docstrings / formatting

This commit is contained in:
2025-09-23 13:17:34 -04:00
parent c2044711fb
commit 19afb287cd
16 changed files with 1165 additions and 428 deletions

View File

@@ -223,6 +223,7 @@ class RadioUtil:
"artist": double_space.sub(" ", result["artist"].strip()),
"song": double_space.sub(" ", result["song"].strip()),
"artistsong": result["artistsong"].strip(),
"album": result["album"].strip() if result["album"] else "N/A",
"genre": self.get_genre(
double_space.sub(" ", result["artist"].strip())
),

View File

@@ -47,7 +47,13 @@ sr = SRUtil()
# ---------- Discord helper ----------
async def discord_notify(webhook_url: str, title: str, description: str, target: Optional[str] = None, color: int = 0x00FF00):
async def discord_notify(
webhook_url: str,
title: str,
description: str,
target: Optional[str] = None,
color: int = 0x00FF00,
):
embed = {
"title": title,
"description": description[:1900] if description else "",
@@ -64,15 +70,20 @@ async def discord_notify(webhook_url: str, title: str, description: str, target:
while True: # permanent retry
try:
async with aiohttp.ClientSession() as session:
async with session.post(webhook_url, json=payload, timeout=aiohttp.ClientTimeout(total=10)) as resp:
async with session.post(
webhook_url, json=payload, timeout=aiohttp.ClientTimeout(total=10)
) as resp:
if resp.status >= 400:
text = await resp.text()
raise RuntimeError(f"Discord webhook failed ({resp.status}): {text}")
raise RuntimeError(
f"Discord webhook failed ({resp.status}): {text}"
)
break
except Exception as e:
print(f"Discord send failed, retrying: {e}")
await asyncio.sleep(5)
def send_log_to_discord(message: str, level: str, target: Optional[str] = None):
colors = {"WARNING": 0xFFA500, "ERROR": 0xFF0000, "CRITICAL": 0xFF0000}
color = colors.get(level.upper(), 0xFFFF00)
@@ -83,7 +94,7 @@ def send_log_to_discord(message: str, level: str, target: Optional[str] = None):
title=f"{level} in bulk_download",
description=message,
target=target,
color=color
color=color,
)
try:
@@ -98,6 +109,7 @@ def send_log_to_discord(message: str, level: str, target: Optional[str] = None):
# ---------- Helpers ----------
def tag_with_mediafile(file_path: str, meta: dict):
f = MediaFile(file_path)
def safe_set(attr, value, default=None, cast=None):
if value is None:
value = default
@@ -106,6 +118,7 @@ def tag_with_mediafile(file_path: str, meta: dict):
setattr(f, attr, cast(value))
else:
setattr(f, attr, str(value))
safe_set("title", meta.get("title"), default="Unknown Title")
safe_set("artist", meta.get("artist"), default="Unknown Artist")
safe_set("albumartist", meta.get("album_artist"), default="Unknown Artist")
@@ -136,6 +149,7 @@ def tag_with_mediafile(file_path: str, meta: dict):
if not cover_bytes and cover_url:
try:
import requests
resp = requests.get(cover_url, timeout=10)
resp.raise_for_status()
cover_bytes = resp.content
@@ -188,7 +202,7 @@ def ensure_unique_filename_in_dir(parent: Path, filename: str) -> Path:
# special-case .tar.gz
if filename.lower().endswith(".tar.gz"):
ext = ".tar.gz"
base = filename[:-len(ext)]
base = filename[: -len(ext)]
else:
p = Path(filename)
ext = p.suffix
@@ -235,13 +249,15 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
send_log_to_discord(f"Failed to init job.meta: {e}", "WARNING", target)
# Job started Discord message
asyncio.run(discord_notify(
DISCORD_WEBHOOK,
title=f"Job Started: {job_id}",
description=f"Processing `{len(track_list)}` track(s)",
target=target,
color=0x00FFFF
))
asyncio.run(
discord_notify(
DISCORD_WEBHOOK,
title=f"Job Started: {job_id}",
description=f"Processing `{len(track_list)}` track(s)",
target=target,
color=0x00FFFF,
)
)
async def process_tracks():
per_track_meta = []
@@ -253,7 +269,11 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
# Set up a one-time rate-limit callback to notify on the first 429 seen by SRUtil
async def _rate_limit_notify(exc: Exception):
try:
send_log_to_discord(f"Rate limit observed while fetching metadata: {exc}", "WARNING", target)
send_log_to_discord(
f"Rate limit observed while fetching metadata: {exc}",
"WARNING",
target,
)
except Exception:
pass
@@ -265,7 +285,13 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
pass
total = len(track_list or [])
for i, track_id in enumerate(track_list or []):
track_info = {"track_id": str(track_id), "status": "Pending", "file_path": None, "error": None, "attempts": 0}
track_info = {
"track_id": str(track_id),
"status": "Pending",
"file_path": None,
"error": None,
"attempts": 0,
}
attempt = 0
while attempt < MAX_RETRIES:
@@ -326,13 +352,19 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
# Try to fetch cover art via SRUtil (use album_id from metadata)
try:
album_field = md.get("album")
album_id = md.get("album_id") or (album_field.get("id") if isinstance(album_field, dict) else None)
album_id = md.get("album_id") or (
album_field.get("id")
if isinstance(album_field, dict)
else None
)
except Exception:
album_id = None
if album_id:
try:
cover_url = await sr.get_cover_by_album_id(album_id, size=640)
cover_url = await sr.get_cover_by_album_id(
album_id, size=640
)
except Exception:
cover_url = None
else:
@@ -344,7 +376,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
if cover_url:
try:
timeout = aiohttp.ClientTimeout(total=15)
async with session.get(cover_url, timeout=timeout) as img_resp:
async with session.get(
cover_url, timeout=timeout
) as img_resp:
if img_resp.status == 200:
img_bytes = await img_resp.read()
else:
@@ -375,23 +409,24 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
# Prefer music_tag if available (keeps compatibility with add_cover_art.py)
try:
from music_tag import load_file as mt_load_file # type: ignore
try:
mf = mt_load_file(str(final_file))
# set basic tags
if md.get('title'):
mf['title'] = md.get('title')
if md.get('artist'):
mf['artist'] = md.get('artist')
if md.get('album'):
mf['album'] = md.get('album')
tracknum = md.get('track_number')
if md.get("title"):
mf["title"] = md.get("title")
if md.get("artist"):
mf["artist"] = md.get("artist")
if md.get("album"):
mf["album"] = md.get("album")
tracknum = md.get("track_number")
if tracknum is not None:
try:
mf['tracknumber'] = int(tracknum)
mf["tracknumber"] = int(tracknum)
except Exception:
pass
if img_bytes:
mf['artwork'] = img_bytes
mf["artwork"] = img_bytes
mf.save()
embedded = True
except Exception:
@@ -438,7 +473,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
wait_time = min(60, 2**attempt)
await asyncio.sleep(wait_time)
else:
await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX))
await asyncio.sleep(
random.uniform(THROTTLE_MIN, THROTTLE_MAX)
)
except Exception as e:
tb = traceback.format_exc()
@@ -447,7 +484,11 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
track_info["error"] = str(e)
if attempt >= MAX_RETRIES:
track_info["status"] = "Failed"
send_log_to_discord(f"Track {track_id} failed after {attempt} attempts", "ERROR", target)
send_log_to_discord(
f"Track {track_id} failed after {attempt} attempts",
"ERROR",
target,
)
await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX))
finally:
@@ -464,7 +505,11 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
job.meta["tarball"] = None
job.meta["status"] = "Failed"
job.save_meta()
send_log_to_discord(f"No tracks were successfully downloaded for job `{job_id}`", "CRITICAL", target)
send_log_to_discord(
f"No tracks were successfully downloaded for job `{job_id}`",
"CRITICAL",
target,
)
return []
# Tarball creation
@@ -476,7 +521,11 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
except Exception:
artist = "Unknown Artist"
artist_counts[artist] = artist_counts.get(artist, 0) + 1
top_artist = sorted(artist_counts.items(), key=lambda kv: (-kv[1], kv[0]))[0][0] if artist_counts else "Unknown Artist"
top_artist = (
sorted(artist_counts.items(), key=lambda kv: (-kv[1], kv[0]))[0][0]
if artist_counts
else "Unknown Artist"
)
# Prefer `job.meta['target']` when provided by the enqueuer. Fall back to the top artist.
target_name = None
try:
@@ -485,7 +534,11 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
except Exception:
target_name = None
base_label = sanitize_filename(target_name) if target_name else sanitize_filename(top_artist)
base_label = (
sanitize_filename(target_name)
if target_name
else sanitize_filename(top_artist)
)
staged_tarball = staging_root / f"{base_label}.tar.gz"
counter = 1
@@ -504,14 +557,24 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
job.save_meta()
logging.info("Creating tarball: %s", staged_tarball)
await discord_notify(DISCORD_WEBHOOK,
title=f"Compressing: Job {job_id}",
description=f"Creating tarball: `{len(all_final_files)}` track(s).\nStaging path: {staged_tarball}",
color=0xFFA500,
target=target)
await discord_notify(
DISCORD_WEBHOOK,
title=f"Compressing: Job {job_id}",
description=f"Creating tarball: `{len(all_final_files)}` track(s).\nStaging path: {staged_tarball}",
color=0xFFA500,
target=target,
)
try:
subprocess.run(
["tar", "-I", "pigz -9", "-cf", str(staged_tarball), "-C", str(staging_root)]
[
"tar",
"-I",
"pigz -9",
"-cf",
str(staged_tarball),
"-C",
str(staging_root),
]
+ [str(f.relative_to(staging_root)) for f in all_final_files],
check=True,
)
@@ -521,7 +584,11 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
except Exception:
pass
except FileNotFoundError:
send_log_to_discord("pigz not available, falling back to tarfile (slower).", "WARNING", target)
send_log_to_discord(
"pigz not available, falling back to tarfile (slower).",
"WARNING",
target,
)
with tarfile.open(staged_tarball, "w:gz") as tar:
for f in all_final_files:
try:
@@ -535,7 +602,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
pass
if not staged_tarball.exists():
send_log_to_discord(f"Tarball was not created: `{staged_tarball}`", "CRITICAL", target)
send_log_to_discord(
f"Tarball was not created: `{staged_tarball}`", "CRITICAL", target
)
if job:
job.meta["status"] = "compress_failed"
job.save_meta()
@@ -556,13 +625,13 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
# Job completed Discord message
completed = len(all_final_files)
failed = (len(track_list) - completed)
failed = len(track_list) - completed
await discord_notify(
DISCORD_WEBHOOK,
title=f"Job Completed: {job_id}",
description=f"Processed `{len(track_list)}` track(s).\nCompleted: `{completed}`\nFailed: `{failed}`\nTarball: `{final_tarball}`",
target=target,
color=0x00FF00
color=0x00FF00,
)
return [str(final_tarball)]
@@ -572,7 +641,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
try:
return loop.run_until_complete(process_tracks())
except Exception as e:
send_log_to_discord(f"bulk_download failed: {e}\n{traceback.format_exc()}", "CRITICAL", target)
send_log_to_discord(
f"bulk_download failed: {e}\n{traceback.format_exc()}", "CRITICAL", target
)
if job:
job.meta["status"] = "Failed"
job.save_meta()

View File

@@ -30,7 +30,6 @@ for name in [__name__, "utils.sr_wrapper"]:
logging.getLogger().setLevel(logging.CRITICAL)
load_dotenv()
@@ -66,7 +65,9 @@ class SRUtil:
self.streamrip_client = TidalClient(self.streamrip_config)
self.MAX_CONCURRENT_METADATA_REQUESTS = 2
self.METADATA_RATE_LIMIT = 1.25
self.METADATA_SEMAPHORE = asyncio.Semaphore(self.MAX_CONCURRENT_METADATA_REQUESTS)
self.METADATA_SEMAPHORE = asyncio.Semaphore(
self.MAX_CONCURRENT_METADATA_REQUESTS
)
self.LAST_METADATA_REQUEST = 0
self.MAX_METADATA_RETRIES = 5
self.METADATA_ALBUM_CACHE: dict[str, dict] = {}
@@ -77,16 +78,18 @@ class SRUtil:
self._rate_limit_notified = False
async def rate_limited_request(self, func, *args, **kwargs):
async with self.METADATA_SEMAPHORE:
now = time.time()
elapsed = now - self.LAST_METADATA_REQUEST
if elapsed < self.METADATA_RATE_LIMIT:
await asyncio.sleep(self.METADATA_RATE_LIMIT - elapsed)
result = await func(*args, **kwargs)
self.LAST_METADATA_REQUEST = time.time()
return result
async with self.METADATA_SEMAPHORE:
now = time.time()
elapsed = now - self.LAST_METADATA_REQUEST
if elapsed < self.METADATA_RATE_LIMIT:
await asyncio.sleep(self.METADATA_RATE_LIMIT - elapsed)
result = await func(*args, **kwargs)
self.LAST_METADATA_REQUEST = time.time()
return result
async def _safe_api_call(self, func, *args, retries: int = 2, backoff: float = 0.5, **kwargs):
async def _safe_api_call(
self, func, *args, retries: int = 2, backoff: float = 0.5, **kwargs
):
"""Call an async API function with resilient retry behavior.
- On AttributeError: attempt a `login()` once and retry.
@@ -116,7 +119,11 @@ class SRUtil:
if ("400" in msg or "429" in msg) and attempt < retries - 1:
# Notify on the first observed 429 (if a callback is set)
try:
if "429" in msg and not self._rate_limit_notified and self.on_rate_limit:
if (
"429" in msg
and not self._rate_limit_notified
and self.on_rate_limit
):
self._rate_limit_notified = True
try:
if asyncio.iscoroutinefunction(self.on_rate_limit):
@@ -128,17 +135,29 @@ class SRUtil:
pass
except Exception:
pass
await asyncio.sleep(backoff * (2 ** attempt))
await asyncio.sleep(backoff * (2**attempt))
continue
# Connection related errors — try to re-login then retry
if isinstance(e, (aiohttp.ClientError, OSError, ConnectionError, asyncio.TimeoutError)) or "Connection" in msg or "closed" in msg.lower():
if (
isinstance(
e,
(
aiohttp.ClientError,
OSError,
ConnectionError,
asyncio.TimeoutError,
),
)
or "Connection" in msg
or "closed" in msg.lower()
):
try:
await self.streamrip_client.login()
except Exception:
pass
if attempt < retries - 1:
await asyncio.sleep(backoff * (2 ** attempt))
await asyncio.sleep(backoff * (2**attempt))
continue
# Unhandled / permanent error: re-raise after loop ends
@@ -151,10 +170,23 @@ class SRUtil:
if not expected or not actual:
return False
return fuzz.token_set_ratio(expected.lower(), actual.lower()) >= threshold
def is_metadata_match(self, expected_artist, expected_album, expected_title, found_artist, found_album, found_title, threshold=80):
def is_metadata_match(
self,
expected_artist,
expected_album,
expected_title,
found_artist,
found_album,
found_title,
threshold=80,
):
artist_match = self.is_fuzzy_match(expected_artist, found_artist, threshold)
album_match = self.is_fuzzy_match(expected_album, found_album, threshold) if expected_album else True
album_match = (
self.is_fuzzy_match(expected_album, found_album, threshold)
if expected_album
else True
)
title_match = self.is_fuzzy_match(expected_title, found_title, threshold)
return artist_match and album_match and title_match
@@ -166,7 +198,9 @@ class SRUtil:
deduped[norm] = entry
return list(deduped.values())
def group_artists_by_name(self, entries: list[dict], query: Optional[str] = None) -> list[dict]:
def group_artists_by_name(
self, entries: list[dict], query: Optional[str] = None
) -> list[dict]:
"""
Group artist entries by normalized display name and pick a primary candidate per name.
@@ -199,10 +233,15 @@ class SRUtil:
score = 0.0
if query:
try:
if it.get("artist", "").strip().lower() == query.strip().lower():
if (
it.get("artist", "").strip().lower()
== query.strip().lower()
):
score += 1000.0
else:
score += float(fuzz.token_set_ratio(query, it.get("artist", "")))
score += float(
fuzz.token_set_ratio(query, it.get("artist", ""))
)
except Exception:
score += 0.0
# add small weight for popularity if present
@@ -216,12 +255,14 @@ class SRUtil:
primary = scored[0][1]
alternatives = [it for _, it in scored[1:]]
out.append({
"artist": primary.get("artist"),
"id": primary.get("id"),
"popularity": primary.get("popularity"),
"alternatives": alternatives,
})
out.append(
{
"artist": primary.get("artist"),
"id": primary.get("id"),
"popularity": primary.get("popularity"),
"alternatives": alternatives,
}
)
return out
@@ -230,14 +271,16 @@ class SRUtil:
return None
m, s = divmod(seconds, 60)
return f"{m}:{s:02}"
def _get_tidal_cover_url(self, uuid, size):
"""Generate a tidal cover url.
:param uuid: VALID uuid string
:param size:
"""
TIDAL_COVER_URL = "https://resources.tidal.com/images/{uuid}/{width}x{height}.jpg"
TIDAL_COVER_URL = (
"https://resources.tidal.com/images/{uuid}/{width}x{height}.jpg"
)
possibles = (80, 160, 320, 640, 1280)
assert size in possibles, f"size must be in {possibles}"
return TIDAL_COVER_URL.format(
@@ -246,8 +289,6 @@ class SRUtil:
width=size,
)
def combine_album_track_metadata(
self, album_json: dict | None, track_json: dict
) -> dict:
@@ -288,10 +329,14 @@ class SRUtil:
"peak": track_json.get("peak"),
"lyrics": track_json.get("lyrics"),
"track_copyright": track_json.get("copyright"),
"cover_id": track_json.get("album", {}).get("cover") or album_json.get("cover"),
"cover_id": track_json.get("album", {}).get("cover")
or album_json.get("cover"),
"cover_url": (
f"https://resources.tidal.com/images/{track_json.get('album', {}).get('cover', album_json.get('cover'))}/1280x1280.jpg"
if (track_json.get("album", {}).get("cover") or album_json.get("cover"))
if (
track_json.get("album", {}).get("cover")
or album_json.get("cover")
)
else None
),
}
@@ -299,7 +344,6 @@ class SRUtil:
return combined
def combine_album_with_all_tracks(
self, album_json: dict[str, Any]
) -> list[dict[str, Any]]:
@@ -309,7 +353,9 @@ class SRUtil:
for t in album_json.get("tracks", [])
]
async def get_artists_by_name(self, artist_name: str, group: bool = False) -> Optional[list]:
async def get_artists_by_name(
self, artist_name: str, group: bool = False
) -> Optional[list]:
"""Get artist(s) by name.
Args:
@@ -324,7 +370,12 @@ class SRUtil:
delay = 1.0
for attempt in range(max_retries):
try:
artists = await self._safe_api_call(self.streamrip_client.search, media_type="artist", query=artist_name, retries=3)
artists = await self._safe_api_call(
self.streamrip_client.search,
media_type="artist",
query=artist_name,
retries=3,
)
break
except Exception as e:
msg = str(e)
@@ -344,7 +395,9 @@ class SRUtil:
artists_page = artists[0] if len(artists) > 0 else {}
else:
artists_page = artists
artists_items = artists_page.get("items", []) if isinstance(artists_page, dict) else []
artists_items = (
artists_page.get("items", []) if isinstance(artists_page, dict) else []
)
if not artists_items:
return None
artists_out = [
@@ -365,13 +418,19 @@ class SRUtil:
async def get_albums_by_artist_id(self, artist_id: int) -> Optional[list | dict]:
"""Get albums by artist ID. Retry login only on authentication failure. Rate limit and retry on 400/429."""
import asyncio
artist_id_str: str = str(artist_id)
albums_out: list[dict] = []
max_retries = 4
delay = 1.0
for attempt in range(max_retries):
try:
metadata = await self._safe_api_call(self.streamrip_client.get_metadata, artist_id_str, "artist", retries=3)
metadata = await self._safe_api_call(
self.streamrip_client.get_metadata,
artist_id_str,
"artist",
retries=3,
)
break
except Exception as e:
msg = str(e)
@@ -397,10 +456,10 @@ class SRUtil:
if "title" in album and "id" in album and "artists" in album
]
return albums_out
async def get_album_by_name(self, artist: str, album: str) -> Optional[dict]:
"""Get album by artist and album name using artist ID and fuzzy matching. Try first 8 chars, then 12 if no match. Notify on success."""
# Notification moved to add_cover_art.py as requested
# Notification moved to add_cover_art.py as requested
for trunc in (8, 12):
search_artist = artist[:trunc]
artists = await self.get_artists_by_name(search_artist)
@@ -429,15 +488,22 @@ class SRUtil:
if best_album and best_album_score >= 85:
return best_album
return None
async def get_cover_by_album_id(self, album_id: int, size: int = 640) -> Optional[str]:
async def get_cover_by_album_id(
self, album_id: int, size: int = 640
) -> Optional[str]:
"""Get cover URL by album ID. Retry login only on authentication failure."""
if size not in [80, 160, 320, 640, 1280]:
return None
album_id_str: str = str(album_id)
for attempt in range(2):
try:
metadata = await self._safe_api_call(self.streamrip_client.get_metadata, item_id=album_id_str, media_type="album", retries=2)
metadata = await self._safe_api_call(
self.streamrip_client.get_metadata,
item_id=album_id_str,
media_type="album",
retries=2,
)
break
except Exception:
if attempt == 1:
@@ -452,7 +518,6 @@ class SRUtil:
cover_url = self._get_tidal_cover_url(cover_id, size)
return cover_url
async def get_tracks_by_album_id(
self, album_id: int, quality: str = "FLAC"
) -> Optional[list | dict]:
@@ -464,7 +529,12 @@ class SRUtil:
"""
album_id_str = str(album_id)
try:
metadata = await self._safe_api_call(self.streamrip_client.get_metadata, item_id=album_id_str, media_type="album", retries=2)
metadata = await self._safe_api_call(
self.streamrip_client.get_metadata,
item_id=album_id_str,
media_type="album",
retries=2,
)
except Exception as e:
logging.warning("get_tracks_by_album_id failed: %s", e)
return None
@@ -486,7 +556,9 @@ class SRUtil:
]
return tracks_out
async def get_tracks_by_artist_song(self, artist: str, song: str, n: int = 0) -> Optional[list]:
async def get_tracks_by_artist_song(
self, artist: str, song: str, n: int = 0
) -> Optional[list]:
"""Get track by artist and song name
Args:
artist (str): The name of the artist.
@@ -496,9 +568,18 @@ class SRUtil:
TODO: Reimplement using StreamRip
"""
try:
search_res = await self._safe_api_call(self.streamrip_client.search, media_type="track", query=f"{artist} - {song}", retries=3)
search_res = await self._safe_api_call(
self.streamrip_client.search,
media_type="track",
query=f"{artist} - {song}",
retries=3,
)
logging.critical("Result: %s", search_res)
return search_res[0].get('items') if search_res and isinstance(search_res, list) else []
return (
search_res[0].get("items")
if search_res and isinstance(search_res, list)
else []
)
except Exception as e:
traceback.print_exc()
logging.critical("Search Exception: %s", str(e))
@@ -529,10 +610,15 @@ class SRUtil:
quality_int = 1
track_id_str: str = str(track_id)
# Ensure client is logged in via safe call when needed inside _safe_api_call
# Ensure client is logged in via safe call when needed inside _safe_api_call
try:
logging.critical("Using quality_int: %s", quality_int)
track = await self._safe_api_call(self.streamrip_client.get_downloadable, track_id=track_id_str, quality=quality_int, retries=3)
track = await self._safe_api_call(
self.streamrip_client.get_downloadable,
track_id=track_id_str,
quality=quality_int,
retries=3,
)
except Exception as e:
logging.warning("get_stream_url_by_track_id failed: %s", e)
return None
@@ -557,7 +643,7 @@ class SRUtil:
metadata = await self.rate_limited_request(
self.streamrip_client.get_metadata, str(track_id), "track"
)
album_id = metadata.get("album", {}).get("id")
album_metadata = None
@@ -567,7 +653,11 @@ class SRUtil:
album_metadata = self.METADATA_ALBUM_CACHE[album_id]
else:
album_metadata = await self.rate_limited_request(
lambda i, t: self._safe_api_call(self.streamrip_client.get_metadata, i, t, retries=2), album_id, "album"
lambda i, t: self._safe_api_call(
self.streamrip_client.get_metadata, i, t, retries=2
),
album_id,
"album",
)
if not album_metadata:
return None
@@ -611,11 +701,12 @@ class SRUtil:
self.MAX_METADATA_RETRIES,
)
# Raise a specific exception so callers can react (e.g. notify)
raise MetadataFetchError(f"Metadata fetch failed permanently for track {track_id} after {self.MAX_METADATA_RETRIES} attempts: {e}")
raise MetadataFetchError(
f"Metadata fetch failed permanently for track {track_id} after {self.MAX_METADATA_RETRIES} attempts: {e}"
)
# If we reach here without returning, raise a generic metadata error
raise MetadataFetchError(f"Metadata fetch failed for track {track_id}")
async def download(self, track_id: int, quality: str = "LOSSLESS") -> bool | str:
"""Download track
Args: