- Changed API key validation from if not _key in self.constants.API_KEYS
to if _key not in self.constants.API_KEYS
for better readability.
Enhance RadioUtil playlist handling and deduplication - Added checks to ensure playlists are initialized and not empty. - Improved deduplication logic to prevent modifying the original playlist during iteration. - Added logging for duplicate removal and playlist population. Add cover art handling in rip_background.py - Implemented functionality to attach album art if provided in metadata. - Added error handling for cover art download failures. Introduce unique filename handling in rip_background.py - Added `ensure_unique_filename_in_dir` function to prevent overwriting files with the same name. Refactor SRUtil for improved error handling and metadata fetching - Introduced `MetadataFetchError` for better error management during metadata retrieval. - Implemented `_safe_api_call` for resilient API calls with retry logic. - Enhanced `get_artists_by_name` to optionally group results by artist name. - Updated various methods to utilize the new error handling and retry mechanisms.
This commit is contained in:
@@ -12,9 +12,9 @@ from typing import Optional
|
||||
from urllib.parse import urlparse, unquote
|
||||
import aiohttp
|
||||
from datetime import datetime, timezone
|
||||
from mediafile import MediaFile # type: ignore[import]
|
||||
from mediafile import MediaFile, Image, ImageType # type: ignore[import]
|
||||
from rq import get_current_job
|
||||
from utils.sr_wrapper import SRUtil
|
||||
from utils.sr_wrapper import SRUtil, MetadataFetchError
|
||||
from dotenv import load_dotenv
|
||||
import re
|
||||
|
||||
@@ -126,6 +126,30 @@ def tag_with_mediafile(file_path: str, meta: dict):
|
||||
pass
|
||||
if release_date_obj:
|
||||
f.date = release_date_obj
|
||||
# Attach album art if provided in meta (synchronous fallback)
|
||||
try:
|
||||
cover_bytes = meta.get("cover_bytes")
|
||||
cover_url = None
|
||||
if not cover_bytes:
|
||||
cover_url = meta.get("cover_art_url") or meta.get("cover_url")
|
||||
|
||||
if not cover_bytes and cover_url:
|
||||
try:
|
||||
import requests
|
||||
resp = requests.get(cover_url, timeout=10)
|
||||
resp.raise_for_status()
|
||||
cover_bytes = resp.content
|
||||
except Exception:
|
||||
cover_bytes = None
|
||||
|
||||
if cover_bytes:
|
||||
try:
|
||||
img = Image(cover_bytes, desc=None, type=ImageType.front)
|
||||
f.images = [img]
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
f.save()
|
||||
|
||||
|
||||
@@ -154,6 +178,35 @@ def ensure_unique_path(p: Path) -> Path:
|
||||
counter += 1
|
||||
|
||||
|
||||
def ensure_unique_filename_in_dir(parent: Path, filename: str) -> Path:
|
||||
"""Return a Path in `parent` with a unique filename.
|
||||
|
||||
Handles multi-part extensions like `.tar.gz` so names become
|
||||
`Name (2).tar.gz` instead of `Name.tar (2).tar.gz`.
|
||||
"""
|
||||
parent.mkdir(parents=True, exist_ok=True)
|
||||
# special-case .tar.gz
|
||||
if filename.lower().endswith(".tar.gz"):
|
||||
ext = ".tar.gz"
|
||||
base = filename[:-len(ext)]
|
||||
else:
|
||||
p = Path(filename)
|
||||
ext = p.suffix
|
||||
base = p.stem
|
||||
|
||||
existing = {f.name for f in parent.iterdir() if f.is_file()}
|
||||
candidate = f"{base}{ext}"
|
||||
if candidate not in existing:
|
||||
return parent / candidate
|
||||
|
||||
counter = 2
|
||||
while True:
|
||||
candidate = f"{base} ({counter}){ext}"
|
||||
if candidate not in existing:
|
||||
return parent / candidate
|
||||
counter += 1
|
||||
|
||||
|
||||
# ---------- bulk_download ----------
|
||||
def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
"""
|
||||
@@ -197,6 +250,19 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
(ROOT_DIR / "completed").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
async with aiohttp.ClientSession(headers=HEADERS) as session:
|
||||
# Set up a one-time rate-limit callback to notify on the first 429 seen by SRUtil
|
||||
async def _rate_limit_notify(exc: Exception):
|
||||
try:
|
||||
send_log_to_discord(f"Rate limit observed while fetching metadata: {exc}", "WARNING", target)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# attach callback and reset notified flag for this job run
|
||||
try:
|
||||
sr.on_rate_limit = _rate_limit_notify
|
||||
sr._rate_limit_notified = False
|
||||
except Exception:
|
||||
pass
|
||||
total = len(track_list or [])
|
||||
for i, track_id in enumerate(track_list or []):
|
||||
track_info = {"track_id": str(track_id), "status": "Pending", "file_path": None, "error": None, "attempts": 0}
|
||||
@@ -208,6 +274,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
track_info["attempts"] = attempt
|
||||
|
||||
try:
|
||||
sr.get_cover_by_album_id
|
||||
url = await sr.get_stream_url_by_track_id(track_id, quality)
|
||||
if not url:
|
||||
raise RuntimeError("No stream URL")
|
||||
@@ -223,7 +290,23 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
async for chunk in resp.content.iter_chunked(64 * 1024):
|
||||
f.write(chunk)
|
||||
|
||||
md = await sr.get_metadata_by_track_id(track_id) or {}
|
||||
try:
|
||||
md = await sr.get_metadata_by_track_id(track_id) or {}
|
||||
except MetadataFetchError as me:
|
||||
# Permanent metadata failure — notify and continue (mark track failed)
|
||||
msg = f"Metadata permanently failed for track {track_id}: {me}"
|
||||
try:
|
||||
send_log_to_discord(msg, "ERROR", target)
|
||||
except Exception:
|
||||
pass
|
||||
track_info["status"] = "Failed"
|
||||
track_info["error"] = str(me)
|
||||
per_track_meta.append(track_info)
|
||||
if job:
|
||||
job.meta["tracks"] = per_track_meta
|
||||
job.meta["progress"] = int(((i + 1) / total) * 100)
|
||||
job.save_meta()
|
||||
break
|
||||
artist_raw = md.get("artist") or "Unknown Artist"
|
||||
album_raw = md.get("album") or "Unknown Album"
|
||||
title_raw = md.get("title") or f"Track {track_id}"
|
||||
@@ -237,8 +320,104 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
album_dir.mkdir(parents=True, exist_ok=True)
|
||||
final_file = ensure_unique_path(album_dir / f"{title}{ext}")
|
||||
|
||||
tag_with_mediafile(str(tmp_file), md)
|
||||
# Move file into final location first (tags will be updated on moved file)
|
||||
tmp_file.rename(final_file)
|
||||
|
||||
# Try to fetch cover art via SRUtil (use album_id from metadata)
|
||||
try:
|
||||
album_field = md.get("album")
|
||||
album_id = md.get("album_id") or (album_field.get("id") if isinstance(album_field, dict) else None)
|
||||
except Exception:
|
||||
album_id = None
|
||||
|
||||
if album_id:
|
||||
try:
|
||||
cover_url = await sr.get_cover_by_album_id(album_id, size=640)
|
||||
except Exception:
|
||||
cover_url = None
|
||||
else:
|
||||
cover_url = md.get("cover_url")
|
||||
|
||||
# Embed tags + artwork using music_tag if available, falling back to mediafile tagging
|
||||
embedded = False
|
||||
try:
|
||||
if cover_url:
|
||||
try:
|
||||
timeout = aiohttp.ClientTimeout(total=15)
|
||||
async with session.get(cover_url, timeout=timeout) as img_resp:
|
||||
if img_resp.status == 200:
|
||||
img_bytes = await img_resp.read()
|
||||
else:
|
||||
img_bytes = None
|
||||
# Notify Discord about failed cover download (HTTP error)
|
||||
try:
|
||||
send_log_to_discord(
|
||||
f"Cover download HTTP `{img_resp.status}` for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`",
|
||||
"WARNING",
|
||||
target,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
img_bytes = None
|
||||
# Notify Discord about exception during cover download
|
||||
try:
|
||||
send_log_to_discord(
|
||||
f"Cover download exception for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`: `{e}`",
|
||||
"WARNING",
|
||||
target,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
img_bytes = None
|
||||
|
||||
# Prefer music_tag if available (keeps compatibility with add_cover_art.py)
|
||||
try:
|
||||
from music_tag import load_file as mt_load_file # type: ignore
|
||||
try:
|
||||
mf = mt_load_file(str(final_file))
|
||||
# set basic tags
|
||||
if md.get('title'):
|
||||
mf['title'] = md.get('title')
|
||||
if md.get('artist'):
|
||||
mf['artist'] = md.get('artist')
|
||||
if md.get('album'):
|
||||
mf['album'] = md.get('album')
|
||||
tracknum = md.get('track_number')
|
||||
if tracknum is not None:
|
||||
try:
|
||||
mf['tracknumber'] = int(tracknum)
|
||||
except Exception:
|
||||
pass
|
||||
if img_bytes:
|
||||
mf['artwork'] = img_bytes
|
||||
mf.save()
|
||||
embedded = True
|
||||
except Exception:
|
||||
embedded = False
|
||||
except Exception:
|
||||
embedded = False
|
||||
|
||||
# If music_tag not available or failed, fallback to mediafile tagging
|
||||
if not embedded:
|
||||
# If we had a cover_url but no bytes, log a warning to Discord
|
||||
try:
|
||||
if cover_url and not img_bytes:
|
||||
send_log_to_discord(
|
||||
f"Cover art not available for track {track_id} album_id={album_id} url={cover_url}",
|
||||
"WARNING",
|
||||
target,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
tag_with_mediafile(str(final_file), md)
|
||||
except Exception:
|
||||
# Ensure at least the basic tags are written
|
||||
try:
|
||||
tag_with_mediafile(str(final_file), md)
|
||||
except Exception:
|
||||
pass
|
||||
tmp_file = None
|
||||
|
||||
track_info["status"] = "Success"
|
||||
@@ -298,8 +477,16 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
artist = "Unknown Artist"
|
||||
artist_counts[artist] = artist_counts.get(artist, 0) + 1
|
||||
top_artist = sorted(artist_counts.items(), key=lambda kv: (-kv[1], kv[0]))[0][0] if artist_counts else "Unknown Artist"
|
||||
combined_artist = sanitize_filename(top_artist)
|
||||
staged_tarball = staging_root / f"{combined_artist}.tar.gz"
|
||||
# Prefer `job.meta['target']` when provided by the enqueuer. Fall back to the top artist.
|
||||
target_name = None
|
||||
try:
|
||||
if job and job.meta:
|
||||
target_name = job.meta.get("target")
|
||||
except Exception:
|
||||
target_name = None
|
||||
|
||||
base_label = sanitize_filename(target_name) if target_name else sanitize_filename(top_artist)
|
||||
staged_tarball = staging_root / f"{base_label}.tar.gz"
|
||||
|
||||
counter = 1
|
||||
base_name = staged_tarball.stem
|
||||
@@ -307,8 +494,10 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
counter += 1
|
||||
staged_tarball = staging_root / f"{base_name} ({counter}).tar.gz"
|
||||
|
||||
final_tarball = ROOT_DIR / "completed" / quality / staged_tarball.name
|
||||
final_tarball.parent.mkdir(parents=True, exist_ok=True)
|
||||
final_dir = ROOT_DIR / "completed" / quality
|
||||
final_dir.mkdir(parents=True, exist_ok=True)
|
||||
# Ensure we don't overwrite an existing final tarball. Preserve `.tar.gz` style.
|
||||
final_tarball = ensure_unique_filename_in_dir(final_dir, staged_tarball.name)
|
||||
|
||||
if job:
|
||||
job.meta["status"] = "Compressing"
|
||||
@@ -317,7 +506,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
logging.info("Creating tarball: %s", staged_tarball)
|
||||
await discord_notify(DISCORD_WEBHOOK,
|
||||
title=f"Compressing: Job {job_id}",
|
||||
description=f"Creating tarball (`{len(track_list)}` track(s)).\nStaging path: {staged_tarball}",
|
||||
description=f"Creating tarball: `{len(all_final_files)}` track(s).\nStaging path: {staged_tarball}",
|
||||
color=0xFFA500,
|
||||
target=target)
|
||||
try:
|
||||
@@ -366,10 +555,12 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
job.save_meta()
|
||||
|
||||
# Job completed Discord message
|
||||
completed = len(all_final_files)
|
||||
failed = (len(track_list) - completed)
|
||||
await discord_notify(
|
||||
DISCORD_WEBHOOK,
|
||||
title=f"Job Completed: {job_id}",
|
||||
description=f"Processed `{len(track_list)}` track(s). Tarball: `{final_tarball}`",
|
||||
description=f"Processed `{len(track_list)}` track(s).\nCompleted: `{completed}`\nFailed: `{failed}`\nTarball: `{final_tarball}`",
|
||||
target=target,
|
||||
color=0x00FF00
|
||||
)
|
||||
|
Reference in New Issue
Block a user