- Changed API key validation from if not _key in self.constants.API_KEYS
to if _key not in self.constants.API_KEYS
for better readability.
Enhance RadioUtil playlist handling and deduplication - Added checks to ensure playlists are initialized and not empty. - Improved deduplication logic to prevent modifying the original playlist during iteration. - Added logging for duplicate removal and playlist population. Add cover art handling in rip_background.py - Implemented functionality to attach album art if provided in metadata. - Added error handling for cover art download failures. Introduce unique filename handling in rip_background.py - Added `ensure_unique_filename_in_dir` function to prevent overwriting files with the same name. Refactor SRUtil for improved error handling and metadata fetching - Introduced `MetadataFetchError` for better error management during metadata retrieval. - Implemented `_safe_api_call` for resilient API calls with retry logic. - Enhanced `get_artists_by_name` to optionally group results by artist name. - Updated various methods to utilize the new error handling and retry mechanisms.
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -29,5 +29,7 @@ up.py
|
||||
job_review.py
|
||||
check_missing.py
|
||||
**/auth/*
|
||||
test/db_stats.py
|
||||
test/report/*
|
||||
.gitignore
|
||||
.env
|
@@ -22,7 +22,7 @@ from fastapi import (
|
||||
HTTPException,
|
||||
Depends)
|
||||
from fastapi_throttle import RateLimiter
|
||||
from fastapi.responses import RedirectResponse, JSONResponse
|
||||
from fastapi.responses import RedirectResponse, JSONResponse, FileResponse
|
||||
|
||||
class Radio(FastAPI):
|
||||
"""Radio Endpoints"""
|
||||
@@ -274,9 +274,8 @@ class Radio(FastAPI):
|
||||
track_id=track_id
|
||||
)
|
||||
if not album_art:
|
||||
return RedirectResponse(
|
||||
url="https://codey.lol/images/radio_art_default.jpg",
|
||||
status_code=302,
|
||||
return FileResponse(
|
||||
path="/var/www/codey.lol/new/public/images/radio_art_default.jpg",
|
||||
)
|
||||
return Response(content=album_art, media_type="image/png")
|
||||
except Exception as e:
|
||||
|
@@ -80,6 +80,9 @@ class RIP(FastAPI):
|
||||
|
||||
tracks_in = job.meta.get("tracks_in")
|
||||
tracks_out = len(job.meta.get("tracks", []))
|
||||
# `utils/rip_background.py` sets per-track status to 'Success' or 'Failed'
|
||||
# so check for 'success' case-insensitively and count matches.
|
||||
succeeded_tracks = len([t for t in job.meta.get("tracks", []) if str(t.get("status", "")).lower() == "success"])
|
||||
|
||||
return {
|
||||
"id": job.id,
|
||||
@@ -90,7 +93,7 @@ class RIP(FastAPI):
|
||||
"started_at": job.started_at,
|
||||
"ended_at": job.ended_at,
|
||||
"progress": progress,
|
||||
"tracks": f"{tracks_out} / {tracks_in}"
|
||||
"tracks": f"{succeeded_tracks} / {tracks_in}"
|
||||
if isinstance(tracks_in, int)
|
||||
else tracks_out,
|
||||
"target": job.meta.get("target"),
|
||||
@@ -101,7 +104,10 @@ class RIP(FastAPI):
|
||||
self, artist: str, request: Request, user=Depends(get_current_user)
|
||||
) -> Response:
|
||||
"""Get artists by name"""
|
||||
artists = await self.trip_util.get_artists_by_name(artist)
|
||||
# support optional grouping to return one primary per display name
|
||||
# with `alternatives` for disambiguation (use ?group=true)
|
||||
group = bool(request.query_params.get("group", False))
|
||||
artists = await self.trip_util.get_artists_by_name(artist, group=group)
|
||||
if not artists:
|
||||
return Response(status_code=404, content="Not found")
|
||||
return JSONResponse(content=artists)
|
||||
@@ -176,7 +182,7 @@ class RIP(FastAPI):
|
||||
job_timeout=14400,
|
||||
failure_ttl=86400,
|
||||
result_ttl=-1,
|
||||
retry=Retry(max=1, interval=[30]),
|
||||
# retry=Retry(max=1, interval=[30]),
|
||||
meta={
|
||||
"progress": 0,
|
||||
"status": "Queued",
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -10,13 +10,16 @@ logger.setLevel(logging.CRITICAL)
|
||||
|
||||
async def main():
|
||||
sr = SRUtil()
|
||||
artist, album = "Kadavar - The Sacrament Of Sin".split(" - ")
|
||||
search_res = await sr.get_album_by_name(artist[:8], album)
|
||||
logging.critical("Search result: %s", search_res)
|
||||
album = search_res
|
||||
_cover = await sr.get_cover_by_album_id(album.get('id'), 640)
|
||||
# cover = sr._get_tidal_cover_url(album.get('cover'), 640)
|
||||
logging.critical("Result: %s, Cover: %s", album, _cover)
|
||||
artist_search = await sr.get_artists_by_name("Ren")
|
||||
# logging.critical("Artist search: %s", artist_search)
|
||||
res = [dict(x) for x in artist_search if x.get('popularity', 0) and x.get('artist').lower() == 'ren']
|
||||
logging.critical("Results: %s", res)
|
||||
# search_res = await sr.get_album_by_name(artist[:8], album)
|
||||
# logging.critical("Search result: %s", search_res)
|
||||
# album = search_res
|
||||
# _cover = await sr.get_cover_by_album_id(album.get('id'), 640)
|
||||
# # cover = sr._get_tidal_cover_url(album.get('cover'), 640)
|
||||
# logging.critical("Result: %s, Cover: %s", album, _cover)
|
||||
return
|
||||
|
||||
|
||||
|
2
util.py
2
util.py
@@ -40,7 +40,7 @@ class Utilities:
|
||||
|
||||
_key: str = key.split("Bearer ", maxsplit=1)[1].strip()
|
||||
|
||||
if not _key in self.constants.API_KEYS:
|
||||
if _key not in self.constants.API_KEYS:
|
||||
return False
|
||||
|
||||
if req_type == 2:
|
||||
|
@@ -35,6 +35,7 @@ class RadioUtil:
|
||||
self.gpt = gpt.GPT(self.constants)
|
||||
self.ls_uri: str = self.constants.LS_URI
|
||||
self.redis_client = redis.Redis(password=private.REDIS_PW)
|
||||
self.DEDUPE_PLAYLISTS: bool = True
|
||||
self.sqlite_exts: list[str] = [
|
||||
"/home/kyle/api/solibs/spellfix1.cpython-311-x86_64-linux-gnu.so"
|
||||
]
|
||||
@@ -392,41 +393,69 @@ class RadioUtil:
|
||||
for playlist in self.playlists:
|
||||
playlist_redis_key: str = f"playlist:{playlist}"
|
||||
_playlist = await self.redis_client.json().get(playlist_redis_key) # type: ignore
|
||||
# Ensure we always have a list to work with
|
||||
if not _playlist:
|
||||
logging.warning("No playlist found in redis for %s, skipping", playlist)
|
||||
self.active_playlist[playlist] = []
|
||||
continue
|
||||
|
||||
# Make sure playlist key exists
|
||||
if playlist not in self.active_playlist.keys():
|
||||
self.active_playlist[playlist] = []
|
||||
random.shuffle(_playlist)
|
||||
self.active_playlist[playlist] = [
|
||||
{
|
||||
|
||||
# Shuffle a copy so we don't mutate the underlying redis object
|
||||
try:
|
||||
shuffled = list(_playlist)
|
||||
random.shuffle(shuffled)
|
||||
except Exception:
|
||||
shuffled = _playlist
|
||||
|
||||
# Build a fresh list rather than modifying in-place (prevents duplication)
|
||||
built: list[dict] = []
|
||||
for r in shuffled:
|
||||
try:
|
||||
item = {
|
||||
"uuid": str(uuid().hex),
|
||||
"id": r["id"],
|
||||
"artist": double_space.sub(" ", r["artist"]).strip(),
|
||||
"song": double_space.sub(" ", r["song"]).strip(),
|
||||
"album": double_space.sub(" ", r["album"]).strip(),
|
||||
"genre": r["genre"] if r["genre"] else "Not Found",
|
||||
"artistsong": double_space.sub(
|
||||
" ", r["artistdashsong"]
|
||||
).strip(),
|
||||
"file_path": r["file_path"],
|
||||
"duration": r["duration"],
|
||||
} for r in _playlist
|
||||
if r not in self.active_playlist[playlist]
|
||||
]
|
||||
"id": r.get("id"),
|
||||
"artist": double_space.sub(" ", (r.get("artist") or "")).strip(),
|
||||
"song": double_space.sub(" ", (r.get("song") or "")).strip(),
|
||||
"album": double_space.sub(" ", (r.get("album") or "")).strip(),
|
||||
"genre": r.get("genre") if r.get("genre") else "Not Found",
|
||||
"artistsong": double_space.sub(" ", (r.get("artistdashsong") or "")).strip(),
|
||||
"file_path": r.get("file_path"),
|
||||
"duration": r.get("duration"),
|
||||
}
|
||||
built.append(item)
|
||||
except Exception:
|
||||
logging.debug("Skipping malformed playlist entry for %s: %s", playlist, r)
|
||||
|
||||
self.active_playlist[playlist] = built
|
||||
logging.info(
|
||||
"Populated playlist: %s with %s items",
|
||||
playlist, len(self.active_playlist[playlist]),
|
||||
)
|
||||
|
||||
"""Dedupe"""
|
||||
logging.info("Removing duplicate tracks...")
|
||||
dedupe_processed = []
|
||||
if self.DEDUPE_PLAYLISTS:
|
||||
logging.info("Removing duplicate tracks (by file_path only)...")
|
||||
dedupe_processed: set[str] = set()
|
||||
deduped_list: list[dict] = []
|
||||
for item in self.active_playlist[playlist]:
|
||||
artistsongabc: str = non_alnum.sub("", item.get("artistsong", ""))
|
||||
if not artistsongabc:
|
||||
logging.info("Missing artistsong: %s", item)
|
||||
fp = item.get("file_path")
|
||||
if not fp:
|
||||
# If no file_path available, skip the item (can't dedupe reliably)
|
||||
logging.info("Skipping item without file_path during dedupe: %s", item)
|
||||
continue
|
||||
if artistsongabc in dedupe_processed:
|
||||
self.active_playlist[playlist].remove(item)
|
||||
dedupe_processed.append(artistsongabc)
|
||||
key = fp
|
||||
|
||||
if key in dedupe_processed:
|
||||
continue
|
||||
dedupe_processed.add(key)
|
||||
deduped_list.append(item)
|
||||
|
||||
self.active_playlist[playlist] = deduped_list
|
||||
else:
|
||||
logging.warning("Dupe removal disabled")
|
||||
|
||||
logging.info(
|
||||
"Duplicates for playlist: %s removed. New playlist size: %s",
|
||||
|
@@ -12,9 +12,9 @@ from typing import Optional
|
||||
from urllib.parse import urlparse, unquote
|
||||
import aiohttp
|
||||
from datetime import datetime, timezone
|
||||
from mediafile import MediaFile # type: ignore[import]
|
||||
from mediafile import MediaFile, Image, ImageType # type: ignore[import]
|
||||
from rq import get_current_job
|
||||
from utils.sr_wrapper import SRUtil
|
||||
from utils.sr_wrapper import SRUtil, MetadataFetchError
|
||||
from dotenv import load_dotenv
|
||||
import re
|
||||
|
||||
@@ -126,6 +126,30 @@ def tag_with_mediafile(file_path: str, meta: dict):
|
||||
pass
|
||||
if release_date_obj:
|
||||
f.date = release_date_obj
|
||||
# Attach album art if provided in meta (synchronous fallback)
|
||||
try:
|
||||
cover_bytes = meta.get("cover_bytes")
|
||||
cover_url = None
|
||||
if not cover_bytes:
|
||||
cover_url = meta.get("cover_art_url") or meta.get("cover_url")
|
||||
|
||||
if not cover_bytes and cover_url:
|
||||
try:
|
||||
import requests
|
||||
resp = requests.get(cover_url, timeout=10)
|
||||
resp.raise_for_status()
|
||||
cover_bytes = resp.content
|
||||
except Exception:
|
||||
cover_bytes = None
|
||||
|
||||
if cover_bytes:
|
||||
try:
|
||||
img = Image(cover_bytes, desc=None, type=ImageType.front)
|
||||
f.images = [img]
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
f.save()
|
||||
|
||||
|
||||
@@ -154,6 +178,35 @@ def ensure_unique_path(p: Path) -> Path:
|
||||
counter += 1
|
||||
|
||||
|
||||
def ensure_unique_filename_in_dir(parent: Path, filename: str) -> Path:
|
||||
"""Return a Path in `parent` with a unique filename.
|
||||
|
||||
Handles multi-part extensions like `.tar.gz` so names become
|
||||
`Name (2).tar.gz` instead of `Name.tar (2).tar.gz`.
|
||||
"""
|
||||
parent.mkdir(parents=True, exist_ok=True)
|
||||
# special-case .tar.gz
|
||||
if filename.lower().endswith(".tar.gz"):
|
||||
ext = ".tar.gz"
|
||||
base = filename[:-len(ext)]
|
||||
else:
|
||||
p = Path(filename)
|
||||
ext = p.suffix
|
||||
base = p.stem
|
||||
|
||||
existing = {f.name for f in parent.iterdir() if f.is_file()}
|
||||
candidate = f"{base}{ext}"
|
||||
if candidate not in existing:
|
||||
return parent / candidate
|
||||
|
||||
counter = 2
|
||||
while True:
|
||||
candidate = f"{base} ({counter}){ext}"
|
||||
if candidate not in existing:
|
||||
return parent / candidate
|
||||
counter += 1
|
||||
|
||||
|
||||
# ---------- bulk_download ----------
|
||||
def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
"""
|
||||
@@ -197,6 +250,19 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
(ROOT_DIR / "completed").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
async with aiohttp.ClientSession(headers=HEADERS) as session:
|
||||
# Set up a one-time rate-limit callback to notify on the first 429 seen by SRUtil
|
||||
async def _rate_limit_notify(exc: Exception):
|
||||
try:
|
||||
send_log_to_discord(f"Rate limit observed while fetching metadata: {exc}", "WARNING", target)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# attach callback and reset notified flag for this job run
|
||||
try:
|
||||
sr.on_rate_limit = _rate_limit_notify
|
||||
sr._rate_limit_notified = False
|
||||
except Exception:
|
||||
pass
|
||||
total = len(track_list or [])
|
||||
for i, track_id in enumerate(track_list or []):
|
||||
track_info = {"track_id": str(track_id), "status": "Pending", "file_path": None, "error": None, "attempts": 0}
|
||||
@@ -208,6 +274,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
track_info["attempts"] = attempt
|
||||
|
||||
try:
|
||||
sr.get_cover_by_album_id
|
||||
url = await sr.get_stream_url_by_track_id(track_id, quality)
|
||||
if not url:
|
||||
raise RuntimeError("No stream URL")
|
||||
@@ -223,7 +290,23 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
async for chunk in resp.content.iter_chunked(64 * 1024):
|
||||
f.write(chunk)
|
||||
|
||||
try:
|
||||
md = await sr.get_metadata_by_track_id(track_id) or {}
|
||||
except MetadataFetchError as me:
|
||||
# Permanent metadata failure — notify and continue (mark track failed)
|
||||
msg = f"Metadata permanently failed for track {track_id}: {me}"
|
||||
try:
|
||||
send_log_to_discord(msg, "ERROR", target)
|
||||
except Exception:
|
||||
pass
|
||||
track_info["status"] = "Failed"
|
||||
track_info["error"] = str(me)
|
||||
per_track_meta.append(track_info)
|
||||
if job:
|
||||
job.meta["tracks"] = per_track_meta
|
||||
job.meta["progress"] = int(((i + 1) / total) * 100)
|
||||
job.save_meta()
|
||||
break
|
||||
artist_raw = md.get("artist") or "Unknown Artist"
|
||||
album_raw = md.get("album") or "Unknown Album"
|
||||
title_raw = md.get("title") or f"Track {track_id}"
|
||||
@@ -237,8 +320,104 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
album_dir.mkdir(parents=True, exist_ok=True)
|
||||
final_file = ensure_unique_path(album_dir / f"{title}{ext}")
|
||||
|
||||
tag_with_mediafile(str(tmp_file), md)
|
||||
# Move file into final location first (tags will be updated on moved file)
|
||||
tmp_file.rename(final_file)
|
||||
|
||||
# Try to fetch cover art via SRUtil (use album_id from metadata)
|
||||
try:
|
||||
album_field = md.get("album")
|
||||
album_id = md.get("album_id") or (album_field.get("id") if isinstance(album_field, dict) else None)
|
||||
except Exception:
|
||||
album_id = None
|
||||
|
||||
if album_id:
|
||||
try:
|
||||
cover_url = await sr.get_cover_by_album_id(album_id, size=640)
|
||||
except Exception:
|
||||
cover_url = None
|
||||
else:
|
||||
cover_url = md.get("cover_url")
|
||||
|
||||
# Embed tags + artwork using music_tag if available, falling back to mediafile tagging
|
||||
embedded = False
|
||||
try:
|
||||
if cover_url:
|
||||
try:
|
||||
timeout = aiohttp.ClientTimeout(total=15)
|
||||
async with session.get(cover_url, timeout=timeout) as img_resp:
|
||||
if img_resp.status == 200:
|
||||
img_bytes = await img_resp.read()
|
||||
else:
|
||||
img_bytes = None
|
||||
# Notify Discord about failed cover download (HTTP error)
|
||||
try:
|
||||
send_log_to_discord(
|
||||
f"Cover download HTTP `{img_resp.status}` for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`",
|
||||
"WARNING",
|
||||
target,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
img_bytes = None
|
||||
# Notify Discord about exception during cover download
|
||||
try:
|
||||
send_log_to_discord(
|
||||
f"Cover download exception for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`: `{e}`",
|
||||
"WARNING",
|
||||
target,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
img_bytes = None
|
||||
|
||||
# Prefer music_tag if available (keeps compatibility with add_cover_art.py)
|
||||
try:
|
||||
from music_tag import load_file as mt_load_file # type: ignore
|
||||
try:
|
||||
mf = mt_load_file(str(final_file))
|
||||
# set basic tags
|
||||
if md.get('title'):
|
||||
mf['title'] = md.get('title')
|
||||
if md.get('artist'):
|
||||
mf['artist'] = md.get('artist')
|
||||
if md.get('album'):
|
||||
mf['album'] = md.get('album')
|
||||
tracknum = md.get('track_number')
|
||||
if tracknum is not None:
|
||||
try:
|
||||
mf['tracknumber'] = int(tracknum)
|
||||
except Exception:
|
||||
pass
|
||||
if img_bytes:
|
||||
mf['artwork'] = img_bytes
|
||||
mf.save()
|
||||
embedded = True
|
||||
except Exception:
|
||||
embedded = False
|
||||
except Exception:
|
||||
embedded = False
|
||||
|
||||
# If music_tag not available or failed, fallback to mediafile tagging
|
||||
if not embedded:
|
||||
# If we had a cover_url but no bytes, log a warning to Discord
|
||||
try:
|
||||
if cover_url and not img_bytes:
|
||||
send_log_to_discord(
|
||||
f"Cover art not available for track {track_id} album_id={album_id} url={cover_url}",
|
||||
"WARNING",
|
||||
target,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
tag_with_mediafile(str(final_file), md)
|
||||
except Exception:
|
||||
# Ensure at least the basic tags are written
|
||||
try:
|
||||
tag_with_mediafile(str(final_file), md)
|
||||
except Exception:
|
||||
pass
|
||||
tmp_file = None
|
||||
|
||||
track_info["status"] = "Success"
|
||||
@@ -298,8 +477,16 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
artist = "Unknown Artist"
|
||||
artist_counts[artist] = artist_counts.get(artist, 0) + 1
|
||||
top_artist = sorted(artist_counts.items(), key=lambda kv: (-kv[1], kv[0]))[0][0] if artist_counts else "Unknown Artist"
|
||||
combined_artist = sanitize_filename(top_artist)
|
||||
staged_tarball = staging_root / f"{combined_artist}.tar.gz"
|
||||
# Prefer `job.meta['target']` when provided by the enqueuer. Fall back to the top artist.
|
||||
target_name = None
|
||||
try:
|
||||
if job and job.meta:
|
||||
target_name = job.meta.get("target")
|
||||
except Exception:
|
||||
target_name = None
|
||||
|
||||
base_label = sanitize_filename(target_name) if target_name else sanitize_filename(top_artist)
|
||||
staged_tarball = staging_root / f"{base_label}.tar.gz"
|
||||
|
||||
counter = 1
|
||||
base_name = staged_tarball.stem
|
||||
@@ -307,8 +494,10 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
counter += 1
|
||||
staged_tarball = staging_root / f"{base_name} ({counter}).tar.gz"
|
||||
|
||||
final_tarball = ROOT_DIR / "completed" / quality / staged_tarball.name
|
||||
final_tarball.parent.mkdir(parents=True, exist_ok=True)
|
||||
final_dir = ROOT_DIR / "completed" / quality
|
||||
final_dir.mkdir(parents=True, exist_ok=True)
|
||||
# Ensure we don't overwrite an existing final tarball. Preserve `.tar.gz` style.
|
||||
final_tarball = ensure_unique_filename_in_dir(final_dir, staged_tarball.name)
|
||||
|
||||
if job:
|
||||
job.meta["status"] = "Compressing"
|
||||
@@ -317,7 +506,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
logging.info("Creating tarball: %s", staged_tarball)
|
||||
await discord_notify(DISCORD_WEBHOOK,
|
||||
title=f"Compressing: Job {job_id}",
|
||||
description=f"Creating tarball (`{len(track_list)}` track(s)).\nStaging path: {staged_tarball}",
|
||||
description=f"Creating tarball: `{len(all_final_files)}` track(s).\nStaging path: {staged_tarball}",
|
||||
color=0xFFA500,
|
||||
target=target)
|
||||
try:
|
||||
@@ -366,10 +555,12 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
job.save_meta()
|
||||
|
||||
# Job completed Discord message
|
||||
completed = len(all_final_files)
|
||||
failed = (len(track_list) - completed)
|
||||
await discord_notify(
|
||||
DISCORD_WEBHOOK,
|
||||
title=f"Job Completed: {job_id}",
|
||||
description=f"Processed `{len(track_list)}` track(s). Tarball: `{final_tarball}`",
|
||||
description=f"Processed `{len(track_list)}` track(s).\nCompleted: `{completed}`\nFailed: `{failed}`\nTarball: `{final_tarball}`",
|
||||
target=target,
|
||||
color=0x00FF00
|
||||
)
|
||||
|
@@ -1,18 +1,9 @@
|
||||
from typing import Optional, Any
|
||||
from typing import Optional, Any, Callable
|
||||
from uuid import uuid4
|
||||
from urllib.parse import urlparse
|
||||
import hashlib
|
||||
import traceback
|
||||
import logging
|
||||
# Suppress all logging output from this module and its children
|
||||
for name in [__name__, "utils.sr_wrapper"]:
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.CRITICAL)
|
||||
logger.propagate = False
|
||||
for handler in logger.handlers:
|
||||
handler.setLevel(logging.CRITICAL)
|
||||
# Also set the root logger to CRITICAL as a last resort (may affect global logging)
|
||||
logging.getLogger().setLevel(logging.CRITICAL)
|
||||
import random
|
||||
import asyncio
|
||||
import os
|
||||
@@ -24,6 +15,21 @@ from dotenv import load_dotenv
|
||||
from rapidfuzz import fuzz
|
||||
|
||||
|
||||
class MetadataFetchError(Exception):
|
||||
"""Raised when metadata fetch permanently fails after retries."""
|
||||
|
||||
|
||||
# Suppress all logging output from this module and its children
|
||||
for name in [__name__, "utils.sr_wrapper"]:
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.CRITICAL)
|
||||
logger.propagate = False
|
||||
for handler in logger.handlers:
|
||||
handler.setLevel(logging.CRITICAL)
|
||||
# Also set the root logger to CRITICAL as a last resort (may affect global logging)
|
||||
logging.getLogger().setLevel(logging.CRITICAL)
|
||||
|
||||
|
||||
|
||||
load_dotenv()
|
||||
|
||||
@@ -65,6 +71,10 @@ class SRUtil:
|
||||
self.MAX_METADATA_RETRIES = 5
|
||||
self.METADATA_ALBUM_CACHE: dict[str, dict] = {}
|
||||
self.RETRY_DELAY = 1.0 # seconds between retries
|
||||
# Callback invoked when a 429 is first observed. Signature: (Exception) -> None or async
|
||||
self.on_rate_limit: Optional[Callable[[Exception], Any]] = None
|
||||
# Internal flag to avoid repeated notifications for the same runtime
|
||||
self._rate_limit_notified = False
|
||||
|
||||
async def rate_limited_request(self, func, *args, **kwargs):
|
||||
async with self.METADATA_SEMAPHORE:
|
||||
@@ -73,9 +83,70 @@ class SRUtil:
|
||||
if elapsed < self.METADATA_RATE_LIMIT:
|
||||
await asyncio.sleep(self.METADATA_RATE_LIMIT - elapsed)
|
||||
result = await func(*args, **kwargs)
|
||||
self.last_request_time = time.time()
|
||||
self.LAST_METADATA_REQUEST = time.time()
|
||||
return result
|
||||
|
||||
async def _safe_api_call(self, func, *args, retries: int = 2, backoff: float = 0.5, **kwargs):
|
||||
"""Call an async API function with resilient retry behavior.
|
||||
|
||||
- On AttributeError: attempt a `login()` once and retry.
|
||||
- On connection-related errors (aiohttp.ClientError, OSError, Timeout):
|
||||
attempt a `login()` and retry up to `retries` times.
|
||||
- On 400/429 responses (message contains '400' or '429'): retry with backoff
|
||||
without triggering login (to avoid excessive logins).
|
||||
|
||||
Returns the result or raises the last exception.
|
||||
"""
|
||||
last_exc: Optional[Exception] = None
|
||||
for attempt in range(retries):
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except AttributeError as e:
|
||||
# Probably missing/closed client internals: try re-login once
|
||||
last_exc = e
|
||||
try:
|
||||
await self.streamrip_client.login()
|
||||
except Exception:
|
||||
pass
|
||||
continue
|
||||
except Exception as e:
|
||||
last_exc = e
|
||||
msg = str(e)
|
||||
# Treat 400/429 as transient rate-limit/server responses — retry without login
|
||||
if ("400" in msg or "429" in msg) and attempt < retries - 1:
|
||||
# Notify on the first observed 429 (if a callback is set)
|
||||
try:
|
||||
if "429" in msg and not self._rate_limit_notified and self.on_rate_limit:
|
||||
self._rate_limit_notified = True
|
||||
try:
|
||||
if asyncio.iscoroutinefunction(self.on_rate_limit):
|
||||
asyncio.create_task(self.on_rate_limit(e))
|
||||
else:
|
||||
loop = asyncio.get_running_loop()
|
||||
loop.run_in_executor(None, self.on_rate_limit, e)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
await asyncio.sleep(backoff * (2 ** attempt))
|
||||
continue
|
||||
|
||||
# Connection related errors — try to re-login then retry
|
||||
if isinstance(e, (aiohttp.ClientError, OSError, ConnectionError, asyncio.TimeoutError)) or "Connection" in msg or "closed" in msg.lower():
|
||||
try:
|
||||
await self.streamrip_client.login()
|
||||
except Exception:
|
||||
pass
|
||||
if attempt < retries - 1:
|
||||
await asyncio.sleep(backoff * (2 ** attempt))
|
||||
continue
|
||||
|
||||
# Unhandled / permanent error: re-raise after loop ends
|
||||
# If we reach here, raise the last exception
|
||||
if last_exc:
|
||||
raise last_exc
|
||||
return None
|
||||
|
||||
def is_fuzzy_match(self, expected, actual, threshold=80):
|
||||
if not expected or not actual:
|
||||
return False
|
||||
@@ -95,6 +166,65 @@ class SRUtil:
|
||||
deduped[norm] = entry
|
||||
return list(deduped.values())
|
||||
|
||||
def group_artists_by_name(self, entries: list[dict], query: Optional[str] = None) -> list[dict]:
|
||||
"""
|
||||
Group artist entries by normalized display name and pick a primary candidate per name.
|
||||
|
||||
Returns a list of dicts where each dict contains the primary candidate plus
|
||||
an `alternatives` list for other artists that share the same display name.
|
||||
|
||||
Scoring/selection policy:
|
||||
- If `query` is provided, prefer an exact case-insensitive match.
|
||||
- Otherwise prefer the entry with highest fuzzy match to `query`.
|
||||
- Use `popularity` as a tiebreaker.
|
||||
|
||||
This keeps a single line in an autocomplete dropdown while preserving the
|
||||
alternate choices (IDs) so the UI can show a submenu or a secondary picker.
|
||||
"""
|
||||
buckets: dict[str, list[dict]] = {}
|
||||
for e in entries:
|
||||
name = e.get("artist", "")
|
||||
norm = name.strip().lower()
|
||||
buckets.setdefault(norm, []).append(e)
|
||||
|
||||
out: list[dict] = []
|
||||
for norm, items in buckets.items():
|
||||
if len(items) == 1:
|
||||
primary = items[0]
|
||||
alternatives: list[dict] = []
|
||||
else:
|
||||
# Score each item
|
||||
scored = []
|
||||
for it in items:
|
||||
score = 0.0
|
||||
if query:
|
||||
try:
|
||||
if it.get("artist", "").strip().lower() == query.strip().lower():
|
||||
score += 1000.0
|
||||
else:
|
||||
score += float(fuzz.token_set_ratio(query, it.get("artist", "")))
|
||||
except Exception:
|
||||
score += 0.0
|
||||
# add small weight for popularity if present
|
||||
pop = it.get("popularity") or 0
|
||||
try:
|
||||
score += float(pop) / 100.0
|
||||
except Exception:
|
||||
pass
|
||||
scored.append((score, it))
|
||||
scored.sort(key=lambda x: x[0], reverse=True)
|
||||
primary = scored[0][1]
|
||||
alternatives = [it for _, it in scored[1:]]
|
||||
|
||||
out.append({
|
||||
"artist": primary.get("artist"),
|
||||
"id": primary.get("id"),
|
||||
"popularity": primary.get("popularity"),
|
||||
"alternatives": alternatives,
|
||||
})
|
||||
|
||||
return out
|
||||
|
||||
def format_duration(self, seconds):
|
||||
if not seconds:
|
||||
return None
|
||||
@@ -179,22 +309,23 @@ class SRUtil:
|
||||
for t in album_json.get("tracks", [])
|
||||
]
|
||||
|
||||
async def get_artists_by_name(self, artist_name: str) -> Optional[list]:
|
||||
"""Get artist(s) by name. Retry login only on authentication failure. Rate limit and retry on 400/429."""
|
||||
import asyncio
|
||||
async def get_artists_by_name(self, artist_name: str, group: bool = False) -> Optional[list]:
|
||||
"""Get artist(s) by name.
|
||||
|
||||
Args:
|
||||
artist_name: query string to search for.
|
||||
group: if True return grouped results (one primary per display name with
|
||||
`alternatives` list). If False return raw search items (legacy shape).
|
||||
|
||||
Retry login only on authentication failure. Rate limit and retry on 400/429.
|
||||
"""
|
||||
artists_out: list[dict] = []
|
||||
max_retries = 4
|
||||
delay = 1.0
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
artists = await self.streamrip_client.search(
|
||||
media_type="artist", query=artist_name
|
||||
)
|
||||
artists = await self._safe_api_call(self.streamrip_client.search, media_type="artist", query=artist_name, retries=3)
|
||||
break
|
||||
except AttributeError:
|
||||
await self.streamrip_client.login()
|
||||
if attempt == max_retries - 1:
|
||||
return None
|
||||
except Exception as e:
|
||||
msg = str(e)
|
||||
if ("400" in msg or "429" in msg) and attempt < max_retries - 1:
|
||||
@@ -205,18 +336,30 @@ class SRUtil:
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
artists = artists[0].get("items", [])
|
||||
# `artists` can be None or a list of result pages — guard accordingly
|
||||
if not artists:
|
||||
return None
|
||||
# If the client returned paged results (list), pick first page dict
|
||||
if isinstance(artists, list):
|
||||
artists_page = artists[0] if len(artists) > 0 else {}
|
||||
else:
|
||||
artists_page = artists
|
||||
artists_items = artists_page.get("items", []) if isinstance(artists_page, dict) else []
|
||||
if not artists_items:
|
||||
return None
|
||||
artists_out = [
|
||||
{
|
||||
"artist": res["name"],
|
||||
"id": res["id"],
|
||||
"popularity": res.get("popularity", 0),
|
||||
}
|
||||
for res in artists
|
||||
for res in artists_items
|
||||
if "name" in res and "id" in res
|
||||
]
|
||||
artists_out = self.dedupe_by_key("artist", artists_out) # Remove duplicates
|
||||
|
||||
if group:
|
||||
return self.group_artists_by_name(artists_out, query=artist_name)
|
||||
|
||||
return artists_out
|
||||
|
||||
async def get_albums_by_artist_id(self, artist_id: int) -> Optional[list | dict]:
|
||||
@@ -228,14 +371,8 @@ class SRUtil:
|
||||
delay = 1.0
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
metadata = await self.streamrip_client.get_metadata(
|
||||
item_id=artist_id_str, media_type="artist"
|
||||
)
|
||||
metadata = await self._safe_api_call(self.streamrip_client.get_metadata, artist_id_str, "artist", retries=3)
|
||||
break
|
||||
except AttributeError:
|
||||
await self.streamrip_client.login()
|
||||
if attempt == max_retries - 1:
|
||||
return None
|
||||
except Exception as e:
|
||||
msg = str(e)
|
||||
if ("400" in msg or "429" in msg) and attempt < max_retries - 1:
|
||||
@@ -300,12 +437,9 @@ class SRUtil:
|
||||
album_id_str: str = str(album_id)
|
||||
for attempt in range(2):
|
||||
try:
|
||||
metadata = await self.streamrip_client.get_metadata(
|
||||
item_id=album_id_str, media_type="album"
|
||||
)
|
||||
metadata = await self._safe_api_call(self.streamrip_client.get_metadata, item_id=album_id_str, media_type="album", retries=2)
|
||||
break
|
||||
except AttributeError:
|
||||
await self.streamrip_client.login()
|
||||
except Exception:
|
||||
if attempt == 1:
|
||||
return None
|
||||
else:
|
||||
@@ -329,10 +463,11 @@ class SRUtil:
|
||||
Optional[list[dict]]: List of tracks or None if not found.
|
||||
"""
|
||||
album_id_str = str(album_id)
|
||||
await self.streamrip_client.login()
|
||||
metadata = await self.streamrip_client.get_metadata(
|
||||
item_id=album_id_str, media_type="album"
|
||||
)
|
||||
try:
|
||||
metadata = await self._safe_api_call(self.streamrip_client.get_metadata, item_id=album_id_str, media_type="album", retries=2)
|
||||
except Exception as e:
|
||||
logging.warning("get_tracks_by_album_id failed: %s", e)
|
||||
return None
|
||||
if not metadata:
|
||||
logging.warning("No metadata found for album ID: %s", album_id)
|
||||
return None
|
||||
@@ -360,21 +495,16 @@ class SRUtil:
|
||||
Optional[dict]: The track details or None if not found.
|
||||
TODO: Reimplement using StreamRip
|
||||
"""
|
||||
if not self.streamrip_client.logged_in:
|
||||
await self.streamrip_client.login()
|
||||
try:
|
||||
search_res = await self.streamrip_client.search(media_type="track",
|
||||
query=f"{artist} - {song}",
|
||||
)
|
||||
search_res = await self._safe_api_call(self.streamrip_client.search, media_type="track", query=f"{artist} - {song}", retries=3)
|
||||
logging.critical("Result: %s", search_res)
|
||||
return search_res[0].get('items')
|
||||
return search_res[0].get('items') if search_res and isinstance(search_res, list) else []
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
logging.critical("Search Exception: %s", str(e))
|
||||
if n < 3:
|
||||
n += 1
|
||||
return await self.get_tracks_by_artist_song(artist, song, n)
|
||||
finally:
|
||||
return []
|
||||
# return []
|
||||
|
||||
@@ -399,18 +529,13 @@ class SRUtil:
|
||||
quality_int = 1
|
||||
track_id_str: str = str(track_id)
|
||||
|
||||
await self.streamrip_client.login()
|
||||
|
||||
# Ensure client is logged in via safe call when needed inside _safe_api_call
|
||||
try:
|
||||
logging.critical("Using quality_int: %s", quality_int)
|
||||
track = await self.streamrip_client.get_downloadable(
|
||||
track_id=track_id_str, quality=quality_int
|
||||
)
|
||||
except AttributeError:
|
||||
await self.streamrip_client.login()
|
||||
track = await self.streamrip_client.get_downloadable(
|
||||
track_id=track_id_str, quality=quality_int
|
||||
)
|
||||
track = await self._safe_api_call(self.streamrip_client.get_downloadable, track_id=track_id_str, quality=quality_int, retries=3)
|
||||
except Exception as e:
|
||||
logging.warning("get_stream_url_by_track_id failed: %s", e)
|
||||
return None
|
||||
if not track:
|
||||
logging.warning("No track found for ID: %s", track_id)
|
||||
return None
|
||||
@@ -427,8 +552,7 @@ class SRUtil:
|
||||
"""
|
||||
for attempt in range(1, self.MAX_METADATA_RETRIES + 1):
|
||||
try:
|
||||
await self.streamrip_client.login()
|
||||
|
||||
await self._safe_api_call(self.streamrip_client.login, retries=1)
|
||||
# Track metadata
|
||||
metadata = await self.rate_limited_request(
|
||||
self.streamrip_client.get_metadata, str(track_id), "track"
|
||||
@@ -443,7 +567,7 @@ class SRUtil:
|
||||
album_metadata = self.METADATA_ALBUM_CACHE[album_id]
|
||||
else:
|
||||
album_metadata = await self.rate_limited_request(
|
||||
self.streamrip_client.get_metadata, album_id, "album"
|
||||
lambda i, t: self._safe_api_call(self.streamrip_client.get_metadata, i, t, retries=2), album_id, "album"
|
||||
)
|
||||
if not album_metadata:
|
||||
return None
|
||||
@@ -456,6 +580,9 @@ class SRUtil:
|
||||
album_metadata, metadata
|
||||
)
|
||||
|
||||
# Include album id so callers can fetch cover art if desired
|
||||
combined_metadata["album_id"] = album_id
|
||||
|
||||
logging.info(
|
||||
"Combined metadata for track ID %s (attempt %d): %s",
|
||||
track_id,
|
||||
@@ -483,7 +610,10 @@ class SRUtil:
|
||||
track_id,
|
||||
self.MAX_METADATA_RETRIES,
|
||||
)
|
||||
return None
|
||||
# Raise a specific exception so callers can react (e.g. notify)
|
||||
raise MetadataFetchError(f"Metadata fetch failed permanently for track {track_id} after {self.MAX_METADATA_RETRIES} attempts: {e}")
|
||||
# If we reach here without returning, raise a generic metadata error
|
||||
raise MetadataFetchError(f"Metadata fetch failed for track {track_id}")
|
||||
|
||||
|
||||
async def download(self, track_id: int, quality: str = "LOSSLESS") -> bool | str:
|
||||
@@ -495,7 +625,7 @@ class SRUtil:
|
||||
bool
|
||||
"""
|
||||
try:
|
||||
await self.streamrip_client.login()
|
||||
await self._safe_api_call(self.streamrip_client.login, retries=1)
|
||||
track_url = await self.get_stream_url_by_track_id(track_id)
|
||||
if not track_url:
|
||||
return False
|
||||
@@ -507,6 +637,12 @@ class SRUtil:
|
||||
f"{self.streamrip_config.session.downloads.folder}/{unique}"
|
||||
)
|
||||
dl_path = f"{dl_folder_path}/{track_id}.{parsed_url_ext}"
|
||||
# ensure download folder exists
|
||||
try:
|
||||
os.makedirs(dl_folder_path, exist_ok=True)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(
|
||||
track_url, headers={}, timeout=aiohttp.ClientTimeout(total=60)
|
||||
|
Reference in New Issue
Block a user