formatting

This commit is contained in:
2025-08-21 15:08:13 -04:00
parent 22eaa2260e
commit dd8d07b2f0
3 changed files with 62 additions and 39 deletions

View File

@@ -9,9 +9,12 @@ from rq import Queue, Retry
from rq.job import Job from rq.job import Job
from rq.job import JobStatus from rq.job import JobStatus
from rq.registry import ( from rq.registry import (
StartedJobRegistry, DeferredJobRegistry, StartedJobRegistry,
FinishedJobRegistry, FailedJobRegistry, DeferredJobRegistry,
ScheduledJobRegistry) FinishedJobRegistry,
FailedJobRegistry,
ScheduledJobRegistry,
)
from utils.rip_background import bulk_download from utils.rip_background import bulk_download
from lyric_search.sources import private from lyric_search.sources import private
from typing import Literal from typing import Literal
@@ -87,7 +90,9 @@ class RIP(FastAPI):
"started_at": job.started_at, "started_at": job.started_at,
"ended_at": job.ended_at, "ended_at": job.ended_at,
"progress": progress, "progress": progress,
"tracks": f"{tracks_out} / {tracks_in}" if isinstance(tracks_in, int) else tracks_out, "tracks": f"{tracks_out} / {tracks_in}"
if isinstance(tracks_in, int)
else tracks_out,
"target": job.meta.get("target"), "target": job.meta.get("target"),
"quality": job.meta.get("quality", "Unknown"), "quality": job.meta.get("quality", "Unknown"),
} }
@@ -111,8 +116,11 @@ class RIP(FastAPI):
return JSONResponse(content=albums) return JSONResponse(content=albums)
async def tracks_by_album_id_handler( async def tracks_by_album_id_handler(
self, album_id: int, request: Request, user=Depends(get_current_user), self,
quality: str = "FLAC" album_id: int,
request: Request,
user=Depends(get_current_user),
quality: str = "FLAC",
) -> Response: ) -> Response:
"""Get tracks by album id""" """Get tracks by album id"""
tracks = await self.trip_util.get_tracks_by_album_id(album_id, quality) tracks = await self.trip_util.get_tracks_by_album_id(album_id, quality)
@@ -131,7 +139,11 @@ class RIP(FastAPI):
return JSONResponse(content=tracks) return JSONResponse(content=tracks)
async def track_by_id_handler( async def track_by_id_handler(
self, track_id: int, quality: str, request: Request, user=Depends(get_current_user) self,
track_id: int,
quality: str,
request: Request,
user=Depends(get_current_user),
) -> Response: ) -> Response:
"""Get track by ID""" """Get track by ID"""
track = await self.trip_util.get_stream_url_by_track_id(track_id, quality) track = await self.trip_util.get_stream_url_by_track_id(track_id, quality)
@@ -157,19 +169,21 @@ class RIP(FastAPI):
target = data.target target = data.target
job = self.task_queue.enqueue( job = self.task_queue.enqueue(
bulk_download, bulk_download,
args=(track_ids, data.quality,), args=(
track_ids,
data.quality,
),
job_timeout=14400, job_timeout=14400,
failure_ttl=86400, failure_ttl=86400,
result_ttl=-1, result_ttl=-1,
retry=Retry(max=1, interval=[30]), retry=Retry(max=1, interval=[30]),
meta={ meta={
'progress': 0, "progress": 0,
'status': 'queued', "status": "queued",
'target': target, "target": target,
'tracks_in': len(track_ids), "tracks_in": len(track_ids),
'quality': data.quality, "quality": data.quality,
} },
) )
self.redis_conn.lpush("enqueued_job_ids", job.id) self.redis_conn.lpush("enqueued_job_ids", job.id)
return JSONResponse( return JSONResponse(
@@ -181,7 +195,9 @@ class RIP(FastAPI):
} }
) )
async def job_status_handler(self, job_id: str, request: Request, user=Depends(get_current_user)): async def job_status_handler(
self, job_id: str, request: Request, user=Depends(get_current_user)
):
"""Get status and result of a single job""" """Get status and result of a single job"""
job = None job = None

View File

@@ -40,6 +40,7 @@ sr = SRUtil()
# ---------- Helpers ---------- # ---------- Helpers ----------
def cleanup_empty_dirs(root: Path): def cleanup_empty_dirs(root: Path):
""" """
Recursively remove any directories under root that contain no files Recursively remove any directories under root that contain no files
@@ -80,6 +81,7 @@ def ensure_unique_path(p: Path) -> Path:
short_id = uuid.uuid4().hex[:8] short_id = uuid.uuid4().hex[:8]
return parent / f"{stem}_{short_id}{suffix}" return parent / f"{stem}_{short_id}{suffix}"
# ---------- Job ---------- # ---------- Job ----------
def bulk_download(track_list: list, quality: str = "FLAC"): def bulk_download(track_list: list, quality: str = "FLAC"):
""" """
@@ -96,7 +98,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
if job: if job:
try: try:
job.meta["track_ids"] = [str(t) for t in (track_list or [])] job.meta["track_ids"] = [str(t) for t in (track_list or [])]
job.meta["tracks"] = [] # will hold per-track dicts job.meta["tracks"] = [] # will hold per-track dicts
job.meta["progress"] = 0 job.meta["progress"] = 0
job.meta["tarball"] = None job.meta["tarball"] = None
job.meta["status"] = "started" job.meta["status"] = "started"
@@ -105,9 +107,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
logging.warning("Failed to init job.meta: %s", e) logging.warning("Failed to init job.meta: %s", e)
async def process_tracks(): async def process_tracks():
per_track_meta = [] # list of per-track dicts (JSON-safe) per_track_meta = [] # list of per-track dicts (JSON-safe)
all_final_files = [] # list[Path] all_final_files = [] # list[Path]
all_artists = set() # set[str] all_artists = set() # set[str]
(ROOT_DIR / "completed").mkdir(parents=True, exist_ok=True) (ROOT_DIR / "completed").mkdir(parents=True, exist_ok=True)
@@ -121,10 +123,10 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
for i, track_id in enumerate(track_list or []): for i, track_id in enumerate(track_list or []):
track_info = { track_info = {
"track_id": str(track_id), "track_id": str(track_id),
"status": "pending", # pending | success | failed "status": "pending", # pending | success | failed
"file_path": None, # str | None "file_path": None, # str | None
"error": None, # str | None "error": None, # str | None
"attempts": 0, # int "attempts": 0, # int
} }
attempt = 0 attempt = 0
@@ -157,12 +159,12 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
# 4) Metadata from SR (prefer API over tags) # 4) Metadata from SR (prefer API over tags)
md = await sr.get_metadata_by_track_id(track_id) or {} md = await sr.get_metadata_by_track_id(track_id) or {}
artist_raw = md.get("artist") or "Unknown Artist" artist_raw = md.get("artist") or "Unknown Artist"
album_raw = md.get("album") or "Unknown Album" album_raw = md.get("album") or "Unknown Album"
title_raw = md.get("song") or f"Track {track_id}" title_raw = md.get("song") or f"Track {track_id}"
artist = sanitize_filename(artist_raw) artist = sanitize_filename(artist_raw)
album = sanitize_filename(album_raw) album = sanitize_filename(album_raw)
title = sanitize_filename(title_raw) title = sanitize_filename(title_raw)
all_artists.add(artist) all_artists.add(artist)
@@ -186,7 +188,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
break # success; exit retry loop break # success; exit retry loop
except Exception as e: except Exception as e:
logging.error("Track %s attempt %s failed: %s", track_id, attempt, e) logging.error(
"Track %s attempt %s failed: %s", track_id, attempt, e
)
track_info["error"] = str(e) track_info["error"] = str(e)
if attempt >= MAX_RETRIES: if attempt >= MAX_RETRIES:
track_info["status"] = "failed" track_info["status"] = "failed"
@@ -207,7 +211,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
job.meta["tracks"] = per_track_meta job.meta["tracks"] = per_track_meta
job.save_meta() job.save_meta()
except Exception as e: except Exception as e:
logging.warning("Failed to update job.meta after track %s: %s", track_id, e) logging.warning(
"Failed to update job.meta after track %s: %s", track_id, e
)
# Throttle between tracks # Throttle between tracks
await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX)) await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX))
@@ -234,9 +240,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
artist_counts[artist] = artist_counts.get(artist, 0) + 1 artist_counts[artist] = artist_counts.get(artist, 0) + 1
if artist_counts: if artist_counts:
top_artist = sorted( top_artist = sorted(artist_counts.items(), key=lambda kv: (-kv[1], kv[0]))[
artist_counts.items(), key=lambda kv: (-kv[1], kv[0]) 0
)[0][0] ][0]
else: else:
top_artist = "Unknown Artist" top_artist = "Unknown Artist"

View File

@@ -134,8 +134,9 @@ class SRUtil:
logging.debug("Retrieved albums: %s", albums_out) logging.debug("Retrieved albums: %s", albums_out)
return albums_out return albums_out
async def get_tracks_by_album_id(self, album_id: int, async def get_tracks_by_album_id(
quality: str = "FLAC") -> Optional[list | dict]: self, album_id: int, quality: str = "FLAC"
) -> Optional[list | dict]:
"""Get tracks by album ID """Get tracks by album ID
Args: Args:
album_id (int): The ID of the album. album_id (int): The ID of the album.