formatting

This commit is contained in:
2025-08-21 15:08:13 -04:00
parent 22eaa2260e
commit dd8d07b2f0
3 changed files with 62 additions and 39 deletions

View File

@@ -9,9 +9,12 @@ from rq import Queue, Retry
from rq.job import Job from rq.job import Job
from rq.job import JobStatus from rq.job import JobStatus
from rq.registry import ( from rq.registry import (
StartedJobRegistry, DeferredJobRegistry, StartedJobRegistry,
FinishedJobRegistry, FailedJobRegistry, DeferredJobRegistry,
ScheduledJobRegistry) FinishedJobRegistry,
FailedJobRegistry,
ScheduledJobRegistry,
)
from utils.rip_background import bulk_download from utils.rip_background import bulk_download
from lyric_search.sources import private from lyric_search.sources import private
from typing import Literal from typing import Literal
@@ -87,7 +90,9 @@ class RIP(FastAPI):
"started_at": job.started_at, "started_at": job.started_at,
"ended_at": job.ended_at, "ended_at": job.ended_at,
"progress": progress, "progress": progress,
"tracks": f"{tracks_out} / {tracks_in}" if isinstance(tracks_in, int) else tracks_out, "tracks": f"{tracks_out} / {tracks_in}"
if isinstance(tracks_in, int)
else tracks_out,
"target": job.meta.get("target"), "target": job.meta.get("target"),
"quality": job.meta.get("quality", "Unknown"), "quality": job.meta.get("quality", "Unknown"),
} }
@@ -111,8 +116,11 @@ class RIP(FastAPI):
return JSONResponse(content=albums) return JSONResponse(content=albums)
async def tracks_by_album_id_handler( async def tracks_by_album_id_handler(
self, album_id: int, request: Request, user=Depends(get_current_user), self,
quality: str = "FLAC" album_id: int,
request: Request,
user=Depends(get_current_user),
quality: str = "FLAC",
) -> Response: ) -> Response:
"""Get tracks by album id""" """Get tracks by album id"""
tracks = await self.trip_util.get_tracks_by_album_id(album_id, quality) tracks = await self.trip_util.get_tracks_by_album_id(album_id, quality)
@@ -131,7 +139,11 @@ class RIP(FastAPI):
return JSONResponse(content=tracks) return JSONResponse(content=tracks)
async def track_by_id_handler( async def track_by_id_handler(
self, track_id: int, quality: str, request: Request, user=Depends(get_current_user) self,
track_id: int,
quality: str,
request: Request,
user=Depends(get_current_user),
) -> Response: ) -> Response:
"""Get track by ID""" """Get track by ID"""
track = await self.trip_util.get_stream_url_by_track_id(track_id, quality) track = await self.trip_util.get_stream_url_by_track_id(track_id, quality)
@@ -157,19 +169,21 @@ class RIP(FastAPI):
target = data.target target = data.target
job = self.task_queue.enqueue( job = self.task_queue.enqueue(
bulk_download, bulk_download,
args=(track_ids, data.quality,), args=(
track_ids,
data.quality,
),
job_timeout=14400, job_timeout=14400,
failure_ttl=86400, failure_ttl=86400,
result_ttl=-1, result_ttl=-1,
retry=Retry(max=1, interval=[30]), retry=Retry(max=1, interval=[30]),
meta={ meta={
'progress': 0, "progress": 0,
'status': 'queued', "status": "queued",
'target': target, "target": target,
'tracks_in': len(track_ids), "tracks_in": len(track_ids),
'quality': data.quality, "quality": data.quality,
} },
) )
self.redis_conn.lpush("enqueued_job_ids", job.id) self.redis_conn.lpush("enqueued_job_ids", job.id)
return JSONResponse( return JSONResponse(
@@ -181,7 +195,9 @@ class RIP(FastAPI):
} }
) )
async def job_status_handler(self, job_id: str, request: Request, user=Depends(get_current_user)): async def job_status_handler(
self, job_id: str, request: Request, user=Depends(get_current_user)
):
"""Get status and result of a single job""" """Get status and result of a single job"""
job = None job = None
@@ -209,7 +225,7 @@ class RIP(FastAPI):
return JSONResponse({"error": "Job not found"}, status_code=404) return JSONResponse({"error": "Job not found"}, status_code=404)
return self._format_job(job) return self._format_job(job)
async def job_list_handler(self, request: Request, user=Depends(get_current_user)): async def job_list_handler(self, request: Request, user=Depends(get_current_user)):
"""List all jobs across all registries (queued, started, finished, failed, etc).""" """List all jobs across all registries (queued, started, finished, failed, etc)."""
jobs_info = [] jobs_info = []

View File

@@ -14,7 +14,7 @@ from rq import get_current_job
from utils.sr_wrapper import SRUtil from utils.sr_wrapper import SRUtil
# ---------- Config ---------- # ---------- Config ----------
ROOT_DIR = Path("/storage/music2") ROOT_DIR = Path("/storage/music2")
MAX_RETRIES = 3 MAX_RETRIES = 3
THROTTLE_MIN = 0.3 THROTTLE_MIN = 0.3
THROTTLE_MAX = 1.0 THROTTLE_MAX = 1.0
@@ -40,9 +40,10 @@ sr = SRUtil()
# ---------- Helpers ---------- # ---------- Helpers ----------
def cleanup_empty_dirs(root: Path): def cleanup_empty_dirs(root: Path):
""" """
Recursively remove any directories under root that contain no files Recursively remove any directories under root that contain no files
(empty or only empty subdirectories). (empty or only empty subdirectories).
""" """
for dirpath, dirnames, filenames in os.walk(root, topdown=False): for dirpath, dirnames, filenames in os.walk(root, topdown=False):
@@ -80,6 +81,7 @@ def ensure_unique_path(p: Path) -> Path:
short_id = uuid.uuid4().hex[:8] short_id = uuid.uuid4().hex[:8]
return parent / f"{stem}_{short_id}{suffix}" return parent / f"{stem}_{short_id}{suffix}"
# ---------- Job ---------- # ---------- Job ----------
def bulk_download(track_list: list, quality: str = "FLAC"): def bulk_download(track_list: list, quality: str = "FLAC"):
""" """
@@ -96,7 +98,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
if job: if job:
try: try:
job.meta["track_ids"] = [str(t) for t in (track_list or [])] job.meta["track_ids"] = [str(t) for t in (track_list or [])]
job.meta["tracks"] = [] # will hold per-track dicts job.meta["tracks"] = [] # will hold per-track dicts
job.meta["progress"] = 0 job.meta["progress"] = 0
job.meta["tarball"] = None job.meta["tarball"] = None
job.meta["status"] = "started" job.meta["status"] = "started"
@@ -105,9 +107,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
logging.warning("Failed to init job.meta: %s", e) logging.warning("Failed to init job.meta: %s", e)
async def process_tracks(): async def process_tracks():
per_track_meta = [] # list of per-track dicts (JSON-safe) per_track_meta = [] # list of per-track dicts (JSON-safe)
all_final_files = [] # list[Path] all_final_files = [] # list[Path]
all_artists = set() # set[str] all_artists = set() # set[str]
(ROOT_DIR / "completed").mkdir(parents=True, exist_ok=True) (ROOT_DIR / "completed").mkdir(parents=True, exist_ok=True)
@@ -121,10 +123,10 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
for i, track_id in enumerate(track_list or []): for i, track_id in enumerate(track_list or []):
track_info = { track_info = {
"track_id": str(track_id), "track_id": str(track_id),
"status": "pending", # pending | success | failed "status": "pending", # pending | success | failed
"file_path": None, # str | None "file_path": None, # str | None
"error": None, # str | None "error": None, # str | None
"attempts": 0, # int "attempts": 0, # int
} }
attempt = 0 attempt = 0
@@ -157,12 +159,12 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
# 4) Metadata from SR (prefer API over tags) # 4) Metadata from SR (prefer API over tags)
md = await sr.get_metadata_by_track_id(track_id) or {} md = await sr.get_metadata_by_track_id(track_id) or {}
artist_raw = md.get("artist") or "Unknown Artist" artist_raw = md.get("artist") or "Unknown Artist"
album_raw = md.get("album") or "Unknown Album" album_raw = md.get("album") or "Unknown Album"
title_raw = md.get("song") or f"Track {track_id}" title_raw = md.get("song") or f"Track {track_id}"
artist = sanitize_filename(artist_raw) artist = sanitize_filename(artist_raw)
album = sanitize_filename(album_raw) album = sanitize_filename(album_raw)
title = sanitize_filename(title_raw) title = sanitize_filename(title_raw)
all_artists.add(artist) all_artists.add(artist)
@@ -186,7 +188,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
break # success; exit retry loop break # success; exit retry loop
except Exception as e: except Exception as e:
logging.error("Track %s attempt %s failed: %s", track_id, attempt, e) logging.error(
"Track %s attempt %s failed: %s", track_id, attempt, e
)
track_info["error"] = str(e) track_info["error"] = str(e)
if attempt >= MAX_RETRIES: if attempt >= MAX_RETRIES:
track_info["status"] = "failed" track_info["status"] = "failed"
@@ -207,7 +211,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
job.meta["tracks"] = per_track_meta job.meta["tracks"] = per_track_meta
job.save_meta() job.save_meta()
except Exception as e: except Exception as e:
logging.warning("Failed to update job.meta after track %s: %s", track_id, e) logging.warning(
"Failed to update job.meta after track %s: %s", track_id, e
)
# Throttle between tracks # Throttle between tracks
await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX)) await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX))
@@ -234,9 +240,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
artist_counts[artist] = artist_counts.get(artist, 0) + 1 artist_counts[artist] = artist_counts.get(artist, 0) + 1
if artist_counts: if artist_counts:
top_artist = sorted( top_artist = sorted(artist_counts.items(), key=lambda kv: (-kv[1], kv[0]))[
artist_counts.items(), key=lambda kv: (-kv[1], kv[0]) 0
)[0][0] ][0]
else: else:
top_artist = "Unknown Artist" top_artist = "Unknown Artist"
@@ -253,7 +259,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
job.meta["status"] = "compressing" job.meta["status"] = "compressing"
job.save_meta() job.save_meta()
except Exception: except Exception:
pass pass
logging.info("Creating tarball: %s", staged_tarball) logging.info("Creating tarball: %s", staged_tarball)

View File

@@ -134,8 +134,9 @@ class SRUtil:
logging.debug("Retrieved albums: %s", albums_out) logging.debug("Retrieved albums: %s", albums_out)
return albums_out return albums_out
async def get_tracks_by_album_id(self, album_id: int, async def get_tracks_by_album_id(
quality: str = "FLAC") -> Optional[list | dict]: self, album_id: int, quality: str = "FLAC"
) -> Optional[list | dict]:
"""Get tracks by album ID """Get tracks by album ID
Args: Args:
album_id (int): The ID of the album. album_id (int): The ID of the album.