formatting / RQ tuning

This commit is contained in:
2025-08-15 13:39:27 -04:00
parent 93050ec6cf
commit 0cd4a71db2
2 changed files with 18 additions and 11 deletions

View File

@@ -73,7 +73,7 @@ def bulk_download(track_list: list):
while attempt < MAX_RETRIES:
attempt += 1
try:
# 1 Get track URL
# Get track URL
url = await sr.get_stream_url_by_track_id(track_id)
if not url:
logging.critical(
@@ -84,7 +84,7 @@ def bulk_download(track_list: list):
)
continue
# 2 Download file (chunked)
# Download file (chunked)
parsed = urlparse(url)
ext = Path(unquote(parsed.path)).suffix or ".mp3"
tmp_file = Path(f"/tmp/{track_id}{ext}")
@@ -95,7 +95,7 @@ def bulk_download(track_list: list):
async for chunk in resp.content.iter_chunked(64 * 1024):
f.write(chunk)
# 3 Extract metadata
# Extract metadata
metadata = await sr.get_metadata_by_track_id(track_id)
if not metadata:
logging.critical(
@@ -109,13 +109,13 @@ def bulk_download(track_list: list):
logging.critical("Got metadata: %s/%s/%s", artist, album, title)
# 4 Organize path
# Organize path
final_dir = ROOT_DIR / artist / album
final_dir.mkdir(parents=True, exist_ok=True)
final_file = final_dir / f"{title}{ext}"
tmp_file.rename(final_file)
# 5 Track per-track info
# Track per-track info
track_info.update(
{"status": "success", "file_path": str(final_file)}
)
@@ -134,17 +134,17 @@ def bulk_download(track_list: list):
random.uniform(THROTTLE_MIN, THROTTLE_MAX)
)
# 6 Update RQ job meta
# Update RQ job meta
per_track_meta.append(track_info)
if job:
job.meta["progress"] = int((i + 1) / total * 100)
job.meta["tracks"] = per_track_meta
job.save_meta()
# 7 Throttle between downloads
# Throttle between downloads
await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX))
# 8 Create per-artist tarballs
# Create per-artist tarballs
tarballs = []
for artist, files in artist_files.items():
short_id = uuid.uuid4().hex[:8]