misc
This commit is contained in:
@@ -14,7 +14,7 @@ from rq import get_current_job
|
||||
from utils.sr_wrapper import SRUtil
|
||||
|
||||
# ---------- Config ----------
|
||||
ROOT_DIR = Path("/storage/music2") # change to your music folder
|
||||
ROOT_DIR = Path("/storage/music2")
|
||||
MAX_RETRIES = 3
|
||||
THROTTLE_MIN = 0.3
|
||||
THROTTLE_MAX = 1.0
|
||||
@@ -56,26 +56,21 @@ def sanitize_filename(name: str) -> str:
|
||||
|
||||
|
||||
def ensure_unique_path(p: Path) -> Path:
|
||||
"""If path exists, append ' (n)' before extension."""
|
||||
if not p.exists():
|
||||
return p
|
||||
"""Always append a short UUID fragment before the extension."""
|
||||
stem, suffix = p.stem, p.suffix
|
||||
parent = p.parent
|
||||
n = 2
|
||||
while True:
|
||||
candidate = parent / f"{stem} ({n}){suffix}"
|
||||
if not candidate.exists():
|
||||
return candidate
|
||||
n += 1
|
||||
|
||||
short_id = uuid.uuid4().hex[:8]
|
||||
return parent / f"{stem}_{short_id}{suffix}"
|
||||
|
||||
# ---------- Job ----------
|
||||
def bulk_download(track_list: list, target: str):
|
||||
def bulk_download(track_list: list):
|
||||
"""
|
||||
RQ job:
|
||||
- fetches stream URLs
|
||||
- downloads with retries + throttling
|
||||
- uses SR metadata to name/organize files
|
||||
- creates ONE tarball for all tracks, with all artist names in the filename
|
||||
- creates ONE tarball for all tracks
|
||||
- returns [tarball_path]
|
||||
"""
|
||||
job = get_current_job()
|
||||
@@ -87,7 +82,7 @@ def bulk_download(track_list: list, target: str):
|
||||
job.meta["tracks"] = [] # will hold per-track dicts
|
||||
job.meta["progress"] = 0
|
||||
job.meta["tarball"] = None
|
||||
job.meta["target"] = target
|
||||
job.meta["status"] = "started"
|
||||
job.save_meta()
|
||||
except Exception as e:
|
||||
logging.warning("Failed to init job.meta: %s", e)
|
||||
@@ -102,6 +97,9 @@ def bulk_download(track_list: list, target: str):
|
||||
async with aiohttp.ClientSession(headers=HEADERS) as session:
|
||||
total = len(track_list or [])
|
||||
logging.critical("Total tracks to process: %s", total)
|
||||
if job:
|
||||
job.meta["progress"] = 0
|
||||
job.save_meta()
|
||||
|
||||
for i, track_id in enumerate(track_list or []):
|
||||
track_info = {
|
||||
@@ -164,6 +162,10 @@ def bulk_download(track_list: list, target: str):
|
||||
track_info["file_path"] = str(final_file)
|
||||
track_info["error"] = None
|
||||
all_final_files.append(final_file)
|
||||
|
||||
if job:
|
||||
job.meta["progress"] = int(((i + 1) / total) * 100)
|
||||
job.save_meta()
|
||||
break # success; exit retry loop
|
||||
|
||||
except Exception as e:
|
||||
@@ -186,7 +188,6 @@ def bulk_download(track_list: list, target: str):
|
||||
if job:
|
||||
try:
|
||||
job.meta["tracks"] = per_track_meta
|
||||
job.meta["progress"] = int(((i + 1) / max(total, 1)) * 100)
|
||||
job.save_meta()
|
||||
except Exception as e:
|
||||
logging.warning("Failed to update job.meta after track %s: %s", track_id, e)
|
||||
@@ -194,7 +195,7 @@ def bulk_download(track_list: list, target: str):
|
||||
# Throttle between tracks
|
||||
await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX))
|
||||
|
||||
# ---- Single combined tarball for all tracks ----
|
||||
# ---- Single combined tarball for all tracks ----
|
||||
if not all_final_files:
|
||||
if job:
|
||||
try:
|
||||
@@ -230,26 +231,41 @@ def bulk_download(track_list: list, target: str):
|
||||
final_tarball = ROOT_DIR / "completed" / staged_tarball.name
|
||||
final_tarball.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if job:
|
||||
try:
|
||||
job.meta["status"] = "compressing"
|
||||
job.save_meta()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
with tarfile.open(staged_tarball, "w:gz") as tar:
|
||||
# Update job status → compressing
|
||||
logging.info("Creating tarball: %s", staged_tarball)
|
||||
|
||||
# Run blocking tar creation in background thread
|
||||
def _create_tar_sync():
|
||||
with tarfile.open(staged_tarball, "w:gz") as tar:
|
||||
for f in all_final_files:
|
||||
try:
|
||||
arcname = f.relative_to(ROOT_DIR)
|
||||
except ValueError:
|
||||
arcname = f.name
|
||||
tar.add(f, arcname=str(arcname))
|
||||
try:
|
||||
os.remove(f)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
await asyncio.to_thread(_create_tar_sync)
|
||||
|
||||
# sanity check
|
||||
if not staged_tarball.exists():
|
||||
logging.error("Tarball was not created: %s", staged_tarball)
|
||||
if job:
|
||||
try:
|
||||
job.meta["status"] = "compressing"
|
||||
job.meta["status"] = "compress_failed"
|
||||
job.save_meta()
|
||||
except Exception:
|
||||
pass
|
||||
logging.info("Creating tarball: %s", staged_tarball)
|
||||
for f in all_final_files:
|
||||
try:
|
||||
arcname = f.relative_to(ROOT_DIR)
|
||||
except ValueError:
|
||||
arcname = f.name
|
||||
tar.add(f, arcname=str(arcname))
|
||||
try:
|
||||
os.remove(f)
|
||||
except Exception:
|
||||
pass
|
||||
return []
|
||||
|
||||
logging.critical("Tarball created: %s", staged_tarball)
|
||||
|
||||
@@ -260,32 +276,11 @@ def bulk_download(track_list: list, target: str):
|
||||
shutil.move(str(staged_tarball), str(final_tarball))
|
||||
|
||||
logging.critical("Tarball finalized: %s", final_tarball)
|
||||
|
||||
# Cleanup empty dirs (unchanged)
|
||||
to_check = set()
|
||||
for p in all_final_files:
|
||||
if p.parent:
|
||||
to_check.add(p.parent)
|
||||
if p.parent and p.parent.parent:
|
||||
to_check.add(p.parent.parent)
|
||||
for d in sorted(to_check, key=lambda p: len(p.parts), reverse=True):
|
||||
if d.is_dir():
|
||||
try:
|
||||
next(d.iterdir())
|
||||
except StopIteration:
|
||||
shutil.rmtree(d, ignore_errors=True)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Update job status → done
|
||||
if job:
|
||||
try:
|
||||
job.meta["tarball"] = str(final_tarball)
|
||||
job.meta["progress"] = 100
|
||||
job.meta["status"] = "done"
|
||||
job.save_meta()
|
||||
except Exception as e:
|
||||
logging.warning("Failed to write final status to job.meta: %s", e)
|
||||
job.meta["tarball"] = str(final_tarball)
|
||||
job.meta["progress"] = 100
|
||||
job.meta["status"] = "completed"
|
||||
job.save_meta()
|
||||
|
||||
return [str(final_tarball)]
|
||||
|
||||
@@ -294,5 +289,10 @@ def bulk_download(track_list: list, target: str):
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
return loop.run_until_complete(process_tracks())
|
||||
except Exception as e:
|
||||
if job:
|
||||
job.meta["status"] = "failed"
|
||||
job.save_meta()
|
||||
logging.critical("Exception: %s", str(e))
|
||||
finally:
|
||||
loop.close()
|
||||
|
Reference in New Issue
Block a user