This commit is contained in:
2025-08-20 15:58:07 -04:00
parent 81f79dea1e
commit e0f64f6773
2 changed files with 93 additions and 65 deletions

View File

@@ -7,6 +7,7 @@ from auth.deps import get_current_user
from redis import Redis
from rq import Queue
from rq.job import Job
from rq.job import JobStatus
from utils.rip_background import bulk_download
from lyric_search.sources import private
from pydantic import BaseModel
@@ -125,16 +126,24 @@ class RIP(FastAPI):
target = data.target
job = self.task_queue.enqueue(
bulk_download,
args=(track_ids, target),
args=(track_ids,),
job_timeout=14400,
failure_ttl=86400,
result_ttl=86400,
meta={
'progress': 0,
'status': 'queued',
'target': target,
'tracks_in': len(track_ids),
}
)
self.redis_conn.lpush("enqueued_job_ids", job.id)
return JSONResponse(
content={
"job_id": job.id,
"status": "queued",
"target": job.meta.get("target", None)
}
)
@@ -146,16 +155,25 @@ class RIP(FastAPI):
job = Job.fetch(job_id, connection=self.redis_conn)
except Exception:
return JSONResponse({"error": "Job not found"}, status_code=404)
job_status: str|JobStatus = job.get_status()
job_progress = job.meta.get("progress", 0)
job_tarball = job.meta.get("tarball")
if job_progress == 100 and not job_tarball:
job_status = "compressing"
tracks_out = len(job.meta.get("tracks", []))
tracks_in = job.meta.get("tracks_in", None)
return {
"id": job.id,
"status": job.get_status(),
"status": job_status,
"result": job.result,
"enqueued_at": job.enqueued_at,
"started_at": job.started_at,
"ended_at": job.ended_at,
"progress": job.meta.get("progress"),
"progress": job_progress,
"target": job.meta.get("target"),
"tracks": f"{tracks_out} / {tracks_in}" if isinstance(tracks_in, int) else tracks_out,
"tarball": job_tarball,
}
async def job_list_handler(self, request: Request, user=Depends(get_current_user)):
@@ -164,18 +182,23 @@ class RIP(FastAPI):
# Jobs still in the queue (pending)
for job in self.task_queue.jobs:
status: str|JobStatus = job.get_status()
job_progress = job.meta.get("progress", 0)
tarball = job.meta.get("tarball")
if job_progress == 100 and not tarball:
status = "compressing"
jobs_info.append(
{
"id": job.id,
"status": job.get_status(), # queued
"status": status, # queued
"result": job.result,
"enqueued_at": job.enqueued_at,
"progress": job.meta.get("progress", None),
"progress": job.meta.get("progress", 0),
"target": job.meta.get("target", None)
}
)
# Started/running jobs tracked via enqueued_job_ids
# Running jobs tracked via enqueued_job_ids
job_ids = self.redis_conn.lrange("enqueued_job_ids", 0, -1)
for jid_bytes in job_ids: # type: ignore
jid = jid_bytes.decode()
@@ -184,19 +207,24 @@ class RIP(FastAPI):
except Exception:
continue # job may have completed and expired
status = job.get_status()
if status in ("started", "queued", "finished"):
job_status: str|JobStatus = job.get_status()
job_progress = job.meta.get("progress", 0)
if job_progress == 100 and not job.meta.get("tarball"):
job_status = "compressing"
if job_status in ("started", "queued", "finished", "failed", "compressing"):
# avoid duplicates for jobs already in task_queue.jobs
if not any(j["id"] == job.id for j in jobs_info):
tracks_in = job.meta.get("tracks_in", None)
tracks_out = len(job.meta.get("tracks", []))
jobs_info.append(
{
"id": job.id,
"status": status,
"status": job_status,
"result": job.result,
"tarball": job.meta.get("tarball", None),
"enqueued_at": job.enqueued_at,
"progress": job.meta.get("progress", None),
"tracks": job.meta.get("tracks", None),
"progress": job.meta.get("progress", 0),
"tracks": f"{tracks_out} / {tracks_in}" if isinstance(tracks_in, int) else tracks_out,
"target": job.meta.get("target", None),
}
)