TRip: capitalize RQ job statuses in related endpoints, order job list, other: minor/typing

This commit is contained in:
2025-08-23 08:20:32 -04:00
parent a8d089c0fe
commit a11748775e
4 changed files with 59 additions and 19 deletions

View File

@@ -3,11 +3,18 @@ import time
import os
import json
import random
from typing import Optional, Annotated
from typing import Any, Optional, Annotated
from fastapi import FastAPI, Request, UploadFile, Response, HTTPException, Form, Depends
from fastapi_throttle import RateLimiter
from fastapi.responses import JSONResponse
import redis.asyncio as redis
import redis
from rq import Queue
from rq.registry import (
StartedJobRegistry,
FinishedJobRegistry,
FailedJobRegistry,
DeferredJobRegistry
)
from lyric_search.sources import private, cache as LyricsCache, redis_cache
@@ -22,7 +29,7 @@ class Misc(FastAPI):
self.constants = constants
self.lyr_cache = LyricsCache.Cache()
self.redis_cache = redis_cache.RedisCache()
self.redis_client = redis.Redis(password=private.REDIS_PW)
self.redis_client: Any = redis.Redis(password=private.REDIS_PW)
self.radio = radio
self.activity_image: Optional[bytes] = None
self.nos_json_path: str = os.path.join(
@@ -35,6 +42,7 @@ class Misc(FastAPI):
"widget/sqlite": self.homepage_sqlite_widget,
"widget/lyrics": self.homepage_lyrics_widget,
"widget/radio": self.homepage_radio_widget,
"widget/rq": self.homepage_rq_widget,
"misc/get_activity_image": self.get_activity_image,
"misc/no": self.no,
}
@@ -141,14 +149,14 @@ class Misc(FastAPI):
"""
# Measure response time w/ test lyric search
time_start: float = time.time() # Start time for response_time
test_lyrics_result = await self.redis_client.ft().search( # noqa: F841
test_lyrics_result = self.redis_client.ft().search( # noqa: F841
"@artist: test @song: test"
)
time_end: float = time.time()
# End response time test
total_keys = await self.redis_client.dbsize()
total_keys = self.redis_client.dbsize()
response_time: float = time_end - time_start
index_info = await self.redis_client.ft().info()
index_info = self.redis_client.ft().info()
indexed_lyrics: int = index_info.get("num_docs")
return JSONResponse(
content={
@@ -158,6 +166,30 @@ class Misc(FastAPI):
"sessions": -1,
}
)
async def homepage_rq_widget(self) -> JSONResponse:
"""
Homepage RQ Widget Handler
"""
queue_name = "dls"
queue = Queue(queue_name, self.redis_client)
queued = queue.count
started = StartedJobRegistry(queue_name, connection=self.redis_client).count
failed = FailedJobRegistry(queue_name, connection=self.redis_client).count
finished = FinishedJobRegistry(queue_name, connection=self.redis_client).count
deferred = DeferredJobRegistry(queue_name, connection=self.redis_client).count
return JSONResponse(
content={
queue_name: {
"queued": queued,
"started": started,
"failed": failed,
"finished": finished,
"deferred": deferred,
}
}
)
async def homepage_sqlite_widget(self) -> JSONResponse:
"""

View File

@@ -76,14 +76,14 @@ class RIP(FastAPI):
job_status: str | JobStatus = job.get_status()
progress = job.meta.get("progress", 0)
if progress == 100 and not job.meta.get("tarball"):
job_status = "compressing"
job_status = "Compressing"
tracks_in = job.meta.get("tracks_in")
tracks_out = len(job.meta.get("tracks", []))
return {
"id": job.id,
"status": job_status,
"status": job_status.title(),
"result": job.result,
"tarball": job.meta.get("tarball"),
"enqueued_at": job.enqueued_at,
@@ -179,7 +179,7 @@ class RIP(FastAPI):
retry=Retry(max=1, interval=[30]),
meta={
"progress": 0,
"status": "queued",
"status": "Queued",
"target": target,
"tracks_in": len(track_ids),
"quality": data.quality,
@@ -189,7 +189,7 @@ class RIP(FastAPI):
return JSONResponse(
content={
"job_id": job.id,
"status": "queued",
"status": "Queued",
"target": job.meta.get("target", None),
"quality": job.meta.get("quality", "Unknown"),
}
@@ -267,4 +267,10 @@ class RIP(FastAPI):
except Exception:
continue
# ---- Sort newest first ----
def job_sort_key(job):
return job.get("ended_at") or job.get("started_at") or job.get("enqueued_at") or 0
jobs_info.sort(key=job_sort_key, reverse=True)
return {"jobs": jobs_info}