2025-08-07 11:47:57 -04:00
|
|
|
import logging
|
|
|
|
from fastapi import FastAPI, Request, Response, Depends
|
|
|
|
from fastapi_throttle import RateLimiter
|
|
|
|
from fastapi.responses import JSONResponse
|
2025-08-11 14:05:20 -04:00
|
|
|
from utils.sr_wrapper import SRUtil
|
2025-08-09 07:48:07 -04:00
|
|
|
from auth.deps import get_current_user
|
2025-08-15 13:31:15 -04:00
|
|
|
from redis import Redis
|
|
|
|
from rq import Queue
|
|
|
|
from rq.job import Job
|
|
|
|
from utils.rip_background import bulk_download
|
|
|
|
from lyric_search.sources import private
|
|
|
|
from pydantic import BaseModel
|
|
|
|
|
2025-08-15 13:39:27 -04:00
|
|
|
|
2025-08-15 13:31:15 -04:00
|
|
|
class ValidBulkFetchRequest(BaseModel):
|
|
|
|
track_ids: list[int]
|
2025-08-07 11:47:57 -04:00
|
|
|
|
2025-08-09 07:48:07 -04:00
|
|
|
|
2025-08-07 11:47:57 -04:00
|
|
|
class RIP(FastAPI):
|
|
|
|
"""
|
|
|
|
Ripping Endpoints
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, app: FastAPI, my_util, constants) -> None:
|
|
|
|
self.app: FastAPI = app
|
|
|
|
self.util = my_util
|
2025-08-11 14:04:22 -04:00
|
|
|
self.trip_util = SRUtil()
|
2025-08-07 11:47:57 -04:00
|
|
|
self.constants = constants
|
2025-08-15 13:31:15 -04:00
|
|
|
self.redis_conn = Redis(
|
|
|
|
host="localhost",
|
|
|
|
port=6379,
|
|
|
|
db=0,
|
|
|
|
password=private.REDIS_PW,
|
|
|
|
)
|
|
|
|
self.task_queue = Queue(
|
|
|
|
"dls",
|
|
|
|
connection=self.redis_conn,
|
|
|
|
default_result_ttl=86400,
|
|
|
|
default_failure_ttl=86400,
|
|
|
|
)
|
2025-08-07 11:47:57 -04:00
|
|
|
self.endpoints: dict = {
|
|
|
|
"trip/get_artists_by_name": self.artists_by_name_handler,
|
|
|
|
"trip/get_albums_by_artist_id/{artist_id:path}": self.albums_by_artist_id_handler,
|
|
|
|
"trip/get_tracks_by_artist_song": self.tracks_by_artist_song_handler,
|
|
|
|
"trip/get_tracks_by_album_id/{album_id:path}": self.tracks_by_album_id_handler,
|
|
|
|
"trip/get_track_by_id/{track_id:path}": self.track_by_id_handler,
|
2025-08-15 13:31:15 -04:00
|
|
|
"trip/bulk_fetch": self.bulk_fetch_handler,
|
|
|
|
"trip/job/{job_id:path}": self.job_status_handler,
|
|
|
|
"trip/jobs/list": self.job_list_handler,
|
2025-08-07 11:47:57 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
for endpoint, handler in self.endpoints.items():
|
2025-08-11 14:03:43 -04:00
|
|
|
dependencies = [Depends(RateLimiter(times=8, seconds=2))]
|
2025-08-07 11:47:57 -04:00
|
|
|
app.add_api_route(
|
|
|
|
f"/{endpoint}",
|
|
|
|
handler,
|
2025-08-15 13:31:15 -04:00
|
|
|
methods=["GET"] if endpoint != "trip/bulk_fetch" else ["POST"],
|
2025-08-07 11:47:57 -04:00
|
|
|
include_in_schema=True,
|
|
|
|
dependencies=dependencies,
|
|
|
|
)
|
|
|
|
|
2025-08-09 07:48:07 -04:00
|
|
|
async def artists_by_name_handler(
|
|
|
|
self, artist: str, request: Request, user=Depends(get_current_user)
|
|
|
|
) -> Response:
|
2025-08-07 11:47:57 -04:00
|
|
|
"""Get artists by name"""
|
|
|
|
artists = await self.trip_util.get_artists_by_name(artist)
|
|
|
|
if not artists:
|
|
|
|
return Response(status_code=404, content="Not found")
|
|
|
|
return JSONResponse(content=artists)
|
2025-08-09 07:48:07 -04:00
|
|
|
|
|
|
|
async def albums_by_artist_id_handler(
|
|
|
|
self, artist_id: int, request: Request, user=Depends(get_current_user)
|
|
|
|
) -> Response:
|
2025-08-07 11:47:57 -04:00
|
|
|
"""Get albums by artist ID"""
|
|
|
|
albums = await self.trip_util.get_albums_by_artist_id(artist_id)
|
|
|
|
if not albums:
|
|
|
|
return Response(status_code=404, content="Not found")
|
|
|
|
return JSONResponse(content=albums)
|
2025-08-09 07:48:07 -04:00
|
|
|
|
|
|
|
async def tracks_by_album_id_handler(
|
|
|
|
self, album_id: int, request: Request, user=Depends(get_current_user)
|
|
|
|
) -> Response:
|
2025-08-07 11:47:57 -04:00
|
|
|
"""Get tracks by album id"""
|
|
|
|
tracks = await self.trip_util.get_tracks_by_album_id(album_id)
|
|
|
|
if not tracks:
|
|
|
|
return Response(status_code=404, content="Not Found")
|
|
|
|
return JSONResponse(content=tracks)
|
|
|
|
|
2025-08-09 07:48:07 -04:00
|
|
|
async def tracks_by_artist_song_handler(
|
|
|
|
self, artist: str, song: str, request: Request, user=Depends(get_current_user)
|
|
|
|
) -> Response:
|
2025-08-07 11:47:57 -04:00
|
|
|
"""Get tracks by artist and song name"""
|
|
|
|
logging.critical("Searching for tracks by artist: %s, song: %s", artist, song)
|
|
|
|
tracks = await self.trip_util.get_tracks_by_artist_song(artist, song)
|
|
|
|
if not tracks:
|
|
|
|
return Response(status_code=404, content="Not found")
|
|
|
|
return JSONResponse(content=tracks)
|
2025-08-09 07:48:07 -04:00
|
|
|
|
|
|
|
async def track_by_id_handler(
|
|
|
|
self, track_id: int, request: Request, user=Depends(get_current_user)
|
|
|
|
) -> Response:
|
2025-08-07 11:47:57 -04:00
|
|
|
"""Get track by ID"""
|
|
|
|
track = await self.trip_util.get_stream_url_by_track_id(track_id)
|
|
|
|
if not track:
|
|
|
|
return Response(status_code=404, content="Not found")
|
|
|
|
return JSONResponse(content={"stream_url": track})
|
2025-08-15 13:31:15 -04:00
|
|
|
|
|
|
|
async def bulk_fetch_handler(
|
|
|
|
self,
|
|
|
|
data: ValidBulkFetchRequest,
|
|
|
|
request: Request,
|
|
|
|
user=Depends(get_current_user),
|
|
|
|
) -> Response:
|
|
|
|
"""Bulk fetch a list of track IDs"""
|
|
|
|
if not data or not data.track_ids:
|
|
|
|
return JSONResponse(
|
|
|
|
content={
|
|
|
|
"err": True,
|
|
|
|
"errorText": "Invalid data",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
track_ids = data.track_ids
|
2025-08-15 13:39:27 -04:00
|
|
|
job = self.task_queue.enqueue(
|
|
|
|
bulk_download,
|
|
|
|
args=(track_ids,),
|
|
|
|
timeout=3600,
|
|
|
|
failure_ttl=86400,
|
|
|
|
result_ttl=86400,
|
|
|
|
)
|
2025-08-15 13:31:15 -04:00
|
|
|
self.redis_conn.lpush("enqueued_job_ids", job.id)
|
|
|
|
return JSONResponse(
|
|
|
|
content={
|
|
|
|
"job_id": job.id,
|
|
|
|
"status": "queued",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
async def job_status_handler(
|
|
|
|
self, job_id: str, request: Request, user=Depends(get_current_user)
|
|
|
|
):
|
|
|
|
"""Get status and result of a single job"""
|
|
|
|
try:
|
|
|
|
job = Job.fetch(job_id, connection=self.redis_conn)
|
|
|
|
except Exception:
|
|
|
|
return JSONResponse({"error": "Job not found"}, status_code=404)
|
|
|
|
|
|
|
|
return {
|
|
|
|
"id": job.id,
|
|
|
|
"status": job.get_status(),
|
|
|
|
"result": job.result,
|
|
|
|
"enqueued_at": job.enqueued_at,
|
|
|
|
"started_at": job.started_at,
|
|
|
|
"ended_at": job.ended_at,
|
|
|
|
}
|
|
|
|
|
|
|
|
async def job_list_handler(self, request: Request, user=Depends(get_current_user)):
|
|
|
|
"""List all jobs in the queue (queued + finished, if result_ttl allows)"""
|
|
|
|
jobs_info = []
|
|
|
|
|
2025-08-15 13:39:27 -04:00
|
|
|
# Jobs still in the queue (pending)
|
2025-08-15 13:31:15 -04:00
|
|
|
for job in self.task_queue.jobs:
|
|
|
|
jobs_info.append(
|
|
|
|
{
|
|
|
|
"id": job.id,
|
|
|
|
"status": job.get_status(), # queued
|
|
|
|
"result": job.result,
|
|
|
|
"enqueued_at": job.enqueued_at,
|
|
|
|
"progress": job.meta.get("progress", None),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2025-08-15 13:39:27 -04:00
|
|
|
# Started/running jobs tracked via enqueued_job_ids
|
2025-08-15 13:31:15 -04:00
|
|
|
job_ids = self.redis_conn.lrange("enqueued_job_ids", 0, -1)
|
|
|
|
for jid_bytes in job_ids: # type: ignore
|
|
|
|
jid = jid_bytes.decode()
|
|
|
|
try:
|
|
|
|
job = Job.fetch(jid, connection=self.redis_conn)
|
|
|
|
except Exception:
|
|
|
|
continue # job may have completed and expired
|
|
|
|
|
|
|
|
status = job.get_status()
|
|
|
|
if status in ("started", "queued", "finished"):
|
|
|
|
# avoid duplicates for jobs already in task_queue.jobs
|
|
|
|
if not any(j["id"] == job.id for j in jobs_info):
|
|
|
|
jobs_info.append(
|
|
|
|
{
|
|
|
|
"id": job.id,
|
|
|
|
"status": status,
|
|
|
|
"result": job.result,
|
|
|
|
"enqueued_at": job.enqueued_at,
|
|
|
|
"progress": job.meta.get("progress", None),
|
|
|
|
"tracks": job.meta.get("track_list", None),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
return {"jobs": jobs_info}
|