misc
This commit is contained in:
@@ -35,6 +35,7 @@ class RIP(FastAPI):
|
|||||||
self.task_queue = Queue(
|
self.task_queue = Queue(
|
||||||
"dls",
|
"dls",
|
||||||
connection=self.redis_conn,
|
connection=self.redis_conn,
|
||||||
|
default_timeout=3600,
|
||||||
default_result_ttl=86400,
|
default_result_ttl=86400,
|
||||||
default_failure_ttl=86400,
|
default_failure_ttl=86400,
|
||||||
)
|
)
|
||||||
@@ -123,7 +124,7 @@ class RIP(FastAPI):
|
|||||||
job = self.task_queue.enqueue(
|
job = self.task_queue.enqueue(
|
||||||
bulk_download,
|
bulk_download,
|
||||||
args=(track_ids,),
|
args=(track_ids,),
|
||||||
timeout=3600,
|
job_timeout=3600,
|
||||||
failure_ttl=86400,
|
failure_ttl=86400,
|
||||||
result_ttl=86400,
|
result_ttl=86400,
|
||||||
)
|
)
|
||||||
|
@@ -4,6 +4,7 @@ import random
|
|||||||
import os
|
import os
|
||||||
import tarfile
|
import tarfile
|
||||||
import uuid
|
import uuid
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from urllib.parse import urlparse, unquote
|
from urllib.parse import urlparse, unquote
|
||||||
@@ -39,6 +40,23 @@ HEADERS = {
|
|||||||
sr = SRUtil()
|
sr = SRUtil()
|
||||||
|
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
def sanitize_filename(name: str) -> str:
|
||||||
|
"""
|
||||||
|
Remove or replace characters not allowed in filenames.
|
||||||
|
Also trims whitespace and collapses consecutive spaces.
|
||||||
|
"""
|
||||||
|
# Replace slashes/backslashes with a dash
|
||||||
|
name = name.replace("/", "-").replace("\\", "-")
|
||||||
|
# Remove illegal characters for most OSes
|
||||||
|
name = re.sub(r'[<>:"|?*\x00-\x1F]', "", name)
|
||||||
|
# Strip leading/trailing spaces and dots
|
||||||
|
name = name.strip().strip(".")
|
||||||
|
# Collapse multiple spaces into one
|
||||||
|
name = re.sub(r"\s+", " ", name)
|
||||||
|
return name or "Unknown"
|
||||||
|
|
||||||
def bulk_download(track_list: list):
|
def bulk_download(track_list: list):
|
||||||
"""
|
"""
|
||||||
Full RQ-compatible bulk download job with:
|
Full RQ-compatible bulk download job with:
|
||||||
@@ -103,9 +121,9 @@ def bulk_download(track_list: list):
|
|||||||
track_id,
|
track_id,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
artist = metadata.get("artist", "Unknown Artist")
|
artist = sanitize_filename(metadata.get("artist", "Unknown Artist"))
|
||||||
album = metadata.get("album", "Unknown Album")
|
album = sanitize_filename(metadata.get("album", "Unknown Album"))
|
||||||
title = metadata.get("song", "Unknown Song")
|
title = sanitize_filename(metadata.get("song", "Unknown Song"))
|
||||||
|
|
||||||
logging.critical("Got metadata: %s/%s/%s", artist, album, title)
|
logging.critical("Got metadata: %s/%s/%s", artist, album, title)
|
||||||
|
|
||||||
@@ -138,7 +156,7 @@ def bulk_download(track_list: list):
|
|||||||
per_track_meta.append(track_info)
|
per_track_meta.append(track_info)
|
||||||
if job:
|
if job:
|
||||||
job.meta["progress"] = int((i + 1) / total * 100)
|
job.meta["progress"] = int((i + 1) / total * 100)
|
||||||
job.meta["tracks"] = per_track_meta
|
job.meta["tracks"] = track_list
|
||||||
job.save_meta()
|
job.save_meta()
|
||||||
|
|
||||||
# Throttle between downloads
|
# Throttle between downloads
|
||||||
|
Reference in New Issue
Block a user