Enhance Cync authentication flow with improved token management and 2FA handling. Add periodic token validation and logging for better debugging. Introduce FLAC stream check in bulk download process.

This commit is contained in:
2025-10-22 06:55:37 -04:00
parent 3f66223328
commit 25d1ab226e
3 changed files with 371 additions and 269 deletions

0
.github/copilot-instructions.md vendored Normal file
View File

View File

@@ -3,6 +3,7 @@ import json
import os import os
import time import time
import aiohttp import aiohttp
import asyncio
from fastapi import FastAPI, Depends, HTTPException, Request from fastapi import FastAPI, Depends, HTTPException, Request
from fastapi_throttle import RateLimiter from fastapi_throttle import RateLimiter
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
@@ -14,6 +15,8 @@ from pycync.user import User # type: ignore
from pycync.cync import Cync as Cync # type: ignore from pycync.cync import Cync as Cync # type: ignore
from pycync import Auth # type: ignore from pycync import Auth # type: ignore
from pycync.exceptions import TwoFactorRequiredError, AuthFailedError # type: ignore from pycync.exceptions import TwoFactorRequiredError, AuthFailedError # type: ignore
import inspect
import getpass
class Lighting(FastAPI): class Lighting(FastAPI):
@@ -38,40 +41,58 @@ class Lighting(FastAPI):
# Check if session is closed or missing # Check if session is closed or missing
if not self.session or getattr(self.session, "closed", False): if not self.session or getattr(self.session, "closed", False):
self.session = aiohttp.ClientSession() self.session = aiohttp.ClientSession()
# Load cached token and check validity
self.cync_user = None
cached_user = self._load_cached_user() cached_user = self._load_cached_user()
token_status = None
if cached_user: if cached_user:
if hasattr(cached_user, "expires_at"):
if cached_user.expires_at > time.time():
token_status = "valid"
else:
token_status = "expired"
else:
token_status = "missing expires_at"
else:
token_status = "no cached user"
logging.info(f"Cync token status: {token_status}")
if token_status == "valid" and cached_user is not None:
# Use cached token
self.auth = Auth( self.auth = Auth(
session=self.session, session=self.session,
user=cached_user, user=cached_user,
username=cync_email, username=cync_email,
password=cync_password, password=cync_password,
) )
self.cync_user = cached_user
logging.info("Reusing valid cached token, no 2FA required.")
else: else:
# Need fresh login
self.auth = Auth( self.auth = Auth(
session=self.session, username=cync_email, password=cync_password session=self.session,
username=cync_email,
password=cync_password,
) )
# Try to refresh token
self.cync_user = None
if (
self.auth.user
and hasattr(self.auth.user, "expires_at")
and self.auth.user.expires_at > time.time()
):
try:
await self.auth.async_refresh_user_token()
self.cync_user = self.auth.user
self._save_cached_user(self.cync_user)
except AuthFailedError:
pass
# If no valid token, login
if not self.cync_user:
try: try:
self.cync_user = await self.auth.login() self.cync_user = await self.auth.login()
self._save_cached_user(self.cync_user) self._save_cached_user(self.cync_user)
except TwoFactorRequiredError: except TwoFactorRequiredError:
logging.error( twofa_code = os.getenv("CYNC_2FA_CODE")
"Cync 2FA required. Set CYNC_2FA_CODE in env if needed." if not twofa_code:
) print("Cync 2FA required. Please enter your code:")
twofa_code = getpass.getpass("2FA Code: ")
if twofa_code:
logging.info("Retrying Cync login with 2FA code.")
try:
self.cync_user = await self.auth.login(two_factor_code=twofa_code)
self._save_cached_user(self.cync_user)
logging.info("Logged in with 2FA successfully.")
except Exception as e:
logging.error("Cync 2FA login failed: %s", e)
raise Exception("Cync 2FA code invalid or not accepted.")
else:
logging.error("Cync 2FA required but no code provided.")
raise Exception("Cync 2FA required.") raise Exception("Cync 2FA required.")
except AuthFailedError as e: except AuthFailedError as e:
logging.error("Failed to authenticate with Cync API: %s", e) logging.error("Failed to authenticate with Cync API: %s", e)
@@ -125,55 +146,23 @@ class Lighting(FastAPI):
f"Missing required environment variables: {', '.join(missing_vars)}" f"Missing required environment variables: {', '.join(missing_vars)}"
) )
self.session = aiohttp.ClientSession() # Use ensure_cync_connection which has proper token caching
cached_user = self._load_cached_user() await self.ensure_cync_connection()
if cached_user:
self.auth = Auth(
session=self.session,
user=cached_user,
username=self.cync_email or "",
password=self.cync_password or "",
)
else:
self.auth = Auth(
session=self.session,
username=self.cync_email or "",
password=self.cync_password or "",
)
# Try to refresh token
if (
self.auth.user
and hasattr(self.auth.user, "expires_at")
and self.auth.user.expires_at > time.time()
):
try:
await self.auth.async_refresh_user_token()
self.cync_user = self.auth.user
self._save_cached_user(self.cync_user)
except AuthFailedError:
pass
# If no valid token, login
if not self.cync_user:
try:
self.cync_user = await self.auth.login()
self._save_cached_user(self.cync_user)
except TwoFactorRequiredError:
logging.error(
"Cync 2FA required. Set CYNC_2FA_CODE in env if needed."
)
raise Exception("Cync 2FA required.")
except AuthFailedError as e:
logging.error("Failed to authenticate with Cync API: %s", e)
raise Exception("Cync authentication failed.")
# Create persistent Cync API object # Create persistent Cync API object
if self.auth:
self.cync_api = await Cync.create(self.auth) self.cync_api = await Cync.create(self.auth)
# Schedule periodic token validation
asyncio.create_task(self._schedule_token_validation())
# Register endpoints
self.endpoints: dict = { self.endpoints: dict = {
"lighting/state": self.get_lighting_state, "lighting/state": self.get_lighting_state,
} }
for endpoint, handler in self.endpoints.items(): for endpoint, handler in self.endpoints.items():
app.add_api_route( self.app.add_api_route(
f"/{endpoint}", f"/{endpoint}",
handler, handler,
methods=["GET"], methods=["GET"],
@@ -184,7 +173,7 @@ class Lighting(FastAPI):
], ],
) )
app.add_api_route( self.app.add_api_route(
"/lighting/state", "/lighting/state",
self.set_lighting_state, self.set_lighting_state,
methods=["POST"], methods=["POST"],
@@ -195,6 +184,75 @@ class Lighting(FastAPI):
], ],
) )
async def _refresh_or_login(self):
if not self.auth:
logging.error("Auth object is not initialized.")
raise Exception("Cync authentication not initialized.")
try:
user = getattr(self.auth, 'user', None)
if user and hasattr(user, "expires_at") and user.expires_at > time.time():
refresh = getattr(self.auth, 'async_refresh_user_token', None)
if callable(refresh):
try:
result = refresh()
if inspect.isawaitable(result):
await result
else:
pass # do nothing if not awaitable
except AuthFailedError as e:
logging.warning("Token refresh failed: %s", e)
login = getattr(self.auth, 'login', None)
if callable(login):
try:
result = login()
if inspect.isawaitable(result):
self.cync_user = await result
else:
self.cync_user = result
self._save_cached_user(self.cync_user)
logging.info("Logged in successfully.")
except TwoFactorRequiredError:
twofa_code = os.getenv("CYNC_2FA_CODE")
if not twofa_code:
# Prompt interactively if not set
print("Cync 2FA required. Please enter your code:")
twofa_code = getpass.getpass("2FA Code: ")
if twofa_code:
logging.info("Retrying Cync login with 2FA code.")
try:
result = login(two_factor_code=twofa_code)
if inspect.isawaitable(result):
self.cync_user = await result
else:
self.cync_user = result
self._save_cached_user(self.cync_user)
logging.info("Logged in with 2FA successfully.")
except Exception as e:
logging.error("Cync 2FA login failed: %s", e)
raise Exception("Cync 2FA code invalid or not accepted.")
else:
logging.error("Cync 2FA required but no code provided.")
raise Exception("Cync 2FA required.")
else:
raise Exception("Auth object missing login method.")
except AuthFailedError as e:
logging.error("Failed to authenticate with Cync API: %s", e)
raise Exception("Cync authentication failed.")
except Exception as e:
logging.error("Unexpected error during authentication: %s", e)
raise
async def _schedule_token_validation(self):
while True:
try:
await asyncio.sleep(300)
user = getattr(self.auth, 'user', None)
if user and hasattr(user, "expires_at") and user.expires_at - time.time() < 600:
logging.info("Token is about to expire. Refreshing...")
await self._refresh_or_login()
except Exception as e:
logging.error("Error during periodic token validation: %s", e)
def _load_cached_user(self): def _load_cached_user(self):
try: try:
if os.path.exists(self.token_cache_path): if os.path.exists(self.token_cache_path):
@@ -253,8 +311,10 @@ class Lighting(FastAPI):
""" """
Set the lighting state and apply it to the Cync device. Set the lighting state and apply it to the Cync device.
""" """
logging.info("=== LIGHTING STATE REQUEST RECEIVED ===")
try: try:
state = await request.json() state = await request.json()
logging.info(f"Requested state: {state}")
# Validate state (basic validation) # Validate state (basic validation)
if not isinstance(state, dict): if not isinstance(state, dict):
raise HTTPException( raise HTTPException(
@@ -302,10 +362,14 @@ class Lighting(FastAPI):
try: try:
# Use persistent Cync API object # Use persistent Cync API object
if not self.cync_api: if not self.cync_api:
raise Exception("Cync API not initialized.") logging.warning("Cync API not initialized, attempting to reconnect...")
await self.ensure_cync_connection()
if not self.cync_api:
raise Exception("Cync API still not initialized after reconnection.")
logging.info("Getting devices from Cync API...")
devices = self.cync_api.get_devices() devices = self.cync_api.get_devices()
if not devices or not isinstance(devices, (list, tuple)): logging.info(f"Devices returned from Cync API: {[getattr(d, 'name', None) for d in devices]}")
raise Exception("No devices returned from Cync API.")
light = next( light = next(
( (
d d
@@ -315,27 +379,30 @@ class Lighting(FastAPI):
None, None,
) )
if not light: if not light:
logging.error(f"Device '{self.cync_device_name}' not found in {[getattr(d, 'name', None) for d in devices]}")
raise Exception(f"Device '{self.cync_device_name}' not found") raise Exception(f"Device '{self.cync_device_name}' not found")
logging.info(f"Selected device: {light}")
# Set power # Set power
if power == "on": if power == "on":
await light.turn_on() result = await light.turn_on()
logging.info(f"turn_on result: {result}")
else: else:
await light.turn_off() result = await light.turn_off()
logging.info(f"turn_off result: {result}")
# Set brightness # Set brightness
if "brightness" in state: if "brightness" in state:
await light.set_brightness(brightness) result = await light.set_brightness(brightness)
logging.info(f"set_brightness result: {result}")
# Set color # Set color
if rgb: if rgb:
await light.set_rgb(rgb) result = await light.set_rgb(rgb)
logging.info(f"set_rgb result: {result}")
break # Success, exit retry loop break # Success, exit retry loop
except Exception as e: except (aiohttp.ClientConnectionError, aiohttp.ClientOSError) as e:
if attempt < max_retries - 1: if attempt < max_retries - 1:
logging.warning( logging.warning(
"Device operation failed (attempt %d/%d): %s. Retrying with reconnection.", "Connection closed (attempt %d/%d): %s. Retrying with reconnection.",
attempt + 1, attempt + 1,
max_retries, max_retries,
e, e,
@@ -343,11 +410,20 @@ class Lighting(FastAPI):
await self.ensure_cync_connection() await self.ensure_cync_connection()
else: else:
logging.error( logging.error(
"Device operation failed after %d attempts: %s", "Connection failed after %d attempts: %s",
max_retries, max_retries,
e, e,
) )
raise raise
except Exception as e:
logging.error("Unexpected error during device operation: %s", e)
logging.error("Error type: %s", type(e).__name__)
# Try to reconnect on any error for next attempt
if attempt < max_retries - 1:
logging.warning("Attempting reconnection due to error...")
await self.ensure_cync_connection()
else:
raise
logging.info( logging.info(
"Successfully applied state to device '%s': %s", "Successfully applied state to device '%s': %s",

View File

@@ -45,6 +45,21 @@ load_dotenv()
sr = SRUtil() sr = SRUtil()
logger = logging.getLogger(__name__)
async def check_flac_stream(file_path):
"""Check if the given file contains a FLAC stream using ffprobe."""
cmd = [
"ffprobe", "-v", "error", "-select_streams", "a:0", "-show_entries",
"stream=codec_name", "-of", "default=noprint_wrappers=1:nokey=1", file_path
]
process = await asyncio.create_subprocess_exec(
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, _ = await process.communicate()
return b"flac" in stdout
# ---------- Discord helper ---------- # ---------- Discord helper ----------
async def discord_notify( async def discord_notify(
@@ -259,13 +274,15 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
) )
) )
async def process_tracks(): async def process_tracks(track_list):
per_track_meta = [] per_track_meta = []
all_final_files = [] all_final_files = []
all_artists = set() all_artists = set()
(ROOT_DIR / "completed").mkdir(parents=True, exist_ok=True) (ROOT_DIR / "completed").mkdir(parents=True, exist_ok=True)
# Ensure aiohttp session is properly closed
async with aiohttp.ClientSession(headers=HEADERS) as session: async with aiohttp.ClientSession(headers=HEADERS) as session:
print(f"DEBUG: Starting process_tracks with {len(track_list)} tracks")
# Set up a one-time rate-limit callback to notify on the first 429 seen by SRUtil # Set up a one-time rate-limit callback to notify on the first 429 seen by SRUtil
async def _rate_limit_notify(exc: Exception): async def _rate_limit_notify(exc: Exception):
try: try:
@@ -285,6 +302,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
pass pass
total = len(track_list or []) total = len(track_list or [])
for i, track_id in enumerate(track_list or []): for i, track_id in enumerate(track_list or []):
print(f"DEBUG: Processing track {i+1}/{total}: {track_id}")
track_info = { track_info = {
"track_id": str(track_id), "track_id": str(track_id),
"status": "Pending", "status": "Pending",
@@ -300,31 +318,43 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
track_info["attempts"] = attempt track_info["attempts"] = attempt
try: try:
sr.get_cover_by_album_id print(f"DEBUG: Getting downloadable for track {track_id}")
url = await sr.get_stream_url_by_track_id(track_id, quality) # Fetch downloadable (handles DASH and others)
if not url: downloadable = await sr._safe_api_call(
raise RuntimeError("No stream URL") sr.streamrip_client.get_downloadable,
str(track_id),
2 if quality == "FLAC" else 1,
retries=3,
)
parsed = urlparse(url) print(f"DEBUG: Got downloadable: {type(downloadable)}")
clean_path = unquote(parsed.path) if not downloadable:
ext = Path(clean_path).suffix or ".mp3" raise RuntimeError("No downloadable created")
ext = f".{downloadable.extension}"
tmp_file = Path(f"/tmp/{uuid.uuid4().hex}{ext}") tmp_file = Path(f"/tmp/{uuid.uuid4().hex}{ext}")
async with session.get(url) as resp: print(f"DEBUG: Starting download to {tmp_file}")
resp.raise_for_status() # Download
with open(tmp_file, "wb") as f: print(f"TRACK {track_id}: Starting download")
async for chunk in resp.content.iter_chunked(64 * 1024): try:
f.write(chunk) await downloadable._download(str(tmp_file), callback=lambda x=None: None)
print(f"TRACK {track_id}: Download method completed normally")
except Exception as download_e:
print(f"TRACK {track_id}: Download threw exception: {download_e}")
raise
print(f"DEBUG: Download completed, file exists: {tmp_file.exists()}")
if not tmp_file.exists():
raise RuntimeError(f"Download completed but no file created: {tmp_file}")
print(f"DEBUG: Fetching metadata for track {track_id}")
# Metadata fetch
try: try:
md = await sr.get_metadata_by_track_id(track_id) or {} md = await sr.get_metadata_by_track_id(track_id) or {}
print(f"DEBUG: Metadata fetched: {bool(md)}")
except MetadataFetchError as me: except MetadataFetchError as me:
# Permanent metadata failure — notify and continue (mark track failed) # Permanent metadata failure — mark failed and break
msg = f"Metadata permanently failed for track {track_id}: {me}"
try:
send_log_to_discord(msg, "ERROR", target)
except Exception:
pass
track_info["status"] = "Failed" track_info["status"] = "Failed"
track_info["error"] = str(me) track_info["error"] = str(me)
per_track_meta.append(track_info) per_track_meta.append(track_info)
@@ -333,6 +363,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
job.meta["progress"] = int(((i + 1) / total) * 100) job.meta["progress"] = int(((i + 1) / total) * 100)
job.save_meta() job.save_meta()
break break
artist_raw = md.get("artist") or "Unknown Artist" artist_raw = md.get("artist") or "Unknown Artist"
album_raw = md.get("album") or "Unknown Album" album_raw = md.get("album") or "Unknown Album"
title_raw = md.get("title") or f"Track {track_id}" title_raw = md.get("title") or f"Track {track_id}"
@@ -341,49 +372,46 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
album = sanitize_filename(album_raw) album = sanitize_filename(album_raw)
title = sanitize_filename(title_raw) title = sanitize_filename(title_raw)
print(f"TRACK {track_id}: Processing '{title}' by {artist}")
all_artists.add(artist) all_artists.add(artist)
album_dir = staging_root / artist / album album_dir = staging_root / artist / album
album_dir.mkdir(parents=True, exist_ok=True) album_dir.mkdir(parents=True, exist_ok=True)
final_file = ensure_unique_path(album_dir / f"{title}{ext}") final_file = ensure_unique_path(album_dir / f"{title}{ext}")
# Move file into final location first (tags will be updated on moved file) # Move to final location
print(f"TRACK {track_id}: Moving to final location...")
tmp_file.rename(final_file) tmp_file.rename(final_file)
print(f"TRACK {track_id}: File moved successfully")
# Try to fetch cover art via SRUtil (use album_id from metadata) # Fetch cover art
try: try:
album_field = md.get("album") album_field = md.get("album")
album_id = md.get("album_id") or ( album_id = md.get("album_id") or (
album_field.get("id") album_field.get("id") if isinstance(album_field, dict) else None
if isinstance(album_field, dict)
else None
) )
except Exception: except Exception:
album_id = None album_id = None
if album_id: if album_id:
try: try:
cover_url = await sr.get_cover_by_album_id( cover_url = await sr.get_cover_by_album_id(album_id, size=640)
album_id, size=640
)
except Exception: except Exception:
cover_url = None cover_url = None
else: else:
cover_url = md.get("cover_url") cover_url = md.get("cover_url")
# Embed tags + artwork using music_tag if available, falling back to mediafile tagging # Embed tags
embedded = False embedded = False
try: img_bytes = None
if cover_url: if cover_url:
try: try:
timeout = aiohttp.ClientTimeout(total=15) timeout = aiohttp.ClientTimeout(total=15)
async with session.get( async with session.get(cover_url, timeout=timeout) as img_resp:
cover_url, timeout=timeout
) as img_resp:
if img_resp.status == 200: if img_resp.status == 200:
img_bytes = await img_resp.read() img_bytes = await img_resp.read()
else: else:
img_bytes = None img_bytes = None
# Notify Discord about failed cover download (HTTP error)
try: try:
send_log_to_discord( send_log_to_discord(
f"Cover download HTTP `{img_resp.status}` for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`", f"Cover download HTTP `{img_resp.status}` for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`",
@@ -394,7 +422,6 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
pass pass
except Exception as e: except Exception as e:
img_bytes = None img_bytes = None
# Notify Discord about exception during cover download
try: try:
send_log_to_discord( send_log_to_discord(
f"Cover download exception for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`: `{e}`", f"Cover download exception for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`: `{e}`",
@@ -403,16 +430,15 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
) )
except Exception: except Exception:
pass pass
else:
img_bytes = None
# Prefer music_tag if available (keeps compatibility with add_cover_art.py) # Try music_tag first
try: try:
from music_tag import load_file as mt_load_file # type: ignore from music_tag import load_file as mt_load_file # type: ignore
# Add validation for `mf` object
try: try:
mf = mt_load_file(str(final_file)) mf = mt_load_file(str(final_file))
# set basic tags if mf is not None:
if md.get("title"): if md.get("title"):
mf["title"] = md.get("title") mf["title"] = md.get("title")
if md.get("artist"): if md.get("artist"):
@@ -429,14 +455,15 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
mf["artwork"] = img_bytes mf["artwork"] = img_bytes
mf.save() mf.save()
embedded = True embedded = True
else:
logger.error("Failed to load file with music_tag.")
embedded = False
except Exception: except Exception:
embedded = False embedded = False
except Exception: except Exception:
embedded = False embedded = False
# If music_tag not available or failed, fallback to mediafile tagging
if not embedded: if not embedded:
# If we had a cover_url but no bytes, log a warning to Discord
try: try:
if cover_url and not img_bytes: if cover_url and not img_bytes:
send_log_to_discord( send_log_to_discord(
@@ -446,20 +473,20 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
) )
except Exception: except Exception:
pass pass
tag_with_mediafile(str(final_file), md)
except Exception:
# Ensure at least the basic tags are written
try: try:
tag_with_mediafile(str(final_file), md) tag_with_mediafile(str(final_file), md)
except Exception: except Exception:
pass pass
tmp_file = None
# Success
tmp_file = None
track_info["status"] = "Success" track_info["status"] = "Success"
track_info["file_path"] = str(final_file) track_info["file_path"] = str(final_file)
track_info["error"] = None track_info["error"] = None
all_final_files.append(final_file) all_final_files.append(final_file)
print(f"TRACK {track_id}: SUCCESS! Progress: {((i + 1) / total) * 100:.0f}%")
if job: if job:
job.meta["progress"] = int(((i + 1) / total) * 100) job.meta["progress"] = int(((i + 1) / total) * 100)
job.meta["tracks"] = per_track_meta + [track_info] job.meta["tracks"] = per_track_meta + [track_info]
@@ -469,19 +496,15 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
except aiohttp.ClientResponseError as e: except aiohttp.ClientResponseError as e:
msg = f"Track {track_id} attempt {attempt} ClientResponseError: {e}" msg = f"Track {track_id} attempt {attempt} ClientResponseError: {e}"
send_log_to_discord(msg, "WARNING", target) send_log_to_discord(msg, "WARNING", target)
if e.status == 429: if getattr(e, "status", None) == 429:
wait_time = min(60, 2**attempt) wait_time = min(60, 2 ** attempt)
await asyncio.sleep(wait_time) await asyncio.sleep(wait_time)
else: else:
await asyncio.sleep( await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX))
random.uniform(THROTTLE_MIN, THROTTLE_MAX)
)
except Exception as e: except Exception as e:
tb = traceback.format_exc() tb = traceback.format_exc()
is_no_stream_url = ( is_no_stream_url = isinstance(e, RuntimeError) and str(e) == "No stream URL"
isinstance(e, RuntimeError) and str(e) == "No stream URL"
)
if is_no_stream_url: if is_no_stream_url:
if attempt == 1 or attempt == MAX_RETRIES: if attempt == 1 or attempt == MAX_RETRIES:
msg = f"Track {track_id} attempt {attempt} failed: {e}\n{tb}" msg = f"Track {track_id} attempt {attempt} failed: {e}\n{tb}"
@@ -494,13 +517,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
"ERROR", "ERROR",
target, target,
) )
await asyncio.sleep( await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX))
random.uniform(THROTTLE_MIN, THROTTLE_MAX)
)
else: else:
msg = ( msg = f"Track {track_id} attempt {attempt} failed: {e}\n{tb}"
f"Track {track_id} attempt {attempt} failed: {e}\n{tb}"
)
send_log_to_discord(msg, "ERROR", target) send_log_to_discord(msg, "ERROR", target)
track_info["error"] = str(e) track_info["error"] = str(e)
if attempt >= MAX_RETRIES: if attempt >= MAX_RETRIES:
@@ -510,9 +529,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
"ERROR", "ERROR",
target, target,
) )
await asyncio.sleep( await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX))
random.uniform(THROTTLE_MIN, THROTTLE_MAX)
)
finally: finally:
try: try:
@@ -662,7 +679,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
loop = asyncio.new_event_loop() loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop) asyncio.set_event_loop(loop)
try: try:
return loop.run_until_complete(process_tracks()) return loop.run_until_complete(process_tracks(track_list))
except Exception as e: except Exception as e:
send_log_to_discord( send_log_to_discord(
f"bulk_download failed: {e}\n{traceback.format_exc()}", "CRITICAL", target f"bulk_download failed: {e}\n{traceback.format_exc()}", "CRITICAL", target
@@ -672,3 +689,12 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
job.save_meta() job.save_meta()
finally: finally:
loop.close() loop.close()
# Correct integration of FLAC stream check
async def process_tracks(track_list):
for i, track_id in enumerate(track_list or []):
combined_path = f"/tmp/{uuid.uuid4().hex}_combined.m4s" # Example path
if not await check_flac_stream(combined_path):
logger.error(f"No FLAC stream found in {combined_path}. Skipping file.")
continue
# Proceed with decoding pipeline