various/stale
This commit is contained in:
@@ -127,9 +127,7 @@ class MemeUtil:
|
||||
db_conn.row_factory = sqlite3.Row
|
||||
rows_per_page: int = 10
|
||||
offset: int = (page - 1) * rows_per_page
|
||||
query: str = (
|
||||
"SELECT id, timestamp FROM memes ORDER BY timestamp DESC LIMIT 10 OFFSET ?"
|
||||
)
|
||||
query: str = "SELECT id, timestamp FROM memes ORDER BY timestamp DESC LIMIT 10 OFFSET ?"
|
||||
async with await db_conn.execute(query, (offset,)) as db_cursor:
|
||||
results = await db_cursor.fetchall()
|
||||
for result in results:
|
||||
|
||||
@@ -5,6 +5,7 @@ import datetime
|
||||
import os
|
||||
import random
|
||||
import asyncio
|
||||
import subprocess
|
||||
from uuid import uuid4 as uuid
|
||||
from typing import Union, Optional, Iterable
|
||||
from aiohttp import ClientSession, ClientTimeout
|
||||
@@ -391,6 +392,39 @@ class RadioUtil:
|
||||
traceback.print_exc()
|
||||
return "Not Found"
|
||||
|
||||
async def _restart_liquidsoap_when_ready(self) -> None:
|
||||
"""Poll server until responsive, then restart Liquidsoap."""
|
||||
max_attempts = 60
|
||||
for attempt in range(max_attempts):
|
||||
try:
|
||||
async with ClientSession() as session:
|
||||
async with session.get(
|
||||
"http://127.0.0.1:52111/",
|
||||
timeout=ClientTimeout(total=3),
|
||||
) as resp:
|
||||
logging.debug("Server check attempt %d: status %d", attempt + 1, resp.status)
|
||||
if resp.status < 500:
|
||||
logging.info("Server is ready (attempt %d)", attempt + 1)
|
||||
break
|
||||
except Exception as e:
|
||||
logging.debug("Server check attempt %d failed: %s", attempt + 1, str(e))
|
||||
await asyncio.sleep(1)
|
||||
else:
|
||||
logging.warning("Server readiness check timed out, restarting Liquidsoap anyway")
|
||||
|
||||
try:
|
||||
logging.info("Restarting Liquidsoap...")
|
||||
subprocess.Popen(
|
||||
["./restart.sh"],
|
||||
cwd="/home/kyle/ls",
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
start_new_session=True,
|
||||
)
|
||||
logging.info("Liquidsoap restart initiated")
|
||||
except Exception as e:
|
||||
logging.error("Error starting Liquidsoap restart: %s", str(e))
|
||||
|
||||
async def load_playlists(self) -> None:
|
||||
"""Load Playlists"""
|
||||
try:
|
||||
@@ -487,10 +521,8 @@ class RadioUtil:
|
||||
|
||||
"""Loading Complete"""
|
||||
self.playlists_loaded = True
|
||||
# Request skip from LS to bring streams current
|
||||
for playlist in self.playlists:
|
||||
logging.info("Skipping: %s", playlist)
|
||||
await self._ls_skip(playlist)
|
||||
# Restart Liquidsoap once server is responsive (fire and forget)
|
||||
asyncio.create_task(self._restart_liquidsoap_when_ready())
|
||||
except Exception as e:
|
||||
logging.info("Playlist load failed: %s", str(e))
|
||||
traceback.print_exc()
|
||||
|
||||
@@ -9,7 +9,6 @@ import subprocess
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from urllib.parse import urlparse, unquote
|
||||
import aiohttp
|
||||
from datetime import datetime, timezone
|
||||
from mediafile import MediaFile, Image, ImageType # type: ignore[import]
|
||||
@@ -20,9 +19,9 @@ import re
|
||||
|
||||
# ---------- Config ----------
|
||||
ROOT_DIR = Path("/storage/music2")
|
||||
MAX_RETRIES = 5
|
||||
THROTTLE_MIN = 1.0
|
||||
THROTTLE_MAX = 3.5
|
||||
MAX_RETRIES = 4
|
||||
THROTTLE_MIN = 0.0
|
||||
THROTTLE_MAX = 0.0
|
||||
DISCORD_WEBHOOK = os.getenv("TRIP_WEBHOOK_URI", "").strip()
|
||||
|
||||
HEADERS = {
|
||||
@@ -36,10 +35,7 @@ HEADERS = {
|
||||
"Connection": "keep-alive",
|
||||
}
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
format="%(asctime)s [%(levelname)s] %(name)s: %(message)s",
|
||||
)
|
||||
# Logging is configured in base.py - don't override here
|
||||
|
||||
load_dotenv()
|
||||
|
||||
@@ -288,8 +284,8 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
all_artists = set()
|
||||
(ROOT_DIR / "completed").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Ensure aiohttp session is properly closed
|
||||
async with aiohttp.ClientSession(headers=HEADERS) as session:
|
||||
session = aiohttp.ClientSession(headers=HEADERS)
|
||||
try:
|
||||
print(f"DEBUG: Starting process_tracks with {len(track_list)} tracks")
|
||||
|
||||
# Set up a one-time rate-limit callback to notify on the first 429 seen by SRUtil
|
||||
@@ -314,13 +310,57 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
print(f"DEBUG: Processing track {i + 1}/{total}: {track_id}")
|
||||
track_info = {
|
||||
"track_id": str(track_id),
|
||||
"title": None,
|
||||
"artist": None,
|
||||
"status": "Pending",
|
||||
"file_path": None,
|
||||
"filename": None,
|
||||
"error": None,
|
||||
"attempts": 0,
|
||||
}
|
||||
attempt = 0
|
||||
|
||||
# Fetch metadata FIRST to check if track is available before attempting download
|
||||
md = None
|
||||
try:
|
||||
print(f"DEBUG: Fetching metadata for track {track_id}")
|
||||
md = await sr.get_metadata_by_track_id(track_id) or {}
|
||||
print(f"DEBUG: Metadata fetched: {bool(md)}")
|
||||
|
||||
# Check if track is streamable
|
||||
if md and not md.get("streamable", True):
|
||||
print(f"TRACK {track_id}: Not streamable, skipping")
|
||||
track_info["status"] = "Failed"
|
||||
track_info["error"] = "Track not streamable"
|
||||
track_info["title"] = md.get("title") or f"Track {track_id}"
|
||||
track_info["artist"] = md.get("artist") or "Unknown Artist"
|
||||
per_track_meta.append(track_info)
|
||||
if job:
|
||||
job.meta["tracks"] = per_track_meta
|
||||
job.meta["progress"] = int(((i + 1) / total) * 100)
|
||||
job.save_meta()
|
||||
continue # Skip to next track
|
||||
|
||||
except MetadataFetchError as me:
|
||||
# Permanent metadata failure — mark failed and skip
|
||||
print(f"TRACK {track_id}: Metadata fetch failed permanently: {me}")
|
||||
track_info["status"] = "Failed"
|
||||
track_info["error"] = str(me)
|
||||
track_info["title"] = f"Track {track_id}"
|
||||
track_info["artist"] = "Unknown Artist"
|
||||
per_track_meta.append(track_info)
|
||||
if job:
|
||||
job.meta["tracks"] = per_track_meta
|
||||
job.meta["progress"] = int(((i + 1) / total) * 100)
|
||||
job.save_meta()
|
||||
continue # Skip to next track
|
||||
except Exception as meta_err:
|
||||
# Non-permanent error - will retry during download attempts
|
||||
print(
|
||||
f"TRACK {track_id}: Metadata prefetch failed (will retry): {meta_err}"
|
||||
)
|
||||
md = None
|
||||
|
||||
while attempt < MAX_RETRIES:
|
||||
tmp_file = None
|
||||
attempt += 1
|
||||
@@ -367,21 +407,13 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
f"Download completed but no file created: {tmp_file}"
|
||||
)
|
||||
|
||||
print(f"DEBUG: Fetching metadata for track {track_id}")
|
||||
# Metadata fetch
|
||||
try:
|
||||
md = await sr.get_metadata_by_track_id(track_id) or {}
|
||||
print(f"DEBUG: Metadata fetched: {bool(md)}")
|
||||
except MetadataFetchError as me:
|
||||
# Permanent metadata failure — mark failed and break
|
||||
track_info["status"] = "Failed"
|
||||
track_info["error"] = str(me)
|
||||
per_track_meta.append(track_info)
|
||||
if job:
|
||||
job.meta["tracks"] = per_track_meta
|
||||
job.meta["progress"] = int(((i + 1) / total) * 100)
|
||||
job.save_meta()
|
||||
break
|
||||
# If we didn't get metadata earlier, try again now
|
||||
if not md:
|
||||
print(f"DEBUG: Re-fetching metadata for track {track_id}")
|
||||
try:
|
||||
md = await sr.get_metadata_by_track_id(track_id) or {}
|
||||
except Exception:
|
||||
md = {}
|
||||
|
||||
artist_raw = md.get("artist") or "Unknown Artist"
|
||||
album_raw = md.get("album") or "Unknown Album"
|
||||
@@ -391,6 +423,10 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
album = sanitize_filename(album_raw)
|
||||
title = sanitize_filename(title_raw)
|
||||
|
||||
# Populate track_info fields so job meta contains the user-visible data
|
||||
track_info["title"] = title
|
||||
track_info["artist"] = artist
|
||||
|
||||
print(f"TRACK {track_id}: Processing '{title}' by {artist}")
|
||||
|
||||
all_artists.add(artist)
|
||||
@@ -400,7 +436,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
|
||||
# Move to final location
|
||||
print(f"TRACK {track_id}: Moving to final location...")
|
||||
tmp_file.rename(final_file)
|
||||
shutil.move(str(tmp_file), str(final_file))
|
||||
print(f"TRACK {track_id}: File moved successfully")
|
||||
|
||||
# Fetch cover art
|
||||
@@ -507,6 +543,10 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
tmp_file = None
|
||||
track_info["status"] = "Success"
|
||||
track_info["file_path"] = str(final_file)
|
||||
try:
|
||||
track_info["filename"] = final_file.name
|
||||
except Exception:
|
||||
track_info["filename"] = None
|
||||
track_info["error"] = None
|
||||
all_final_files.append(final_file)
|
||||
|
||||
@@ -514,6 +554,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
f"TRACK {track_id}: SUCCESS! Progress: {((i + 1) / total) * 100:.0f}%"
|
||||
)
|
||||
|
||||
# Throttle after successful download to avoid hitting server too quickly
|
||||
await asyncio.sleep(random.uniform(THROTTLE_MIN, THROTTLE_MAX))
|
||||
|
||||
if job:
|
||||
job.meta["progress"] = int(((i + 1) / total) * 100)
|
||||
job.meta["tracks"] = per_track_meta + [track_info]
|
||||
@@ -523,9 +566,34 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
except aiohttp.ClientResponseError as e:
|
||||
msg = f"Track {track_id} attempt {attempt} ClientResponseError: {e}"
|
||||
send_log_to_discord(msg, "WARNING", target)
|
||||
# If 429, backoff as before. If 5xx, recreate session and refresh Tidal client.
|
||||
if getattr(e, "status", None) == 429:
|
||||
wait_time = min(60, 2**attempt)
|
||||
await asyncio.sleep(wait_time)
|
||||
elif 500 <= getattr(e, "status", 0) < 600:
|
||||
# Recreate local aiohttp session on 5xx errors
|
||||
try:
|
||||
await session.close()
|
||||
except Exception:
|
||||
pass
|
||||
session = aiohttp.ClientSession(headers=HEADERS)
|
||||
# Also force a fresh Tidal login in case the upstream session is stale
|
||||
try:
|
||||
await sr._force_fresh_login()
|
||||
send_log_to_discord(
|
||||
f"Refreshed Tidal session after 5xx error on track {track_id}",
|
||||
"WARNING",
|
||||
target,
|
||||
)
|
||||
except Exception as login_err:
|
||||
send_log_to_discord(
|
||||
f"Failed to refresh Tidal session: {login_err}",
|
||||
"ERROR",
|
||||
target,
|
||||
)
|
||||
await asyncio.sleep(
|
||||
random.uniform(THROTTLE_MIN, THROTTLE_MAX)
|
||||
)
|
||||
else:
|
||||
await asyncio.sleep(
|
||||
random.uniform(THROTTLE_MIN, THROTTLE_MAX)
|
||||
@@ -533,10 +601,74 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
err_str = str(e).lower()
|
||||
is_no_stream_url = (
|
||||
isinstance(e, RuntimeError) and str(e) == "No stream URL"
|
||||
)
|
||||
if is_no_stream_url:
|
||||
# Check if this is a 5xx error from the server (may appear in error message)
|
||||
is_5xx_error = any(
|
||||
code in err_str for code in ("500", "502", "503", "504")
|
||||
)
|
||||
# Check for permanent failures that should NOT be retried
|
||||
is_not_found = any(
|
||||
phrase in err_str
|
||||
for phrase in (
|
||||
"track not found",
|
||||
"not found",
|
||||
"404",
|
||||
"does not exist",
|
||||
"no longer available",
|
||||
"asset is not ready",
|
||||
)
|
||||
)
|
||||
|
||||
if is_not_found:
|
||||
# Permanent failure - do not retry
|
||||
msg = (
|
||||
f"Track {track_id} not found/unavailable, skipping: {e}"
|
||||
)
|
||||
print(msg)
|
||||
send_log_to_discord(msg, "WARNING", target)
|
||||
track_info["status"] = "Failed"
|
||||
track_info["error"] = str(e)
|
||||
break # Exit retry loop immediately
|
||||
elif is_5xx_error:
|
||||
msg = (
|
||||
f"Track {track_id} attempt {attempt} server error: {e}"
|
||||
)
|
||||
send_log_to_discord(msg, "WARNING", target)
|
||||
track_info["error"] = err_str
|
||||
# Recreate local aiohttp session
|
||||
try:
|
||||
await session.close()
|
||||
except Exception:
|
||||
pass
|
||||
session = aiohttp.ClientSession(headers=HEADERS)
|
||||
# Force a fresh Tidal login
|
||||
try:
|
||||
await sr._force_fresh_login()
|
||||
send_log_to_discord(
|
||||
f"Refreshed Tidal session after 5xx error on track {track_id}",
|
||||
"WARNING",
|
||||
target,
|
||||
)
|
||||
except Exception as login_err:
|
||||
send_log_to_discord(
|
||||
f"Failed to refresh Tidal session: {login_err}",
|
||||
"ERROR",
|
||||
target,
|
||||
)
|
||||
if attempt >= MAX_RETRIES:
|
||||
track_info["status"] = "Failed"
|
||||
send_log_to_discord(
|
||||
f"Track {track_id} failed after {attempt} attempts (5xx)",
|
||||
"ERROR",
|
||||
target,
|
||||
)
|
||||
await asyncio.sleep(
|
||||
random.uniform(THROTTLE_MIN, THROTTLE_MAX)
|
||||
)
|
||||
elif is_no_stream_url:
|
||||
if attempt == 1 or attempt == MAX_RETRIES:
|
||||
msg = f"Track {track_id} attempt {attempt} failed: {e}\n{tb}"
|
||||
send_log_to_discord(msg, "ERROR", target)
|
||||
@@ -575,8 +707,22 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Ensure placeholders and filename for the job metadata
|
||||
track_info["title"] = track_info.get("title") or f"Track {track_id}"
|
||||
track_info["artist"] = track_info.get("artist") or "Unknown Artist"
|
||||
if track_info.get("file_path") and not track_info.get("filename"):
|
||||
try:
|
||||
track_info["filename"] = Path(track_info["file_path"]).name
|
||||
except Exception:
|
||||
track_info["filename"] = None
|
||||
per_track_meta.append(track_info)
|
||||
|
||||
finally:
|
||||
try:
|
||||
await session.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not all_final_files:
|
||||
if job:
|
||||
job.meta["tarball"] = None
|
||||
@@ -624,7 +770,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
counter += 1
|
||||
staged_tarball = staging_root / f"{base_name} ({counter}).tar.gz"
|
||||
|
||||
final_dir = ROOT_DIR / "completed" / quality
|
||||
final_dir = Path("/storage/music/TRIP")
|
||||
final_dir.mkdir(parents=True, exist_ok=True)
|
||||
# Ensure we don't overwrite an existing final tarball. Preserve `.tar.gz` style.
|
||||
final_tarball = ensure_unique_filename_in_dir(final_dir, staged_tarball.name)
|
||||
@@ -677,6 +823,14 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
os.remove(f)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
send_log_to_discord(f"Tar creation failed: {e}", "ERROR", target)
|
||||
if job:
|
||||
job.meta["status"] = "compress_failed"
|
||||
job.save_meta()
|
||||
# Do not proceed further if tarball creation failed
|
||||
await asyncio.sleep(0.1)
|
||||
return []
|
||||
|
||||
if not staged_tarball.exists():
|
||||
send_log_to_discord(
|
||||
@@ -711,6 +865,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
|
||||
color=0x00FF00,
|
||||
)
|
||||
|
||||
# Always log the final tarball path for debugging
|
||||
logging.info("Job %s finished, tarball: %s", job_id, final_tarball)
|
||||
|
||||
return [str(final_tarball)]
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
|
||||
@@ -1081,6 +1081,27 @@ class SRUtil:
|
||||
return combined_metadata
|
||||
|
||||
except Exception as e:
|
||||
err_str = str(e).lower()
|
||||
# If this is a permanent not found error, abort retries immediately
|
||||
if any(
|
||||
phrase in err_str
|
||||
for phrase in [
|
||||
"track not found",
|
||||
"not found",
|
||||
"404",
|
||||
"does not exist",
|
||||
"no longer available",
|
||||
"asset is not ready",
|
||||
]
|
||||
):
|
||||
logging.error(
|
||||
"Metadata fetch permanent failure for track %s: %s (not retrying)",
|
||||
track_id,
|
||||
str(e),
|
||||
)
|
||||
raise MetadataFetchError(
|
||||
f"Metadata fetch failed permanently for track {track_id}: {e}"
|
||||
)
|
||||
# Exponential backoff with jitter for 429 or other errors
|
||||
delay = self.RETRY_DELAY * (2 ** (attempt - 1)) + random.uniform(0, 0.5)
|
||||
if attempt < self.MAX_METADATA_RETRIES:
|
||||
@@ -1179,6 +1200,47 @@ class SRUtil:
|
||||
if not tracks:
|
||||
return None
|
||||
|
||||
# Prefer exact title matches first (highest confidence)
|
||||
exact_title_matches = []
|
||||
for t in tracks:
|
||||
found_title = t.get("title")
|
||||
if found_title and found_title.strip().lower() == song.strip().lower():
|
||||
exact_title_matches.append(t)
|
||||
if exact_title_matches:
|
||||
logging.info(f"SR: {len(exact_title_matches)} exact title matches found")
|
||||
tracks = exact_title_matches
|
||||
else:
|
||||
# Prefer tracks that match artist/title fuzzily
|
||||
filtered_by_metadata = []
|
||||
for t in tracks:
|
||||
found_artist = (
|
||||
t.get("artist", {}).get("name")
|
||||
if isinstance(t.get("artist"), dict)
|
||||
else t.get("artist")
|
||||
)
|
||||
found_album = (
|
||||
t.get("album", {}).get("title") if t.get("album") else None
|
||||
)
|
||||
found_title = t.get("title")
|
||||
try:
|
||||
if self.is_metadata_match(
|
||||
artist, album, song, found_artist, found_album, found_title
|
||||
):
|
||||
filtered_by_metadata.append(t)
|
||||
except Exception:
|
||||
# On any error, skip strict metadata matching for this candidate
|
||||
continue
|
||||
|
||||
if filtered_by_metadata:
|
||||
logging.info(
|
||||
f"SR: {len(filtered_by_metadata)} candidates after metadata filtering"
|
||||
)
|
||||
tracks = filtered_by_metadata
|
||||
else:
|
||||
logging.info(
|
||||
"SR: No candidates passed metadata match filter; falling back to search results"
|
||||
)
|
||||
|
||||
# If duration provided, select the track with closest duration match
|
||||
if duration is not None:
|
||||
tracks_with_diff = [
|
||||
@@ -1195,7 +1257,88 @@ class SRUtil:
|
||||
best_track = tracks[0]
|
||||
|
||||
track_id = best_track.get("id")
|
||||
logging.info(f"SR: Using track ID {track_id}")
|
||||
# Ensure the selected candidate reasonably matches expected metadata
|
||||
selected_artist = (
|
||||
best_track.get("artist", {}).get("name")
|
||||
if isinstance(best_track.get("artist"), dict)
|
||||
else best_track.get("artist")
|
||||
)
|
||||
selected_title = best_track.get("title")
|
||||
if not self.is_metadata_match(
|
||||
artist,
|
||||
album,
|
||||
song,
|
||||
selected_artist,
|
||||
best_track.get("album", {}).get("title")
|
||||
if best_track.get("album")
|
||||
else None,
|
||||
selected_title,
|
||||
):
|
||||
# Try to find another candidate that does match metadata
|
||||
logging.warning(
|
||||
"SR: Selected candidate failed metadata check: id=%s artist=%s title=%s; searching for better match",
|
||||
track_id,
|
||||
selected_artist,
|
||||
selected_title,
|
||||
)
|
||||
found_better = None
|
||||
for candidate in tracks:
|
||||
cand_artist = (
|
||||
candidate.get("artist", {}).get("name")
|
||||
if isinstance(candidate.get("artist"), dict)
|
||||
else candidate.get("artist")
|
||||
)
|
||||
cand_title = candidate.get("title")
|
||||
if self.is_metadata_match(
|
||||
artist,
|
||||
album,
|
||||
song,
|
||||
cand_artist,
|
||||
candidate.get("album", {}).get("title")
|
||||
if candidate.get("album")
|
||||
else None,
|
||||
cand_title,
|
||||
):
|
||||
found_better = candidate
|
||||
break
|
||||
if found_better:
|
||||
logging.warning(
|
||||
"SR: Switching to better candidate id=%s artist=%s title=%s",
|
||||
found_better.get("id"),
|
||||
(
|
||||
found_better.get("artist", {}).get("name")
|
||||
if isinstance(found_better.get("artist"), dict)
|
||||
else found_better.get("artist")
|
||||
),
|
||||
found_better.get("title"),
|
||||
)
|
||||
best_track = found_better
|
||||
track_id = best_track.get("id")
|
||||
else:
|
||||
# No matching candidate passed metadata checks; log candidates and abort
|
||||
logging.warning(
|
||||
"SR: No candidates passed metadata checks for %s - %s; candidates: %s",
|
||||
artist,
|
||||
song,
|
||||
[
|
||||
{
|
||||
"id": t.get("id"),
|
||||
"artist": (
|
||||
t.get("artist", {}).get("name")
|
||||
if isinstance(t.get("artist"), dict)
|
||||
else t.get("artist")
|
||||
),
|
||||
"title": t.get("title"),
|
||||
"duration": t.get("duration"),
|
||||
}
|
||||
for t in tracks[:10]
|
||||
],
|
||||
)
|
||||
return None
|
||||
|
||||
logging.info(
|
||||
f"SR: Using track ID {track_id} (artist={best_track.get('artist')}, title={best_track.get('title')})"
|
||||
)
|
||||
if not track_id:
|
||||
return None
|
||||
|
||||
|
||||
Reference in New Issue
Block a user