This commit is contained in:
2025-09-09 15:50:13 -04:00
parent a57173b90a
commit f6d4ed57f3
5 changed files with 225 additions and 89 deletions

View File

@@ -1,8 +1,9 @@
from typing import Optional
from typing import Optional, Any
from uuid import uuid4
from urllib.parse import urlparse
import hashlib
import logging
import asyncio
import os
import aiohttp
from streamrip.client import TidalClient # type: ignore
@@ -43,6 +44,8 @@ class SRUtil:
)
self.streamrip_config
self.streamrip_client = TidalClient(self.streamrip_config)
self.MAX_METADATA_RETRIES = 5
self.RETRY_DELAY = 1.0 # seconds between retries
def dedupe_by_key(self, key: str, entries: list[dict]) -> list[dict]:
deduped = {}
@@ -58,6 +61,64 @@ class SRUtil:
m, s = divmod(seconds, 60)
return f"{m}:{s:02}"
def combine_album_track_metadata(
self, album_json: dict[str, Any], track_json: dict[str, Any]
) -> dict[str, Any]:
"""
Combine album-level and track-level metadata into a unified tag dictionary.
If track_json comes from album_json['tracks'], it will override album-level values where relevant.
"""
# Album-level
combined = {
"album": album_json.get("title"),
"album_artist": album_json.get("artist", {}).get("name"),
"release_date": album_json.get("releaseDate"),
"album_type": album_json.get("type"),
"total_tracks": album_json.get("numberOfTracks"),
"upc": album_json.get("upc"),
"album_copyright": album_json.get("copyright"),
"album_cover_id": album_json.get("cover"),
"album_cover_url": f"https://resources.tidal.com/images/{album_json.get('cover')}/1280x1280.jpg"
if album_json.get("cover")
else None,
}
# Track-level (overrides or adds to album info)
combined.update(
{
"title": track_json.get("title"),
"artist": track_json.get("artist", {}).get("name"),
"artists": [a.get("name") for a in track_json.get("artists", [])],
"track_number": track_json.get("trackNumber"),
"disc_number": track_json.get("volumeNumber"),
"duration": track_json.get("duration"),
"isrc": track_json.get("isrc"),
"bpm": track_json.get("bpm"),
"explicit": track_json.get("explicit"),
"replaygain": track_json.get("replayGain"),
"peak": track_json.get("peak"),
"lyrics": track_json.get("lyrics"),
"track_copyright": track_json.get("copyright"),
"cover_id": track_json.get("album", {}).get(
"cover", album_json.get("cover")
),
"cover_url": f"https://resources.tidal.com/images/{track_json.get('album', {}).get('cover', album_json.get('cover'))}/1280x1280.jpg"
if (track_json.get("album", {}).get("cover") or album_json.get("cover"))
else None,
}
)
return combined
def combine_album_with_all_tracks(
self, album_json: dict[str, Any]
) -> list[dict[str, Any]]:
"""Return a list of combined metadata dicts for all tracks in an album JSON."""
return [
self.combine_album_track_metadata(album_json, t)
for t in album_json.get("tracks", [])
]
async def get_artists_by_name(self, artist_name: str) -> Optional[list]:
"""Get artist(s) by name.
Args:
@@ -220,19 +281,47 @@ class SRUtil:
return stream_url
async def get_metadata_by_track_id(self, track_id: int) -> Optional[dict]:
try:
await self.streamrip_client.login()
metadata = await self.streamrip_client.get_metadata(str(track_id), "track")
return {
"artist": metadata.get("artist", {}).get("name", "Unknown Artist"),
"album": metadata.get("album", {}).get("title", "Unknown Album"),
"song": metadata.get("title", uuid4()),
}
except Exception as e:
logging.critical(
"Get metadata for %s failed, Exception: %s", track_id, str(e)
)
return None
"""
Fetch track + album metadata with retries.
Returns combined metadata dict or None after exhausting retries.
"""
for attempt in range(1, self.MAX_METADATA_RETRIES + 1):
try:
await self.streamrip_client.login()
metadata = await self.streamrip_client.get_metadata(
str(track_id), "track"
)
album_id = metadata.get("album", {}).get("id")
album_metadata = await self.streamrip_client.get_metadata(
album_id, "album"
)
combined_metadata: dict = self.combine_album_track_metadata(
album_metadata, metadata
)
logging.info(
"Combined metadata for track ID %s (attempt %d): %s",
track_id,
attempt,
combined_metadata,
)
return combined_metadata
except Exception as e:
logging.warning(
"Metadata fetch failed for track %s (attempt %d/%d): %s",
track_id,
attempt,
self.MAX_METADATA_RETRIES,
str(e),
)
if attempt < self.MAX_METADATA_RETRIES:
await asyncio.sleep(self.RETRY_DELAY)
else:
logging.error(
"Metadata fetch failed permanently for track %s after %d attempts",
track_id,
self.MAX_METADATA_RETRIES,
)
return None
async def download(self, track_id: int, quality: str = "LOSSLESS") -> bool | str:
"""Download track