diff --git a/base.py b/base.py index ffa3562..d29b4d4 100644 --- a/base.py +++ b/base.py @@ -22,8 +22,8 @@ app = FastAPI( contact={"name": "codey"}, redirect_slashes=False, loop=loop, - docs_url="/docs", # Swagger UI (default) - redoc_url="/redoc", # ReDoc UI (default, but explicitly set) + docs_url="/docs", # Swagger UI (default) + redoc_url="/redoc", # ReDoc UI (default, but explicitly set) ) constants = importlib.import_module("constants").Constants() @@ -46,13 +46,12 @@ app.add_middleware( allow_headers=["*"], ) # type: ignore + # Add Scalar API documentation endpoint (before blacklist routes) @app.get("/scalar", include_in_schema=False) def scalar_docs(): - return get_scalar_api_reference( - openapi_url="/openapi.json", - title="codey.lol API" - ) + return get_scalar_api_reference(openapi_url="/openapi.json", title="codey.lol API") + """ Blacklisted routes @@ -73,7 +72,9 @@ def base_head(): def disallow_get_any(request: Request, var: Any = None): path = request.path_params["path"] allowed_paths = ["widget", "misc/no", "docs", "redoc", "scalar", "openapi.json"] - logging.info(f"Checking path: {path}, allowed: {path in allowed_paths or path.split('/', maxsplit=1)[0] in allowed_paths}") + logging.info( + f"Checking path: {path}, allowed: {path in allowed_paths or path.split('/', maxsplit=1)[0] in allowed_paths}" + ) if not ( isinstance(path, str) and (path.split("/", maxsplit=1)[0] in allowed_paths or path in allowed_paths) diff --git a/endpoints/radio.py b/endpoints/radio.py index de6d9e2..f25cfbc 100644 --- a/endpoints/radio.py +++ b/endpoints/radio.py @@ -364,7 +364,7 @@ class Radio(FastAPI): ret_obj: dict = {**self.radio_util.now_playing[station]} ret_obj["station"] = station try: - ret_obj["elapsed"] = int(time.time()) - ret_obj["start"] + ret_obj["elapsed"] = int(time.time()) - ret_obj["start"] if ret_obj["start"] else 0 except KeyError: traceback.print_exc() ret_obj["elapsed"] = 0 diff --git a/utils/radio_util.py b/utils/radio_util.py index 5913898..f91f7f2 100644 --- a/utils/radio_util.py +++ b/utils/radio_util.py @@ -339,7 +339,10 @@ class RadioUtil: time_start: float = time.time() artist_genre: dict[str, str] = {} query: str = ( - "SELECT genre FROM artist_genre WHERE artist LIKE ? COLLATE NOCASE" + "SELECT REPLACE(GROUP_CONCAT(DISTINCT g.name), ',', ', ') AS genre FROM artists a " + "JOIN artist_genres ag ON a.id = ag.artist_id " + "JOIN genres g ON ag.genre_id = g.id " + "WHERE a.name LIKE ? COLLATE NOCASE" ) with sqlite3.connect(self.artist_genre_db_path) as _db: _db.row_factory = sqlite3.Row @@ -347,7 +350,7 @@ class RadioUtil: params: tuple[str] = (f"%%{artist}%%",) _cursor = _db.execute(query, params) res = _cursor.fetchone() - if not res: + if not res or not res["genre"]: artist_genre[artist] = "N/A" continue artist_genre[artist] = res["genre"] @@ -367,14 +370,17 @@ class RadioUtil: try: artist = artist.strip() query: str = ( - "SELECT genre FROM artist_genre WHERE artist LIKE ? COLLATE NOCASE" + "SELECT REPLACE(GROUP_CONCAT(DISTINCT g.name), ',', ', ') AS genre FROM artists a " + "JOIN artist_genres ag ON a.id = ag.artist_id " + "JOIN genres g ON ag.genre_id = g.id " + "WHERE a.name LIKE ? COLLATE NOCASE" ) params: tuple[str] = (artist,) with sqlite3.connect(self.playback_db_path, timeout=2) as _db: _db.row_factory = sqlite3.Row _cursor = _db.execute(query, params) res = _cursor.fetchone() - if not res: + if not res or not res["genre"]: return "Not Found" # Exception suppressed # raise RadioException( # f"Could not locate {artist} in artist_genre_map db." @@ -491,7 +497,7 @@ class RadioUtil: def cache_album_art(self, track_id: int, file_path: str) -> None: """ - Cache Album Art to SQLite DB + Cache Album Art to SQLite DB - IMPROVED VERSION Args: track_id (int): Track ID to update file_path (str): Path to file, for artwork extraction @@ -499,30 +505,92 @@ class RadioUtil: None """ try: - logging.info( - "cache_album_art: Attempting to store album art for track_id: %s", - track_id, - ) - tagger = music_tag.load_file(file_path) - album_art = tagger["artwork"].first.data if tagger else None - with sqlite3.connect(self.album_art_db_path, timeout=2) as db_conn: - db_cursor = db_conn.execute( - "INSERT OR IGNORE INTO album_art (track_id, album_art) VALUES(?, ?)", - ( - track_id, - album_art, - ), - ) - if isinstance(db_cursor.lastrowid, int): - db_conn.commit() + # Validate file exists first + if not os.path.exists(file_path): + logging.warning("cache_album_art: File not found: %s", file_path) + return + + logging.info("cache_album_art: Attempting to store album art for track_id: %s", track_id) + + # Check if artwork already exists to avoid duplicates + with sqlite3.connect(self.album_art_db_path, timeout=5) as db_conn: + db_conn.row_factory = sqlite3.Row + cursor = db_conn.execute("SELECT track_id FROM album_art WHERE track_id = ?", (track_id,)) + if cursor.fetchone(): + logging.debug("cache_album_art: Track %s already has album art", track_id) + return + + # Load file with better error handling + try: + tagger = music_tag.load_file(file_path) + except Exception as e: + logging.warning("cache_album_art: Failed to load file %s: %s", file_path, e) + return + + # Extract artwork with validation + album_art = None + try: + if not tagger: + logging.debug("cache_album_art: No tagger available for track %s", track_id) + return + + artwork_field = tagger["artwork"] + if artwork_field and hasattr(artwork_field, 'first') and artwork_field.first: + first_artwork = artwork_field.first + if hasattr(first_artwork, 'data') and first_artwork.data: + potential_art = first_artwork.data + + # Validate artwork data + if isinstance(potential_art, bytes) and len(potential_art) > 100: + # Check if it looks like valid image data + if (potential_art.startswith(b'\xff\xd8') or # JPEG + potential_art.startswith(b'\x89PNG') or # PNG + potential_art.startswith(b'GIF87a') or # GIF87a + potential_art.startswith(b'GIF89a') or # GIF89a + potential_art.startswith(b'RIFF')): # WEBP/other RIFF + album_art = potential_art + logging.debug("cache_album_art: Found valid artwork (%s bytes)", len(album_art)) + else: + logging.warning("cache_album_art: Invalid artwork format for track %s - not caching", track_id) + return + else: + logging.debug("cache_album_art: No valid artwork data for track %s", track_id) + return + else: + logging.debug("cache_album_art: No artwork data available for track %s", track_id) + return else: - logging.debug( - "No row inserted for track_id: %s w/ file_path: %s", - track_id, - file_path, + logging.debug("cache_album_art: No artwork field for track %s", track_id) + return + + except Exception as e: + logging.warning("cache_album_art: Error extracting artwork for track %s: %s", track_id, e) + return + + # Only proceed if we have valid artwork + if not album_art: + logging.debug("cache_album_art: No valid artwork to cache for track %s", track_id) + return + + # Insert into database + try: + with sqlite3.connect(self.album_art_db_path, timeout=5) as db_conn: + cursor = db_conn.execute( + "INSERT OR IGNORE INTO album_art (track_id, album_art) VALUES (?, ?)", + (track_id, album_art) ) + + if cursor.rowcount == 1: + db_conn.commit() + logging.info("cache_album_art: Successfully cached %s bytes for track %s", len(album_art), track_id) + else: + logging.debug("cache_album_art: No row inserted for track_id: %s (may already exist)", track_id) + + except Exception as e: + logging.error("cache_album_art: Database error for track %s: %s", track_id, e) + except Exception as e: - logging.debug("cache_album_art Exception: %s", str(e)) + logging.error("cache_album_art: Unexpected error for track %s: %s", track_id, e) traceback.print_exc() def get_album_art(self, track_id: int) -> Optional[bytes]: