import logging import traceback import time import datetime import os import random import asyncio from uuid import uuid4 as uuid from typing import Union, Optional, Iterable from aiohttp import ClientSession, ClientTimeout import regex from regex import Pattern import sqlite3 import gpt import music_tag # type: ignore from rapidfuzz import fuzz from endpoints.constructors import RadioException import redis.asyncio as redis from redis.commands.search.query import Query # noqa from redis.commands.search.indexDefinition import IndexDefinition, IndexType # noqa from redis.commands.search.field import TextField # noqa from redis.commands.json.path import Path # noqa from lyric_search.sources import private double_space: Pattern = regex.compile(r"\s{2,}") non_alnum: Pattern = regex.compile(r"[^a-zA-Z0-9]") class RadioUtil: """ Radio Utils """ def __init__(self, constants, loop) -> None: self.constants = constants self.loop = loop self.gpt = gpt.GPT(self.constants) self.ls_uri: str = self.constants.LS_URI self.redis_client = redis.Redis(password=private.REDIS_PW) self.DEDUPE_PLAYLISTS: bool = True self.sqlite_exts: list[str] = [ "/home/kyle/api/solibs/spellfix1.cpython-311-x86_64-linux-gnu.so" ] self.playback_db_path: str = os.path.join( "/usr/local/share", "sqlite_dbs", "track_file_map.db" ) self.artist_genre_db_path: str = os.path.join( "/usr/local/share", "sqlite_dbs", "artist_genre_map.db" ) self.album_art_db_path: str = os.path.join( "/usr/local/share", "sqlite_dbs", "track_album_art.db" ) self.db_queries = { 'main': self.constants.RADIO_DB_QUERY, 'rap': self.constants.RADIO_DB_QUERY_RAP, 'pop': self.constants.RADIO_DB_QUERY_POP, # 'classical': self.constants.RADIO_DB_QUERY_CLASSICAL, 'rock': self.constants.RADIO_DB_QUERY_ROCK, 'electronic': self.constants.RADIO_DB_QUERY_ELECTRONIC, } self.playback_genres: list[str] = [ # "metal", # # "hip hop", # "metalcore", # "deathcore", # "edm", # "electronic", # "post-hardcore", # "post hardcore", # # "hard rock", # # "rock", # # # "ska", # # "post punk", # # "post-punk", # # "pop punk", # # "pop-punk", ] self.playlists: list = [ "main", "rock", "rap", "electronic", "pop", ] self.active_playlist: dict[str, list[dict]] = {} self.playlists_loaded: bool = False self.now_playing: dict[str, dict] = { playlist: { "artist": "N/A", "song": "N/A", "album": "N/A", "genre": "N/A", "artistsong": "N/A - N/A", "duration": 0, "start": 0, "end": 0, "file_path": None, "id": None, } for playlist in self.playlists } self.webhooks: dict = { "gpt": { "hook": self.constants.GPT_WEBHOOK, "lastRun": None, }, "sfm": { "hook": self.constants.SFM_WEBHOOK, "lastRun": None, }, } def duration_conv(self, s: Union[int, float]) -> str: """ Convert duration given in seconds to hours, minutes, and seconds (h:m:s) Args: s (Union[int, float]): seconds to convert Returns: str """ return str(datetime.timedelta(seconds=s)).split(".", maxsplit=1)[0] def trackdb_typeahead(self, query: str) -> Optional[list[str]]: """ Query track db for typeahead Args: query (str): The search query Returns: Optional[list[str]] """ if not query: return None with sqlite3.connect(self.playback_db_path, timeout=1) as _db: _db.row_factory = sqlite3.Row db_query: str = """SELECT DISTINCT(LOWER(TRIM(artist) || " - " || TRIM(song))),\ (TRIM(artist) || " - " || TRIM(song)) as artistsong FROM tracks WHERE\ artistsong LIKE ? LIMIT 30""" db_params: tuple[str] = (f"%{query}%",) _cursor = _db.execute(db_query, db_params) result: Iterable[sqlite3.Row] = _cursor.fetchall() out_result = [str(r["artistsong"]) for r in result] return out_result def datatables_search(self, filter: str, station: str = "main") -> Optional[list[dict]]: """DataTables Search Args: filter (str): The filter query to fuzzy match with Returns: list[dict]: List of matching playlist items (if any are found) """ filter = filter.strip().lower() matched: list[dict] = [] for item in self.active_playlist[station]: artist: str = item.get("artist", "") song: str = item.get("song", "") artistsong: str = item.get("artistsong", "") album: str = item.get("album", "") if not artist or not song or not artistsong: continue if non_alnum.sub("", filter) in non_alnum.sub("", artistsong).lower(): matched.append(item) continue if ( fuzz.ratio(filter, artist) >= 85 or fuzz.ratio(filter, song) >= 85 or fuzz.ratio(filter, album) >= 85 ): matched.append(item) return matched def search_db( self, artistsong: Optional[str] = None, artist: Optional[str] = None, song: Optional[str] = None, station: str = "main" ) -> bool: """ Search for track, add it up next in play queue if found Args: artistsong (Optional[str]): Artist - Song combo to search [ignored if artist/song are specified] artist (Optional[str]): Artist to search (ignored if artistsong is specified) song (Optional[str]): Song to search (ignored if artistsong is specified) Returns: bool """ if artistsong and (artist or song): raise RadioException("Cannot search using combination provided") if not artistsong and (not artist or not song): raise RadioException("No query provided") try: search_query: str = 'SELECT id, artist, song, (artist || " - " || song) AS artistsong, album, file_path, duration FROM tracks\ WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\ <= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 1' if artistsong: artistsong_split: list = artistsong.split(" - ", maxsplit=1) (search_artist, search_song) = tuple(artistsong_split) else: search_artist = artist search_song = song if not artistsong: artistsong = f"{search_artist} - {search_song}" if not search_artist or not search_song or not artistsong: raise RadioException("No query provided") search_params = ( search_artist.lower(), search_song.lower(), artistsong.lower(), ) with sqlite3.connect(self.playback_db_path, timeout=2) as db_conn: db_conn.enable_load_extension(True) for ext in self.sqlite_exts: db_conn.load_extension(ext) db_conn.row_factory = sqlite3.Row db_cursor = db_conn.execute(search_query, search_params) result: Optional[sqlite3.Row | bool] = db_cursor.fetchone() if not result or not isinstance(result, sqlite3.Row): return False push_obj: dict = { "id": result["id"], "uuid": str(uuid().hex), "artist": double_space.sub(" ", result["artist"].strip()), "song": double_space.sub(" ", result["song"].strip()), "artistsong": result["artistsong"].strip(), "album": result["album"].strip() if result["album"] else "N/A", "genre": self.get_genre( double_space.sub(" ", result["artist"].strip()) ), "file_path": result["file_path"], "duration": result["duration"], } self.active_playlist[station].insert(0, push_obj) return True except Exception as e: logging.critical("search_db:: Search error occurred: %s", str(e)) traceback.print_exc() return False def add_genre(self, artist: str, genre: str) -> bool: """ Add artist/genre pairing to DB Args: artist (str) genre (str) Returns: bool """ try: with sqlite3.connect(self.artist_genre_db_path, timeout=2) as _db: query: str = ( "INSERT OR IGNORE INTO artist_genre (artist, genre) VALUES(?, ?)" ) params: tuple[str, str] = (artist, genre) res = _db.execute(query, params) if isinstance(res.lastrowid, int): logging.debug( "Query executed successfully for %s/%s, committing", artist, genre, ) _db.commit() return True logging.debug( "Failed to store artist/genre pair: %s/%s (res: %s)", artist, genre, res ) return False except Exception as e: logging.info( "Failed to store artist/genre pair: %s/%s (%s)", artist, genre, str(e) ) traceback.print_exc() return False def add_genres(self, pairs: list[dict[str, str]]) -> bool: """ (BATCH) Add artist/genre pairings to DB Expects list of dicts comprised of artist name (key), genre (value) Args: pairs (list[dict[str, str]]): Pairs of artist/genres to add, list of dicts Returns: bool """ try: added_rows: int = 0 artist = None genre = None with sqlite3.connect(self.artist_genre_db_path, timeout=2) as _db: for pair in pairs: try: artist, genre = pair query: str = "INSERT OR IGNORE INTO artist_genre (artist, genre) VALUES(?, ?)" params: tuple[str, str] = (artist, genre) res = _db.execute(query, params) if isinstance(res.lastrowid, int): logging.debug( "add_genres: Query executed successfully for %s/%s", artist, genre, ) added_rows += 1 else: logging.debug( "Failed to store artist/genre pair: %s/%s (res: %s)", artist, genre, res, ) except Exception as e: logging.info( "Failed to store artist/genre pair: %s/%s (%s)", artist, genre, str(e), ) continue if added_rows: logging.info("add_genres: Committing %s rows", added_rows) _db.commit() return True logging.info("add_genres: Failed (No rows added)") return False except Exception as e: logging.info("Failed to store artist/genre pairs: %s", str(e)) traceback.print_exc() return False def get_genres(self, input_artists: list[str]) -> dict: """ Retrieve genres for given list of artists Batch equivalent of get_genre Args: input_artists (list): The artists to query Returns: dict[str, str] """ time_start: float = time.time() artist_genre: dict[str, str] = {} query: str = ( "SELECT REPLACE(GROUP_CONCAT(DISTINCT g.name), ',', ', ') AS genre FROM artists a " "JOIN artist_genres ag ON a.id = ag.artist_id " "JOIN genres g ON ag.genre_id = g.id " "WHERE a.name LIKE ? COLLATE NOCASE" ) with sqlite3.connect(self.artist_genre_db_path) as _db: _db.row_factory = sqlite3.Row for artist in input_artists: params: tuple[str] = (f"%%{artist}%%",) _cursor = _db.execute(query, params) res = _cursor.fetchone() if not res or not res["genre"]: artist_genre[artist] = "N/A" continue artist_genre[artist] = res["genre"] time_end: float = time.time() logging.info(f"Time taken: {time_end - time_start}") return artist_genre def get_genre(self, artist: str) -> str: """ Retrieve Genre for given Artist Args: artist (str): The artist to query Returns: str """ try: artist = artist.strip() query: str = ( "SELECT REPLACE(GROUP_CONCAT(DISTINCT g.name), ',', ', ') AS genre FROM artists a " "JOIN artist_genres ag ON a.id = ag.artist_id " "JOIN genres g ON ag.genre_id = g.id " "WHERE a.name LIKE ? COLLATE NOCASE" ) params: tuple[str] = (artist,) with sqlite3.connect(self.playback_db_path, timeout=2) as _db: _db.row_factory = sqlite3.Row _cursor = _db.execute(query, params) res = _cursor.fetchone() if not res or not res["genre"]: return "Not Found" # Exception suppressed # raise RadioException( # f"Could not locate {artist} in artist_genre_map db." # ) return res["genre"] except Exception as e: logging.info("Failed to look up genre for artist: %s (%s)", artist, str(e)) traceback.print_exc() return "Not Found" async def load_playlists(self) -> None: """Load Playlists""" try: logging.info("Loading playlists...") if isinstance(self.active_playlist, dict): self.active_playlist.clear() for playlist in self.playlists: playlist_redis_key: str = f"playlist:{playlist}" _playlist = await self.redis_client.json().get(playlist_redis_key) # type: ignore # Ensure we always have a list to work with if not _playlist: logging.warning("No playlist found in redis for %s, skipping", playlist) self.active_playlist[playlist] = [] continue # Make sure playlist key exists if playlist not in self.active_playlist.keys(): self.active_playlist[playlist] = [] # Shuffle a copy so we don't mutate the underlying redis object try: shuffled = list(_playlist) random.shuffle(shuffled) except Exception: shuffled = _playlist # Build a fresh list rather than modifying in-place (prevents duplication) built: list[dict] = [] for r in shuffled: try: item = { "uuid": str(uuid().hex), "id": r.get("id"), "artist": double_space.sub(" ", (r.get("artist") or "")).strip(), "song": double_space.sub(" ", (r.get("song") or "")).strip(), "album": double_space.sub(" ", (r.get("album") or "")).strip(), "genre": r.get("genre") if r.get("genre") else "Not Found", "artistsong": double_space.sub(" ", (r.get("artistdashsong") or "")).strip(), "file_path": r.get("file_path"), "duration": r.get("duration"), } built.append(item) except Exception: logging.debug("Skipping malformed playlist entry for %s: %s", playlist, r) self.active_playlist[playlist] = built logging.info( "Populated playlist: %s with %s items", playlist, len(self.active_playlist[playlist]), ) """Dedupe""" if self.DEDUPE_PLAYLISTS: logging.info("Removing duplicate tracks (by file_path only)...") dedupe_processed: set[str] = set() deduped_list: list[dict] = [] for item in self.active_playlist[playlist]: fp = item.get("file_path") if not fp: # If no file_path available, skip the item (can't dedupe reliably) logging.info("Skipping item without file_path during dedupe: %s", item) continue key = fp if key in dedupe_processed: continue dedupe_processed.add(key) deduped_list.append(item) self.active_playlist[playlist] = deduped_list else: logging.warning("Dupe removal disabled") logging.info( "Duplicates for playlist: %s removed. New playlist size: %s", playlist, len(self.active_playlist[playlist]), ) if playlist == 'main' and self.playback_genres: new_playlist: list[dict] = [] logging.info("Limiting playback genres") for item in self.active_playlist[playlist]: item_genres = item.get("genre", "").strip().lower() # Check if any genre matches and item isn't already in new_playlist if any(genre.strip().lower() in item_genres for genre in self.playback_genres): if item not in new_playlist: new_playlist.append(item) self.active_playlist[playlist] = new_playlist logging.info( "%s items for playlist: %s remain for playback after filtering", playlist, len(self.active_playlist[playlist]), ) """Loading Complete""" # Request skip from LS to bring streams current for playlist in self.playlists: logging.info("Skipping: %s", playlist) await self._ls_skip(playlist) self.playlists_loaded = True except Exception as e: logging.info("Playlist load failed: %s", str(e)) traceback.print_exc() def cache_album_art(self, track_id: int, file_path: str) -> None: """ Cache Album Art to SQLite DB - IMPROVED VERSION Args: track_id (int): Track ID to update file_path (str): Path to file, for artwork extraction Returns: None """ try: # Validate file exists first if not os.path.exists(file_path): logging.warning("cache_album_art: File not found: %s", file_path) return logging.info("cache_album_art: Attempting to store album art for track_id: %s", track_id) # Check if artwork already exists to avoid duplicates with sqlite3.connect(self.album_art_db_path, timeout=5) as db_conn: db_conn.row_factory = sqlite3.Row cursor = db_conn.execute("SELECT track_id FROM album_art WHERE track_id = ?", (track_id,)) if cursor.fetchone(): logging.debug("cache_album_art: Track %s already has album art", track_id) return # Load file with better error handling try: tagger = music_tag.load_file(file_path) except Exception as e: logging.warning("cache_album_art: Failed to load file %s: %s", file_path, e) return # Extract artwork with validation album_art = None try: if not tagger: logging.debug("cache_album_art: No tagger available for track %s", track_id) return artwork_field = tagger["artwork"] if artwork_field and hasattr(artwork_field, 'first') and artwork_field.first: first_artwork = artwork_field.first if hasattr(first_artwork, 'data') and first_artwork.data: potential_art = first_artwork.data # Validate artwork data if isinstance(potential_art, bytes) and len(potential_art) > 100: # Check if it looks like valid image data if (potential_art.startswith(b'\xff\xd8') or # JPEG potential_art.startswith(b'\x89PNG') or # PNG potential_art.startswith(b'GIF87a') or # GIF87a potential_art.startswith(b'GIF89a') or # GIF89a potential_art.startswith(b'RIFF')): # WEBP/other RIFF album_art = potential_art logging.debug("cache_album_art: Found valid artwork (%s bytes)", len(album_art)) else: logging.warning("cache_album_art: Invalid artwork format for track %s - not caching", track_id) return else: logging.debug("cache_album_art: No valid artwork data for track %s", track_id) return else: logging.debug("cache_album_art: No artwork data available for track %s", track_id) return else: logging.debug("cache_album_art: No artwork field for track %s", track_id) return except Exception as e: logging.warning("cache_album_art: Error extracting artwork for track %s: %s", track_id, e) return # Only proceed if we have valid artwork if not album_art: logging.debug("cache_album_art: No valid artwork to cache for track %s", track_id) return # Insert into database try: with sqlite3.connect(self.album_art_db_path, timeout=5) as db_conn: cursor = db_conn.execute( "INSERT OR IGNORE INTO album_art (track_id, album_art) VALUES (?, ?)", (track_id, album_art) ) if cursor.rowcount == 1: db_conn.commit() logging.info("cache_album_art: Successfully cached %s bytes for track %s", len(album_art), track_id) else: logging.debug("cache_album_art: No row inserted for track_id: %s (may already exist)", track_id) except Exception as e: logging.error("cache_album_art: Database error for track %s: %s", track_id, e) except Exception as e: logging.error("cache_album_art: Unexpected error for track %s: %s", track_id, e) traceback.print_exc() def get_album_art(self, track_id: int) -> Optional[bytes]: """ Get Album Art Args: track_id (int): Track ID to query Returns: Optional[bytes] """ try: with sqlite3.connect(self.album_art_db_path, timeout=2) as db_conn: db_conn.row_factory = sqlite3.Row query: str = "SELECT album_art FROM album_art WHERE track_id = ?" query_params: tuple[int] = (track_id,) db_cursor = db_conn.execute(query, query_params) result: Optional[Union[sqlite3.Row, bool]] = db_cursor.fetchone() if not result or not isinstance(result, sqlite3.Row): return None return result["album_art"] except Exception as e: logging.debug("get_album_art Exception: %s", str(e)) traceback.print_exc() return None def get_queue_item_by_uuid(self, _uuid: str, station: str = "main") -> Optional[tuple[int, dict]]: """ Get queue item by UUID Args: uuid: The UUID to search Returns: Optional[tuple[int, dict]] """ for x, item in enumerate(self.active_playlist[station]): if item.get("uuid") == _uuid: return (x, item) return None async def _ls_skip(self, station: str = "main") -> bool: """ Ask LiquidSoap server to skip to the next track Args: station (str): default "main" Returns: bool """ try: async with ClientSession() as session: async with session.post( f"{self.ls_uri}/next", data=station, timeout=ClientTimeout(connect=2, sock_read=2) ) as request: request.raise_for_status() text: Optional[str] = await request.text() return isinstance(text, str) and text.startswith("OK") except Exception as e: logging.critical("Skip failed: %s", str(e)) return False # failsafe async def get_ai_song_info(self, artist: str, song: str) -> Optional[str]: """ Get AI Song Info Args: artist (str) song (str) Returns: Optional[str] """ prompt: str = f" am going to listen to {song} by {artist}." response: Optional[str] = await self.gpt.get_completion(prompt) if not response: logging.critical("No response received from GPT?") return None return response async def webhook_song_change(self, track: dict, station: str = "main") -> None: """ Handles Song Change Outbounds (Webhooks) Args: track (dict) station (str): default "main" Returns: None """ try: """TEMP - ONLY MAIN""" if not station == "main": return return # Temp disable global # First, send track info """ TODO: Review friendly_track_start and friendly_track_end, not currently in use """ # friendly_track_start: str = time.strftime( # "%Y-%m-%d %H:%M:%S", time.localtime(track["start"]) # ) # friendly_track_end: str = time.strftime( # "%Y-%m-%d %H:%M:%S", time.localtime(track["end"]) # ) hook_data: dict = { "username": "serious.FM", "embeds": [ { "title": f"Now Playing on {station.title()}", "description": f"## {track['song']}\nby\n## {track['artist']}", "color": 0x30C56F, "thumbnail": { "url": f"https://api.codey.lol/radio/album_art?track_id={track['id']}&{int(time.time())}", }, "fields": [ { "name": "Duration", "value": self.duration_conv(track["duration"]), "inline": True, }, { "name": "Genre", "value": ( track["genre"] if track["genre"] else "Unknown" ), "inline": True, }, { "name": "Filetype", "value": track["file_path"].rsplit(".", maxsplit=1)[1], "inline": True, }, { "name": "Higher Res", "value": "[stream/icecast](https://stream.codey.lol/sfm.ogg) | [web player](https://codey.lol/radio)", "inline": True, }, { "name": "Album", "value": ( track["album"] if "album" in track.keys() else "Unknown" ), }, ], } ], } now: float = time.time() _sfm: dict = self.webhooks["sfm"] if _sfm: sfm_hook: str = _sfm.get("hook", "") sfm_hook_lastRun: Optional[float] = _sfm.get("lastRun", 0.0) if sfm_hook_lastRun and ((now - sfm_hook_lastRun) < 5): logging.info("SFM Webhook: Throttled!") return async with ClientSession() as session: async with await session.post( sfm_hook, json=hook_data, timeout=ClientTimeout(connect=5, sock_read=5), headers={ "content-type": "application/json; charset=utf-8", }, ) as request: request.raise_for_status() # Next, AI feedback (for main stream only) """ TEMP. DISABLED """ # if station == "main": # ai_response: Optional[str] = await self.get_ai_song_info( # track["artist"], track["song"] # ) # if not ai_response: # return # hook_data = { # "username": "GPT", # "embeds": [ # { # "title": "AI Feedback", # "color": 0x35D0FF, # "description": ai_response.strip(), # } # ], # } # ai_hook: str = self.webhooks["gpt"].get("hook") # async with ClientSession() as session: # async with await session.post( # ai_hook, # json=hook_data, # timeout=ClientTimeout(connect=5, sock_read=5), # headers={ # "content-type": "application/json; charset=utf-8", # }, # ) as request: # request.raise_for_status() except Exception as e: logging.info("Webhook error occurred: %s", str(e)) traceback.print_exc()