misc/migration related

This commit is contained in:
2025-06-08 08:53:18 -04:00
parent 68408c4796
commit 4cdd6d0c99
13 changed files with 90 additions and 28 deletions

5
.gitignore vendored
View File

@ -14,4 +14,7 @@ artist_genre_tag.py
uv.lock uv.lock
pyproject.toml pyproject.toml
mypy.ini mypy.ini
.python-version .python-version
get_next_track.py
endpoints/radio.py
utils/radio_util.py

View File

@ -118,7 +118,5 @@ async def on_start():
app.add_event_handler("startup", on_start) app.add_event_handler("startup", on_start)
redis = redis_cache.RedisCache() redis = redis_cache.RedisCache()
loop.create_task(redis.create_index()) loop.create_task(redis.create_index())

View File

@ -235,10 +235,12 @@ class ValidRadioNextRequest(BaseModel):
""" """
- **key**: API Key - **key**: API Key
- **skipTo**: UUID to skip to [optional] - **skipTo**: UUID to skip to [optional]
- **pop**: Whether to pop track [optional, defaults to False]
""" """
key: str key: str
skipTo: Optional[str] = None skipTo: Optional[str] = None
pop: Optional[bool] = False
class ValidRadioReshuffleRequest(ValidRadioNextRequest): class ValidRadioReshuffleRequest(ValidRadioNextRequest):

View File

@ -20,7 +20,7 @@ class CacheUtils:
def __init__(self) -> None: def __init__(self) -> None:
self.lyrics_db_path: LiteralString = os.path.join( self.lyrics_db_path: LiteralString = os.path.join(
"/mnt/data/share", "sqlite_dbs", "cached_lyrics.db" "/usr/local/share", "sqlite_dbs", "cached_lyrics.db"
) )
async def check_typeahead(self, query: str) -> Optional[list[str]]: async def check_typeahead(self, query: str) -> Optional[list[str]]:

View File

@ -25,7 +25,7 @@ class Misc(FastAPI):
self.radio = radio self.radio = radio
self.activity_image: Optional[bytes] = None self.activity_image: Optional[bytes] = None
self.nos_json_path: str = os.path.join( self.nos_json_path: str = os.path.join(
"/", "mnt", "data", "share", "naas", "reasons.json" "/", "usr", "local", "share", "naas", "reasons.json"
) )
self.nos: list[str] = [] self.nos: list[str] = []
self.last_5_nos: list[str] = [] self.last_5_nos: list[str] = []

View File

@ -103,8 +103,8 @@ class Radio(FastAPI):
"err": True, "err": True,
"errorText": "General failure.", "errorText": "General failure.",
}, },
) )
raise e # Re-raise HTTPException raise e # Re-raise HTTPException
async def radio_reshuffle( async def radio_reshuffle(
self, data: ValidRadioReshuffleRequest, request: Request self, data: ValidRadioReshuffleRequest, request: Request
@ -128,12 +128,12 @@ class Radio(FastAPI):
Get current play queue (paged, 20 results per page) Get current play queue (paged, 20 results per page)
""" """
search: Optional[str] = None search: Optional[str] = None
draw: int = 0 draw: int = 0
if isinstance(data, ValidRadioQueueRequest): if isinstance(data, ValidRadioQueueRequest):
search = data.search search = data.search
draw = data.draw draw = data.draw
start: int = int(data.start) start: int = int(data.start)
end: int = start + 20 end: int = start + 20
else: else:
start: int = 0 start: int = 0
end: int = 20 end: int = 20
@ -280,6 +280,7 @@ class Radio(FastAPI):
(Track will be removed from the queue in the process.) (Track will be removed from the queue in the process.)
- **key**: API key - **key**: API key
- **skipTo**: Optional UUID to skip to - **skipTo**: Optional UUID to skip to
- **pop**: Whether to pop the item from the queue when returning, or simply peek (for LiquidSoap prefetch compatibility)
""" """
if not self.util.check_key(path=request.url.path, req_type=4, key=data.key): if not self.util.check_key(path=request.url.path, req_type=4, key=data.key):
raise HTTPException(status_code=403, detail="Unauthorized") raise HTTPException(status_code=403, detail="Unauthorized")
@ -297,7 +298,14 @@ class Radio(FastAPI):
"errorText": "General failure occurred, prompting playlist reload.", "errorText": "General failure occurred, prompting playlist reload.",
}, },
) )
next = self.radio_util.active_playlist.pop(0)
logging.info("Radio get next!! Pop: %s", data.pop)
if data.pop:
next = self.radio_util.active_playlist.pop(0)
else:
next = self.radio_util.active_playlist[0]
if not isinstance(next, dict): if not isinstance(next, dict):
logging.critical("next is of type: %s, reloading playlist...", type(next)) logging.critical("next is of type: %s, reloading playlist...", type(next))
await self.on_start() await self.on_start()

View File

@ -41,7 +41,7 @@ class RandMsg(FastAPI):
match db_rand_selected: match db_rand_selected:
case 0: case 0:
randmsg_db_path: Union[str, LiteralString] = os.path.join( randmsg_db_path: Union[str, LiteralString] = os.path.join(
"/mnt/data/share", "sqlite_dbs", "qajoke.db" "/usr/local/share", "sqlite_dbs", "qajoke.db"
) # For qajoke db ) # For qajoke db
db_query: str = ( db_query: str = (
"SELECT id, ('<b>Q:</b> ' || question || '<br/><b>A:</b> ' \ "SELECT id, ('<b>Q:</b> ' || question || '<br/><b>A:</b> ' \
@ -50,7 +50,7 @@ class RandMsg(FastAPI):
title_attr = "QA Joke DB" title_attr = "QA Joke DB"
case 1 | 9: case 1 | 9:
randmsg_db_path = os.path.join( randmsg_db_path = os.path.join(
"/mnt/data/share", "sqlite_dbs", "randmsg.db" "/usr/local/share", "sqlite_dbs", "randmsg.db"
) # For randmsg db ) # For randmsg db
db_query = "SELECT id, msg FROM msgs WHERE \ db_query = "SELECT id, msg FROM msgs WHERE \
LENGTH(msg) <= 180 ORDER BY RANDOM() LIMIT 1" # For randmsg db LENGTH(msg) <= 180 ORDER BY RANDOM() LIMIT 1" # For randmsg db
@ -59,28 +59,28 @@ class RandMsg(FastAPI):
title_attr = "Random Msg DB" title_attr = "Random Msg DB"
case 2: case 2:
randmsg_db_path = os.path.join( randmsg_db_path = os.path.join(
"/mnt/data/share", "sqlite_dbs", "trump.db" "/usr/local/share", "sqlite_dbs", "trump.db"
) # For Trump Tweet DB ) # For Trump Tweet DB
db_query = "SELECT id, content FROM tweets \ db_query = "SELECT id, content FROM tweets \
ORDER BY RANDOM() LIMIT 1" # For Trump Tweet DB ORDER BY RANDOM() LIMIT 1" # For Trump Tweet DB
title_attr = "Trump Tweet DB" title_attr = "Trump Tweet DB"
case 3: case 3:
randmsg_db_path = os.path.join( randmsg_db_path = os.path.join(
"/mnt/data/share", "sqlite_dbs", "philo.db" "/usr/local/share", "sqlite_dbs", "philo.db"
) # For Philo DB ) # For Philo DB
db_query = "SELECT id, (content || '<br> - ' || speaker) FROM quotes \ db_query = "SELECT id, (content || '<br> - ' || speaker) FROM quotes \
ORDER BY RANDOM() LIMIT 1" # For Philo DB ORDER BY RANDOM() LIMIT 1" # For Philo DB
title_attr = "Philosophical Quotes DB" title_attr = "Philosophical Quotes DB"
case 4: case 4:
randmsg_db_path = os.path.join( randmsg_db_path = os.path.join(
"/mnt/data/share", "sqlite_dbs", "hate.db" "/usr/local/share", "sqlite_dbs", "hate.db"
) # For Hate DB ) # For Hate DB
db_query = """SELECT id, ("<font color='#FF0000'>" || comment) FROM hate_speech \ db_query = """SELECT id, ("<font color='#FF0000'>" || comment) FROM hate_speech \
WHERE length(comment) <= 180 ORDER BY RANDOM() LIMIT 1""" WHERE length(comment) <= 180 ORDER BY RANDOM() LIMIT 1"""
title_attr = "Hate Speech DB" title_attr = "Hate Speech DB"
case 5: case 5:
randmsg_db_path = os.path.join( randmsg_db_path = os.path.join(
"/mnt/data/share", "sqlite_dbs", "rjokes.db" "/usr/local/share", "sqlite_dbs", "rjokes.db"
) # r/jokes DB ) # r/jokes DB
db_query = """SELECT id, (title || "<br>" || body) FROM jokes \ db_query = """SELECT id, (title || "<br>" || body) FROM jokes \
WHERE score >= 10000 ORDER BY RANDOM() LIMIT 1""" WHERE score >= 10000 ORDER BY RANDOM() LIMIT 1"""

View File

@ -61,15 +61,15 @@ class Transcriptions(FastAPI):
match show_id: match show_id:
case 0: case 0:
db_path = os.path.join("/mnt/data/share", "sqlite_dbs", "sp.db") db_path = os.path.join("/usr/local/share", "sqlite_dbs", "sp.db")
db_query = """SELECT DISTINCT(("S" || Season || "E" || Episode || " " || Title)), ID FROM SP_DAT ORDER BY Season, Episode""" db_query = """SELECT DISTINCT(("S" || Season || "E" || Episode || " " || Title)), ID FROM SP_DAT ORDER BY Season, Episode"""
show_title = "South Park" show_title = "South Park"
case 1: case 1:
db_path = os.path.join("/mnt/data/share", "sqlite_dbs", "futur.db") db_path = os.path.join("/usr/local/share", "sqlite_dbs", "futur.db")
db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP""" db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP"""
show_title = "Futurama" show_title = "Futurama"
case 2: case 2:
db_path = os.path.join("/mnt/data/share", "sqlite_dbs", "parks.db") db_path = os.path.join("/usr/local/share", "sqlite_dbs", "parks.db")
db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP""" db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP"""
show_title = "Parks And Rec" show_title = "Parks And Rec"
case _: case _:
@ -111,14 +111,14 @@ class Transcriptions(FastAPI):
match show_id: match show_id:
case 0: case 0:
db_path: Union[str, LiteralString] = os.path.join( db_path: Union[str, LiteralString] = os.path.join(
"/mnt/data/share", "sqlite_dbs", "sp.db" "/usr/local/share", "sqlite_dbs", "sp.db"
) )
db_query: str = """SELECT ("S" || Season || "E" || Episode || " " || Title), Character, Line FROM SP_DAT WHERE ID = ?""" db_query: str = """SELECT ("S" || Season || "E" || Episode || " " || Title), Character, Line FROM SP_DAT WHERE ID = ?"""
case 1: case 1:
db_path = os.path.join("/mnt/data/share", "sqlite_dbs", "futur.db") db_path = os.path.join("/usr/local/share", "sqlite_dbs", "futur.db")
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE || "<br><em>Opener: " || EP_OPENER || "</em>"), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY LINE_ID ASC""" db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE || "<br><em>Opener: " || EP_OPENER || "</em>"), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY LINE_ID ASC"""
case 2: case 2:
db_path = os.path.join("/mnt/data/share", "sqlite_dbs", "parks.db") db_path = os.path.join("/usr/local/share", "sqlite_dbs", "parks.db")
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY id ASC""" db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY id ASC"""
case _: case _:

View File

@ -22,7 +22,7 @@ class Cache:
def __init__(self) -> None: def __init__(self) -> None:
self.cache_db: Union[str, LiteralString] = os.path.join( self.cache_db: Union[str, LiteralString] = os.path.join(
"/mnt/data/share", "sqlite_dbs", "cached_lyrics.db" "/usr/local/share", "sqlite_dbs", "cached_lyrics.db"
) )
self.redis_cache = redis_cache.RedisCache() self.redis_cache = redis_cache.RedisCache()
self.notifier = notifier.DiscordNotifier() self.notifier = notifier.DiscordNotifier()

View File

@ -1,4 +1,7 @@
SCRAPE_HEADERS: dict[str, str] = { SCRAPE_HEADERS: dict[str, str] = {
"accept": "*/*", "accept": "*/*",
"Accept-Encoding": "gzip, deflate, br, zstd",
"Accept-Language": "en-US,en;q=0.5",
"Connection": "keep-alive",
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:130.0) Gecko/20100101 Firefox/130.0", "User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:130.0) Gecko/20100101 Firefox/130.0",
} }

View File

@ -57,6 +57,7 @@ class Genius:
timeout=self.timeout, timeout=self.timeout,
headers=self.headers, headers=self.headers,
verify_ssl=False, verify_ssl=False,
proxy=private.GENIUS_PROXY,
) as request: ) as request:
request.raise_for_status() request.raise_for_status()
text: Optional[str] = await request.text() text: Optional[str] = await request.text()
@ -112,6 +113,7 @@ class Genius:
timeout=self.timeout, timeout=self.timeout,
headers=self.headers, headers=self.headers,
verify_ssl=False, verify_ssl=False,
proxy=private.GENIUS_PROXY,
) as scrape_request: ) as scrape_request:
scrape_request.raise_for_status() scrape_request.raise_for_status()
scrape_text: Optional[str] = await scrape_request.text() scrape_text: Optional[str] = await scrape_request.text()

View File

@ -1,6 +1,7 @@
import os import os
import logging import logging
import io import io
import traceback
import math import math
from typing import Optional from typing import Optional
import aiosqlite as sqlite3 import aiosqlite as sqlite3
@ -14,7 +15,7 @@ class MemeUtil:
def __init__(self, constants) -> None: def __init__(self, constants) -> None:
self.constants = constants self.constants = constants
self.meme_db_path = os.path.join("/mnt/data/share", "sqlite_dbs", "meme.db") self.meme_db_path = os.path.join("/usr/local/share", "sqlite_dbs", "meme.db")
def is_png(self, buffer: bytes | io.BytesIO) -> bool: def is_png(self, buffer: bytes | io.BytesIO) -> bool:
""" """
@ -72,8 +73,17 @@ class MemeUtil:
buffer = io.BytesIO(result["image"]) buffer = io.BytesIO(result["image"])
is_png = self.is_png(buffer) is_png = self.is_png(buffer)
if not is_png: if not is_png:
logging.debug("Converting %s, not detected as PNG", meme_id) logging.debug(
"Converting meme_id: %s, not detected as PNG", meme_id
)
ret_image = self.convert_to_png(buffer) ret_image = self.convert_to_png(buffer)
converted = await self.replace_with_converted_png(
meme_id, ret_image
)
if converted:
logging.info("Converted meme_id: %s", meme_id)
else:
logging.info("Failed to convert meme_id: %s", meme_id)
else: else:
ret_image = result["image"] ret_image = result["image"]
return ret_image return ret_image
@ -148,3 +158,34 @@ class MemeUtil:
return None return None
pages = math.ceil(count / rows_per_page) pages = math.ceil(count / rows_per_page)
return pages return pages
async def replace_with_converted_png(self, meme_id: int, meme_image: bytes) -> bool:
"""
Replace stored image with converted PNG
Args:
meme_id (int)
meme_image (bytes)
Returns:
bool
"""
update_query: str = "UPDATE memes SET image = ?, file_ext = 'PNG' WHERE id = ?"
params: tuple = (
meme_image,
meme_id,
)
try:
async with sqlite3.connect(self.meme_db_path, timeout=5) as db_conn:
update = await db_conn.execute_insert(update_query, params)
if not update:
logging.info(
"replace_with_converted_png: Failed -> Update: %s\nFor meme_id: %s",
update,
meme_id,
)
return False
else:
return True
except Exception as e:
logging.info("replace_with_converted_png: %s", str(e))
traceback.print_exc()
return False

View File

@ -41,13 +41,13 @@ class RadioUtil:
"/home/kyle/api/solibs/spellfix1.cpython-311-x86_64-linux-gnu.so" "/home/kyle/api/solibs/spellfix1.cpython-311-x86_64-linux-gnu.so"
] ]
self.active_playlist_path: str = os.path.join( self.active_playlist_path: str = os.path.join(
"/mnt/data/share", "sqlite_dbs", "track_file_map.db" "/usr/local/share", "sqlite_dbs", "track_file_map.db"
) )
self.artist_genre_db_path: str = os.path.join( self.artist_genre_db_path: str = os.path.join(
"/mnt/data/share", "sqlite_dbs", "artist_genre_map.db" "/usr/local/share", "sqlite_dbs", "artist_genre_map.db"
) )
self.album_art_db_path: str = os.path.join( self.album_art_db_path: str = os.path.join(
"/mnt/data/share", "sqlite_dbs", "track_album_art.db" "/usr/local/share", "sqlite_dbs", "track_album_art.db"
) )
self.playback_genres: list[str] = [ self.playback_genres: list[str] = [
# "post-hardcore", # "post-hardcore",
@ -57,6 +57,11 @@ class RadioUtil:
# "edm", # "edm",
# "electronic", # "electronic",
# "hard rock", # "hard rock",
# "ska",
# "post punk",
# "post-punk",
# "pop punk",
# "pop-punk",
] ]
self.active_playlist: list[dict] = [] self.active_playlist: list[dict] = []
self.playlist_loaded: bool = False self.playlist_loaded: bool = False