formatting/add meme endpoints

This commit is contained in:
codey 2025-05-17 08:07:38 -04:00
parent d944a32c62
commit 2caa482a0d
10 changed files with 135 additions and 31 deletions

View File

@ -93,6 +93,7 @@ routes: dict = {
app, util, constants, loop app, util, constants, loop
), ),
"mgr": importlib.import_module("endpoints.mgr.mgr_test").Mgr(app, util, constants), "mgr": importlib.import_module("endpoints.mgr.mgr_test").Mgr(app, util, constants),
"meme": importlib.import_module("endpoints.meme").Meme(app, util, constants),
} }
# Misc endpoint depends on radio endpoint instance # Misc endpoint depends on radio endpoint instance

View File

@ -85,9 +85,7 @@ class KarmaDB:
"INSERT INTO karma(keyword, score, last_change) VALUES(?, ?, ?)" "INSERT INTO karma(keyword, score, last_change) VALUES(?, ?, ?)"
) )
friendly_flag: str = "++" if not flag else "--" friendly_flag: str = "++" if not flag else "--"
audit_message: str = ( audit_message: str = f"{granter} adjusted karma for {keyword} @ {datetime.datetime.now().isoformat()}: {friendly_flag}"
f"{granter} adjusted karma for {keyword} @ {datetime.datetime.now().isoformat()}: {friendly_flag}"
)
audit_query: str = ( audit_query: str = (
"INSERT INTO karma_audit(impacted_keyword, comment) VALUES(?, ?)" "INSERT INTO karma_audit(impacted_keyword, comment) VALUES(?, ?)"
) )

View File

@ -115,7 +115,7 @@ class LyricSearch(FastAPI):
if data.src.upper() not in self.acceptable_request_sources: if data.src.upper() not in self.acceptable_request_sources:
await self.notifier.send( await self.notifier.send(
f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}", f"ERROR @ {__file__.rsplit('/', maxsplit=1)[-1]}",
f"Unknown request source: {data.src}", f"Unknown request source: {data.src}",
) )
return JSONResponse( return JSONResponse(

37
endpoints/meme.py Normal file
View File

@ -0,0 +1,37 @@
import logging
from fastapi import FastAPI, Request, Response
from fastapi.responses import JSONResponse
from utils.meme_util import MemeUtil
class Meme(FastAPI):
"""
Misc Endpoints
"""
def __init__(self, app: FastAPI, my_util, constants) -> None:
self.app: FastAPI = app
self.util = my_util
self.meme_util = MemeUtil(constants)
self.constants = constants
self.endpoints: dict = {
"memes/get_meme/{id:path}": self.get_meme_by_id,
"memes/list_memes": self.list_memes,
}
for endpoint, handler in self.endpoints.items():
app.add_api_route(
f"/{endpoint}", handler, methods=["GET"], include_in_schema=True
)
async def get_meme_by_id(self, id: int, request: Request) -> Response:
"""Get meme (image) by id"""
meme_image = await self.meme_util.get_meme_by_id(id)
if not meme_image:
return Response(status_code=404, content="Not found")
return Response(content=meme_image, media_type="image/png")
async def list_memes(self, page: int, request: Request) -> Response:
"""Get meme (image) by id"""
meme_list = await self.meme_util.list_memes(page)
return JSONResponse(content={"memes": meme_list})

View File

@ -113,9 +113,7 @@ class Transcriptions(FastAPI):
db_path: Union[str, LiteralString] = os.path.join( db_path: Union[str, LiteralString] = os.path.join(
"/usr/local/share", "sqlite_dbs", "sp.db" "/usr/local/share", "sqlite_dbs", "sp.db"
) )
db_query: str = ( db_query: str = """SELECT ("S" || Season || "E" || Episode || " " || Title), Character, Line FROM SP_DAT WHERE ID = ?"""
"""SELECT ("S" || Season || "E" || Episode || " " || Title), Character, Line FROM SP_DAT WHERE ID = ?"""
)
case 1: case 1:
db_path = os.path.join("/usr/local/share", "sqlite_dbs", "futur.db") db_path = os.path.join("/usr/local/share", "sqlite_dbs", "futur.db")
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE || "<br><em>Opener: " || EP_OPENER || "</em>"), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY LINE_ID ASC""" db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE || "<br><em>Opener: " || EP_OPENER || "</em>"), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY LINE_ID ASC"""

View File

@ -89,10 +89,8 @@ class Cache:
logging.debug( logging.debug(
"Checking whether %s is already stored", artistsong.replace("\n", " - ") "Checking whether %s is already stored", artistsong.replace("\n", " - ")
) )
check_query: str = ( check_query: str = 'SELECT id, artist, song FROM lyrics WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\
'SELECT id, artist, song FROM lyrics WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\
<= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 1' <= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 1'
)
artistsong_split = artistsong.split("\n", maxsplit=1) artistsong_split = artistsong.split("\n", maxsplit=1)
artist = artistsong_split[0].lower() artist = artistsong_split[0].lower()
song = artistsong_split[1].lower() song = artistsong_split[1].lower()
@ -213,8 +211,10 @@ class Cache:
lyrics = regex.sub(r"(<br>|\n|\r\n)", " / ", lyr_result.lyrics.strip()) lyrics = regex.sub(r"(<br>|\n|\r\n)", " / ", lyr_result.lyrics.strip())
lyrics = regex.sub(r"\s{2,}", " ", lyrics) lyrics = regex.sub(r"\s{2,}", " ", lyrics)
insert_query = "INSERT INTO lyrics (src, date_retrieved, artist, song, artistsong, confidence, lyrics)\ insert_query = (
"INSERT INTO lyrics (src, date_retrieved, artist, song, artistsong, confidence, lyrics)\
VALUES(?, ?, ?, ?, ?, ?, ?)" VALUES(?, ?, ?, ?, ?, ?, ?)"
)
params = ( params = (
lyr_result.src, lyr_result.src,
time.time(), time.time(),
@ -258,10 +258,8 @@ class Cache:
if artist == "!" and song == "!": if artist == "!" and song == "!":
random_search = True random_search = True
search_query: str = ( search_query: str = "SELECT id, artist, song, lyrics, src, confidence\
"SELECT id, artist, song, lyrics, src, confidence\
FROM lyrics ORDER BY RANDOM() LIMIT 1" FROM lyrics ORDER BY RANDOM() LIMIT 1"
)
logging.info("Searching %s - %s on %s", artist, song, self.label) logging.info("Searching %s - %s on %s", artist, song, self.label)
@ -320,11 +318,9 @@ class Cache:
self.cache_pre_query self.cache_pre_query
) as _db_cursor: ) as _db_cursor:
if not random_search: if not random_search:
search_query: str = ( search_query: str = 'SELECT id, artist, song, lyrics, src, confidence FROM lyrics\
'SELECT id, artist, song, lyrics, src, confidence FROM lyrics\
WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\ WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\
<= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 10' <= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 10'
)
search_params: tuple = ( search_params: tuple = (
artist.strip(), artist.strip(),
song.strip(), song.strip(),

View File

@ -8,9 +8,7 @@ import re
from typing import Optional from typing import Optional
from aiohttp import ClientTimeout, ClientSession from aiohttp import ClientTimeout, ClientSession
from bs4 import BeautifulSoup, ResultSet # type: ignore from bs4 import BeautifulSoup, ResultSet # type: ignore
from tenacity import ( from tenacity import retry, stop_after_attempt, wait_fixed
retry, stop_after_attempt, wait_fixed
)
import html as htm import html as htm
from . import private, common, cache, redis_cache from . import private, common, cache, redis_cache
from lyric_search import utils from lyric_search import utils
@ -58,6 +56,7 @@ class Genius:
f"{self.genius_search_url}{search_term}", f"{self.genius_search_url}{search_term}",
timeout=self.timeout, timeout=self.timeout,
headers=self.headers, headers=self.headers,
verify_ssl=False,
) as request: ) as request:
request.raise_for_status() request.raise_for_status()
text: Optional[str] = await request.text() text: Optional[str] = await request.text()
@ -109,7 +108,10 @@ class Genius:
scrape_url: str = f"{self.genius_url}{scrape_stub[1:]}" scrape_url: str = f"{self.genius_url}{scrape_stub[1:]}"
async with client.get( async with client.get(
scrape_url, timeout=self.timeout, headers=self.headers scrape_url,
timeout=self.timeout,
headers=self.headers,
verify_ssl=False,
) as scrape_request: ) as scrape_request:
scrape_request.raise_for_status() scrape_request.raise_for_status()
scrape_text: Optional[str] = await scrape_request.text() scrape_text: Optional[str] = await scrape_request.text()

View File

@ -6,9 +6,7 @@ import traceback
import logging import logging
from typing import Optional, Union from typing import Optional, Union
from aiohttp import ClientTimeout, ClientSession from aiohttp import ClientTimeout, ClientSession
from tenacity import ( from tenacity import retry, stop_after_attempt, wait_fixed
retry, stop_after_attempt, wait_fixed
)
from lyric_search import utils from lyric_search import utils
from lyric_search.constructors import LyricsResult from lyric_search.constructors import LyricsResult
from . import common, cache, redis_cache from . import common, cache, redis_cache

77
utils/meme_util.py Normal file
View File

@ -0,0 +1,77 @@
import os
import io
from typing import Optional
import aiosqlite as sqlite3
from PIL import Image
class MemeUtil:
"""
Meme Utils
"""
def __init__(self, constants) -> None:
self.constants = constants
self.meme_db_path = os.path.join("/usr/local/share", "sqlite_dbs", "meme.db")
def convert_to_png(self, in_buffer: io.BytesIO) -> bytes:
in_buffer.seek(0)
with Image.open(in_buffer) as im:
if im.format == "PNG":
raise ValueError("Already a PNG")
out_buffer = io.BytesIO()
im.save(out_buffer, format="PNG")
out_buffer.seek(0)
return out_buffer.read()
async def get_meme_by_id(self, meme_id: int) -> Optional[bytes]:
"""
Get meme by id
Args:
meme_id (int)
Returns:
Optional[bytes]
"""
ret_image: Optional[bytes] = None
buffer: Optional[io.BytesIO] = None
async with sqlite3.connect(self.meme_db_path, timeout=5) as db_conn:
db_conn.row_factory = sqlite3.Row
query: str = "SELECT image FROM memes WHERE id = ? LIMIT 1"
async with await db_conn.execute(query, (meme_id,)) as db_cursor:
result = await db_cursor.fetchone()
if not result:
return None
buffer = io.BytesIO(result["image"])
with Image.open(buffer) as im:
if im.format != "PNG":
ret_image = self.convert_to_png(buffer)
else:
ret_image = result["image"]
return ret_image
async def list_memes(self, page: int) -> Optional[list]:
"""
List memes (paginated)
Args:
page (id)
Returns:
list
"""
out_result: list = []
async with sqlite3.connect(self.meme_db_path, timeout=5) as db_conn:
db_conn.row_factory = sqlite3.Row
rows_per_page: int = 10
offset: int = (page - 1) * rows_per_page
query: str = "SELECT id, timestamp FROM memes ORDER BY timestamp DESC LIMIT 10 OFFSET ?"
async with await db_conn.execute(query, (offset,)) as db_cursor:
results = await db_cursor.fetchall()
for result in results:
result_id = result["id"]
result_timestamp = result["timestamp"]
out_result.append(
{
"id": result_id,
"timestamp": result_timestamp,
}
)
return out_result

View File

@ -56,6 +56,7 @@ class RadioUtil:
"deathcore", "deathcore",
"edm", "edm",
"electronic", "electronic",
"hard rock",
] ]
self.active_playlist: list[dict] = [] self.active_playlist: list[dict] = []
self.playlist_loaded: bool = False self.playlist_loaded: bool = False
@ -160,11 +161,9 @@ class RadioUtil:
if not artistsong and (not artist or not song): if not artistsong and (not artist or not song):
raise RadioException("No query provided") raise RadioException("No query provided")
try: try:
search_query: str = ( search_query: str = 'SELECT id, artist, song, (artist || " - " || song) AS artistsong, album, file_path, duration FROM tracks\
'SELECT id, artist, song, (artist || " - " || song) AS artistsong, album, file_path, duration FROM tracks\
WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\ WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\
<= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 1' <= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 1'
)
if artistsong: if artistsong:
artistsong_split: list = artistsong.split(" - ", maxsplit=1) artistsong_split: list = artistsong.split(" - ", maxsplit=1)
(search_artist, search_song) = tuple(artistsong_split) (search_artist, search_song) = tuple(artistsong_split)
@ -256,9 +255,7 @@ class RadioUtil:
for pair in pairs: for pair in pairs:
try: try:
artist, genre = pair artist, genre = pair
query: str = ( query: str = "INSERT OR IGNORE INTO artist_genre (artist, genre) VALUES(?, ?)"
"INSERT OR IGNORE INTO artist_genre (artist, genre) VALUES(?, ?)"
)
params: tuple[str, str] = (artist, genre) params: tuple[str, str] = (artist, genre)
res = _db.execute(query, params) res = _db.execute(query, params)
if isinstance(res.lastrowid, int): if isinstance(res.lastrowid, int):
@ -376,7 +373,7 @@ class RadioUtil:
dedupe_processed.append(artistsongabc) dedupe_processed.append(artistsongabc)
logging.info( logging.info(
"Duplicates removed." "New playlist size: %s", "Duplicates removed.New playlist size: %s",
len(self.active_playlist), len(self.active_playlist),
) )