formatting/add meme endpoints
This commit is contained in:
parent
d944a32c62
commit
2caa482a0d
1
base.py
1
base.py
@ -93,6 +93,7 @@ routes: dict = {
|
||||
app, util, constants, loop
|
||||
),
|
||||
"mgr": importlib.import_module("endpoints.mgr.mgr_test").Mgr(app, util, constants),
|
||||
"meme": importlib.import_module("endpoints.meme").Meme(app, util, constants),
|
||||
}
|
||||
|
||||
# Misc endpoint depends on radio endpoint instance
|
||||
|
@ -85,9 +85,7 @@ class KarmaDB:
|
||||
"INSERT INTO karma(keyword, score, last_change) VALUES(?, ?, ?)"
|
||||
)
|
||||
friendly_flag: str = "++" if not flag else "--"
|
||||
audit_message: str = (
|
||||
f"{granter} adjusted karma for {keyword} @ {datetime.datetime.now().isoformat()}: {friendly_flag}"
|
||||
)
|
||||
audit_message: str = f"{granter} adjusted karma for {keyword} @ {datetime.datetime.now().isoformat()}: {friendly_flag}"
|
||||
audit_query: str = (
|
||||
"INSERT INTO karma_audit(impacted_keyword, comment) VALUES(?, ?)"
|
||||
)
|
||||
|
@ -115,7 +115,7 @@ class LyricSearch(FastAPI):
|
||||
|
||||
if data.src.upper() not in self.acceptable_request_sources:
|
||||
await self.notifier.send(
|
||||
f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}",
|
||||
f"ERROR @ {__file__.rsplit('/', maxsplit=1)[-1]}",
|
||||
f"Unknown request source: {data.src}",
|
||||
)
|
||||
return JSONResponse(
|
||||
|
37
endpoints/meme.py
Normal file
37
endpoints/meme.py
Normal file
@ -0,0 +1,37 @@
|
||||
import logging
|
||||
from fastapi import FastAPI, Request, Response
|
||||
from fastapi.responses import JSONResponse
|
||||
from utils.meme_util import MemeUtil
|
||||
|
||||
|
||||
class Meme(FastAPI):
|
||||
"""
|
||||
Misc Endpoints
|
||||
"""
|
||||
|
||||
def __init__(self, app: FastAPI, my_util, constants) -> None:
|
||||
self.app: FastAPI = app
|
||||
self.util = my_util
|
||||
self.meme_util = MemeUtil(constants)
|
||||
self.constants = constants
|
||||
self.endpoints: dict = {
|
||||
"memes/get_meme/{id:path}": self.get_meme_by_id,
|
||||
"memes/list_memes": self.list_memes,
|
||||
}
|
||||
|
||||
for endpoint, handler in self.endpoints.items():
|
||||
app.add_api_route(
|
||||
f"/{endpoint}", handler, methods=["GET"], include_in_schema=True
|
||||
)
|
||||
|
||||
async def get_meme_by_id(self, id: int, request: Request) -> Response:
|
||||
"""Get meme (image) by id"""
|
||||
meme_image = await self.meme_util.get_meme_by_id(id)
|
||||
if not meme_image:
|
||||
return Response(status_code=404, content="Not found")
|
||||
return Response(content=meme_image, media_type="image/png")
|
||||
|
||||
async def list_memes(self, page: int, request: Request) -> Response:
|
||||
"""Get meme (image) by id"""
|
||||
meme_list = await self.meme_util.list_memes(page)
|
||||
return JSONResponse(content={"memes": meme_list})
|
@ -113,9 +113,7 @@ class Transcriptions(FastAPI):
|
||||
db_path: Union[str, LiteralString] = os.path.join(
|
||||
"/usr/local/share", "sqlite_dbs", "sp.db"
|
||||
)
|
||||
db_query: str = (
|
||||
"""SELECT ("S" || Season || "E" || Episode || " " || Title), Character, Line FROM SP_DAT WHERE ID = ?"""
|
||||
)
|
||||
db_query: str = """SELECT ("S" || Season || "E" || Episode || " " || Title), Character, Line FROM SP_DAT WHERE ID = ?"""
|
||||
case 1:
|
||||
db_path = os.path.join("/usr/local/share", "sqlite_dbs", "futur.db")
|
||||
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE || "<br><em>Opener: " || EP_OPENER || "</em>"), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY LINE_ID ASC"""
|
||||
|
@ -89,10 +89,8 @@ class Cache:
|
||||
logging.debug(
|
||||
"Checking whether %s is already stored", artistsong.replace("\n", " - ")
|
||||
)
|
||||
check_query: str = (
|
||||
'SELECT id, artist, song FROM lyrics WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\
|
||||
check_query: str = 'SELECT id, artist, song FROM lyrics WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\
|
||||
<= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 1'
|
||||
)
|
||||
artistsong_split = artistsong.split("\n", maxsplit=1)
|
||||
artist = artistsong_split[0].lower()
|
||||
song = artistsong_split[1].lower()
|
||||
@ -213,8 +211,10 @@ class Cache:
|
||||
lyrics = regex.sub(r"(<br>|\n|\r\n)", " / ", lyr_result.lyrics.strip())
|
||||
lyrics = regex.sub(r"\s{2,}", " ", lyrics)
|
||||
|
||||
insert_query = "INSERT INTO lyrics (src, date_retrieved, artist, song, artistsong, confidence, lyrics)\
|
||||
insert_query = (
|
||||
"INSERT INTO lyrics (src, date_retrieved, artist, song, artistsong, confidence, lyrics)\
|
||||
VALUES(?, ?, ?, ?, ?, ?, ?)"
|
||||
)
|
||||
params = (
|
||||
lyr_result.src,
|
||||
time.time(),
|
||||
@ -258,10 +258,8 @@ class Cache:
|
||||
|
||||
if artist == "!" and song == "!":
|
||||
random_search = True
|
||||
search_query: str = (
|
||||
"SELECT id, artist, song, lyrics, src, confidence\
|
||||
search_query: str = "SELECT id, artist, song, lyrics, src, confidence\
|
||||
FROM lyrics ORDER BY RANDOM() LIMIT 1"
|
||||
)
|
||||
|
||||
logging.info("Searching %s - %s on %s", artist, song, self.label)
|
||||
|
||||
@ -320,11 +318,9 @@ class Cache:
|
||||
self.cache_pre_query
|
||||
) as _db_cursor:
|
||||
if not random_search:
|
||||
search_query: str = (
|
||||
'SELECT id, artist, song, lyrics, src, confidence FROM lyrics\
|
||||
search_query: str = 'SELECT id, artist, song, lyrics, src, confidence FROM lyrics\
|
||||
WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\
|
||||
<= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 10'
|
||||
)
|
||||
search_params: tuple = (
|
||||
artist.strip(),
|
||||
song.strip(),
|
||||
|
@ -8,9 +8,7 @@ import re
|
||||
from typing import Optional
|
||||
from aiohttp import ClientTimeout, ClientSession
|
||||
from bs4 import BeautifulSoup, ResultSet # type: ignore
|
||||
from tenacity import (
|
||||
retry, stop_after_attempt, wait_fixed
|
||||
)
|
||||
from tenacity import retry, stop_after_attempt, wait_fixed
|
||||
import html as htm
|
||||
from . import private, common, cache, redis_cache
|
||||
from lyric_search import utils
|
||||
@ -58,6 +56,7 @@ class Genius:
|
||||
f"{self.genius_search_url}{search_term}",
|
||||
timeout=self.timeout,
|
||||
headers=self.headers,
|
||||
verify_ssl=False,
|
||||
) as request:
|
||||
request.raise_for_status()
|
||||
text: Optional[str] = await request.text()
|
||||
@ -109,7 +108,10 @@ class Genius:
|
||||
scrape_url: str = f"{self.genius_url}{scrape_stub[1:]}"
|
||||
|
||||
async with client.get(
|
||||
scrape_url, timeout=self.timeout, headers=self.headers
|
||||
scrape_url,
|
||||
timeout=self.timeout,
|
||||
headers=self.headers,
|
||||
verify_ssl=False,
|
||||
) as scrape_request:
|
||||
scrape_request.raise_for_status()
|
||||
scrape_text: Optional[str] = await scrape_request.text()
|
||||
|
@ -6,9 +6,7 @@ import traceback
|
||||
import logging
|
||||
from typing import Optional, Union
|
||||
from aiohttp import ClientTimeout, ClientSession
|
||||
from tenacity import (
|
||||
retry, stop_after_attempt, wait_fixed
|
||||
)
|
||||
from tenacity import retry, stop_after_attempt, wait_fixed
|
||||
from lyric_search import utils
|
||||
from lyric_search.constructors import LyricsResult
|
||||
from . import common, cache, redis_cache
|
||||
|
77
utils/meme_util.py
Normal file
77
utils/meme_util.py
Normal file
@ -0,0 +1,77 @@
|
||||
import os
|
||||
import io
|
||||
from typing import Optional
|
||||
import aiosqlite as sqlite3
|
||||
from PIL import Image
|
||||
|
||||
|
||||
class MemeUtil:
|
||||
"""
|
||||
Meme Utils
|
||||
"""
|
||||
|
||||
def __init__(self, constants) -> None:
|
||||
self.constants = constants
|
||||
self.meme_db_path = os.path.join("/usr/local/share", "sqlite_dbs", "meme.db")
|
||||
|
||||
def convert_to_png(self, in_buffer: io.BytesIO) -> bytes:
|
||||
in_buffer.seek(0)
|
||||
with Image.open(in_buffer) as im:
|
||||
if im.format == "PNG":
|
||||
raise ValueError("Already a PNG")
|
||||
out_buffer = io.BytesIO()
|
||||
im.save(out_buffer, format="PNG")
|
||||
out_buffer.seek(0)
|
||||
return out_buffer.read()
|
||||
|
||||
async def get_meme_by_id(self, meme_id: int) -> Optional[bytes]:
|
||||
"""
|
||||
Get meme by id
|
||||
Args:
|
||||
meme_id (int)
|
||||
Returns:
|
||||
Optional[bytes]
|
||||
"""
|
||||
ret_image: Optional[bytes] = None
|
||||
buffer: Optional[io.BytesIO] = None
|
||||
async with sqlite3.connect(self.meme_db_path, timeout=5) as db_conn:
|
||||
db_conn.row_factory = sqlite3.Row
|
||||
query: str = "SELECT image FROM memes WHERE id = ? LIMIT 1"
|
||||
async with await db_conn.execute(query, (meme_id,)) as db_cursor:
|
||||
result = await db_cursor.fetchone()
|
||||
if not result:
|
||||
return None
|
||||
buffer = io.BytesIO(result["image"])
|
||||
with Image.open(buffer) as im:
|
||||
if im.format != "PNG":
|
||||
ret_image = self.convert_to_png(buffer)
|
||||
else:
|
||||
ret_image = result["image"]
|
||||
return ret_image
|
||||
|
||||
async def list_memes(self, page: int) -> Optional[list]:
|
||||
"""
|
||||
List memes (paginated)
|
||||
Args:
|
||||
page (id)
|
||||
Returns:
|
||||
list
|
||||
"""
|
||||
out_result: list = []
|
||||
async with sqlite3.connect(self.meme_db_path, timeout=5) as db_conn:
|
||||
db_conn.row_factory = sqlite3.Row
|
||||
rows_per_page: int = 10
|
||||
offset: int = (page - 1) * rows_per_page
|
||||
query: str = "SELECT id, timestamp FROM memes ORDER BY timestamp DESC LIMIT 10 OFFSET ?"
|
||||
async with await db_conn.execute(query, (offset,)) as db_cursor:
|
||||
results = await db_cursor.fetchall()
|
||||
for result in results:
|
||||
result_id = result["id"]
|
||||
result_timestamp = result["timestamp"]
|
||||
out_result.append(
|
||||
{
|
||||
"id": result_id,
|
||||
"timestamp": result_timestamp,
|
||||
}
|
||||
)
|
||||
return out_result
|
@ -56,6 +56,7 @@ class RadioUtil:
|
||||
"deathcore",
|
||||
"edm",
|
||||
"electronic",
|
||||
"hard rock",
|
||||
]
|
||||
self.active_playlist: list[dict] = []
|
||||
self.playlist_loaded: bool = False
|
||||
@ -160,11 +161,9 @@ class RadioUtil:
|
||||
if not artistsong and (not artist or not song):
|
||||
raise RadioException("No query provided")
|
||||
try:
|
||||
search_query: str = (
|
||||
'SELECT id, artist, song, (artist || " - " || song) AS artistsong, album, file_path, duration FROM tracks\
|
||||
search_query: str = 'SELECT id, artist, song, (artist || " - " || song) AS artistsong, album, file_path, duration FROM tracks\
|
||||
WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\
|
||||
<= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 1'
|
||||
)
|
||||
if artistsong:
|
||||
artistsong_split: list = artistsong.split(" - ", maxsplit=1)
|
||||
(search_artist, search_song) = tuple(artistsong_split)
|
||||
@ -256,9 +255,7 @@ class RadioUtil:
|
||||
for pair in pairs:
|
||||
try:
|
||||
artist, genre = pair
|
||||
query: str = (
|
||||
"INSERT OR IGNORE INTO artist_genre (artist, genre) VALUES(?, ?)"
|
||||
)
|
||||
query: str = "INSERT OR IGNORE INTO artist_genre (artist, genre) VALUES(?, ?)"
|
||||
params: tuple[str, str] = (artist, genre)
|
||||
res = _db.execute(query, params)
|
||||
if isinstance(res.lastrowid, int):
|
||||
@ -376,7 +373,7 @@ class RadioUtil:
|
||||
dedupe_processed.append(artistsongabc)
|
||||
|
||||
logging.info(
|
||||
"Duplicates removed." "New playlist size: %s",
|
||||
"Duplicates removed.New playlist size: %s",
|
||||
len(self.active_playlist),
|
||||
)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user