This commit is contained in:
codey 2025-01-23 13:02:03 -05:00
parent 2df8250ba2
commit e55485e7e8
11 changed files with 37 additions and 22 deletions

View File

@ -49,6 +49,10 @@ Blacklisted routes
def disallow_get():
return util.get_blocked_response()
@app.head("/")
def base_head():
return
@app.get("/{path}")
def disallow_get_any(request: Request, var: Any = None): # pylint: disable=unused-argument
path = request.path_params['path']

View File

@ -76,7 +76,7 @@ class AI(FastAPI):
}
async with ClientSession() as session:
async with session.post(data.hook, json=hook_data,
async with await session.post(data.hook, json=hook_data,
timeout=ClientTimeout(connect=5, sock_read=5), headers={
'content-type': 'application/json; charset=utf-8',}) as request:
request.raise_for_status()

View File

@ -45,7 +45,7 @@ class KarmaDB:
async def get_karma(self, keyword: str) -> int | dict:
"""Get Karma Value for Keyword"""
async with sqlite3.connect(self.db_path, timeout=2) as db_conn:
async with db_conn.execute("SELECT score FROM karma WHERE keyword LIKE ? LIMIT 1", (keyword,)) as db_cursor:
async with await db_conn.execute("SELECT score FROM karma WHERE keyword LIKE ? LIMIT 1", (keyword,)) as db_cursor:
try:
(score,) = await db_cursor.fetchone()
return score
@ -59,7 +59,7 @@ class KarmaDB:
"""Get Top n=10 Karma Entries"""
try:
async with sqlite3.connect(self.db_path, timeout=2) as db_conn:
async with db_conn.execute("SELECT keyword, score FROM karma ORDER BY score DESC LIMIT ?", (n,)) as db_cursor:
async with await db_conn.execute("SELECT keyword, score FROM karma ORDER BY score DESC LIMIT ?", (n,)) as db_cursor:
return await db_cursor.fetchall()
except:
traceback.print_exc()
@ -81,17 +81,17 @@ class KarmaDB:
logging.debug("Audit message: %s{audit_message}\nKeyword: %s{keyword}")
async with sqlite3.connect(self.db_path, timeout=2) as db_conn:
async with db_conn.execute(audit_query, (keyword, audit_message,)) as db_cursor:
async with await db_conn.execute(audit_query, (keyword, audit_message,)) as db_cursor:
await db_conn.commit()
await db_cursor.close()
async with db_conn.execute(query, (now, keyword,)) as db_cursor:
async with await db_conn.execute(query, (now, keyword,)) as db_cursor:
if db_cursor.rowcount:
await db_conn.commit()
return True
if db_cursor.rowcount < 1: # Keyword does not already exist
await db_cursor.close()
new_val = 1 if not flag else -1
async with db_conn.execute(new_keyword_query, (keyword, new_val, now,)) as db_cursor:
async with await db_conn.execute(new_keyword_query, (keyword, new_val, now,)) as db_cursor:
if db_cursor.rowcount >= 1:
await db_conn.commit()
return True

View File

@ -81,7 +81,7 @@ class CacheUtils:
else:
query = "SELECT distinct(song) FROM lyrics WHERE artist LIKE ? AND song LIKE ? LIMIT 15"
query_params = (f"%{pre_query}%", f"%{s}%",)
async with db_conn.execute(query, query_params) as db_cursor:
async with await db_conn.execute(query, query_params) as db_cursor:
return await db_cursor.fetchall()

View File

@ -126,7 +126,6 @@ class Misc(FastAPI):
dict
"""
counts = await self.redis_cache.get_found_counts()
logging.info("Got counts: %s - type: %s", counts, type(counts))
return counts
async def homepage_radio_widget(self) -> dict:

View File

@ -107,7 +107,7 @@ class RandMsg(FastAPI):
title_attr = "Donnies DB"
async with sqlite3.connect(database=randmsg_db_path, timeout=1) as _db:
async with _db.execute(db_query) as _cursor:
async with await _db.execute(db_query) as _cursor:
result = await _cursor.fetchone()
(result_id, result_msg) = result
result_msg = result_msg.strip()

View File

@ -82,7 +82,7 @@ class Transcriptions(FastAPI):
}
await self.glob_state.increment_counter('transcript_list_requests')
async with sqlite3.connect(database=db_path, timeout=1) as _db:
async with _db.execute(db_query) as _cursor:
async with await _db.execute(db_query) as _cursor:
result = await _cursor.fetchall()
return {
"show_title": show_title,
@ -120,7 +120,7 @@ class Transcriptions(FastAPI):
await self.glob_state.increment_counter('transcript_requests')
async with sqlite3.connect(database=db_path, timeout=1) as _db:
params = (episode_id,)
async with _db.execute(db_query, params) as _cursor:
async with await _db.execute(db_query, params) as _cursor:
result = await _cursor.fetchall()
first_result = result[0]
return {

View File

@ -77,8 +77,8 @@ class XC(FastAPI):
}
bot_api_url = f'http://{BID_ADDR_MAP[bid]}/'
async with ClientSession() as session:
async with session.post(f"{bot_api_url}{cmd}", json=cmd_data, headers={
async with ClientSession() as session:
async with await session.post(f"{bot_api_url}{cmd}", json=cmd_data, headers={
'Content-Type': 'application/json; charset=utf-8'
}, timeout=ClientTimeout(connect=5, sock_read=5)) as request:
response = await request.json()

View File

@ -3,12 +3,12 @@
from typing import Optional
from lyric_search_new.constructors import LyricsResult
from lyric_search_new import notifier
import sys
import logging
import traceback
sys.path.insert(1,'..')
from . import cache
from . import genius
from . import lrclib
from . import cache, redis_cache, genius, lrclib
logger = logging.getLogger()
logger.setLevel(logging.INFO)
@ -22,6 +22,8 @@ class Aggregate:
if not exclude_methods:
exclude_methods: list = []
self.exclude_methods = exclude_methods
self.redis_cache = redis_cache.RedisCache()
self.notifier = notifier.DiscordNotifier()
async def search(self, artist: str, song: str, plain: bool = True) -> Optional[LyricsResult]:
"""
@ -63,4 +65,15 @@ class Aggregate:
if search_result:
break
logging.info("%s: NOT FOUND!", source.label)
if not search_result:
logging.info("%s - %s: all sources exhausted, not found.",
artist, song)
try:
await self.redis_cache.increment_found_count("failed")
except Exception as e:
traceback.print_exc()
logging.info("Could not increment redis failed counter: %s",
str(e))
self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}",
f"Could not increment redis failed counter: {str(e)}")
return search_result

View File

@ -53,7 +53,7 @@ class LRCLib:
input_track: str = f"{artist} - {song}"
returned_lyrics: str = ''
async with ClientSession() as client:
async with client.get(self.lrclib_url,
async with await client.get(self.lrclib_url,
params = {
'artist_name': artist,
'track_name': song,

View File

@ -97,19 +97,18 @@ class RedisCache:
async def get_found_counts(self) -> dict:
"""
Get found counts for all sources
Get found counts for all sources (and failed count)
Args:
None
Returns:
dict: In the form {'source': count, 'source2': count, ...}
"""
try:
sources: list = ["cache", "lrclib", "genius"]
sources: list = ["cache", "lrclib", "genius", "failed"]
counts: dict = {}
for src in sources:
src_found_count = await self.redis_client.get(f"returned:{src}")
counts[src] = src_found_count
logging.info("Returning: %s", counts)
return counts
except Exception as e:
await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}", f"{str(e)}")
@ -204,11 +203,11 @@ class RedisCache:
jsonset = await self.redis_client.json().set(newkey, Path.root_path(),
redis_mapping)
if not jsonset:
raise RedisException(f"Failed to store {lyr_result.artist} - {lyr_result.song} (SQLite id: {sqlite_id}):\n{jsonset}")
raise RedisException(f"Failed to store {lyr_result.artist} - {lyr_result.song} (SQLite id: {sqlite_id}) to redis:\n{jsonset}")
logging.info("Stored %s - %s (related SQLite Row ID: %s) to %s",
lyr_result.artist, lyr_result.song, sqlite_id, newkey)
await self.notifier.send("INFO",
f"Stored {lyr_result.artist} - {lyr_result.song} (related SQLite Row ID: {sqlite_id}) to {newkey}")
f"Stored {lyr_result.artist} - {lyr_result.song} (related SQLite Row ID: {sqlite_id}) to redis: {newkey}")
except Exception as e:
await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}",
f"Failed to store {lyr_result.artist} - {lyr_result.song}\