various
This commit is contained in:
parent
2df8250ba2
commit
e55485e7e8
4
base.py
4
base.py
@ -49,6 +49,10 @@ Blacklisted routes
|
|||||||
def disallow_get():
|
def disallow_get():
|
||||||
return util.get_blocked_response()
|
return util.get_blocked_response()
|
||||||
|
|
||||||
|
@app.head("/")
|
||||||
|
def base_head():
|
||||||
|
return
|
||||||
|
|
||||||
@app.get("/{path}")
|
@app.get("/{path}")
|
||||||
def disallow_get_any(request: Request, var: Any = None): # pylint: disable=unused-argument
|
def disallow_get_any(request: Request, var: Any = None): # pylint: disable=unused-argument
|
||||||
path = request.path_params['path']
|
path = request.path_params['path']
|
||||||
|
@ -76,7 +76,7 @@ class AI(FastAPI):
|
|||||||
}
|
}
|
||||||
|
|
||||||
async with ClientSession() as session:
|
async with ClientSession() as session:
|
||||||
async with session.post(data.hook, json=hook_data,
|
async with await session.post(data.hook, json=hook_data,
|
||||||
timeout=ClientTimeout(connect=5, sock_read=5), headers={
|
timeout=ClientTimeout(connect=5, sock_read=5), headers={
|
||||||
'content-type': 'application/json; charset=utf-8',}) as request:
|
'content-type': 'application/json; charset=utf-8',}) as request:
|
||||||
request.raise_for_status()
|
request.raise_for_status()
|
||||||
|
@ -45,7 +45,7 @@ class KarmaDB:
|
|||||||
async def get_karma(self, keyword: str) -> int | dict:
|
async def get_karma(self, keyword: str) -> int | dict:
|
||||||
"""Get Karma Value for Keyword"""
|
"""Get Karma Value for Keyword"""
|
||||||
async with sqlite3.connect(self.db_path, timeout=2) as db_conn:
|
async with sqlite3.connect(self.db_path, timeout=2) as db_conn:
|
||||||
async with db_conn.execute("SELECT score FROM karma WHERE keyword LIKE ? LIMIT 1", (keyword,)) as db_cursor:
|
async with await db_conn.execute("SELECT score FROM karma WHERE keyword LIKE ? LIMIT 1", (keyword,)) as db_cursor:
|
||||||
try:
|
try:
|
||||||
(score,) = await db_cursor.fetchone()
|
(score,) = await db_cursor.fetchone()
|
||||||
return score
|
return score
|
||||||
@ -59,7 +59,7 @@ class KarmaDB:
|
|||||||
"""Get Top n=10 Karma Entries"""
|
"""Get Top n=10 Karma Entries"""
|
||||||
try:
|
try:
|
||||||
async with sqlite3.connect(self.db_path, timeout=2) as db_conn:
|
async with sqlite3.connect(self.db_path, timeout=2) as db_conn:
|
||||||
async with db_conn.execute("SELECT keyword, score FROM karma ORDER BY score DESC LIMIT ?", (n,)) as db_cursor:
|
async with await db_conn.execute("SELECT keyword, score FROM karma ORDER BY score DESC LIMIT ?", (n,)) as db_cursor:
|
||||||
return await db_cursor.fetchall()
|
return await db_cursor.fetchall()
|
||||||
except:
|
except:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
@ -81,17 +81,17 @@ class KarmaDB:
|
|||||||
logging.debug("Audit message: %s{audit_message}\nKeyword: %s{keyword}")
|
logging.debug("Audit message: %s{audit_message}\nKeyword: %s{keyword}")
|
||||||
|
|
||||||
async with sqlite3.connect(self.db_path, timeout=2) as db_conn:
|
async with sqlite3.connect(self.db_path, timeout=2) as db_conn:
|
||||||
async with db_conn.execute(audit_query, (keyword, audit_message,)) as db_cursor:
|
async with await db_conn.execute(audit_query, (keyword, audit_message,)) as db_cursor:
|
||||||
await db_conn.commit()
|
await db_conn.commit()
|
||||||
await db_cursor.close()
|
await db_cursor.close()
|
||||||
async with db_conn.execute(query, (now, keyword,)) as db_cursor:
|
async with await db_conn.execute(query, (now, keyword,)) as db_cursor:
|
||||||
if db_cursor.rowcount:
|
if db_cursor.rowcount:
|
||||||
await db_conn.commit()
|
await db_conn.commit()
|
||||||
return True
|
return True
|
||||||
if db_cursor.rowcount < 1: # Keyword does not already exist
|
if db_cursor.rowcount < 1: # Keyword does not already exist
|
||||||
await db_cursor.close()
|
await db_cursor.close()
|
||||||
new_val = 1 if not flag else -1
|
new_val = 1 if not flag else -1
|
||||||
async with db_conn.execute(new_keyword_query, (keyword, new_val, now,)) as db_cursor:
|
async with await db_conn.execute(new_keyword_query, (keyword, new_val, now,)) as db_cursor:
|
||||||
if db_cursor.rowcount >= 1:
|
if db_cursor.rowcount >= 1:
|
||||||
await db_conn.commit()
|
await db_conn.commit()
|
||||||
return True
|
return True
|
||||||
|
@ -81,7 +81,7 @@ class CacheUtils:
|
|||||||
else:
|
else:
|
||||||
query = "SELECT distinct(song) FROM lyrics WHERE artist LIKE ? AND song LIKE ? LIMIT 15"
|
query = "SELECT distinct(song) FROM lyrics WHERE artist LIKE ? AND song LIKE ? LIMIT 15"
|
||||||
query_params = (f"%{pre_query}%", f"%{s}%",)
|
query_params = (f"%{pre_query}%", f"%{s}%",)
|
||||||
async with db_conn.execute(query, query_params) as db_cursor:
|
async with await db_conn.execute(query, query_params) as db_cursor:
|
||||||
return await db_cursor.fetchall()
|
return await db_cursor.fetchall()
|
||||||
|
|
||||||
|
|
||||||
|
@ -126,7 +126,6 @@ class Misc(FastAPI):
|
|||||||
dict
|
dict
|
||||||
"""
|
"""
|
||||||
counts = await self.redis_cache.get_found_counts()
|
counts = await self.redis_cache.get_found_counts()
|
||||||
logging.info("Got counts: %s - type: %s", counts, type(counts))
|
|
||||||
return counts
|
return counts
|
||||||
|
|
||||||
async def homepage_radio_widget(self) -> dict:
|
async def homepage_radio_widget(self) -> dict:
|
||||||
|
@ -107,7 +107,7 @@ class RandMsg(FastAPI):
|
|||||||
title_attr = "Donnies DB"
|
title_attr = "Donnies DB"
|
||||||
|
|
||||||
async with sqlite3.connect(database=randmsg_db_path, timeout=1) as _db:
|
async with sqlite3.connect(database=randmsg_db_path, timeout=1) as _db:
|
||||||
async with _db.execute(db_query) as _cursor:
|
async with await _db.execute(db_query) as _cursor:
|
||||||
result = await _cursor.fetchone()
|
result = await _cursor.fetchone()
|
||||||
(result_id, result_msg) = result
|
(result_id, result_msg) = result
|
||||||
result_msg = result_msg.strip()
|
result_msg = result_msg.strip()
|
||||||
|
@ -82,7 +82,7 @@ class Transcriptions(FastAPI):
|
|||||||
}
|
}
|
||||||
await self.glob_state.increment_counter('transcript_list_requests')
|
await self.glob_state.increment_counter('transcript_list_requests')
|
||||||
async with sqlite3.connect(database=db_path, timeout=1) as _db:
|
async with sqlite3.connect(database=db_path, timeout=1) as _db:
|
||||||
async with _db.execute(db_query) as _cursor:
|
async with await _db.execute(db_query) as _cursor:
|
||||||
result = await _cursor.fetchall()
|
result = await _cursor.fetchall()
|
||||||
return {
|
return {
|
||||||
"show_title": show_title,
|
"show_title": show_title,
|
||||||
@ -120,7 +120,7 @@ class Transcriptions(FastAPI):
|
|||||||
await self.glob_state.increment_counter('transcript_requests')
|
await self.glob_state.increment_counter('transcript_requests')
|
||||||
async with sqlite3.connect(database=db_path, timeout=1) as _db:
|
async with sqlite3.connect(database=db_path, timeout=1) as _db:
|
||||||
params = (episode_id,)
|
params = (episode_id,)
|
||||||
async with _db.execute(db_query, params) as _cursor:
|
async with await _db.execute(db_query, params) as _cursor:
|
||||||
result = await _cursor.fetchall()
|
result = await _cursor.fetchall()
|
||||||
first_result = result[0]
|
first_result = result[0]
|
||||||
return {
|
return {
|
||||||
|
@ -77,8 +77,8 @@ class XC(FastAPI):
|
|||||||
}
|
}
|
||||||
|
|
||||||
bot_api_url = f'http://{BID_ADDR_MAP[bid]}/'
|
bot_api_url = f'http://{BID_ADDR_MAP[bid]}/'
|
||||||
async with ClientSession() as session:
|
async with ClientSession() as session:
|
||||||
async with session.post(f"{bot_api_url}{cmd}", json=cmd_data, headers={
|
async with await session.post(f"{bot_api_url}{cmd}", json=cmd_data, headers={
|
||||||
'Content-Type': 'application/json; charset=utf-8'
|
'Content-Type': 'application/json; charset=utf-8'
|
||||||
}, timeout=ClientTimeout(connect=5, sock_read=5)) as request:
|
}, timeout=ClientTimeout(connect=5, sock_read=5)) as request:
|
||||||
response = await request.json()
|
response = await request.json()
|
||||||
|
@ -3,12 +3,12 @@
|
|||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from lyric_search_new.constructors import LyricsResult
|
from lyric_search_new.constructors import LyricsResult
|
||||||
|
from lyric_search_new import notifier
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
|
import traceback
|
||||||
sys.path.insert(1,'..')
|
sys.path.insert(1,'..')
|
||||||
from . import cache
|
from . import cache, redis_cache, genius, lrclib
|
||||||
from . import genius
|
|
||||||
from . import lrclib
|
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
logger.setLevel(logging.INFO)
|
logger.setLevel(logging.INFO)
|
||||||
@ -22,6 +22,8 @@ class Aggregate:
|
|||||||
if not exclude_methods:
|
if not exclude_methods:
|
||||||
exclude_methods: list = []
|
exclude_methods: list = []
|
||||||
self.exclude_methods = exclude_methods
|
self.exclude_methods = exclude_methods
|
||||||
|
self.redis_cache = redis_cache.RedisCache()
|
||||||
|
self.notifier = notifier.DiscordNotifier()
|
||||||
|
|
||||||
async def search(self, artist: str, song: str, plain: bool = True) -> Optional[LyricsResult]:
|
async def search(self, artist: str, song: str, plain: bool = True) -> Optional[LyricsResult]:
|
||||||
"""
|
"""
|
||||||
@ -63,4 +65,15 @@ class Aggregate:
|
|||||||
if search_result:
|
if search_result:
|
||||||
break
|
break
|
||||||
logging.info("%s: NOT FOUND!", source.label)
|
logging.info("%s: NOT FOUND!", source.label)
|
||||||
|
if not search_result:
|
||||||
|
logging.info("%s - %s: all sources exhausted, not found.",
|
||||||
|
artist, song)
|
||||||
|
try:
|
||||||
|
await self.redis_cache.increment_found_count("failed")
|
||||||
|
except Exception as e:
|
||||||
|
traceback.print_exc()
|
||||||
|
logging.info("Could not increment redis failed counter: %s",
|
||||||
|
str(e))
|
||||||
|
self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}",
|
||||||
|
f"Could not increment redis failed counter: {str(e)}")
|
||||||
return search_result
|
return search_result
|
@ -53,7 +53,7 @@ class LRCLib:
|
|||||||
input_track: str = f"{artist} - {song}"
|
input_track: str = f"{artist} - {song}"
|
||||||
returned_lyrics: str = ''
|
returned_lyrics: str = ''
|
||||||
async with ClientSession() as client:
|
async with ClientSession() as client:
|
||||||
async with client.get(self.lrclib_url,
|
async with await client.get(self.lrclib_url,
|
||||||
params = {
|
params = {
|
||||||
'artist_name': artist,
|
'artist_name': artist,
|
||||||
'track_name': song,
|
'track_name': song,
|
||||||
|
@ -97,19 +97,18 @@ class RedisCache:
|
|||||||
|
|
||||||
async def get_found_counts(self) -> dict:
|
async def get_found_counts(self) -> dict:
|
||||||
"""
|
"""
|
||||||
Get found counts for all sources
|
Get found counts for all sources (and failed count)
|
||||||
Args:
|
Args:
|
||||||
None
|
None
|
||||||
Returns:
|
Returns:
|
||||||
dict: In the form {'source': count, 'source2': count, ...}
|
dict: In the form {'source': count, 'source2': count, ...}
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
sources: list = ["cache", "lrclib", "genius"]
|
sources: list = ["cache", "lrclib", "genius", "failed"]
|
||||||
counts: dict = {}
|
counts: dict = {}
|
||||||
for src in sources:
|
for src in sources:
|
||||||
src_found_count = await self.redis_client.get(f"returned:{src}")
|
src_found_count = await self.redis_client.get(f"returned:{src}")
|
||||||
counts[src] = src_found_count
|
counts[src] = src_found_count
|
||||||
logging.info("Returning: %s", counts)
|
|
||||||
return counts
|
return counts
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}", f"{str(e)}")
|
await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}", f"{str(e)}")
|
||||||
@ -204,11 +203,11 @@ class RedisCache:
|
|||||||
jsonset = await self.redis_client.json().set(newkey, Path.root_path(),
|
jsonset = await self.redis_client.json().set(newkey, Path.root_path(),
|
||||||
redis_mapping)
|
redis_mapping)
|
||||||
if not jsonset:
|
if not jsonset:
|
||||||
raise RedisException(f"Failed to store {lyr_result.artist} - {lyr_result.song} (SQLite id: {sqlite_id}):\n{jsonset}")
|
raise RedisException(f"Failed to store {lyr_result.artist} - {lyr_result.song} (SQLite id: {sqlite_id}) to redis:\n{jsonset}")
|
||||||
logging.info("Stored %s - %s (related SQLite Row ID: %s) to %s",
|
logging.info("Stored %s - %s (related SQLite Row ID: %s) to %s",
|
||||||
lyr_result.artist, lyr_result.song, sqlite_id, newkey)
|
lyr_result.artist, lyr_result.song, sqlite_id, newkey)
|
||||||
await self.notifier.send("INFO",
|
await self.notifier.send("INFO",
|
||||||
f"Stored {lyr_result.artist} - {lyr_result.song} (related SQLite Row ID: {sqlite_id}) to {newkey}")
|
f"Stored {lyr_result.artist} - {lyr_result.song} (related SQLite Row ID: {sqlite_id}) to redis: {newkey}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}",
|
await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}",
|
||||||
f"Failed to store {lyr_result.artist} - {lyr_result.song}\
|
f"Failed to store {lyr_result.artist} - {lyr_result.song}\
|
||||||
|
Loading…
x
Reference in New Issue
Block a user