This commit is contained in:
codey 2025-01-22 06:38:40 -05:00
parent 38dbddd297
commit 4b16a4a265
5 changed files with 64 additions and 11 deletions

View File

@ -10,7 +10,7 @@ from redis.commands.search.query import Query
from redis.commands.search.indexDefinition import IndexDefinition, IndexType from redis.commands.search.indexDefinition import IndexDefinition, IndexType
from redis.commands.search.field import TextField, TagField from redis.commands.search.field import TextField, TagField
from redis.commands.json.path import Path from redis.commands.json.path import Path
from lyric_search_new.sources import private, cache as LyricsCache from lyric_search_new.sources import private, cache as LyricsCache, redis_cache
class Misc(FastAPI): class Misc(FastAPI):
"""Misc Endpoints""" """Misc Endpoints"""
@ -20,10 +20,12 @@ class Misc(FastAPI):
self.constants = constants self.constants = constants
self.glob_state = glob_state self.glob_state = glob_state
self.lyr_cache = LyricsCache.Cache() self.lyr_cache = LyricsCache.Cache()
self.redis_cache = redis_cache.RedisCache()
self.redis_client = redis.Redis(password=private.REDIS_PW) self.redis_client = redis.Redis(password=private.REDIS_PW)
self.endpoints = { self.endpoints = {
"widget/redis": self.homepage_redis_widget, "widget/redis": self.homepage_redis_widget,
"widget/sqlite": self.homepage_sqlite_widget, "widget/sqlite": self.homepage_sqlite_widget,
"widget/lyrics": self.homepage_lyrics_widget,
} }
for endpoint, handler in self.endpoints.items(): for endpoint, handler in self.endpoints.items():
@ -32,7 +34,7 @@ class Misc(FastAPI):
async def homepage_redis_widget(self) -> dict: async def homepage_redis_widget(self) -> dict:
""" """
/widget/redis/ /widget/redis/
Homepage Widget Handler Homepage Redis Widget Handler
Args: Args:
None None
Returns: Returns:
@ -61,7 +63,7 @@ class Misc(FastAPI):
async def homepage_sqlite_widget(self) -> dict: async def homepage_sqlite_widget(self) -> dict:
""" """
/widget/sqlite/ /widget/sqlite/
Homepage Widget Handler Homepage SQLite Widget Handler
Args: Args:
None None
Returns: Returns:
@ -74,4 +76,17 @@ class Misc(FastAPI):
'storedRows': row_count, 'storedRows': row_count,
'distinctArtists': distinct_artists, 'distinctArtists': distinct_artists,
'lyricsLength': lyrics_length, 'lyricsLength': lyrics_length,
} }
async def homepage_lyrics_widget(self) -> dict:
"""
/widget/lyrics/
Homepage Lyrics Widget Handler
Args:
None
Returns:
dict
"""
counts = await self.redis_cache.get_found_counts()
logging.info("Got counts: %s - type: %s", counts, type(counts))
return counts

View File

@ -264,7 +264,8 @@ class Cache:
matched.time = time_diff matched.time = time_diff
logging.info("Found %s on redis cache, skipping SQLite...", logging.info("Found %s on redis cache, skipping SQLite...",
f"{artist} - {song}") f"{artist} - {song}")
await self.redis_cache.increment_found_count(self.label)
return matched return matched
"""SQLite: Fallback""" """SQLite: Fallback"""
@ -302,6 +303,7 @@ class Cache:
time_end: float = time.time() time_end: float = time.time()
time_diff: float = time_end - time_start time_diff: float = time_end - time_start
matched.time = time_diff matched.time = time_diff
await self.redis_cache.increment_found_count(self.label)
return matched return matched
except: except:
traceback.print_exc() traceback.print_exc()

View File

@ -10,9 +10,7 @@ from typing import Optional
from aiohttp import ClientTimeout, ClientSession from aiohttp import ClientTimeout, ClientSession
from bs4 import BeautifulSoup, ResultSet from bs4 import BeautifulSoup, ResultSet
import html as htm import html as htm
from . import private from . import private, common, cache, redis_cache
from . import common
from . import cache
from lyric_search_new import utils from lyric_search_new import utils
from lyric_search_new.constructors import LyricsResult from lyric_search_new.constructors import LyricsResult
@ -36,6 +34,7 @@ class Genius:
self.datautils = utils.DataUtils() self.datautils = utils.DataUtils()
self.matcher = utils.TrackMatcher() self.matcher = utils.TrackMatcher()
self.cache = cache.Cache() self.cache = cache.Cache()
self.redis_cache = redis_cache.RedisCache()
# pylint: disable=unused-argument # pylint: disable=unused-argument
async def search(self, artist: str, song: str, **kwargs) -> Optional[LyricsResult]: async def search(self, artist: str, song: str, **kwargs) -> Optional[LyricsResult]:
@ -123,6 +122,7 @@ class Genius:
lyrics=returned_lyrics, lyrics=returned_lyrics,
confidence=confidence, confidence=confidence,
time=time_diff) time=time_diff)
await self.redis_cache.increment_found_count(self.label)
await self.cache.store(matched) await self.cache.store(matched)
return matched return matched

View File

@ -10,8 +10,7 @@ from typing import Optional
from aiohttp import ClientTimeout, ClientSession from aiohttp import ClientTimeout, ClientSession
from lyric_search_new import utils from lyric_search_new import utils
from lyric_search_new.constructors import LyricsResult from lyric_search_new.constructors import LyricsResult
from . import common from . import common, cache, redis_cache
from . import cache
logger = logging.getLogger() logger = logging.getLogger()
log_level = logging.getLevelName(logger.level) log_level = logging.getLevelName(logger.level)
@ -31,6 +30,7 @@ class LRCLib:
self.datautils = utils.DataUtils() self.datautils = utils.DataUtils()
self.matcher = utils.TrackMatcher() self.matcher = utils.TrackMatcher()
self.cache = cache.Cache() self.cache = cache.Cache()
self.redis_cache = redis_cache.RedisCache()
async def search(self, artist: str, song: str, plain: bool = True) -> Optional[LyricsResult]: async def search(self, artist: str, song: str, plain: bool = True) -> Optional[LyricsResult]:
""" """
@ -121,6 +121,7 @@ class LRCLib:
lyrics=returned_lyrics if plain else lrc_obj, lyrics=returned_lyrics if plain else lrc_obj,
confidence=confidence, confidence=confidence,
time=time_diff) time=time_diff)
await self.redis_cache.increment_found_count(self.label)
await self.cache.store(matched) await self.cache.store(matched)
return matched return matched
except: except:

View File

@ -78,7 +78,42 @@ class RedisCache:
if fuzzy: if fuzzy:
artist = " ".join([f"(%{artist_word}%)" for artist_word in artist.split(" ")]) artist = " ".join([f"(%{artist_word}%)" for artist_word in artist.split(" ")])
song = " ".join([f"(%{song_word}%)" for song_word in song.split(" ")]) song = " ".join([f"(%{song_word}%)" for song_word in song.split(" ")])
return (artist, song) return (artist, song)
async def increment_found_count(self, src: str) -> None:
"""
Increment the found count for a source
Args:
src (str): The source to increment
Returns:
None
"""
try:
src = src.strip().lower()
await self.redis_client.incr(f"returned:{src}")
except Exception as e:
await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}", f"{str(e)}")
traceback.print_exc()
async def get_found_counts(self) -> dict:
"""
Get found counts for all sources
Args:
None
Returns:
dict: In the form {'source': count, 'source2': count, ...}
"""
try:
sources: list = ["cache", "lrclib", "genius"]
counts: dict = {}
for src in sources:
src_found_count = await self.redis_client.get(f"returned:{src}")
counts[src] = src_found_count
logging.info("Returning: %s", counts)
return counts
except Exception as e:
await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}", f"{str(e)}")
traceback.print_exc()
async def search(self, **kwargs) -> list[tuple]: async def search(self, **kwargs) -> list[tuple]: