api/lyric_search/sources/redis_cache.py

218 lines
9.5 KiB
Python
Raw Normal View History

2025-01-18 13:26:00 -05:00
#!/usr/bin/env python3.12
import logging
import traceback
import json
import time
2025-01-19 07:01:07 -05:00
import sys
2025-01-20 05:47:09 -05:00
import regex
2025-02-15 21:09:33 -05:00
from regex import Pattern
2025-01-30 19:33:04 -05:00
import asyncio
2025-02-15 21:09:33 -05:00
from typing import Union, Optional
2025-01-19 07:01:07 -05:00
sys.path.insert(1,'..')
from lyric_search import notifier
from lyric_search.constructors import LyricsResult
2025-01-18 13:26:00 -05:00
import redis.asyncio as redis
2025-01-18 13:34:10 -05:00
from redis.commands.search.query import Query
2025-01-18 13:26:00 -05:00
from redis.commands.search.indexDefinition import IndexDefinition, IndexType
from redis.commands.search.field import TextField, TagField
from redis.commands.json.path import Path
2025-01-18 13:26:00 -05:00
from . import private
logger = logging.getLogger()
log_level = logging.getLevelName(logger.level)
class RedisException(Exception):
"""
Redis Exception
"""
class RedisCache:
"""
Redis Cache Methods
"""
2025-01-19 07:09:05 -05:00
def __init__(self) -> None:
2025-01-18 13:34:10 -05:00
self.redis_client = redis.Redis(password=private.REDIS_PW)
2025-01-19 07:01:07 -05:00
self.notifier = notifier.DiscordNotifier()
self.notify_warnings = True
2025-02-15 21:09:33 -05:00
self.regexes: list[Pattern] = [
2025-01-20 05:47:09 -05:00
regex.compile(r'\-'),
regex.compile(r'[^a-zA-Z0-9\s]'),
]
2025-01-30 19:33:04 -05:00
try:
asyncio.get_event_loop().create_task(self.create_index())
2025-02-15 21:09:33 -05:00
except Exception as e:
logging.debug("Failed to create redis create_index task: %s",
str(e))
2025-01-18 13:26:00 -05:00
2025-01-19 07:09:05 -05:00
async def create_index(self) -> None:
2025-01-18 13:26:00 -05:00
"""Create Index"""
try:
schema = (
TextField("$.search_artist", as_name="artist"),
TextField("$.search_song", as_name="song"),
2025-01-18 13:26:00 -05:00
TextField("$.src", as_name="src"),
TextField("$.lyrics", as_name="lyrics")
)
2025-01-18 13:34:10 -05:00
result = await self.redis_client.ft().create_index(
schema, definition=IndexDefinition(prefix=["lyrics:"], index_type=IndexType.JSON))
2025-01-18 13:26:00 -05:00
if str(result) != "OK":
raise RedisException(f"Redis: Failed to create index: {result}")
2025-01-19 07:01:07 -05:00
except Exception as e:
2025-02-15 21:09:33 -05:00
logging.debug("Failed to create redis index: %s",
str(e))
2025-01-20 05:47:09 -05:00
2025-02-15 21:09:33 -05:00
def sanitize_input(self, artist: str, song: str,
fuzzy: Optional[bool] = False) -> tuple[str, str]:
2025-01-20 05:47:09 -05:00
"""
Sanitize artist/song input (convert to redis matchable fuzzy query)
Args:
artist: Input artist
song: Input song
fuzzy: Whether to create fuzzy query str
2025-01-20 05:47:09 -05:00
Returns:
tuple[str, str]: Tuple containing the 2 output strings (artist, song)
"""
artist = self.regexes[0].sub("", artist)
2025-01-20 05:47:09 -05:00
artist = self.regexes[1].sub("", artist).strip()
song = self.regexes[0].sub("", song)
2025-01-20 05:47:09 -05:00
song = self.regexes[1].sub("", song).strip()
if fuzzy:
artist = " ".join([f"(%{artist_word}%)" for artist_word in artist.split(" ")])
song = " ".join([f"(%{song_word}%)" for song_word in song.split(" ")])
2025-01-22 06:38:40 -05:00
return (artist, song)
async def increment_found_count(self, src: str) -> None:
"""
Increment the found count for a source
Args:
src (str): The source to increment
Returns:
None
"""
try:
src = src.strip().lower()
await self.redis_client.incr(f"returned:{src}")
except Exception as e:
await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}", f"{str(e)}")
traceback.print_exc()
async def get_found_counts(self) -> dict:
"""
2025-01-23 13:02:03 -05:00
Get found counts for all sources (and failed count)
2025-01-22 06:38:40 -05:00
Args:
None
Returns:
dict: In the form {'source': count, 'source2': count, ...}
"""
try:
2025-01-23 13:02:03 -05:00
sources: list = ["cache", "lrclib", "genius", "failed"]
2025-02-16 06:53:41 -05:00
counts: dict[str, int] = {}
2025-01-22 06:38:40 -05:00
for src in sources:
src_found_count = await self.redis_client.get(f"returned:{src}")
2025-02-16 06:53:41 -05:00
counts[src] = int(src_found_count) # Redis returns bytes
2025-01-22 06:38:40 -05:00
return counts
except Exception as e:
await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}", f"{str(e)}")
traceback.print_exc()
2025-01-20 05:47:09 -05:00
2025-01-19 13:28:17 -05:00
2025-02-15 21:09:33 -05:00
async def search(self, artist: Optional[str] = None,
song: Optional[str] = None,
lyrics: Optional[str] = None) -> list[tuple]:
2025-01-19 07:05:01 -05:00
"""
Search Redis Cache
Args:
artist (Optional[str]): artist to search
song (Optional[str]): song to search
lyrics (Optional[str]): lyrics to search (optional, used in place of artist/song if provided)
Returns:
list[tuple]: List of redis results, tuple's first value is the redis key, second is the returned data
2025-01-18 13:26:00 -05:00
"""
try:
2025-02-05 20:23:06 -05:00
fuzzy_artist = None
fuzzy_song = None
2025-01-18 14:17:39 -05:00
is_random_search = artist == "!" and song == "!"
2025-01-18 13:26:00 -05:00
if lyrics:
# to code later
raise RedisException("Lyric search not yet implemented")
2025-01-18 14:17:39 -05:00
if not is_random_search:
logging.debug("Redis: Searching normally first")
(artist, song) = self.sanitize_input(artist, song)
logging.debug("Seeking: %s - %s", artist, song)
2025-02-15 21:09:33 -05:00
search_res: Union[dict, list] = await self.redis_client.ft().search(Query(
2025-01-20 05:47:09 -05:00
f"@artist:{artist} @song:{song}"
))
2025-02-15 21:09:33 -05:00
search_res_out: list[tuple] = [(result['id'].split(":",
2025-01-19 07:01:07 -05:00
maxsplit=1)[1], dict(json.loads(result['json'])))
2025-01-18 14:17:39 -05:00
for result in search_res.docs]
if not search_res_out:
logging.debug("Redis: Normal search failed, trying with fuzzy search")
2025-01-26 17:56:04 -05:00
(fuzzy_artist, fuzzy_song) = self.sanitize_input(artist=artist.split(" ")[0:5],
song=song.split(" ")[0:6], fuzzy=True)
search_res = await self.redis_client.ft().search(Query(
f"@artist:{fuzzy_artist} @song:{fuzzy_song}"
))
search_res_out = [(result['id'].split(":",
maxsplit=1)[1], dict(json.loads(result['json'])))
for result in search_res.docs]
2025-01-18 14:17:39 -05:00
else:
2025-02-15 21:09:33 -05:00
random_redis_key: str = await self.redis_client.randomkey()
out_id: str = str(random_redis_key).split(":",
2025-01-18 14:46:05 -05:00
maxsplit=1)[1][:-1]
2025-01-18 14:17:39 -05:00
search_res = await self.redis_client.json().get(random_redis_key)
2025-01-18 14:46:05 -05:00
search_res_out = [(out_id, search_res)]
2025-01-19 07:01:07 -05:00
if not search_res_out and self.notify_warnings:
await self.notifier.send("WARNING", f"Redis cache miss for: \n## *{artist} - {song}*")
2025-01-18 13:26:00 -05:00
return search_res_out
2025-01-19 07:01:07 -05:00
except Exception as e:
2025-01-18 13:26:00 -05:00
traceback.print_exc()
2025-02-05 20:23:06 -05:00
# await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}", f"{str(e)}\nSearch was: {artist} - {song}; fuzzy: {fuzzy_artist} - {fuzzy_song}")
2025-02-15 21:09:33 -05:00
async def redis_store(self, sqlite_id: int,
lyr_result: LyricsResult) -> None:
"""
Store lyrics to redis cache
Args:
sqlite_id (int): the row id of the related SQLite db insertion
lyr_result (LyricsResult): the returned lyrics to cache
Returns:
None
"""
2025-01-19 08:01:20 -05:00
try:
(search_artist, search_song) = self.sanitize_input(lyr_result.artist,
lyr_result.song)
2025-02-15 21:09:33 -05:00
redis_mapping: dict = {
'id': sqlite_id,
'src': lyr_result.src,
'date_retrieved': time.time(),
'artist': lyr_result.artist,
'search_artist': search_artist,
'search_song': search_song,
'search_artistsong': f'{search_artist}\n{search_song}',
'song': lyr_result.song,
'artistsong': f"{lyr_result.artist}\n{lyr_result.song}",
'confidence': lyr_result.confidence,
'lyrics': lyr_result.lyrics,
'tags': '(none)',
'liked': 0,
}
2025-02-15 21:09:33 -05:00
newkey: str = f"lyrics:000{sqlite_id}"
jsonset: bool = await self.redis_client.json().set(newkey, Path.root_path(),
2025-01-19 08:01:20 -05:00
redis_mapping)
if not jsonset:
2025-01-23 13:02:03 -05:00
raise RedisException(f"Failed to store {lyr_result.artist} - {lyr_result.song} (SQLite id: {sqlite_id}) to redis:\n{jsonset}")
2025-01-19 08:01:20 -05:00
logging.info("Stored %s - %s (related SQLite Row ID: %s) to %s",
lyr_result.artist, lyr_result.song, sqlite_id, newkey)
await self.notifier.send("INFO",
2025-01-23 13:02:03 -05:00
f"Stored {lyr_result.artist} - {lyr_result.song} (related SQLite Row ID: {sqlite_id}) to redis: {newkey}")
2025-01-19 08:01:20 -05:00
except Exception as e:
2025-01-20 05:50:14 -05:00
await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}",
2025-01-19 08:01:20 -05:00
f"Failed to store {lyr_result.artist} - {lyr_result.song}\
(SQLite id: {sqlite_id}) to Redis:\n{str(e)}")