api/lyric_search/sources/redis_cache.py

265 lines
10 KiB
Python
Raw Normal View History

2025-01-18 13:26:00 -05:00
import logging
import traceback
import json
import time
2025-01-19 07:01:07 -05:00
import sys
2025-01-20 05:47:09 -05:00
import regex
2025-02-15 21:09:33 -05:00
from regex import Pattern
2025-01-30 19:33:04 -05:00
import asyncio
2025-02-15 21:09:33 -05:00
from typing import Union, Optional
sys.path.insert(1, "..")
from lyric_search import notifier
from lyric_search.constructors import LyricsResult
2025-01-18 13:26:00 -05:00
import redis.asyncio as redis
from redis.commands.search.query import Query # type: ignore
from redis.commands.search.indexDefinition import IndexDefinition, IndexType # type: ignore
from redis.commands.search.field import TextField, TagField # type: ignore
from redis.commands.json.path import Path # type: ignore
2025-01-18 13:26:00 -05:00
from . import private
logger = logging.getLogger()
log_level = logging.getLevelName(logger.level)
2025-01-18 13:26:00 -05:00
class RedisException(Exception):
"""
Redis Exception
"""
2025-01-18 13:26:00 -05:00
class RedisCache:
"""
Redis Cache Methods
"""
2025-01-19 07:09:05 -05:00
def __init__(self) -> None:
2025-02-18 06:55:47 -05:00
self.redis_client: redis.Redis = redis.Redis(password=private.REDIS_PW)
2025-01-19 07:01:07 -05:00
self.notifier = notifier.DiscordNotifier()
2025-02-18 14:37:37 -05:00
self.notify_warnings = False
2025-02-15 21:09:33 -05:00
self.regexes: list[Pattern] = [
regex.compile(r"\-"),
regex.compile(r"[^a-zA-Z0-9\s]"),
2025-01-20 05:47:09 -05:00
]
2025-01-30 19:33:04 -05:00
try:
asyncio.get_event_loop().create_task(self.create_index())
2025-02-15 21:09:33 -05:00
except Exception as e:
logging.debug("Failed to create redis create_index task: %s", str(e))
2025-01-19 07:09:05 -05:00
async def create_index(self) -> None:
2025-01-18 13:26:00 -05:00
"""Create Index"""
try:
schema = (
TextField("$.search_artist", as_name="artist"),
TextField("$.search_song", as_name="song"),
TextField("$.src", as_name="src"),
TextField("$.lyrics", as_name="lyrics"),
)
2025-01-18 13:34:10 -05:00
result = await self.redis_client.ft().create_index(
schema,
definition=IndexDefinition(
prefix=["lyrics:"], index_type=IndexType.JSON
),
)
2025-01-18 13:26:00 -05:00
if str(result) != "OK":
raise RedisException(f"Redis: Failed to create index: {result}")
2025-01-19 07:01:07 -05:00
except Exception as e:
logging.debug("Failed to create redis index: %s", str(e))
def sanitize_input(
self, artist: str, song: str, fuzzy: Optional[bool] = False
) -> tuple[str, str]:
2025-01-20 05:47:09 -05:00
"""
Sanitize artist/song input (convert to redis matchable fuzzy query)
Args:
artist: Input artist
song: Input song
fuzzy: Whether to create fuzzy query str
2025-01-20 05:47:09 -05:00
Returns:
tuple[str, str]: Tuple containing the 2 output strings (artist, song)
"""
artist = self.regexes[0].sub("", artist)
2025-01-20 05:47:09 -05:00
artist = self.regexes[1].sub("", artist).strip()
song = self.regexes[0].sub("", song)
2025-01-20 05:47:09 -05:00
song = self.regexes[1].sub("", song).strip()
if fuzzy:
artist = " ".join(
[f"(%{artist_word}%)" for artist_word in artist.split(" ")]
)
song = " ".join([f"(%{song_word}%)" for song_word in song.split(" ")])
2025-01-22 06:38:40 -05:00
return (artist, song)
2025-01-22 06:38:40 -05:00
async def increment_found_count(self, src: str) -> None:
"""
Increment the found count for a source
Args:
src (str): The source to increment
Returns:
None
"""
try:
src = src.strip().lower()
await self.redis_client.incr(f"returned:{src}")
except Exception as e:
2025-02-21 14:11:39 -05:00
file: str = __file__.rsplit("/", maxsplit=1)[-1]
await self.notifier.send(f"ERROR @ {file}", str(e))
2025-01-22 06:38:40 -05:00
traceback.print_exc()
2025-02-18 06:55:47 -05:00
async def get_found_counts(self) -> Optional[dict]:
2025-01-22 06:38:40 -05:00
"""
2025-01-23 13:02:03 -05:00
Get found counts for all sources (and failed count)
2025-01-22 06:38:40 -05:00
Returns:
dict: In the form {'source': count, 'source2': count, ...}
"""
try:
2025-01-23 13:02:03 -05:00
sources: list = ["cache", "lrclib", "genius", "failed"]
2025-02-16 06:53:41 -05:00
counts: dict[str, int] = {}
2025-01-22 06:38:40 -05:00
for src in sources:
src_found_count = await self.redis_client.get(f"returned:{src}")
counts[src] = int(src_found_count) # Redis returns bytes
2025-01-22 06:38:40 -05:00
return counts
except Exception as e:
2025-02-21 14:11:39 -05:00
file: str = __file__.rsplit("/", maxsplit=1)[-1]
await self.notifier.send(f"ERROR @ {file}", str(e))
2025-01-22 06:38:40 -05:00
traceback.print_exc()
2025-02-18 06:55:47 -05:00
return None
async def search(
self,
artist: Optional[str] = None,
song: Optional[str] = None,
lyrics: Optional[str] = None,
) -> Optional[list[tuple]]:
2025-01-19 07:05:01 -05:00
"""
Search Redis Cache
Args:
artist (Optional[str]): artist to search
song (Optional[str]): song to search
lyrics (Optional[str]): lyrics to search (optional, used in place of artist/song if provided)
Returns:
list[tuple]: List of redis results, tuple's first value is the redis key, second is the returned data
2025-01-18 13:26:00 -05:00
"""
try:
2025-02-05 20:23:06 -05:00
fuzzy_artist = None
fuzzy_song = None
2025-01-18 14:17:39 -05:00
is_random_search = artist == "!" and song == "!"
2025-01-18 13:26:00 -05:00
if lyrics:
# to code later
raise RedisException("Lyric search not yet implemented")
2025-01-18 14:17:39 -05:00
if not is_random_search:
logging.debug("Redis: Searching normally first")
2025-02-18 06:55:47 -05:00
if not artist or not song:
logging.info(
"redis_cache:: search failed: No artist or song provided."
)
2025-02-18 06:55:47 -05:00
return None
(artist, song) = self.sanitize_input(artist, song)
logging.debug("Seeking: %s - %s", artist, song)
search_res: Union[dict, list] = await self.redis_client.ft().search(
Query(f"@artist:{artist} @song:{song}") # type: ignore
)
search_res_out: list[tuple] = [
(
result["id"].split(":", maxsplit=1)[1],
dict(json.loads(result["json"])),
)
for result in search_res.docs
] # type: ignore
if not search_res_out:
logging.debug(
"Redis: Normal search failed, trying with fuzzy search"
)
2025-02-18 06:55:47 -05:00
short_artist = " ".join(artist.split(" ")[0:5])
short_song = " ".join(song.split(" ")[0:5])
(fuzzy_artist, fuzzy_song) = self.sanitize_input(
artist=short_artist.strip(), song=short_song.strip(), fuzzy=True
)
search_res = await self.redis_client.ft().search(
Query( # type: ignore
f"@artist:{fuzzy_artist} @song:{fuzzy_song}"
)
)
search_res_out = [
(
result["id"].split(":", maxsplit=1)[1],
dict(json.loads(result["json"])),
)
for result in search_res.docs
] # type: ignore
2025-01-18 14:17:39 -05:00
else:
2025-02-15 21:09:33 -05:00
random_redis_key: str = await self.redis_client.randomkey()
out_id: str = str(random_redis_key).split(":", maxsplit=1)[1][:-1]
2025-01-18 14:17:39 -05:00
search_res = await self.redis_client.json().get(random_redis_key)
2025-01-18 14:46:05 -05:00
search_res_out = [(out_id, search_res)]
2025-01-19 07:01:07 -05:00
if not search_res_out and self.notify_warnings:
await self.notifier.send(
"WARNING", f"Redis cache miss for: `{artist} - {song}`"
)
2025-01-18 13:26:00 -05:00
return search_res_out
2025-01-19 07:01:07 -05:00
except Exception as e:
2025-01-18 13:26:00 -05:00
traceback.print_exc()
# await self.notifier.send(f"ERROR @ {__file__.rsplit("/", maxsplit=1)[-1]}", f"{str(e)}\nSearch was: {artist} - {song}; fuzzy: {fuzzy_artist} - {fuzzy_song}")
2025-02-18 06:55:47 -05:00
return None
async def redis_store(self, sqlite_id: int, lyr_result: LyricsResult) -> None:
"""
Store lyrics to redis cache
Args:
sqlite_id (int): the row id of the related SQLite db insertion
lyr_result (LyricsResult): the returned lyrics to cache
Returns:
None
"""
2025-01-19 08:01:20 -05:00
try:
(search_artist, search_song) = self.sanitize_input(
lyr_result.artist, lyr_result.song
)
2025-02-15 21:09:33 -05:00
redis_mapping: dict = {
"id": sqlite_id,
"src": lyr_result.src,
"date_retrieved": time.time(),
"artist": lyr_result.artist,
"search_artist": search_artist,
"search_song": search_song,
"search_artistsong": f"{search_artist}\n{search_song}",
"song": lyr_result.song,
"artistsong": f"{lyr_result.artist}\n{lyr_result.song}",
"confidence": lyr_result.confidence,
"lyrics": lyr_result.lyrics,
"tags": "(none)",
"liked": 0,
}
2025-02-15 21:09:33 -05:00
newkey: str = f"lyrics:000{sqlite_id}"
jsonset: bool = await self.redis_client.json().set(
newkey, Path.root_path(), redis_mapping
)
2025-01-19 08:01:20 -05:00
if not jsonset:
raise RedisException(
f"Failed to store {lyr_result.artist} - {lyr_result.song} (SQLite id: {sqlite_id}) to redis:\n{jsonset}"
)
logging.info(
"Stored %s - %s (related SQLite Row ID: %s) to %s",
lyr_result.artist,
lyr_result.song,
sqlite_id,
newkey,
)
await self.notifier.send(
"INFO",
f"Stored `{lyr_result.artist} - {lyr_result.song}` (related SQLite Row ID: `{sqlite_id}`) to redis: `{newkey}`",
)
2025-01-19 08:01:20 -05:00
except Exception as e:
2025-02-21 14:11:39 -05:00
file: str = __file__.rsplit("/", maxsplit=1)[-1]
await self.notifier.send(
f"ERROR @ {file}",
f"Failed to store `{lyr_result.artist} - {lyr_result.song}`\
(SQLite id: `{sqlite_id}`) to Redis:\n`{str(e)}`",
)