diff --git a/base.py b/base.py
index 4812d59..e938c79 100644
--- a/base.py
+++ b/base.py
@@ -1,6 +1,8 @@
#!/usr/bin/env python3.12
import importlib
+import sys
+sys.path.insert(0, ".")
import logging
import asyncio
from typing import Any
@@ -80,7 +82,6 @@ End Blacklisted Routes
"""
Actionable Routes
"""
-counter_endpoints = importlib.import_module("endpoints.counters").Counters(app, util, constants, glob_state)
randmsg_endpoint = importlib.import_module("endpoints.rand_msg").RandMsg(app, util, constants, glob_state)
transcription_endpoints = importlib.import_module("endpoints.transcriptions").Transcriptions(app, util, constants, glob_state)
ai_endpoints = importlib.import_module("endpoints.ai").AI(app, util, constants, glob_state)
diff --git a/endpoints/counters.py b/endpoints/counters.py
deleted file mode 100644
index 2284cb8..0000000
--- a/endpoints/counters.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python3.12
-
-from fastapi import FastAPI
-from pydantic import BaseModel
-
-class ValidCounterIncrementRequest(BaseModel):
- """
- - **counter**: counter to update
- """
-
- counter: str
- key: str
-
-
-class ValidCounterRetrievalRequest(BaseModel):
- """
- - **counter**: counter to retrieve (if none is provided, all counters are returned)
- """
-
- counter: str = "all"
-
-
-class Counters(FastAPI):
- """Counter Endpoints"""
- def __init__(self, app: FastAPI, util, constants, glob_state): # pylint: disable=super-init-not-called
- self.app = app
- self.util = util
- self.constants = constants
- self.glob_state = glob_state
-
- self.endpoints = {
- "counters/get": self.get_counter_handler,
- # "counters/increment": self.increment_counter_handler
- #tbd
- }
-
- for endpoint, handler in self.endpoints.items():
- app.add_api_route(f"/{endpoint}/", handler, methods=["POST"])
-
- async def get_counter_handler(self, data: ValidCounterRetrievalRequest):
- """
- /counters/get/
- Get current counter value
- """
-
- counter = data.counter
- if not counter == 'all':
- count = await self.glob_state.get_counter(counter)
- else:
- count = await self.glob_state.get_all_counters()
- return {
- 'counter': counter,
- 'count': count
-
- }
-
- # async def increment_counter_handler(self, data: ValidCounterIncrementRequest):
- # """
- # /counters/increment/
- # Increment counter value (requires PUT KEY)
- # """
-
- # return {
-
- # }
diff --git a/endpoints/karma.py b/endpoints/karma.py
index ac7a850..f5e965a 100644
--- a/endpoints/karma.py
+++ b/endpoints/karma.py
@@ -40,7 +40,8 @@ class ValidTopKarmaRequest(BaseModel):
class KarmaDB:
"""Karma DB Util"""
def __init__(self):
- self.db_path = os.path.join("/", "var", "lib", "singerdbs", "karma.db")
+ self.db_path = os.path.join("/", "usr", "local", "share",
+ "sqlite_dbs", "karma.db")
async def get_karma(self, keyword: str) -> int | dict:
"""Get Karma Value for Keyword"""
diff --git a/endpoints/lyric_search.py b/endpoints/lyric_search.py
index 5cc8b45..2de2e6c 100644
--- a/endpoints/lyric_search.py
+++ b/endpoints/lyric_search.py
@@ -67,8 +67,8 @@ class ValidLyricSearchLogRequest(BaseModel):
class CacheUtils:
"""Lyrics Cache DB Utils"""
def __init__(self):
- self.lyrics_db_path = os.path.join("/", "var", "lib",
- "singerdbs", "cached_lyrics.db")
+ self.lyrics_db_path = os.path.join("/", "usr", "local", "share",
+ "sqlite_dbs", "cached_lyrics.db")
async def check_typeahead(self, s: str, pre_query: str | None = None):
"""Check s against artists stored - for typeahead"""
@@ -94,16 +94,13 @@ class LyricSearch(FastAPI):
self.glob_state = glob_state
self.cache_utils = CacheUtils()
self.notifier = notifier.DiscordNotifier()
- self.lyrics_engine = importlib.import_module("lyrics_engine").LyricsEngine()
- self.endpoint_name = "lyric_search"
- self.endpoint2_name = "lyric_cache_list"
self.endpoints = {
"typeahead/artist": self.artist_typeahead_handler,
"typeahead/song": self.song_typeahead_handler,
"lyric_search": self.lyric_search_handler,
- "lyric_cache_list": self.lyric_cache_list_handler,
+ # "lyric_cache_list": self.lyric_cache_list_handler,
}
self.acceptable_request_sources = [
@@ -123,14 +120,14 @@ class LyricSearch(FastAPI):
for endpoint, handler in self.endpoints.items():
app.add_api_route(f"/{endpoint}/", handler, methods=["POST"])
- async def lyric_cache_list_handler(self):
- """
- Get currently cached lyrics entries
- """
- return {
- 'err': False,
- 'data': await self.lyrics_engine.listCacheEntries()
- }
+ # async def lyric_cache_list_handler(self):
+ # """
+ # Get currently cached lyrics entries
+ # """
+ # return {
+ # 'err': False,
+ # 'data': await self.lyrics_engine.listCacheEntries()
+ # }
async def artist_typeahead_handler(self, data: ValidTypeAheadRequest):
"""Artist Type Ahead Handler"""
@@ -158,15 +155,15 @@ class LyricSearch(FastAPI):
typeahead_list = [str(r.get('song')) for r in typeahead_result]
return typeahead_list
- async def lyric_search_log_handler(self, data: ValidLyricSearchLogRequest):
- """Lyric Search Log Handler"""
- include_radio = data.webradio
- await self.glob_state.increment_counter('lyrichistory_requests')
- last_10k_sings = await self.lyrics_engine.getHistory(limit=10000, webradio=include_radio)
- return {
- 'err': False,
- 'history': last_10k_sings
- }
+ # async def lyric_search_log_handler(self, data: ValidLyricSearchLogRequest):
+ # """Lyric Search Log Handler"""
+ # include_radio = data.webradio
+ # await self.glob_state.increment_counter('lyrichistory_requests')
+ # last_10k_sings = await self.lyrics_engine.getHistory(limit=10000, webradio=include_radio)
+ # return {
+ # 'err': False,
+ # 'history': last_10k_sings
+ # }
async def lyric_search_handler(self, data: ValidLyricRequest):
"""
diff --git a/endpoints/rand_msg.py b/endpoints/rand_msg.py
index f2df8ee..1678b5b 100644
--- a/endpoints/rand_msg.py
+++ b/endpoints/rand_msg.py
@@ -41,18 +41,15 @@ class RandMsg(FastAPI):
match db_rand_selected:
case 0:
randmsg_db_path = os.path.join("/",
- "var",
- "lib",
- "singerdbs",
- "qajoke.db") # For qajoke db
+ "usr", "local", "share",
+ "sqlite_dbs", "qajoke.db") # For qajoke db
db_query = "SELECT id, ('Q: ' || question || '
A: ' \
|| answer) FROM jokes ORDER BY RANDOM() LIMIT 1" # For qajoke db
title_attr = "QA Joke DB"
case 1 | 9:
randmsg_db_path = os.path.join("/",
- "var",
- "lib",
- "singerdbs",
+ "usr", "local", "share",
+ "sqlite_dbs",
"randmsg.db") # For randmsg db
db_query = "SELECT id, msg FROM msgs WHERE \
LENGTH(msg) <= 180 ORDER BY RANDOM() LIMIT 1" # For randmsg db
@@ -61,45 +58,40 @@ class RandMsg(FastAPI):
title_attr = "Random Msg DB"
case 2:
randmsg_db_path = os.path.join("/",
- "var",
- "lib",
- "singerdbs",
+ "usr", "local", "share",
+ "sqlite_dbs",
"trump.db") # For Trump Tweet DB
db_query = "SELECT id, content FROM tweets \
ORDER BY RANDOM() LIMIT 1" # For Trump Tweet DB
title_attr = "Trump Tweet DB"
case 3:
randmsg_db_path = os.path.join("/",
- "var",
- "lib",
- "singerdbs",
+ "usr", "local", "share",
+ "sqlite_dbs",
"philo.db") # For Philo DB
db_query = "SELECT id, (content || '
- ' || speaker) FROM quotes \
ORDER BY RANDOM() LIMIT 1" # For Philo DB
title_attr = "Philosophical Quotes DB"
case 4:
randmsg_db_path = os.path.join("/",
- "var",
- "lib",
- "singerdbs",
+ "usr", "local", "share",
+ "sqlite_dbs",
"hate.db") # For Hate DB
db_query = """SELECT id, ("" || comment) FROM hate_speech \
WHERE length(comment) <= 180 ORDER BY RANDOM() LIMIT 1"""
title_attr = "Hate Speech DB"
case 5:
randmsg_db_path = os.path.join("/",
- "var",
- "lib",
- "singerdbs",
+ "usr", "local", "share",
+ "sqlite_dbs",
"rjokes.db") # r/jokes DB
db_query = """SELECT id, (title || "
" || body) FROM jokes \
WHERE score >= 10000 ORDER BY RANDOM() LIMIT 1"""
title_attr = "r/jokes DB"
case 6:
randmsg_db_path = os.path.join("/",
- "var",
- "lib",
- "singerdbs",
+ "usr", "local", "share",
+ "sqlite_dbs",
"donnies.db") # Donnies DB
random.seed()
twilight_or_mice = random.choice(["twilight", "mice"])
diff --git a/endpoints/transcriptions.py b/endpoints/transcriptions.py
index 954c8c9..661d7aa 100644
--- a/endpoints/transcriptions.py
+++ b/endpoints/transcriptions.py
@@ -64,15 +64,18 @@ class Transcriptions(FastAPI):
match show_id:
case 0:
- db_path = os.path.join("/", "var", "lib", "singerdbs", "sp.db")
+ db_path = os.path.join("/", "usr", "local", "share",
+ "sqlite_dbs", "sp.db")
db_query = """SELECT DISTINCT(("S" || Season || "E" || Episode || " " || Title)), ID FROM SP_DAT ORDER BY Season, Episode"""
show_title = "South Park"
case 1:
- db_path = os.path.join("/", "var", "lib", "singerdbs", "futur.db")
+ db_path = os.path.join("/", "usr", "local", "share",
+ "sqlite_dbs", "futur.db")
db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP"""
show_title = "Futurama"
case 2:
- db_path = os.path.join("/", "var", "lib", "singerdbs", "parks.db")
+ db_path = os.path.join("/", "usr", "local", "share",
+ "sqlite_dbs", "parks.db")
db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP"""
show_title = "Parks And Rec"
case _:
@@ -102,13 +105,16 @@ class Transcriptions(FastAPI):
# pylint: disable=line-too-long
match show_id:
case 0:
- db_path = os.path.join("/", "var", "lib", "singerdbs", "sp.db")
+ db_path = os.path.join("/", "usr", "local", "share",
+ "sqlite_dbs", "sp.db")
db_query = """SELECT ("S" || Season || "E" || Episode || " " || Title), Character, Line FROM SP_DAT WHERE ID = ?"""
case 1:
- db_path = os.path.join("/", "var", "lib", "singerdbs", "futur.db")
+ db_path = os.path.join("/", "usr", "local", "share",
+ "sqlite_dbs", "futur.db")
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE || "
Opener: " || EP_OPENER || ""), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY LINE_ID ASC"""
case 2:
- db_path = os.path.join("/", "var", "lib", "singerdbs", "parks.db")
+ db_path = os.path.join("/", "usr", "local", "share",
+ "sqlite_dbs", "parks.db")
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY id ASC"""
case _:
diff --git a/lyric_search/sources/cache.py b/lyric_search/sources/cache.py
index 6ba4667..f4d8bd4 100644
--- a/lyric_search/sources/cache.py
+++ b/lyric_search/sources/cache.py
@@ -23,15 +23,14 @@ log_level = logging.getLevelName(logger.level)
class Cache:
"""Cache Search Module"""
def __init__(self) -> None:
- self.cache_db: str = os.path.join("/", "var",
- "lib", "singerdbs",
- "cached_lyrics.db")
+ self.cache_db: str = os.path.join("/", "usr", "local", "share",
+ "sqlite_dbs", "cached_lyrics.db")
self.redis_cache = redis_cache.RedisCache()
self.notifier = notifier.DiscordNotifier()
self.cache_pre_query: str = "pragma journal_mode = WAL; pragma synchronous = normal;\
pragma temp_store = memory; pragma mmap_size = 30000000000;"
- self.sqlite_exts: list[str] = ['/usr/local/lib/python3.11/dist-packages/spellfix1.cpython-311-x86_64-linux-gnu.so']
+ self.sqlite_exts: list[str] = ['/home/singer/api/solibs/spellfix1.cpython-311-x86_64-linux-gnu.so']
self.label: str = "Cache"
def get_matched(self, matched_candidate: tuple, confidence: int,
diff --git a/state.py b/state.py
index 7dbf79b..bb186ad 100644
--- a/state.py
+++ b/state.py
@@ -15,7 +15,8 @@ class State(FastAPI):
"""Global State for API"""
def __init__(self, app: FastAPI, util, constants): # pylint: disable=unused-argument
super().__init__()
- self.counter_db_path = os.path.join("/", "var", "lib", "singerdbs", "stats.db")
+ self.counter_db_path = os.path.join("/", "usr", "local", "share",
+ "sqlite_dbs", "stats.db")
self.counters = {
str(counter): 0 for counter in constants.AVAILABLE_COUNTERS
}