requests_async
This commit is contained in:
parent
06c2d48818
commit
0b47373127
3
base.py
3
base.py
@ -1,6 +1,8 @@
|
|||||||
#!/usr/bin/env python3.12
|
#!/usr/bin/env python3.12
|
||||||
|
|
||||||
import importlib
|
import importlib
|
||||||
|
import sys
|
||||||
|
sys.path.insert(0, ".")
|
||||||
import logging
|
import logging
|
||||||
import asyncio
|
import asyncio
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@ -80,7 +82,6 @@ End Blacklisted Routes
|
|||||||
"""
|
"""
|
||||||
Actionable Routes
|
Actionable Routes
|
||||||
"""
|
"""
|
||||||
counter_endpoints = importlib.import_module("endpoints.counters").Counters(app, util, constants, glob_state)
|
|
||||||
randmsg_endpoint = importlib.import_module("endpoints.rand_msg").RandMsg(app, util, constants, glob_state)
|
randmsg_endpoint = importlib.import_module("endpoints.rand_msg").RandMsg(app, util, constants, glob_state)
|
||||||
transcription_endpoints = importlib.import_module("endpoints.transcriptions").Transcriptions(app, util, constants, glob_state)
|
transcription_endpoints = importlib.import_module("endpoints.transcriptions").Transcriptions(app, util, constants, glob_state)
|
||||||
ai_endpoints = importlib.import_module("endpoints.ai").AI(app, util, constants, glob_state)
|
ai_endpoints = importlib.import_module("endpoints.ai").AI(app, util, constants, glob_state)
|
||||||
|
@ -1,65 +0,0 @@
|
|||||||
#!/usr/bin/env python3.12
|
|
||||||
|
|
||||||
from fastapi import FastAPI
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
class ValidCounterIncrementRequest(BaseModel):
|
|
||||||
"""
|
|
||||||
- **counter**: counter to update
|
|
||||||
"""
|
|
||||||
|
|
||||||
counter: str
|
|
||||||
key: str
|
|
||||||
|
|
||||||
|
|
||||||
class ValidCounterRetrievalRequest(BaseModel):
|
|
||||||
"""
|
|
||||||
- **counter**: counter to retrieve (if none is provided, all counters are returned)
|
|
||||||
"""
|
|
||||||
|
|
||||||
counter: str = "all"
|
|
||||||
|
|
||||||
|
|
||||||
class Counters(FastAPI):
|
|
||||||
"""Counter Endpoints"""
|
|
||||||
def __init__(self, app: FastAPI, util, constants, glob_state): # pylint: disable=super-init-not-called
|
|
||||||
self.app = app
|
|
||||||
self.util = util
|
|
||||||
self.constants = constants
|
|
||||||
self.glob_state = glob_state
|
|
||||||
|
|
||||||
self.endpoints = {
|
|
||||||
"counters/get": self.get_counter_handler,
|
|
||||||
# "counters/increment": self.increment_counter_handler
|
|
||||||
#tbd
|
|
||||||
}
|
|
||||||
|
|
||||||
for endpoint, handler in self.endpoints.items():
|
|
||||||
app.add_api_route(f"/{endpoint}/", handler, methods=["POST"])
|
|
||||||
|
|
||||||
async def get_counter_handler(self, data: ValidCounterRetrievalRequest):
|
|
||||||
"""
|
|
||||||
/counters/get/
|
|
||||||
Get current counter value
|
|
||||||
"""
|
|
||||||
|
|
||||||
counter = data.counter
|
|
||||||
if not counter == 'all':
|
|
||||||
count = await self.glob_state.get_counter(counter)
|
|
||||||
else:
|
|
||||||
count = await self.glob_state.get_all_counters()
|
|
||||||
return {
|
|
||||||
'counter': counter,
|
|
||||||
'count': count
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
# async def increment_counter_handler(self, data: ValidCounterIncrementRequest):
|
|
||||||
# """
|
|
||||||
# /counters/increment/
|
|
||||||
# Increment counter value (requires PUT KEY)
|
|
||||||
# """
|
|
||||||
|
|
||||||
# return {
|
|
||||||
|
|
||||||
# }
|
|
@ -40,7 +40,8 @@ class ValidTopKarmaRequest(BaseModel):
|
|||||||
class KarmaDB:
|
class KarmaDB:
|
||||||
"""Karma DB Util"""
|
"""Karma DB Util"""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.db_path = os.path.join("/", "var", "lib", "singerdbs", "karma.db")
|
self.db_path = os.path.join("/", "usr", "local", "share",
|
||||||
|
"sqlite_dbs", "karma.db")
|
||||||
|
|
||||||
async def get_karma(self, keyword: str) -> int | dict:
|
async def get_karma(self, keyword: str) -> int | dict:
|
||||||
"""Get Karma Value for Keyword"""
|
"""Get Karma Value for Keyword"""
|
||||||
|
@ -67,8 +67,8 @@ class ValidLyricSearchLogRequest(BaseModel):
|
|||||||
class CacheUtils:
|
class CacheUtils:
|
||||||
"""Lyrics Cache DB Utils"""
|
"""Lyrics Cache DB Utils"""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.lyrics_db_path = os.path.join("/", "var", "lib",
|
self.lyrics_db_path = os.path.join("/", "usr", "local", "share",
|
||||||
"singerdbs", "cached_lyrics.db")
|
"sqlite_dbs", "cached_lyrics.db")
|
||||||
|
|
||||||
async def check_typeahead(self, s: str, pre_query: str | None = None):
|
async def check_typeahead(self, s: str, pre_query: str | None = None):
|
||||||
"""Check s against artists stored - for typeahead"""
|
"""Check s against artists stored - for typeahead"""
|
||||||
@ -94,16 +94,13 @@ class LyricSearch(FastAPI):
|
|||||||
self.glob_state = glob_state
|
self.glob_state = glob_state
|
||||||
self.cache_utils = CacheUtils()
|
self.cache_utils = CacheUtils()
|
||||||
self.notifier = notifier.DiscordNotifier()
|
self.notifier = notifier.DiscordNotifier()
|
||||||
self.lyrics_engine = importlib.import_module("lyrics_engine").LyricsEngine()
|
|
||||||
|
|
||||||
self.endpoint_name = "lyric_search"
|
|
||||||
self.endpoint2_name = "lyric_cache_list"
|
|
||||||
|
|
||||||
self.endpoints = {
|
self.endpoints = {
|
||||||
"typeahead/artist": self.artist_typeahead_handler,
|
"typeahead/artist": self.artist_typeahead_handler,
|
||||||
"typeahead/song": self.song_typeahead_handler,
|
"typeahead/song": self.song_typeahead_handler,
|
||||||
"lyric_search": self.lyric_search_handler,
|
"lyric_search": self.lyric_search_handler,
|
||||||
"lyric_cache_list": self.lyric_cache_list_handler,
|
# "lyric_cache_list": self.lyric_cache_list_handler,
|
||||||
}
|
}
|
||||||
|
|
||||||
self.acceptable_request_sources = [
|
self.acceptable_request_sources = [
|
||||||
@ -123,14 +120,14 @@ class LyricSearch(FastAPI):
|
|||||||
for endpoint, handler in self.endpoints.items():
|
for endpoint, handler in self.endpoints.items():
|
||||||
app.add_api_route(f"/{endpoint}/", handler, methods=["POST"])
|
app.add_api_route(f"/{endpoint}/", handler, methods=["POST"])
|
||||||
|
|
||||||
async def lyric_cache_list_handler(self):
|
# async def lyric_cache_list_handler(self):
|
||||||
"""
|
# """
|
||||||
Get currently cached lyrics entries
|
# Get currently cached lyrics entries
|
||||||
"""
|
# """
|
||||||
return {
|
# return {
|
||||||
'err': False,
|
# 'err': False,
|
||||||
'data': await self.lyrics_engine.listCacheEntries()
|
# 'data': await self.lyrics_engine.listCacheEntries()
|
||||||
}
|
# }
|
||||||
|
|
||||||
async def artist_typeahead_handler(self, data: ValidTypeAheadRequest):
|
async def artist_typeahead_handler(self, data: ValidTypeAheadRequest):
|
||||||
"""Artist Type Ahead Handler"""
|
"""Artist Type Ahead Handler"""
|
||||||
@ -158,15 +155,15 @@ class LyricSearch(FastAPI):
|
|||||||
typeahead_list = [str(r.get('song')) for r in typeahead_result]
|
typeahead_list = [str(r.get('song')) for r in typeahead_result]
|
||||||
return typeahead_list
|
return typeahead_list
|
||||||
|
|
||||||
async def lyric_search_log_handler(self, data: ValidLyricSearchLogRequest):
|
# async def lyric_search_log_handler(self, data: ValidLyricSearchLogRequest):
|
||||||
"""Lyric Search Log Handler"""
|
# """Lyric Search Log Handler"""
|
||||||
include_radio = data.webradio
|
# include_radio = data.webradio
|
||||||
await self.glob_state.increment_counter('lyrichistory_requests')
|
# await self.glob_state.increment_counter('lyrichistory_requests')
|
||||||
last_10k_sings = await self.lyrics_engine.getHistory(limit=10000, webradio=include_radio)
|
# last_10k_sings = await self.lyrics_engine.getHistory(limit=10000, webradio=include_radio)
|
||||||
return {
|
# return {
|
||||||
'err': False,
|
# 'err': False,
|
||||||
'history': last_10k_sings
|
# 'history': last_10k_sings
|
||||||
}
|
# }
|
||||||
|
|
||||||
async def lyric_search_handler(self, data: ValidLyricRequest):
|
async def lyric_search_handler(self, data: ValidLyricRequest):
|
||||||
"""
|
"""
|
||||||
|
@ -41,18 +41,15 @@ class RandMsg(FastAPI):
|
|||||||
match db_rand_selected:
|
match db_rand_selected:
|
||||||
case 0:
|
case 0:
|
||||||
randmsg_db_path = os.path.join("/",
|
randmsg_db_path = os.path.join("/",
|
||||||
"var",
|
"usr", "local", "share",
|
||||||
"lib",
|
"sqlite_dbs", "qajoke.db") # For qajoke db
|
||||||
"singerdbs",
|
|
||||||
"qajoke.db") # For qajoke db
|
|
||||||
db_query = "SELECT id, ('<b>Q:</b> ' || question || '<br/><b>A:</b> ' \
|
db_query = "SELECT id, ('<b>Q:</b> ' || question || '<br/><b>A:</b> ' \
|
||||||
|| answer) FROM jokes ORDER BY RANDOM() LIMIT 1" # For qajoke db
|
|| answer) FROM jokes ORDER BY RANDOM() LIMIT 1" # For qajoke db
|
||||||
title_attr = "QA Joke DB"
|
title_attr = "QA Joke DB"
|
||||||
case 1 | 9:
|
case 1 | 9:
|
||||||
randmsg_db_path = os.path.join("/",
|
randmsg_db_path = os.path.join("/",
|
||||||
"var",
|
"usr", "local", "share",
|
||||||
"lib",
|
"sqlite_dbs",
|
||||||
"singerdbs",
|
|
||||||
"randmsg.db") # For randmsg db
|
"randmsg.db") # For randmsg db
|
||||||
db_query = "SELECT id, msg FROM msgs WHERE \
|
db_query = "SELECT id, msg FROM msgs WHERE \
|
||||||
LENGTH(msg) <= 180 ORDER BY RANDOM() LIMIT 1" # For randmsg db
|
LENGTH(msg) <= 180 ORDER BY RANDOM() LIMIT 1" # For randmsg db
|
||||||
@ -61,45 +58,40 @@ class RandMsg(FastAPI):
|
|||||||
title_attr = "Random Msg DB"
|
title_attr = "Random Msg DB"
|
||||||
case 2:
|
case 2:
|
||||||
randmsg_db_path = os.path.join("/",
|
randmsg_db_path = os.path.join("/",
|
||||||
"var",
|
"usr", "local", "share",
|
||||||
"lib",
|
"sqlite_dbs",
|
||||||
"singerdbs",
|
|
||||||
"trump.db") # For Trump Tweet DB
|
"trump.db") # For Trump Tweet DB
|
||||||
db_query = "SELECT id, content FROM tweets \
|
db_query = "SELECT id, content FROM tweets \
|
||||||
ORDER BY RANDOM() LIMIT 1" # For Trump Tweet DB
|
ORDER BY RANDOM() LIMIT 1" # For Trump Tweet DB
|
||||||
title_attr = "Trump Tweet DB"
|
title_attr = "Trump Tweet DB"
|
||||||
case 3:
|
case 3:
|
||||||
randmsg_db_path = os.path.join("/",
|
randmsg_db_path = os.path.join("/",
|
||||||
"var",
|
"usr", "local", "share",
|
||||||
"lib",
|
"sqlite_dbs",
|
||||||
"singerdbs",
|
|
||||||
"philo.db") # For Philo DB
|
"philo.db") # For Philo DB
|
||||||
db_query = "SELECT id, (content || '<br> - ' || speaker) FROM quotes \
|
db_query = "SELECT id, (content || '<br> - ' || speaker) FROM quotes \
|
||||||
ORDER BY RANDOM() LIMIT 1" # For Philo DB
|
ORDER BY RANDOM() LIMIT 1" # For Philo DB
|
||||||
title_attr = "Philosophical Quotes DB"
|
title_attr = "Philosophical Quotes DB"
|
||||||
case 4:
|
case 4:
|
||||||
randmsg_db_path = os.path.join("/",
|
randmsg_db_path = os.path.join("/",
|
||||||
"var",
|
"usr", "local", "share",
|
||||||
"lib",
|
"sqlite_dbs",
|
||||||
"singerdbs",
|
|
||||||
"hate.db") # For Hate DB
|
"hate.db") # For Hate DB
|
||||||
db_query = """SELECT id, ("<font color='#FF0000'>" || comment) FROM hate_speech \
|
db_query = """SELECT id, ("<font color='#FF0000'>" || comment) FROM hate_speech \
|
||||||
WHERE length(comment) <= 180 ORDER BY RANDOM() LIMIT 1"""
|
WHERE length(comment) <= 180 ORDER BY RANDOM() LIMIT 1"""
|
||||||
title_attr = "Hate Speech DB"
|
title_attr = "Hate Speech DB"
|
||||||
case 5:
|
case 5:
|
||||||
randmsg_db_path = os.path.join("/",
|
randmsg_db_path = os.path.join("/",
|
||||||
"var",
|
"usr", "local", "share",
|
||||||
"lib",
|
"sqlite_dbs",
|
||||||
"singerdbs",
|
|
||||||
"rjokes.db") # r/jokes DB
|
"rjokes.db") # r/jokes DB
|
||||||
db_query = """SELECT id, (title || "<br>" || body) FROM jokes \
|
db_query = """SELECT id, (title || "<br>" || body) FROM jokes \
|
||||||
WHERE score >= 10000 ORDER BY RANDOM() LIMIT 1"""
|
WHERE score >= 10000 ORDER BY RANDOM() LIMIT 1"""
|
||||||
title_attr = "r/jokes DB"
|
title_attr = "r/jokes DB"
|
||||||
case 6:
|
case 6:
|
||||||
randmsg_db_path = os.path.join("/",
|
randmsg_db_path = os.path.join("/",
|
||||||
"var",
|
"usr", "local", "share",
|
||||||
"lib",
|
"sqlite_dbs",
|
||||||
"singerdbs",
|
|
||||||
"donnies.db") # Donnies DB
|
"donnies.db") # Donnies DB
|
||||||
random.seed()
|
random.seed()
|
||||||
twilight_or_mice = random.choice(["twilight", "mice"])
|
twilight_or_mice = random.choice(["twilight", "mice"])
|
||||||
|
@ -64,15 +64,18 @@ class Transcriptions(FastAPI):
|
|||||||
|
|
||||||
match show_id:
|
match show_id:
|
||||||
case 0:
|
case 0:
|
||||||
db_path = os.path.join("/", "var", "lib", "singerdbs", "sp.db")
|
db_path = os.path.join("/", "usr", "local", "share",
|
||||||
|
"sqlite_dbs", "sp.db")
|
||||||
db_query = """SELECT DISTINCT(("S" || Season || "E" || Episode || " " || Title)), ID FROM SP_DAT ORDER BY Season, Episode"""
|
db_query = """SELECT DISTINCT(("S" || Season || "E" || Episode || " " || Title)), ID FROM SP_DAT ORDER BY Season, Episode"""
|
||||||
show_title = "South Park"
|
show_title = "South Park"
|
||||||
case 1:
|
case 1:
|
||||||
db_path = os.path.join("/", "var", "lib", "singerdbs", "futur.db")
|
db_path = os.path.join("/", "usr", "local", "share",
|
||||||
|
"sqlite_dbs", "futur.db")
|
||||||
db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP"""
|
db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP"""
|
||||||
show_title = "Futurama"
|
show_title = "Futurama"
|
||||||
case 2:
|
case 2:
|
||||||
db_path = os.path.join("/", "var", "lib", "singerdbs", "parks.db")
|
db_path = os.path.join("/", "usr", "local", "share",
|
||||||
|
"sqlite_dbs", "parks.db")
|
||||||
db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP"""
|
db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP"""
|
||||||
show_title = "Parks And Rec"
|
show_title = "Parks And Rec"
|
||||||
case _:
|
case _:
|
||||||
@ -102,13 +105,16 @@ class Transcriptions(FastAPI):
|
|||||||
# pylint: disable=line-too-long
|
# pylint: disable=line-too-long
|
||||||
match show_id:
|
match show_id:
|
||||||
case 0:
|
case 0:
|
||||||
db_path = os.path.join("/", "var", "lib", "singerdbs", "sp.db")
|
db_path = os.path.join("/", "usr", "local", "share",
|
||||||
|
"sqlite_dbs", "sp.db")
|
||||||
db_query = """SELECT ("S" || Season || "E" || Episode || " " || Title), Character, Line FROM SP_DAT WHERE ID = ?"""
|
db_query = """SELECT ("S" || Season || "E" || Episode || " " || Title), Character, Line FROM SP_DAT WHERE ID = ?"""
|
||||||
case 1:
|
case 1:
|
||||||
db_path = os.path.join("/", "var", "lib", "singerdbs", "futur.db")
|
db_path = os.path.join("/", "usr", "local", "share",
|
||||||
|
"sqlite_dbs", "futur.db")
|
||||||
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE || "<br><em>Opener: " || EP_OPENER || "</em>"), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY LINE_ID ASC"""
|
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE || "<br><em>Opener: " || EP_OPENER || "</em>"), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY LINE_ID ASC"""
|
||||||
case 2:
|
case 2:
|
||||||
db_path = os.path.join("/", "var", "lib", "singerdbs", "parks.db")
|
db_path = os.path.join("/", "usr", "local", "share",
|
||||||
|
"sqlite_dbs", "parks.db")
|
||||||
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY id ASC"""
|
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY id ASC"""
|
||||||
|
|
||||||
case _:
|
case _:
|
||||||
|
@ -23,15 +23,14 @@ log_level = logging.getLevelName(logger.level)
|
|||||||
class Cache:
|
class Cache:
|
||||||
"""Cache Search Module"""
|
"""Cache Search Module"""
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.cache_db: str = os.path.join("/", "var",
|
self.cache_db: str = os.path.join("/", "usr", "local", "share",
|
||||||
"lib", "singerdbs",
|
"sqlite_dbs", "cached_lyrics.db")
|
||||||
"cached_lyrics.db")
|
|
||||||
self.redis_cache = redis_cache.RedisCache()
|
self.redis_cache = redis_cache.RedisCache()
|
||||||
self.notifier = notifier.DiscordNotifier()
|
self.notifier = notifier.DiscordNotifier()
|
||||||
|
|
||||||
self.cache_pre_query: str = "pragma journal_mode = WAL; pragma synchronous = normal;\
|
self.cache_pre_query: str = "pragma journal_mode = WAL; pragma synchronous = normal;\
|
||||||
pragma temp_store = memory; pragma mmap_size = 30000000000;"
|
pragma temp_store = memory; pragma mmap_size = 30000000000;"
|
||||||
self.sqlite_exts: list[str] = ['/usr/local/lib/python3.11/dist-packages/spellfix1.cpython-311-x86_64-linux-gnu.so']
|
self.sqlite_exts: list[str] = ['/home/singer/api/solibs/spellfix1.cpython-311-x86_64-linux-gnu.so']
|
||||||
self.label: str = "Cache"
|
self.label: str = "Cache"
|
||||||
|
|
||||||
def get_matched(self, matched_candidate: tuple, confidence: int,
|
def get_matched(self, matched_candidate: tuple, confidence: int,
|
||||||
|
3
state.py
3
state.py
@ -15,7 +15,8 @@ class State(FastAPI):
|
|||||||
"""Global State for API"""
|
"""Global State for API"""
|
||||||
def __init__(self, app: FastAPI, util, constants): # pylint: disable=unused-argument
|
def __init__(self, app: FastAPI, util, constants): # pylint: disable=unused-argument
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.counter_db_path = os.path.join("/", "var", "lib", "singerdbs", "stats.db")
|
self.counter_db_path = os.path.join("/", "usr", "local", "share",
|
||||||
|
"sqlite_dbs", "stats.db")
|
||||||
self.counters = {
|
self.counters = {
|
||||||
str(counter): 0 for counter in constants.AVAILABLE_COUNTERS
|
str(counter): 0 for counter in constants.AVAILABLE_COUNTERS
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user