Compare commits

...

21 Commits

Author SHA1 Message Date
10ccf8c8eb performance: db/aiohttp connection pooling 2025-12-18 07:51:47 -05:00
bc8b407a91 rm test.conf 2025-12-18 07:30:39 -05:00
041de95698 misc 2025-12-18 07:27:37 -05:00
6240888ac5 cull transcriptions endpoints from README.md 2025-12-03 13:56:28 -05:00
be9ed777a5 retire transcriptions endpoints 2025-12-03 13:55:52 -05:00
7779049c93 misc / sr_wrapper-- only consider an album returned from tidal to be a duplicate if both the name AND release date match. 2025-11-26 15:09:43 -05:00
41d9065c79 rm 2025-11-25 13:08:20 -05:00
85298b861d minor 2025-11-25 13:06:07 -05:00
476c4e6e51 bugfix: LRCLIb's models.py needs load_dotenv 2025-11-23 07:12:38 -05:00
353f14c899 formatting / minor 2025-11-22 21:43:48 -05:00
3d0b867427 Misc 2025-11-22 21:41:12 -05:00
dcc6c7b24e More progress re: #34
- Change of direction, LRCLib searches from /lyric/search now use internal cache - which is a PGSQL import of the LRCLib SQLite database.  Change to PGSQL was made for performance.
2025-11-22 13:13:03 -05:00
c302b256d3 Begin #34 2025-11-21 12:29:12 -05:00
c6d2bad79d Enhance lyric search functionality by improving line splitting logic and adding multi-line matching for subsearch. Update cache handling to ensure confidence threshold is respected before returning results. 2025-10-24 13:40:55 -04:00
25d1ab226e Enhance Cync authentication flow with improved token management and 2FA handling. Add periodic token validation and logging for better debugging. Introduce FLAC stream check in bulk download process. 2025-10-22 06:55:37 -04:00
3f66223328 Fix import statement for index_definition in redis_cache.py and radio_util.py (dependency upgrade related-- redis module) 2025-10-15 12:13:03 -04:00
c493f2aabf Increase rate limit for lighting state requests and enhance error handling for Cync device operations. Improve lyric search processing by splitting lyrics based on line breaks and cleaning special characters (bugfix for subsearch/seek). 2025-10-15 10:10:56 -04:00
0029a9ec19 . 2025-10-07 12:07:45 -04:00
90c3efbb8b Remove .gitignore file, radio database restructuring 2025-10-07 12:07:13 -04:00
a61970d298 . 2025-10-03 11:34:48 -04:00
fb94750b46 - Removed unnecessary/unrelated files
- Added Scalar
2025-10-03 11:34:33 -04:00
29 changed files with 2261 additions and 2809 deletions

36
.gitignore vendored
View File

@@ -1,36 +0,0 @@
**/__pycache__/*
**/.vscode/*
**/private.py
**/solibs/*
**/*.json
**/mgr/*
constants.py
tests.py
db_migrate.py
notifier.py
test_hifi.py
youtube*
playlist_creator.py
artist_genre_tag.py
pg_migrate_lyrics.py
uv.lock
pyproject.toml
mypy.ini
.python-version
get_next_track.py
endpoints/radio.py
utils/radio_util.py
redis_playlist.py
endpoints/auth.py
endpoints/radio2
endpoints/radio2/**
hash_password.py
up.py
job_review.py
check_missing.py
**/auth/*
**/radio_api/*
test/db_stats.py
test/report/*
.gitignore
.env

View File

@@ -4,7 +4,10 @@ A modern FastAPI-based backend providing various endpoints for media, authentica
## Overview ## Overview
This server is built with [FastAPI](https://fastapi.tiangolo.com/) and provides a comprehensive API for multiple services. The interactive API documentation is available at the [Swagger UI](https://api.codey.lol/docs). This server is built with [FastAPI](https://fastapi.tiangolo.com/) and provides a comprehensive API for multiple services. API documentation is available in three formats:
- **Swagger UI**: [https://api.codey.lol/docs](https://api.codey.lol/docs) - Classic interactive API explorer with "Try it out" functionality
- **Scalar**: [https://api.codey.lol/scalar](https://api.codey.lol/scalar) - Modern, fast interactive API documentation (recommended)
- **ReDoc**: [https://api.codey.lol/redoc](https://api.codey.lol/redoc) - Clean, read-only documentation with better visual design
## API Endpoints ## API Endpoints
@@ -48,10 +51,6 @@ This server is built with [FastAPI](https://fastapi.tiangolo.com/) and provides
**YouTube (`/yt/`)** 🎥 **YouTube (`/yt/`)** 🎥
- `POST /yt/search` - Search for YouTube videos by title - `POST /yt/search` - Search for YouTube videos by title
**Transcriptions (`/transcriptions/`)** 📝
- `POST /transcriptions/get_episodes` - Get episode list by show ID
- `POST /transcriptions/get_episode_lines` - Get transcript lines for specific episodes
**Lyric Search (`/lyric/`)** 🎵 **Lyric Search (`/lyric/`)** 🎵
- `POST /lyric/search` - Search for song lyrics across multiple sources - `POST /lyric/search` - Search for song lyrics across multiple sources
- Supports artist/song search, text search within lyrics - Supports artist/song search, text search within lyrics

99
base.py
View File

@@ -4,26 +4,88 @@ import sys
sys.path.insert(0, ".") sys.path.insert(0, ".")
import logging import logging
import asyncio import asyncio
# Install uvloop for better async performance (2-4x speedup on I/O)
try:
import uvloop
uvloop.install()
logging.info("uvloop installed successfully")
except ImportError:
logging.warning("uvloop not available, using default asyncio event loop")
from contextlib import asynccontextmanager
from typing import Any from typing import Any
from fastapi import FastAPI, Request from fastapi import FastAPI, Request
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from scalar_fastapi import get_scalar_api_reference
from lyric_search.sources import redis_cache from lyric_search.sources import redis_cache
import shared # Shared connection pools
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
logging.getLogger("aiosqlite").setLevel(logging.WARNING) logging.getLogger("aiosqlite").setLevel(logging.WARNING)
logging.getLogger("httpx").setLevel(logging.WARNING) logging.getLogger("httpx").setLevel(logging.WARNING)
logging.getLogger("python_multipart.multipart").setLevel(logging.WARNING)
logging.getLogger("streamrip").setLevel(logging.WARNING)
logging.getLogger("utils.sr_wrapper").setLevel(logging.WARNING)
logger = logging.getLogger() logger = logging.getLogger()
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
# Pre-import endpoint modules so we can wire up lifespan
constants = importlib.import_module("constants").Constants()
# Will be set after app creation
_routes: dict = {}
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Lifespan context manager for startup/shutdown events."""
# Startup
uvicorn_access_logger = logging.getLogger("uvicorn.access")
uvicorn_access_logger.disabled = True
# Initialize shared infrastructure (Redis pool, aiohttp session, SQLite pool)
await shared.startup()
# Start Radio playlists
if "radio" in _routes and hasattr(_routes["radio"], "on_start"):
await _routes["radio"].on_start()
# Start endpoint background tasks
if "trip" in _routes and hasattr(_routes["trip"], "startup"):
await _routes["trip"].startup()
if "lighting" in _routes and hasattr(_routes["lighting"], "startup"):
await _routes["lighting"].startup()
logger.info("Application startup complete")
yield
# Shutdown
if "lighting" in _routes and hasattr(_routes["lighting"], "shutdown"):
await _routes["lighting"].shutdown()
if "trip" in _routes and hasattr(_routes["trip"], "shutdown"):
await _routes["trip"].shutdown()
# Clean up shared infrastructure
await shared.shutdown()
logger.info("Application shutdown complete")
app = FastAPI( app = FastAPI(
title="codey.lol API", title="codey.lol API",
version="1.0", version="1.0",
contact={"name": "codey"}, contact={"name": "codey"},
redirect_slashes=False, redirect_slashes=False,
loop=loop, loop=loop,
docs_url=None, # Disabled - using Scalar at /docs instead
redoc_url="/redoc",
lifespan=lifespan,
) )
constants = importlib.import_module("constants").Constants()
util = importlib.import_module("util").Utilities(app, constants) util = importlib.import_module("util").Utilities(app, constants)
origins = [ origins = [
@@ -33,6 +95,7 @@ origins = [
"https://status.boatson.boats", "https://status.boatson.boats",
"https://_new.codey.lol", "https://_new.codey.lol",
"http://localhost:4321", "http://localhost:4321",
"https://local.codey.lol:4321",
] ]
app.add_middleware( app.add_middleware(
@@ -44,6 +107,12 @@ app.add_middleware(
) # type: ignore ) # type: ignore
# Scalar API documentation at /docs (replaces default Swagger UI)
@app.get("/docs", include_in_schema=False)
def scalar_docs():
return get_scalar_api_reference(openapi_url="/openapi.json", title="codey.lol API")
""" """
Blacklisted routes Blacklisted routes
""" """
@@ -62,10 +131,15 @@ def base_head():
@app.get("/{path}", include_in_schema=False) @app.get("/{path}", include_in_schema=False)
def disallow_get_any(request: Request, var: Any = None): def disallow_get_any(request: Request, var: Any = None):
path = request.path_params["path"] path = request.path_params["path"]
allowed_paths = ["widget", "misc/no", "docs", "redoc", "openapi.json"]
logging.info(
f"Checking path: {path}, allowed: {path in allowed_paths or path.split('/', maxsplit=1)[0] in allowed_paths}"
)
if not ( if not (
isinstance(path, str) isinstance(path, str)
and (path.split("/", maxsplit=1) == "widget" or path == "misc/no") and (path.split("/", maxsplit=1)[0] in allowed_paths or path in allowed_paths)
): ):
logging.error(f"BLOCKED path: {path}")
return util.get_blocked_response() return util.get_blocked_response()
else: else:
logging.info("OK, %s", path) logging.info("OK, %s", path)
@@ -84,13 +158,11 @@ End Blacklisted Routes
Actionable Routes Actionable Routes
""" """
routes: dict = { _routes.update(
{
"randmsg": importlib.import_module("endpoints.rand_msg").RandMsg( "randmsg": importlib.import_module("endpoints.rand_msg").RandMsg(
app, util, constants app, util, constants
), ),
"transcriptions": importlib.import_module(
"endpoints.transcriptions"
).Transcriptions(app, util, constants),
"lyrics": importlib.import_module("endpoints.lyric_search").LyricSearch( "lyrics": importlib.import_module("endpoints.lyric_search").LyricSearch(
app, util, constants app, util, constants
), ),
@@ -104,12 +176,13 @@ routes: dict = {
"lighting": importlib.import_module("endpoints.lighting").Lighting( "lighting": importlib.import_module("endpoints.lighting").Lighting(
app, util, constants app, util, constants
), ),
} }
)
# Misc endpoint depends on radio endpoint instance # Misc endpoint depends on radio endpoint instance
radio_endpoint = routes.get("radio") radio_endpoint = _routes.get("radio")
if radio_endpoint: if radio_endpoint:
routes["misc"] = importlib.import_module("endpoints.misc").Misc( _routes["misc"] = importlib.import_module("endpoints.misc").Misc(
app, util, constants, radio_endpoint app, util, constants, radio_endpoint
) )
@@ -117,17 +190,9 @@ if radio_endpoint:
End Actionable Routes End Actionable Routes
""" """
""" """
Startup Startup
""" """
async def on_start():
uvicorn_access_logger = logging.getLogger("uvicorn.access")
uvicorn_access_logger.disabled = True
app.add_event_handler("startup", on_start)
redis = redis_cache.RedisCache() redis = redis_cache.RedisCache()
loop.create_task(redis.create_index()) loop.create_task(redis.create_index())

View File

@@ -1,205 +1,379 @@
"""
Cync Lighting Control API
This module provides a FastAPI endpoint for controlling Cync smart lights.
It maintains a persistent connection to the Cync cloud service and handles
authentication, token caching, and connection lifecycle management.
Key behaviors:
- pycync uses a TCP/TLS connection that requires login acknowledgment before commands work
- Commands are sent through a WiFi-connected "hub" device to the Bluetooth mesh
- The TCP manager auto-reconnects on disconnect with a 10-second delay
- We wait for the connection to be fully ready before sending commands
"""
import logging import logging
import json import json
import os import os
import time import time
import asyncio
from typing import Optional, Any
from dataclasses import dataclass
import aiohttp import aiohttp
from fastapi import FastAPI, Depends, HTTPException, Request from fastapi import FastAPI, Depends, HTTPException, Request
from fastapi_throttle import RateLimiter from fastapi_throttle import RateLimiter
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
import redis
from lyric_search.sources import private
from auth.deps import get_current_user from auth.deps import get_current_user
from dotenv import load_dotenv from dotenv import load_dotenv
from pycync.user import User # type: ignore from pycync.user import User # type: ignore
from pycync.cync import Cync as Cync # type: ignore from pycync.cync import Cync # type: ignore
from pycync import Auth # type: ignore from pycync import Auth # type: ignore
from pycync.exceptions import TwoFactorRequiredError, AuthFailedError # type: ignore from pycync.exceptions import TwoFactorRequiredError, AuthFailedError # type: ignore
# Configure logging
logger = logging.getLogger(__name__)
class Lighting(FastAPI):
async def ensure_cync_connection(self):
"""Ensure aiohttp session and Cync API are alive, re-create if needed."""
# Check required environment variables
missing_vars = []
if not self.cync_email:
missing_vars.append("CYNC_EMAIL")
if not self.cync_password:
missing_vars.append("CYNC_PASSWORD")
if not self.cync_device_name:
missing_vars.append("CYNC_DEVICE_NAME")
if missing_vars:
raise Exception(
f"Missing required environment variables: {', '.join(missing_vars)}"
)
# Cast to str after check to silence linter
cync_email: str = self.cync_email # type: ignore
cync_password: str = self.cync_password # type: ignore
# Check if session is closed or missing @dataclass
if not self.session or getattr(self.session, "closed", False): class CyncConnectionState:
self.session = aiohttp.ClientSession() """Track the state of our Cync connection."""
cached_user = self._load_cached_user()
if cached_user:
self.auth = Auth(
session=self.session,
user=cached_user,
username=cync_email,
password=cync_password,
)
else:
self.auth = Auth(
session=self.session, username=cync_email, password=cync_password
)
# Try to refresh token
self.cync_user = None
if (
self.auth.user
and hasattr(self.auth.user, "expires_at")
and self.auth.user.expires_at > time.time()
):
try:
await self.auth.async_refresh_user_token()
self.cync_user = self.auth.user
self._save_cached_user(self.cync_user)
except AuthFailedError:
pass
# If no valid token, login
if not self.cync_user:
try:
self.cync_user = await self.auth.login()
self._save_cached_user(self.cync_user)
except TwoFactorRequiredError:
logging.error(
"Cync 2FA required. Set CYNC_2FA_CODE in env if needed."
)
raise Exception("Cync 2FA required.")
except AuthFailedError as e:
logging.error("Failed to authenticate with Cync API: %s", e)
raise Exception("Cync authentication failed.")
self.cync_api = await Cync.create(self.auth)
# Also check if cync_api is None (shouldn't happen, but just in case)
if not self.cync_api:
if not self.auth:
logging.critical("self.auth: %s", self.auth)
return
self.cync_api = await Cync.create(self.auth)
session: Optional[aiohttp.ClientSession] = None
auth: Optional[Auth] = None
cync_api: Optional[Cync] = None
user: Optional[User] = None
connected_at: Optional[float] = None
last_command_at: Optional[float] = None
class Lighting:
""" """
Lighting Endpoints Cync Lighting Controller
Manages authentication and device control for Cync smart lights.
Uses pycync library which maintains a TCP connection for device commands.
""" """
def __init__(self, app: FastAPI, util, constants) -> None: # Configuration
"""Initialize Lighting endpoints and persistent Cync connection.""" TOKEN_EXPIRY_BUFFER = 300 # Consider token expired 5 min before actual expiry
CONNECTION_READY_TIMEOUT = 15 # Max seconds to wait for TCP connection to be ready
COMMAND_DELAY = 0.3 # Delay between sequential commands
MAX_RETRIES = 3
def __init__(self, app: FastAPI, util: Any, constants: Any) -> None:
load_dotenv() load_dotenv()
self.app: FastAPI = app
self.app = app
self.util = util self.util = util
self.constants = constants self.constants = constants
self.redis_client = redis.Redis(
password=private.REDIS_PW, decode_responses=True # Redis for state persistence - use shared sync client
) import shared
self.redis_client = shared.get_redis_sync_client(decode_responses=True)
self.lighting_key = "lighting:state" self.lighting_key = "lighting:state"
# Cync config # Cync configuration from environment
self.cync_email = os.getenv("CYNC_EMAIL") self.cync_email = os.getenv("CYNC_EMAIL")
self.cync_password = os.getenv("CYNC_PASSWORD") self.cync_password = os.getenv("CYNC_PASSWORD")
self.cync_device_name = os.getenv("CYNC_DEVICE_NAME") self.cync_device_name = os.getenv("CYNC_DEVICE_NAME")
self.token_cache_path = "cync_token.json" self.token_cache_path = "cync_token.json"
self.session = None
self.auth = None
self.cync_user = None
self.cync_api = None
# Set up Cync connection at startup using FastAPI event # Connection state
@app.on_event("startup") self._state = CyncConnectionState()
async def startup_event(): self._connection_lock = asyncio.Lock()
# Check required environment variables self._health_task: Optional[asyncio.Task] = None
missing_vars = []
if not self.cync_email:
missing_vars.append("CYNC_EMAIL")
if not self.cync_password:
missing_vars.append("CYNC_PASSWORD")
if not self.cync_device_name:
missing_vars.append("CYNC_DEVICE_NAME")
if missing_vars:
raise Exception(
f"Missing required environment variables: {', '.join(missing_vars)}"
)
self.session = aiohttp.ClientSession() # Register routes
cached_user = self._load_cached_user() self._register_routes()
if cached_user:
self.auth = Auth(
session=self.session,
user=cached_user,
username=self.cync_email or "",
password=self.cync_password or "",
)
else:
self.auth = Auth(
session=self.session,
username=self.cync_email or "",
password=self.cync_password or "",
)
# Try to refresh token
if (
self.auth.user
and hasattr(self.auth.user, "expires_at")
and self.auth.user.expires_at > time.time()
):
try:
await self.auth.async_refresh_user_token()
self.cync_user = self.auth.user
self._save_cached_user(self.cync_user)
except AuthFailedError:
pass
# If no valid token, login
if not self.cync_user:
try:
self.cync_user = await self.auth.login()
self._save_cached_user(self.cync_user)
except TwoFactorRequiredError:
logging.error(
"Cync 2FA required. Set CYNC_2FA_CODE in env if needed."
)
raise Exception("Cync 2FA required.")
except AuthFailedError as e:
logging.error("Failed to authenticate with Cync API: %s", e)
raise Exception("Cync authentication failed.")
# Create persistent Cync API object
self.cync_api = await Cync.create(self.auth)
self.endpoints: dict = { def _register_routes(self) -> None:
"lighting/state": self.get_lighting_state, """Register FastAPI routes."""
} common_deps = [
Depends(RateLimiter(times=25, seconds=2)),
for endpoint, handler in self.endpoints.items():
app.add_api_route(
f"/{endpoint}",
handler,
methods=["GET"],
include_in_schema=True,
dependencies=[
Depends(RateLimiter(times=10, seconds=2)),
Depends(get_current_user), Depends(get_current_user),
], ]
self.app.add_api_route(
"/lighting/state",
self.get_lighting_state,
methods=["GET"],
dependencies=common_deps,
include_in_schema=False,
) )
app.add_api_route( self.app.add_api_route(
"/lighting/state", "/lighting/state",
self.set_lighting_state, self.set_lighting_state,
methods=["POST"], methods=["POST"],
include_in_schema=True, dependencies=common_deps,
dependencies=[ include_in_schema=False,
Depends(RateLimiter(times=10, seconds=2)),
Depends(get_current_user),
],
) )
def _load_cached_user(self): # =========================================================================
# Lifecycle Management
# =========================================================================
async def startup(self) -> None:
"""Initialize on app startup. Call from lifespan context manager."""
self._validate_config()
try: try:
if os.path.exists(self.token_cache_path): await self._connect()
logger.info("Cync lighting initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize Cync at startup: {e}")
# Don't raise - allow app to start, will retry on first request
# Start background health monitoring
self._health_task = asyncio.create_task(self._health_monitor())
async def shutdown(self) -> None:
"""Cleanup on app shutdown. Call from lifespan context manager."""
if self._health_task:
self._health_task.cancel()
try:
await self._health_task
except asyncio.CancelledError:
pass
await self._disconnect()
logger.info("Cync lighting shut down")
def _validate_config(self) -> None:
"""Validate required environment variables."""
missing = []
if not self.cync_email:
missing.append("CYNC_EMAIL")
if not self.cync_password:
missing.append("CYNC_PASSWORD")
if not self.cync_device_name:
missing.append("CYNC_DEVICE_NAME")
if missing:
raise RuntimeError(f"Missing required env vars: {', '.join(missing)}")
# =========================================================================
# Connection Management
# =========================================================================
async def _connect(self, force: bool = False) -> None:
"""
Establish connection to Cync cloud.
This creates the aiohttp session, authenticates, and initializes
the pycync API which starts its TCP connection.
"""
async with self._connection_lock:
# Check if we need to connect
if not force and self._is_connection_valid():
return
logger.info("Establishing Cync connection...")
# Clean up existing connection
await self._disconnect_unlocked()
# Create HTTP session
timeout = aiohttp.ClientTimeout(total=30, connect=10)
self._state.session = aiohttp.ClientSession(timeout=timeout)
# Authenticate
await self._authenticate()
# Create Cync API (starts TCP connection)
logger.info("Creating Cync API instance...")
assert self._state.auth is not None # Set by _authenticate
self._state.cync_api = await Cync.create(self._state.auth)
# Wait for TCP connection to be ready
await self._wait_for_connection_ready()
self._state.connected_at = time.time()
logger.info("Cync connection established")
async def _disconnect(self) -> None:
"""Disconnect and cleanup resources."""
async with self._connection_lock:
await self._disconnect_unlocked()
async def _disconnect_unlocked(self) -> None:
"""Disconnect without acquiring lock (internal use)."""
# Shutdown pycync TCP connection
if self._state.cync_api:
try:
# pycync's command client has a shut_down method
client = getattr(self._state.cync_api, "_command_client", None)
if client:
await client.shut_down()
except Exception as e:
logger.warning(f"Error shutting down Cync client: {e}")
# Close HTTP session
if self._state.session and not self._state.session.closed:
await self._state.session.close()
await asyncio.sleep(0.1) # Allow cleanup
# Reset state
self._state = CyncConnectionState()
def _is_connection_valid(self) -> bool:
"""Check if current connection is usable."""
if not self._state.cync_api or not self._state.session:
return False
if self._state.session.closed:
return False
# Check token expiry
if self._is_token_expired():
logger.info("Token expired or expiring soon")
return False
return True
def _is_token_expired(self) -> bool:
"""Check if token is expired or will expire soon."""
if not self._state.user:
return True
expires_at = getattr(self._state.user, "expires_at", 0)
return expires_at < (time.time() + self.TOKEN_EXPIRY_BUFFER)
async def _wait_for_connection_ready(self) -> None:
"""
Wait for pycync TCP connection to be fully ready.
pycync's TCP manager waits for login acknowledgment before sending
any commands. We need to wait for this to complete.
"""
if not self._state.cync_api:
raise RuntimeError("Cync API not initialized")
client = getattr(self._state.cync_api, "_command_client", None)
if not client:
logger.warning("Could not access command client")
return
tcp_manager = getattr(client, "_tcp_manager", None)
if not tcp_manager:
logger.warning("Could not access TCP manager")
return
# Wait for login to be acknowledged
start = time.time()
while not getattr(tcp_manager, "_login_acknowledged", False):
if time.time() - start > self.CONNECTION_READY_TIMEOUT:
raise TimeoutError("Timed out waiting for Cync login acknowledgment")
await asyncio.sleep(0.2)
logger.debug("Waiting for Cync TCP login acknowledgment...")
# Give a tiny bit more time for device probing to start
await asyncio.sleep(0.5)
logger.info(f"Cync TCP connection ready (took {time.time() - start:.1f}s)")
# =========================================================================
# Authentication
# =========================================================================
async def _authenticate(self) -> None:
"""Authenticate with Cync, using cached token if valid."""
# Try cached token first
cached_user = self._load_cached_token()
# These are validated by _validate_config at startup
assert self._state.session is not None
assert self.cync_email is not None
assert self.cync_password is not None
if cached_user and not self._is_user_token_expired(cached_user):
logger.info("Using cached Cync token")
self._state.auth = Auth(
session=self._state.session,
user=cached_user,
username=self.cync_email,
password=self.cync_password,
)
self._state.user = cached_user
return
# Need fresh login
logger.info("Performing fresh Cync login...")
self._state.auth = Auth(
session=self._state.session,
username=self.cync_email,
password=self.cync_password,
)
try:
self._state.user = await self._state.auth.login()
self._save_cached_token(self._state.user)
logger.info("Cync login successful")
except TwoFactorRequiredError:
await self._handle_2fa()
except AuthFailedError as e:
logger.error(f"Cync authentication failed: {e}")
raise
async def _handle_2fa(self) -> None:
"""Handle 2FA authentication."""
import sys
# Try environment variable first
twofa_code = os.getenv("CYNC_2FA_CODE")
# If not set, prompt interactively
if not twofa_code:
print("\n" + "=" * 50)
print("CYNC 2FA REQUIRED")
print("=" * 50)
print("Check your email for the Cync verification code.")
print("Enter the code below (you have 60 seconds):")
print("=" * 50)
sys.stdout.flush()
# Use asyncio to read with timeout
try:
loop = asyncio.get_event_loop()
twofa_code = await asyncio.wait_for(
loop.run_in_executor(None, input, "2FA Code: "), timeout=60.0
)
twofa_code = twofa_code.strip()
except asyncio.TimeoutError:
logger.error("2FA code entry timed out")
raise RuntimeError("2FA code entry timed out")
if not twofa_code:
logger.error("No 2FA code provided")
raise RuntimeError("Cync 2FA required but no code provided")
logger.info("Retrying Cync login with 2FA code")
try:
assert self._state.auth is not None
self._state.user = await self._state.auth.login(two_factor_code=twofa_code)
self._save_cached_token(self._state.user)
logger.info("Cync 2FA login successful")
except Exception as e:
logger.error(f"Cync 2FA login failed: {e}")
raise
def _is_user_token_expired(self, user: User) -> bool:
"""Check if a user's token is expired."""
expires_at = getattr(user, "expires_at", 0)
return expires_at < (time.time() + self.TOKEN_EXPIRY_BUFFER)
def _load_cached_token(self) -> Optional[User]:
"""Load cached authentication token from disk."""
try:
if not os.path.exists(self.token_cache_path):
return None
with open(self.token_cache_path, "r") as f: with open(self.token_cache_path, "r") as f:
data = json.load(f) data = json.load(f)
return User( return User(
access_token=data["access_token"], access_token=data["access_token"],
refresh_token=data["refresh_token"], refresh_token=data["refresh_token"],
@@ -208,10 +382,11 @@ class Lighting(FastAPI):
expires_at=data["expires_at"], expires_at=data["expires_at"],
) )
except Exception as e: except Exception as e:
logging.warning("Failed to load cached Cync user: %s", e) logger.warning(f"Failed to load cached token: {e}")
return None return None
def _save_cached_user(self, user): def _save_cached_token(self, user: User) -> None:
"""Save authentication token to disk."""
try: try:
data = { data = {
"access_token": user.access_token, "access_token": user.access_token,
@@ -222,129 +397,252 @@ class Lighting(FastAPI):
} }
with open(self.token_cache_path, "w") as f: with open(self.token_cache_path, "w") as f:
json.dump(data, f) json.dump(data, f)
logging.info("Saved Cync user tokens to disk.") logger.debug("Saved Cync token to disk")
except Exception as e: except Exception as e:
logging.warning("Failed to save Cync user tokens: %s", e) logger.warning(f"Failed to save token: {e}")
async def get_lighting_state(self) -> JSONResponse: def _clear_cached_token(self) -> None:
""" """Remove cached token file."""
Get the current lighting state. try:
if os.path.exists(self.token_cache_path):
os.remove(self.token_cache_path)
logger.info("Cleared cached token")
except OSError:
pass
Returns: # =========================================================================
- **JSONResponse**: Contains the current lighting state. # Health Monitoring
# =========================================================================
async def _health_monitor(self) -> None:
"""Background task to monitor connection health and refresh tokens."""
while True:
try:
await asyncio.sleep(300) # Check every 5 minutes
# Proactively refresh if token is expiring
if self._is_token_expired():
logger.info("Token expiring, proactively reconnecting...")
try:
await self._connect(force=True)
except Exception as e:
logger.error(f"Proactive reconnection failed: {e}")
except asyncio.CancelledError:
break
except Exception as e:
logger.error(f"Health monitor error: {e}")
# =========================================================================
# Device Control
# =========================================================================
async def _get_device(self):
"""Get the target light device."""
if not self._state.cync_api:
raise RuntimeError("Cync not connected")
devices = self._state.cync_api.get_devices()
if not devices:
raise RuntimeError("No devices found")
device = next(
(d for d in devices if getattr(d, "name", None) == self.cync_device_name),
None,
)
if not device:
available = [getattr(d, "name", "unnamed") for d in devices]
raise RuntimeError(
f"Device '{self.cync_device_name}' not found. Available: {available}"
)
return device
async def _send_commands(
self,
power: str,
brightness: Optional[int] = None,
rgb: Optional[tuple[int, int, int]] = None,
) -> None:
""" """
Send commands to the light device.
Commands are sent sequentially with small delays to ensure
the TCP connection processes each one.
"""
device = await self._get_device()
logger.info(f"Sending commands to device: {device.name}")
# Power
if power == "on":
await device.turn_on()
logger.debug("Sent turn_on")
else:
await device.turn_off()
logger.debug("Sent turn_off")
await asyncio.sleep(self.COMMAND_DELAY)
# Brightness
if brightness is not None:
await device.set_brightness(brightness)
logger.debug(f"Sent brightness: {brightness}")
await asyncio.sleep(self.COMMAND_DELAY)
# Color
if rgb:
await device.set_rgb(rgb)
logger.debug(f"Sent RGB: {rgb}")
await asyncio.sleep(self.COMMAND_DELAY)
self._state.last_command_at = time.time()
# =========================================================================
# API Endpoints
# =========================================================================
async def get_lighting_state(self, user=Depends(get_current_user)) -> JSONResponse:
"""Get the current lighting state from Redis."""
if "lighting" not in user.get("roles", []) and "admin" not in user.get(
"roles", []
):
raise HTTPException(status_code=403, detail="Insufficient permissions")
try: try:
state = self.redis_client.get(self.lighting_key) state = self.redis_client.get(self.lighting_key)
if state: if state:
return JSONResponse(content=json.loads(str(state))) return JSONResponse(content=json.loads(str(state)))
else:
# Default state # Default state
default_state = { return JSONResponse(
content={
"power": "off", "power": "off",
"brightness": 50, "brightness": 50,
"color": {"r": 255, "g": 255, "b": 255}, "color": {"r": 255, "g": 255, "b": 255},
} }
return JSONResponse(content=default_state) )
except Exception as e: except Exception as e:
logging.error("Error getting lighting state: %s", e) logger.error(f"Error getting lighting state: {e}")
raise HTTPException(status_code=500, detail="Internal server error") raise HTTPException(status_code=500, detail="Internal server error")
async def set_lighting_state(self, request: Request) -> JSONResponse: async def set_lighting_state(
""" self, request: Request, user=Depends(get_current_user)
Set the lighting state and apply it to the Cync device. ) -> JSONResponse:
""" """Set the lighting state and apply to Cync device."""
try: try:
if "lighting" not in user.get("roles", []) and "admin" not in user.get(
"roles", []
):
raise HTTPException(status_code=403, detail="Insufficient permissions")
state = await request.json() state = await request.json()
# Validate state (basic validation) logger.info(f"Lighting request: {state}")
# Validate
if not isinstance(state, dict): if not isinstance(state, dict):
raise HTTPException( raise HTTPException(
status_code=400, detail="State must be a JSON object" status_code=400, detail="State must be a JSON object"
) )
# Store in Redis power, brightness, rgb = self._parse_state(state)
# Save to Redis (even if device command fails)
self.redis_client.set(self.lighting_key, json.dumps(state)) self.redis_client.set(self.lighting_key, json.dumps(state))
await self.ensure_cync_connection() # Apply to device with retries
await self._apply_state_with_retry(power, brightness, rgb)
# Apply to Cync device logger.info(
power = state.get("power", "off") f"Successfully applied state: power={power}, brightness={brightness}, rgb={rgb}"
if power not in ["on", "off"]: )
raise HTTPException( return JSONResponse(
status_code=400, detail=f"Invalid power state: {power}" content={
"message": "Lighting state updated",
"state": state,
}
) )
brightness = state.get("brightness", 50) except HTTPException:
raise
except Exception as e:
logger.error(f"Error setting lighting state: {e}")
raise HTTPException(status_code=500, detail=str(e))
def _parse_state(self, state: dict) -> tuple[str, Optional[int], Optional[tuple]]:
"""Parse and validate lighting state from request."""
# Power
power = state.get("power", "off")
if power not in ("on", "off"):
raise HTTPException(status_code=400, detail=f"Invalid power: {power}")
# Brightness
brightness = None
if "brightness" in state:
brightness = state["brightness"]
if not isinstance(brightness, (int, float)) or not (0 <= brightness <= 100): if not isinstance(brightness, (int, float)) or not (0 <= brightness <= 100):
raise HTTPException( raise HTTPException(
status_code=400, detail=f"Invalid brightness: {brightness}" status_code=400, detail=f"Invalid brightness: {brightness}"
) )
brightness = int(brightness)
# Color
rgb = None
color = state.get("color") color = state.get("color")
if ( if (
color color
and isinstance(color, dict) and isinstance(color, dict)
and all(k in color for k in ["r", "g", "b"]) and all(k in color for k in ("r", "g", "b"))
): ):
rgb = (color["r"], color["g"], color["b"]) rgb = (color["r"], color["g"], color["b"])
elif all(k in state for k in ["red", "green", "blue"]): elif all(k in state for k in ("red", "green", "blue")):
rgb = (state["red"], state["green"], state["blue"]) rgb = (state["red"], state["green"], state["blue"])
for val, name in zip(rgb, ["red", "green", "blue"]):
if not isinstance(val, int) or not (0 <= val <= 255):
raise HTTPException(
status_code=400, detail=f"Invalid {name} color value: {val}"
)
else:
rgb = None
# Use persistent Cync API object
if not self.cync_api:
raise HTTPException(status_code=500, detail="Cync API not initialized.")
devices = self.cync_api.get_devices()
if not devices or not isinstance(devices, (list, tuple)):
raise HTTPException(
status_code=500, detail="No devices returned from Cync API."
)
light = next(
(
d
for d in devices
if hasattr(d, "name") and d.name == self.cync_device_name
),
None,
)
if not light:
raise HTTPException(
status_code=404,
detail=f"Device '{self.cync_device_name}' not found",
)
# Set power
if power == "on":
await light.turn_on()
else:
await light.turn_off()
# Set brightness
if "brightness" in state:
await light.set_brightness(brightness)
# Set color
if rgb: if rgb:
await light.set_rgb(rgb) for i, name in enumerate(("red", "green", "blue")):
if not isinstance(rgb[i], int) or not (0 <= rgb[i] <= 255):
raise HTTPException(
status_code=400, detail=f"Invalid {name}: {rgb[i]}"
)
return power, brightness, rgb
async def _apply_state_with_retry(
self,
power: str,
brightness: Optional[int],
rgb: Optional[tuple],
) -> None:
"""Apply state to device with connection retry logic."""
last_error: Optional[Exception] = None
for attempt in range(self.MAX_RETRIES):
try:
# Ensure connection (force reconnect on retries)
await self._connect(force=(attempt > 0))
# Send commands
await self._send_commands(power, brightness, rgb)
return # Success
except (AuthFailedError, TwoFactorRequiredError) as e:
last_error = e
logger.warning(f"Auth error on attempt {attempt + 1}: {e}")
self._clear_cached_token()
except TimeoutError as e:
last_error = e
logger.warning(f"Timeout on attempt {attempt + 1}: {e}")
logging.info(
"Successfully applied state to device '%s': %s",
self.cync_device_name,
state,
)
return JSONResponse(
content={
"message": "Lighting state updated and applied",
"state": state,
}
)
except HTTPException:
raise
except Exception as e: except Exception as e:
logging.error("Error setting lighting state: %s", e) last_error = e
raise HTTPException(status_code=500, detail="Internal server error") logger.warning(
f"Error on attempt {attempt + 1}: {type(e).__name__}: {e}"
)
# Wait before retry (exponential backoff)
if attempt < self.MAX_RETRIES - 1:
wait_time = 2**attempt
logger.info(f"Retrying in {wait_time}s...")
await asyncio.sleep(wait_time)
# All retries failed
logger.error(f"All {self.MAX_RETRIES} attempts failed")
raise last_error or RuntimeError("Failed to apply lighting state")

View File

@@ -1,4 +1,3 @@
import logging
import os import os
import urllib.parse import urllib.parse
import regex import regex
@@ -81,9 +80,9 @@ class LyricSearch(FastAPI):
) )
for endpoint, handler in self.endpoints.items(): for endpoint, handler in self.endpoints.items():
times: int = 20 times: int = 5
seconds: int = 2 seconds: int = 2
rate_limit: tuple[int, int] = (2, 3) # Default; (Times, Seconds) rate_limit: tuple[int, int] = (times, seconds) # Default; (Times, Seconds)
_schema_include = endpoint in ["lyric/search"] _schema_include = endpoint in ["lyric/search"]
if ( if (
@@ -97,9 +96,11 @@ class LyricSearch(FastAPI):
handler, handler,
methods=["POST"], methods=["POST"],
include_in_schema=_schema_include, include_in_schema=_schema_include,
dependencies=[Depends(RateLimiter(times=times, seconds=seconds))] dependencies=(
[Depends(RateLimiter(times=times, seconds=seconds))]
if not endpoint == "typeahead/lyrics" if not endpoint == "typeahead/lyrics"
else None, else None
),
) )
async def typeahead_handler(self, data: ValidTypeAheadRequest) -> JSONResponse: async def typeahead_handler(self, data: ValidTypeAheadRequest) -> JSONResponse:
@@ -187,7 +188,7 @@ class LyricSearch(FastAPI):
}, },
) )
excluded_sources: Optional[list] = data.excluded_sources excluded_sources: list = data.excluded_sources or []
aggregate_search = aggregate.Aggregate(exclude_methods=excluded_sources) aggregate_search = aggregate.Aggregate(exclude_methods=excluded_sources)
plain_lyrics: bool = not data.lrc plain_lyrics: bool = not data.lrc
result: Optional[Union[LyricsResult, dict]] = await aggregate_search.search( result: Optional[Union[LyricsResult, dict]] = await aggregate_search.search(
@@ -210,29 +211,93 @@ class LyricSearch(FastAPI):
if data.sub and not data.lrc: if data.sub and not data.lrc:
seeked_found_line: Optional[int] = None seeked_found_line: Optional[int] = None
lyric_lines: list[str] = result["lyrics"].strip().split(" / ") # Split lyrics into lines based on <br>, newline characters, or " / "
lyrics_text = result["lyrics"].strip()
# Determine the delimiter and split accordingly
if "<br>" in lyrics_text:
lyric_lines = lyrics_text.split("<br>")
separator = "<br>"
elif " / " in lyrics_text:
lyric_lines = lyrics_text.split(" / ")
separator = " / "
else:
lyric_lines = lyrics_text.split("\n")
separator = "\n"
search_term = data.sub.strip().lower()
# First try single-line matching (existing behavior)
for i, line in enumerate(lyric_lines): for i, line in enumerate(lyric_lines):
line = regex.sub(r"\u2064", "", line.strip()) # Remove any special characters and extra spaces
if data.sub.strip().lower() in line.strip().lower(): cleaned_line = regex.sub(r"\u2064", "", line.strip())
if search_term in cleaned_line.lower():
seeked_found_line = i seeked_found_line = i
logging.debug(
"Found %s at %s, match for %s!",
line,
seeked_found_line,
data.sub,
) # REMOVEME: DEBUG
break break
if not seeked_found_line: # If no single-line match found, try multi-line matching
if seeked_found_line is None:
# Try matching across consecutive lines (up to 5 lines for reasonable performance)
max_lines_to_check = min(5, len(lyric_lines))
for i in range(len(lyric_lines)):
for line_count in range(2, max_lines_to_check + 1):
if i + line_count <= len(lyric_lines):
# Combine consecutive lines with space separator
combined_lines = []
line_positions: list[tuple[int, int]] = (
[]
) # Track where each line starts in combined text
combined_text_parts: list[str] = []
for j in range(line_count):
if i + j < len(lyric_lines):
cleaned_line = regex.sub(
r"\u2064", "", lyric_lines[i + j].strip()
)
combined_lines.append(cleaned_line)
# Track position of this line in the combined text
line_start_pos = len(
" ".join(combined_text_parts).lower()
)
if line_start_pos > 0:
line_start_pos += (
1 # Account for space separator
)
line_positions.append((i + j, line_start_pos))
combined_text_parts.append(cleaned_line)
combined_text = " ".join(combined_lines).lower()
if search_term in combined_text:
# Find which specific line the match starts in
match_pos = combined_text.find(search_term)
# Find the line that contains the start of the match
actual_start_line = i # Default fallback
for line_idx, line_start_pos in line_positions:
if line_start_pos <= match_pos:
actual_start_line = line_idx
else:
break
seeked_found_line = actual_start_line
break
if seeked_found_line is not None:
break
if seeked_found_line is None:
return JSONResponse( return JSONResponse(
status_code=500,
content={ content={
"err": True, "err": True,
"errorText": "Seek (a.k.a. subsearch) failed.", "errorText": "Seek (a.k.a. subsearch) failed.",
"failed_seek": True, "failed_seek": True,
}, },
) )
result["lyrics"] = " / ".join(lyric_lines[seeked_found_line:]) # Only include lines strictly starting from the matched line
result["lyrics"] = separator.join(lyric_lines[seeked_found_line:])
result["confidence"] = int(result["confidence"]) result["confidence"] = int(result["confidence"])
result["time"] = f"{float(result['time']):.4f}" result["time"] = f"{float(result['time']):.4f}"

View File

@@ -62,6 +62,7 @@ class Misc(FastAPI):
self.upload_activity_image, self.upload_activity_image,
methods=["POST"], methods=["POST"],
dependencies=[Depends(RateLimiter(times=10, seconds=2))], dependencies=[Depends(RateLimiter(times=10, seconds=2))],
include_in_schema=False,
) )
logging.debug("Loading NaaS reasons") logging.debug("Loading NaaS reasons")

View File

@@ -47,12 +47,12 @@ class Radio(FastAPI):
self.sr_util = SRUtil() self.sr_util = SRUtil()
self.lrclib = LRCLib() self.lrclib = LRCLib()
self.lrc_cache: Dict[str, Optional[str]] = {} self.lrc_cache: Dict[str, Optional[str]] = {}
self.lrc_cache_locks = {} self.lrc_cache_locks: Dict[str, asyncio.Lock] = defaultdict(asyncio.Lock)
self.playlists_loaded: bool = False self.playlists_loaded: bool = False
# WebSocket connection management # WebSocket connection management
self.active_connections: Dict[str, Set[WebSocket]] = {} self.active_connections: Dict[str, Set[WebSocket]] = {}
# Initialize broadcast locks to prevent duplicate events # Initialize broadcast locks to prevent duplicate events
self.broadcast_locks = defaultdict(asyncio.Lock) self.broadcast_locks: Dict[str, asyncio.Lock] = defaultdict(asyncio.Lock)
self.endpoints: dict = { self.endpoints: dict = {
"radio/np": self.radio_now_playing, "radio/np": self.radio_now_playing,
"radio/request": self.radio_request, "radio/request": self.radio_request,
@@ -71,9 +71,9 @@ class Radio(FastAPI):
if endpoint == "radio/album_art": if endpoint == "radio/album_art":
methods = ["GET"] methods = ["GET"]
app.add_api_route( app.add_api_route(
f"/{endpoint}", handler, methods=methods, include_in_schema=True, f"/{endpoint}", handler, methods=methods, include_in_schema=False,
dependencies=[Depends( dependencies=[Depends(
RateLimiter(times=25, seconds=2))] if not endpoint == "radio/np" else None, RateLimiter(times=25, seconds=2))],
) )
# Add WebSocket route # Add WebSocket route
@@ -83,12 +83,8 @@ class Radio(FastAPI):
app.add_websocket_route("/radio/ws/{station}", websocket_route_handler) app.add_websocket_route("/radio/ws/{station}", websocket_route_handler)
app.add_event_handler("startup", self.on_start)
async def on_start(self) -> None: async def on_start(self) -> None:
# Initialize locks in the event loop # Load playlists for all stations
self.lrc_cache_locks = defaultdict(asyncio.Lock)
self.broadcast_locks = defaultdict(asyncio.Lock)
stations = ", ".join(self.radio_util.db_queries.keys()) stations = ", ".join(self.radio_util.db_queries.keys())
logging.info("radio: Initializing stations:\n%s", stations) logging.info("radio: Initializing stations:\n%s", stations)
await self.radio_util.load_playlists() await self.radio_util.load_playlists()
@@ -307,7 +303,7 @@ class Radio(FastAPI):
} }
) )
async def album_art_handler( def album_art_handler(
self, request: Request, track_id: Optional[int] = None, self, request: Request, track_id: Optional[int] = None,
station: Station = "main" station: Station = "main"
) -> Response: ) -> Response:
@@ -364,13 +360,21 @@ class Radio(FastAPI):
ret_obj: dict = {**self.radio_util.now_playing[station]} ret_obj: dict = {**self.radio_util.now_playing[station]}
ret_obj["station"] = station ret_obj["station"] = station
try: try:
ret_obj["elapsed"] = int(time.time()) - ret_obj["start"] ret_obj["elapsed"] = int(time.time()) - ret_obj["start"] if ret_obj["start"] else 0
except KeyError: except KeyError:
traceback.print_exc() traceback.print_exc()
ret_obj["elapsed"] = 0 ret_obj["elapsed"] = 0
ret_obj.pop("file_path") ret_obj.pop("file_path")
return JSONResponse(content=ret_obj) return JSONResponse(content=ret_obj)
def _bg_cache_art(self, track_id: int, file_path: str):
try:
album_art = self.radio_util.get_album_art(track_id=track_id)
if not album_art:
self.radio_util.cache_album_art(track_id, file_path)
except Exception as e:
logging.error(f"Background art cache error: {e}")
async def radio_get_next( async def radio_get_next(
self, self,
data: ValidRadioNextRequest, data: ValidRadioNextRequest,
@@ -448,13 +452,7 @@ class Radio(FastAPI):
logging.info("radio_get_next Exception: %s", str(e)) logging.info("radio_get_next Exception: %s", str(e))
traceback.print_exc() traceback.print_exc()
try: background_tasks.add_task(self._bg_cache_art, next["id"], next["file_path"])
album_art = self.radio_util.get_album_art(track_id=next["id"])
if not album_art:
self.radio_util.cache_album_art(next["id"], next["file_path"])
except Exception as e:
logging.info("radio_get_next Exception: %s", str(e))
traceback.print_exc()
return JSONResponse(content=next) return JSONResponse(content=next)
@@ -496,9 +494,13 @@ class Radio(FastAPI):
}, },
) )
search: bool = self.radio_util.search_db( loop = asyncio.get_running_loop()
search: bool = await loop.run_in_executor(
None,
lambda: self.radio_util.search_db(
artistsong=artistsong, artist=artist, song=song, station=data.station artistsong=artistsong, artist=artist, song=song, station=data.station
) )
)
if data.alsoSkip: if data.alsoSkip:
await self.radio_util._ls_skip(data.station) await self.radio_util._ls_skip(data.station)
return JSONResponse(content={"result": search}) return JSONResponse(content={"result": search})
@@ -684,23 +686,23 @@ class Radio(FastAPI):
async def _send_lrc_to_client(self, websocket: WebSocket, station: str, track_data: dict): async def _send_lrc_to_client(self, websocket: WebSocket, station: str, track_data: dict):
"""Send cached LRC data to a specific client asynchronously. Only sends if LRC exists in cache.""" """Send cached LRC data to a specific client asynchronously. Only sends if LRC exists in cache."""
logging.info(f"[LRC Send] Checking cached LRC for station {station}") logging.debug(f"[LRC Send] Checking cached LRC for station {station}")
logging.info(f"[LRC Send] Current track: {track_data.get('artist', 'Unknown')} - {track_data.get('song', 'Unknown')}") logging.debug(f"[LRC Send] Current track: {track_data.get('artist', 'Unknown')} - {track_data.get('song', 'Unknown')}")
try: try:
# Only send if LRC is in cache # Only send if LRC is in cache
cached_lrc = self.lrc_cache.get(station) cached_lrc = self.lrc_cache.get(station)
logging.info(f"[LRC Send] Cache status for station {station}: {'Found' if cached_lrc else 'Not found'}") logging.debug(f"[LRC Send] Cache status for station {station}: {'Found' if cached_lrc else 'Not found'}")
if cached_lrc: if cached_lrc:
logging.info("[LRC Send] Sending cached LRC to client") logging.debug("[LRC Send] Sending cached LRC to client")
lrc_data: dict = { lrc_data: dict = {
"type": "lrc", "type": "lrc",
"data": cached_lrc, "data": cached_lrc,
"source": "Cache" "source": "Cache"
} }
await websocket.send_text(json.dumps(lrc_data)) await websocket.send_text(json.dumps(lrc_data))
logging.info("[LRC Send] Successfully sent cached LRC to client") logging.debug("[LRC Send] Successfully sent cached LRC to client")
else: else:
logging.info(f"[LRC Send] No cached LRC available for station {station}") logging.debug(f"[LRC Send] No cached LRC available for station {station}")
except Exception as e: except Exception as e:
logging.error(f"[LRC Send] Failed to send cached LRC to client: {e}") logging.error(f"[LRC Send] Failed to send cached LRC to client: {e}")
logging.error(f"[LRC Send] Error details: {traceback.format_exc()}") logging.error(f"[LRC Send] Error details: {traceback.format_exc()}")
@@ -709,34 +711,34 @@ class Radio(FastAPI):
"""Send cached LRC data to a specific client asynchronously. Only sends if valid LRC exists in cache.""" """Send cached LRC data to a specific client asynchronously. Only sends if valid LRC exists in cache."""
try: try:
track_info = f"{track_data.get('artist', 'Unknown')} - {track_data.get('song', 'Unknown')}" track_info = f"{track_data.get('artist', 'Unknown')} - {track_data.get('song', 'Unknown')}"
logging.info(f"[LRC Send {id(websocket)}] Starting LRC send for {track_info}") logging.debug(f"[LRC Send {id(websocket)}] Starting LRC send for {track_info}")
logging.info(f"[LRC Send {id(websocket)}] Cache keys before lock: {list(self.lrc_cache.keys())}") logging.debug(f"[LRC Send {id(websocket)}] Cache keys before lock: {list(self.lrc_cache.keys())}")
# Get cached LRC with lock to ensure consistency # Get cached LRC with lock to ensure consistency
async with self.lrc_cache_locks[station]: async with self.lrc_cache_locks[station]:
logging.info(f"[LRC Send {id(websocket)}] Got cache lock") logging.debug(f"[LRC Send {id(websocket)}] Got cache lock")
cached_lrc = self.lrc_cache.get(station) cached_lrc = self.lrc_cache.get(station)
logging.info(f"[LRC Send {id(websocket)}] Cache keys during lock: {list(self.lrc_cache.keys())}") logging.debug(f"[LRC Send {id(websocket)}] Cache keys during lock: {list(self.lrc_cache.keys())}")
logging.info(f"[LRC Send {id(websocket)}] Cache entry length: {len(cached_lrc) if cached_lrc else 0}") logging.debug(f"[LRC Send {id(websocket)}] Cache entry length: {len(cached_lrc) if cached_lrc else 0}")
# Only send if we have actual lyrics # Only send if we have actual lyrics
if cached_lrc: if cached_lrc:
logging.info(f"[LRC Send {id(websocket)}] Preparing to send {len(cached_lrc)} bytes of LRC") logging.debug(f"[LRC Send {id(websocket)}] Preparing to send {len(cached_lrc)} bytes of LRC")
lrc_data: dict = { lrc_data: dict = {
"type": "lrc", "type": "lrc",
"data": cached_lrc, "data": cached_lrc,
"source": "Cache" "source": "Cache"
} }
await websocket.send_text(json.dumps(lrc_data)) await websocket.send_text(json.dumps(lrc_data))
logging.info(f"[LRC Send {id(websocket)}] Successfully sent LRC") logging.debug(f"[LRC Send {id(websocket)}] Successfully sent LRC")
else: else:
logging.info(f"[LRC Send {id(websocket)}] No LRC in cache") logging.debug(f"[LRC Send {id(websocket)}] No LRC in cache")
# If we have no cache entry, let's check if a fetch is needed # If we have no cache entry, let's check if a fetch is needed
async with self.lrc_cache_locks[station]: async with self.lrc_cache_locks[station]:
logging.info(f"[LRC Send {id(websocket)}] Checking if fetch needed") logging.debug(f"[LRC Send {id(websocket)}] Checking if fetch needed")
# Only attempt fetch if we're the first to notice missing lyrics # Only attempt fetch if we're the first to notice missing lyrics
if station not in self.lrc_cache: if station not in self.lrc_cache:
logging.info(f"[LRC Send {id(websocket)}] Initiating LRC fetch") logging.debug(f"[LRC Send {id(websocket)}] Initiating LRC fetch")
lrc, source = await self._fetch_and_cache_lrc(station, track_data) lrc, source = await self._fetch_and_cache_lrc(station, track_data)
if lrc: if lrc:
self.lrc_cache[station] = lrc self.lrc_cache[station] = lrc
@@ -746,7 +748,7 @@ class Radio(FastAPI):
"source": source "source": source
} }
await websocket.send_text(json.dumps(lrc_data)) await websocket.send_text(json.dumps(lrc_data))
logging.info(f"[LRC Send {id(websocket)}] Sent newly fetched LRC") logging.debug(f"[LRC Send {id(websocket)}] Sent newly fetched LRC")
except Exception as e: except Exception as e:
logging.error(f"[LRC Send {id(websocket)}] Failed: {e}") logging.error(f"[LRC Send {id(websocket)}] Failed: {e}")
logging.error(f"[LRC Send {id(websocket)}] Error details: {traceback.format_exc()}") logging.error(f"[LRC Send {id(websocket)}] Error details: {traceback.format_exc()}")
@@ -759,41 +761,41 @@ class Radio(FastAPI):
duration: Optional[int] = track_data.get("duration") duration: Optional[int] = track_data.get("duration")
if not (artist and title): if not (artist and title):
logging.info("[LRC] Missing artist or title, skipping fetch") logging.debug("[LRC] Missing artist or title, skipping fetch")
return None, "None" return None, "None"
logging.info(f"[LRC] Starting fetch for {station}: {artist} - {title}") logging.debug(f"[LRC] Starting fetch for {station}: {artist} - {title}")
# Try SR first with timeout # Try LRCLib first with timeout
try: try:
async with asyncio.timeout(5.0): # 5 second timeout async with asyncio.timeout(10.0): # 10 second timeout
lrc = await self.sr_util.get_lrc_by_artist_song( logging.debug("[LRC] Trying LRCLib")
artist, title, duration=duration lrclib_result = await self.lrclib.search(artist, title, plain=False, raw=True)
)
if lrc:
logging.info("[LRC] Found from SR")
return lrc, "SR"
except asyncio.TimeoutError:
logging.warning("[LRC] SR fetch timed out")
except Exception as e:
logging.error(f"[LRC] SR fetch error: {e}")
logging.info("[LRC] SR fetch completed without results")
# Try LRCLib as fallback with timeout
try:
async with asyncio.timeout(5.0): # 5 second timeout
logging.info("[LRC] Trying LRCLib fallback")
lrclib_result = await self.lrclib.search(artist, title, plain=False)
if lrclib_result and lrclib_result.lyrics and isinstance(lrclib_result.lyrics, str): if lrclib_result and lrclib_result.lyrics and isinstance(lrclib_result.lyrics, str):
logging.info("[LRC] Found from LRCLib") logging.debug("[LRC] Found from LRCLib")
return lrclib_result.lyrics, "LRCLib" return lrclib_result.lyrics, "LRCLib"
except asyncio.TimeoutError: except asyncio.TimeoutError:
logging.warning("[LRC] LRCLib fetch timed out") logging.warning("[LRC] LRCLib fetch timed out")
except Exception as e: except Exception as e:
logging.error(f"[LRC] LRCLib fetch error: {e}") logging.error(f"[LRC] LRCLib fetch error: {e}")
logging.info("[LRC] No lyrics found from any source") logging.debug("[LRC] LRCLib fetch completed without results")
# Try SR as fallback with timeout
try:
async with asyncio.timeout(10.0): # 10 second timeout
lrc = await self.sr_util.get_lrc_by_artist_song(
artist, title, duration=duration
)
if lrc:
logging.debug("[LRC] Found from SR")
return lrc, "SR"
except asyncio.TimeoutError:
logging.warning("[LRC] SR fetch timed out")
except Exception as e:
logging.error(f"[LRC] SR fetch error: {e}")
logging.debug("[LRC] No lyrics found from any source")
return None, "None" return None, "None"
except Exception as e: except Exception as e:
logging.error(f"[LRC] Error fetching lyrics: {e}") logging.error(f"[LRC] Error fetching lyrics: {e}")
@@ -804,11 +806,21 @@ class Radio(FastAPI):
try: try:
async with self.lrc_cache_locks[station]: async with self.lrc_cache_locks[station]:
self.lrc_cache.pop(station, None) self.lrc_cache.pop(station, None)
lrc, source = await self._fetch_and_cache_lrc(station, track_json) lrc, source = await self._fetch_and_cache_lrc(station, track_json)
async with self.lrc_cache_locks[station]:
# Verify we are still on the same song
current_track = self.radio_util.now_playing.get(station)
if current_track and current_track.get("uuid") == track_json.get("uuid"):
if lrc: if lrc:
self.lrc_cache[station] = lrc self.lrc_cache[station] = lrc
else: else:
self.lrc_cache[station] = None self.lrc_cache[station] = None
else:
logging.info(f"[LRC] Discarding fetch result for {station} as track changed.")
return
if lrc: if lrc:
await self.broadcast_lrc(station, lrc, source) await self.broadcast_lrc(station, lrc, source)
except Exception as e: except Exception as e:

View File

@@ -1,11 +1,11 @@
import os import os
import random import random
from typing import LiteralString, Optional, Union from typing import LiteralString, Optional, Union
import aiosqlite as sqlite3
from fastapi import FastAPI, Depends from fastapi import FastAPI, Depends
from fastapi_throttle import RateLimiter from fastapi_throttle import RateLimiter
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from .constructors import RandMsgRequest from .constructors import RandMsgRequest
import shared # Use shared SQLite pool
class RandMsg(FastAPI): class RandMsg(FastAPI):
@@ -103,11 +103,11 @@ class RandMsg(FastAPI):
} }
) )
async with sqlite3.connect(database=randmsg_db_path, timeout=1) as _db: # Use shared SQLite pool for connection reuse
async with await _db.execute(db_query) as _cursor: sqlite_pool = shared.get_sqlite_pool()
if not isinstance(_cursor, sqlite3.Cursor): async with sqlite_pool.connection(randmsg_db_path, timeout=1) as _db:
return JSONResponse(content={"err": True}) async with _db.execute(db_query) as _cursor:
result: Optional[sqlite3.Row] = await _cursor.fetchone() result = await _cursor.fetchone()
if not result: if not result:
return JSONResponse(content={"err": True}) return JSONResponse(content={"err": True})
(result_id, result_msg) = result (result_id, result_msg) = result

View File

@@ -1,5 +1,5 @@
import logging import logging
from fastapi import FastAPI, Request, Response, Depends from fastapi import FastAPI, Request, Response, Depends, HTTPException
from fastapi_throttle import RateLimiter from fastapi_throttle import RateLimiter
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from utils.sr_wrapper import SRUtil from utils.sr_wrapper import SRUtil
@@ -20,6 +20,9 @@ from lyric_search.sources import private
from typing import Literal from typing import Literal
from pydantic import BaseModel from pydantic import BaseModel
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
class ValidBulkFetchRequest(BaseModel): class ValidBulkFetchRequest(BaseModel):
track_ids: list[int] track_ids: list[int]
@@ -60,22 +63,46 @@ class RIP(FastAPI):
"trip/bulk_fetch": self.bulk_fetch_handler, "trip/bulk_fetch": self.bulk_fetch_handler,
"trip/job/{job_id:path}": self.job_status_handler, "trip/job/{job_id:path}": self.job_status_handler,
"trip/jobs/list": self.job_list_handler, "trip/jobs/list": self.job_list_handler,
"trip/auth/start": self.tidal_auth_start_handler,
"trip/auth/check": self.tidal_auth_check_handler,
} }
# Store pending device codes for auth flow
self._pending_device_codes: dict[str, str] = {}
for endpoint, handler in self.endpoints.items(): for endpoint, handler in self.endpoints.items():
dependencies = [Depends(RateLimiter(times=8, seconds=2))] dependencies = [Depends(RateLimiter(times=8, seconds=2))]
app.add_api_route( app.add_api_route(
f"/{endpoint}", f"/{endpoint}",
handler, handler,
methods=["GET"] if endpoint != "trip/bulk_fetch" else ["POST"], methods=(
["GET"]
if endpoint not in ("trip/bulk_fetch", "trip/auth/check")
else ["POST"]
),
include_in_schema=False, include_in_schema=False,
dependencies=dependencies, dependencies=dependencies,
) )
async def startup(self) -> None:
"""Initialize Tidal keepalive. Call this from your app's lifespan context manager."""
try:
await self.trip_util.start_keepalive()
logger.info("Tidal keepalive task started successfully")
except Exception as e:
logger.error(f"Failed to start Tidal keepalive task: {e}")
async def shutdown(self) -> None:
"""Stop Tidal keepalive. Call this from your app's lifespan context manager."""
try:
await self.trip_util.stop_keepalive()
logger.info("Tidal keepalive task stopped successfully")
except Exception as e:
logger.error(f"Error stopping Tidal keepalive task: {e}")
def _format_job(self, job: Job): def _format_job(self, job: Job):
""" """
Helper to normalize job data into JSON. Helper to normalize job data into JSON.
Parameters: Parameters:
- job (Job): The job object to format. - job (Job): The job object to format.
@@ -108,9 +135,11 @@ class RIP(FastAPI):
"started_at": job.started_at, "started_at": job.started_at,
"ended_at": job.ended_at, "ended_at": job.ended_at,
"progress": progress, "progress": progress,
"tracks": f"{succeeded_tracks} / {tracks_in}" "tracks": (
f"{succeeded_tracks} / {tracks_in}"
if isinstance(tracks_in, int) if isinstance(tracks_in, int)
else tracks_out, else tracks_out
),
"target": job.meta.get("target"), "target": job.meta.get("target"),
"quality": job.meta.get("quality", "Unknown"), "quality": job.meta.get("quality", "Unknown"),
} }
@@ -129,6 +158,8 @@ class RIP(FastAPI):
Returns: Returns:
- **Response**: JSON response with artists or 404. - **Response**: JSON response with artists or 404.
""" """
if "trip" not in user.get("roles", []) and "admin" not in user.get("roles", []):
raise HTTPException(status_code=403, detail="Insufficient permissions")
# support optional grouping to return one primary per display name # support optional grouping to return one primary per display name
# with `alternatives` for disambiguation (use ?group=true) # with `alternatives` for disambiguation (use ?group=true)
group = bool(request.query_params.get("group", False)) group = bool(request.query_params.get("group", False))
@@ -151,6 +182,8 @@ class RIP(FastAPI):
Returns: Returns:
- **Response**: JSON response with albums or 404. - **Response**: JSON response with albums or 404.
""" """
if "trip" not in user.get("roles", []) and "admin" not in user.get("roles", []):
raise HTTPException(status_code=403, detail="Insufficient permissions")
albums = await self.trip_util.get_albums_by_artist_id(artist_id) albums = await self.trip_util.get_albums_by_artist_id(artist_id)
if not albums: if not albums:
return Response(status_code=404, content="Not found") return Response(status_code=404, content="Not found")
@@ -175,6 +208,8 @@ class RIP(FastAPI):
Returns: Returns:
- **Response**: JSON response with tracks or 404. - **Response**: JSON response with tracks or 404.
""" """
if "trip" not in user.get("roles", []) and "admin" not in user.get("roles", []):
raise HTTPException(status_code=403, detail="Insufficient permissions")
tracks = await self.trip_util.get_tracks_by_album_id(album_id, quality) tracks = await self.trip_util.get_tracks_by_album_id(album_id, quality)
if not tracks: if not tracks:
return Response(status_code=404, content="Not Found") return Response(status_code=404, content="Not Found")
@@ -195,6 +230,8 @@ class RIP(FastAPI):
Returns: Returns:
- **Response**: JSON response with tracks or 404. - **Response**: JSON response with tracks or 404.
""" """
if "trip" not in user.get("roles", []) and "admin" not in user.get("roles", []):
raise HTTPException(status_code=403, detail="Insufficient permissions")
logging.critical("Searching for tracks by artist: %s, song: %s", artist, song) logging.critical("Searching for tracks by artist: %s, song: %s", artist, song)
tracks = await self.trip_util.get_tracks_by_artist_song(artist, song) tracks = await self.trip_util.get_tracks_by_artist_song(artist, song)
if not tracks: if not tracks:
@@ -220,6 +257,8 @@ class RIP(FastAPI):
Returns: Returns:
- **Response**: JSON response with stream URL or 404. - **Response**: JSON response with stream URL or 404.
""" """
if "trip" not in user.get("roles", []) and "admin" not in user.get("roles", []):
raise HTTPException(status_code=403, detail="Insufficient permissions")
track = await self.trip_util.get_stream_url_by_track_id(track_id, quality) track = await self.trip_util.get_stream_url_by_track_id(track_id, quality)
if not track: if not track:
return Response(status_code=404, content="Not found") return Response(status_code=404, content="Not found")
@@ -242,6 +281,8 @@ class RIP(FastAPI):
Returns: Returns:
- **Response**: JSON response with job info or error. - **Response**: JSON response with job info or error.
""" """
if "trip" not in user.get("roles", []) and "admin" not in user.get("roles", []):
raise HTTPException(status_code=403, detail="Insufficient permissions")
if not data or not data.track_ids or not data.target: if not data or not data.track_ids or not data.target:
return JSONResponse( return JSONResponse(
content={ content={
@@ -293,7 +334,8 @@ class RIP(FastAPI):
Returns: Returns:
- **JSONResponse**: Job status and result or error. - **JSONResponse**: Job status and result or error.
""" """
if "trip" not in user.get("roles", []) and "admin" not in user.get("roles", []):
raise HTTPException(status_code=403, detail="Insufficient permissions")
job = None job = None
try: try:
# Try direct fetch first # Try direct fetch first
@@ -331,6 +373,8 @@ class RIP(FastAPI):
Returns: Returns:
- **JSONResponse**: List of jobs. - **JSONResponse**: List of jobs.
""" """
if "trip" not in user.get("roles", []) and "admin" not in user.get("roles", []):
raise HTTPException(status_code=403, detail="Insufficient permissions")
jobs_info = [] jobs_info = []
seen = set() seen = set()
@@ -382,3 +426,90 @@ class RIP(FastAPI):
jobs_info.sort(key=job_sort_key, reverse=True) jobs_info.sort(key=job_sort_key, reverse=True)
return {"jobs": jobs_info} return {"jobs": jobs_info}
async def tidal_auth_start_handler(
self, request: Request, user=Depends(get_current_user)
):
"""
Start Tidal device authorization flow.
Returns a URL that the user must visit to authorize the application.
After visiting the URL and authorizing, call /trip/auth/check to complete.
Returns:
- **JSONResponse**: Contains device_code and verification_url.
"""
try:
if "trip" not in user.get("roles", []) and "admin" not in user.get(
"roles", []
):
raise HTTPException(status_code=403, detail="Insufficient permissions")
device_code, verification_url = await self.trip_util.start_device_auth()
# Store device code for this session
self._pending_device_codes[user.get("sub", "default")] = device_code
return JSONResponse(
content={
"device_code": device_code,
"verification_url": verification_url,
"message": "Visit the URL to authorize, then call /trip/auth/check",
}
)
except Exception as e:
logger.error("Tidal auth start failed: %s", e)
return JSONResponse(
content={"error": str(e)},
status_code=500,
)
async def tidal_auth_check_handler(
self, request: Request, user=Depends(get_current_user)
):
"""
Check if Tidal device authorization is complete.
Call this after the user has visited the verification URL.
Returns:
- **JSONResponse**: Contains success status and message.
"""
if "trip" not in user.get("roles", []) and "admin" not in user.get("roles", []):
raise HTTPException(status_code=403, detail="Insufficient permissions")
device_code = self._pending_device_codes.get(user.get("sub", "default"))
if not device_code:
return JSONResponse(
content={
"error": "No pending authorization. Call /trip/auth/start first."
},
status_code=400,
)
try:
success, error = await self.trip_util.check_device_auth(device_code)
if success:
# Clear the pending code
self._pending_device_codes.pop(user.get("sub", "default"), None)
return JSONResponse(
content={
"success": True,
"message": "Tidal authorization complete!",
}
)
elif error == "pending":
return JSONResponse(
content={
"success": False,
"pending": True,
"message": "Waiting for user to authorize...",
}
)
else:
return JSONResponse(
content={"success": False, "error": error},
status_code=400,
)
except Exception as e:
logger.error("Tidal auth check failed: %s", e)
return JSONResponse(
content={"error": str(e)},
status_code=500,
)

View File

@@ -1,171 +0,0 @@
import os
import aiosqlite as sqlite3
from fastapi import FastAPI, Depends, Response
from fastapi_throttle import RateLimiter
from fastapi.responses import JSONResponse
from typing import Optional, LiteralString, Union, Iterable, cast
from .constructors import ValidShowEpisodeLineRequest, ValidShowEpisodeListRequest
class Transcriptions(FastAPI):
"""
Transcription Endpoints
"""
def __init__(self, app: FastAPI, util, constants) -> None:
"""Initialize Transcriptions endpoints."""
self.app: FastAPI = app
self.util = util
self.constants = constants
self.endpoints: dict = {
"transcriptions/get_episodes": self.get_episodes_handler,
"transcriptions/get_episode_lines": self.get_episode_lines_handler,
# tbd
}
for endpoint, handler in self.endpoints.items():
app.add_api_route(
f"/{endpoint}",
handler,
methods=["POST"],
include_in_schema=True,
dependencies=[Depends(RateLimiter(times=2, seconds=2))],
)
async def get_episodes_handler(
self, data: ValidShowEpisodeListRequest
) -> JSONResponse:
"""
Get list of episodes by show ID.
Parameters:
- **data** (ValidShowEpisodeListRequest): Request containing show ID.
Returns:
- **JSONResponse**: Contains a list of episodes.
"""
show_id: int = data.s
db_path: Optional[Union[str, LiteralString]] = None
db_query: Optional[str] = None
show_title: Optional[str] = None
if not isinstance(show_id, int):
return JSONResponse(
status_code=500,
content={
"err": True,
"errorText": "Invalid request",
},
)
show_id = int(show_id)
if not (str(show_id).isnumeric()) or show_id not in [0, 1, 2]:
return JSONResponse(
status_code=500,
content={
"err": True,
"errorText": "Show not found.",
},
)
match show_id:
case 0:
db_path = os.path.join("/usr/local/share", "sqlite_dbs", "sp.db")
db_query = """SELECT DISTINCT(("S" || Season || "E" || Episode || " " || Title)), ID FROM SP_DAT ORDER BY Season, Episode"""
show_title = "South Park"
case 1:
db_path = os.path.join("/usr/local/share", "sqlite_dbs", "futur.db")
db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP"""
show_title = "Futurama"
case 2:
db_path = os.path.join("/usr/local/share", "sqlite_dbs", "parks.db")
db_query = """SELECT DISTINCT(("S" || EP_S || "E" || EP_EP || " " || EP_TITLE)), EP_ID FROM clean_dialog ORDER BY EP_S, EP_EP"""
show_title = "Parks And Rec"
case _:
return JSONResponse(
status_code=500,
content={
"err": True,
"errorText": "Unknown error.",
},
)
async with sqlite3.connect(database=db_path, timeout=1) as _db:
async with await _db.execute(db_query) as _cursor:
result: Iterable[sqlite3.Row] = await _cursor.fetchall()
return JSONResponse(
content={
"show_title": show_title,
"episodes": [
{
"id": item[1],
"ep_friendly": item[0],
}
for item in result
],
}
)
async def get_episode_lines_handler(
self, data: ValidShowEpisodeLineRequest
) -> Response:
"""
Get lines for a particular episode.
Parameters:
- **data** (ValidShowEpisodeLineRequest): Request containing show and episode ID.
Returns:
- **Response**: Episode lines.
"""
show_id: int = int(data.s)
episode_id: int = int(data.e)
match show_id:
case 0:
db_path: Union[str, LiteralString] = os.path.join(
"/usr/local/share", "sqlite_dbs", "sp.db"
)
db_query: str = """SELECT ("S" || Season || "E" || Episode || " " || Title), Character, Line FROM SP_DAT WHERE ID = ?"""
case 1:
db_path = os.path.join("/usr/local/share", "sqlite_dbs", "futur.db")
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE || "<br><em>Opener: " || EP_OPENER || "</em>"), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY LINE_ID ASC"""
case 2:
db_path = os.path.join("/usr/local/share", "sqlite_dbs", "parks.db")
db_query = """SELECT ("S" || EP_S || "E" || EP_EP || " " || EP_TITLE), EP_LINE_SPEAKER, EP_LINE FROM clean_dialog WHERE EP_ID = ? ORDER BY id ASC"""
case _:
return JSONResponse(
status_code=500,
content={
"err": True,
"errorText": "Unknown error",
},
)
async with sqlite3.connect(database=db_path, timeout=1) as _db:
params: tuple = (episode_id,)
async with await _db.execute(db_query, params) as _cursor:
result: Iterable[sqlite3.Row] = await _cursor.fetchall()
result_list = cast(list[sqlite3.Row], result)
if not result_list:
return Response(
status_code=404,
content="Not found",
)
first_result: sqlite3.Row = result_list[0]
return JSONResponse(
content={
"episode_id": episode_id,
"ep_friendly": first_result[0].strip(),
"lines": [
{
"speaker": item[1].strip(),
"line": item[2].strip(),
}
for item in result
],
}
)

View File

@@ -3,6 +3,7 @@ from fastapi import FastAPI, Depends
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from fastapi_throttle import RateLimiter from fastapi_throttle import RateLimiter
from typing import Optional, Union from typing import Optional, Union
from utils.yt_utils import sign_video_id
from .constructors import ValidYTSearchRequest from .constructors import ValidYTSearchRequest
@@ -57,6 +58,7 @@ class YT(FastAPI):
return JSONResponse( return JSONResponse(
content={ content={
"video_id": yt_video_id, "video_id": yt_video_id,
"video_token": sign_video_id(yt_video_id) if yt_video_id else None,
"extras": yts_res[0], "extras": yts_res[0],
} }
) )

114
lyric_search/models.py Normal file
View File

@@ -0,0 +1,114 @@
"""
Database models for LRCLib lyrics cache.
"""
import os
import urllib.parse
from typing import Type, AsyncGenerator
from sqlalchemy import (
Column,
Integer,
String,
Float,
Boolean,
DateTime,
ForeignKey,
UniqueConstraint,
)
from sqlalchemy.orm import relationship, foreign
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine, AsyncSession
from sqlalchemy.ext.asyncio import async_sessionmaker
from dotenv import load_dotenv
load_dotenv()
Base: Type[DeclarativeMeta] = declarative_base()
class Tracks(Base): # type: ignore
"""Tracks table - stores track metadata."""
__tablename__ = "tracks"
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String, index=True)
name_lower = Column(String, index=True)
artist_name = Column(String, index=True)
artist_name_lower = Column(String, index=True)
album_name = Column(String)
album_name_lower = Column(String, index=True)
duration = Column(Float, index=True)
last_lyrics_id = Column(Integer, ForeignKey("lyrics.id"), index=True)
created_at = Column(DateTime)
updated_at = Column(DateTime)
# Relationships
lyrics = relationship(
"Lyrics",
back_populates="track",
foreign_keys=[last_lyrics_id],
primaryjoin="Tracks.id == foreign(Lyrics.track_id)",
)
# Constraints
__table_args__ = (
UniqueConstraint(
"name_lower",
"artist_name_lower",
"album_name_lower",
"duration",
name="uq_tracks",
),
)
class Lyrics(Base): # type: ignore
"""Lyrics table - stores lyrics content."""
__tablename__ = "lyrics"
id = Column(Integer, primary_key=True, autoincrement=True)
plain_lyrics = Column(String)
synced_lyrics = Column(String)
track_id = Column(Integer, ForeignKey("tracks.id"), index=True)
has_plain_lyrics = Column(Boolean, index=True)
has_synced_lyrics = Column(Boolean, index=True)
instrumental = Column(Boolean)
source = Column(String, index=True)
created_at = Column(DateTime, index=True)
updated_at = Column(DateTime)
# Relationships
track = relationship(
"Tracks",
back_populates="lyrics",
foreign_keys=[track_id],
primaryjoin=(Tracks.id == foreign(track_id)),
remote_side=Tracks.id,
)
# PostgreSQL connection - using environment variables
POSTGRES_HOST = os.getenv("POSTGRES_HOST", "localhost")
POSTGRES_PORT = os.getenv("POSTGRES_PORT", "5432")
POSTGRES_DB = os.getenv("POSTGRES_DB", "lrclib")
POSTGRES_USER = os.getenv("POSTGRES_USER", "api")
POSTGRES_PASSWORD = os.getenv("POSTGRES_PASSWORD", "")
# URL-encode the password to handle special characters
encoded_password = urllib.parse.quote_plus(POSTGRES_PASSWORD)
DATABASE_URL: str = (
f"postgresql+asyncpg://{POSTGRES_USER}:{encoded_password}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}"
)
async_engine: AsyncEngine = create_async_engine(
DATABASE_URL, pool_size=20, max_overflow=10, pool_pre_ping=True, echo=False
)
AsyncSessionLocal = async_sessionmaker(bind=async_engine, expire_on_commit=False)
async def get_async_db():
"""Get async database session."""
async with AsyncSessionLocal() as session:
yield session

View File

@@ -14,9 +14,7 @@ class Aggregate:
Aggregate all source methods Aggregate all source methods
""" """
def __init__(self, exclude_methods=None) -> None: def __init__(self, exclude_methods: list = []) -> None:
if not exclude_methods:
exclude_methods: list = []
self.exclude_methods = exclude_methods self.exclude_methods = exclude_methods
self.redis_cache = redis_cache.RedisCache() self.redis_cache = redis_cache.RedisCache()
self.notifier = notifier.DiscordNotifier() self.notifier = notifier.DiscordNotifier()
@@ -70,14 +68,14 @@ class Aggregate:
if plain: # do not record LRC fails if plain: # do not record LRC fails
try: try:
await self.redis_cache.increment_found_count("failed") await self.redis_cache.increment_found_count("failed")
self.notifier.send( # await self.notifier.send(
"WARNING", # "WARNING",
f"Could not find {artist} - {song} via queried sources.", # f"Could not find {artist} - {song} via queried sources.",
) # )
except Exception as e: except Exception as e:
traceback.print_exc() traceback.print_exc()
logging.info("Could not increment redis failed counter: %s", str(e)) logging.info("Could not increment redis failed counter: %s", str(e))
self.notifier.send( await self.notifier.send(
f"ERROR @ {__file__.rsplit('/', maxsplit=1)[-1]}", f"ERROR @ {__file__.rsplit('/', maxsplit=1)[-1]}",
f"Could not increment redis failed counter: {str(e)}", f"Could not increment redis failed counter: {str(e)}",
) )

View File

@@ -91,8 +91,10 @@ class Cache:
logging.debug( logging.debug(
"Checking whether %s is already stored", artistsong.replace("\n", " - ") "Checking whether %s is already stored", artistsong.replace("\n", " - ")
) )
check_query: str = 'SELECT id, artist, song FROM lyrics WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\ check_query: str = (
'SELECT id, artist, song FROM lyrics WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\
<= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 1' <= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 1'
)
artistsong_split = artistsong.split("\n", maxsplit=1) artistsong_split = artistsong.split("\n", maxsplit=1)
artist = artistsong_split[0].lower() artist = artistsong_split[0].lower()
song = artistsong_split[1].lower() song = artistsong_split[1].lower()
@@ -213,10 +215,8 @@ class Cache:
lyrics = regex.sub(r"(<br>|\n|\r\n)", " / ", lyr_result.lyrics.strip()) lyrics = regex.sub(r"(<br>|\n|\r\n)", " / ", lyr_result.lyrics.strip())
lyrics = regex.sub(r"\s{2,}", " ", lyrics) lyrics = regex.sub(r"\s{2,}", " ", lyrics)
insert_query = ( insert_query = "INSERT INTO lyrics (src, date_retrieved, artist, song, artistsong, confidence, lyrics)\
"INSERT INTO lyrics (src, date_retrieved, artist, song, artistsong, confidence, lyrics)\
VALUES(?, ?, ?, ?, ?, ?, ?)" VALUES(?, ?, ?, ?, ?, ?, ?)"
)
params = ( params = (
lyr_result.src, lyr_result.src,
time.time(), time.time(),
@@ -260,8 +260,10 @@ class Cache:
if artist == "!" and song == "!": if artist == "!" and song == "!":
random_search = True random_search = True
search_query: str = "SELECT id, artist, song, lyrics, src, confidence\ search_query: str = (
"SELECT id, artist, song, lyrics, src, confidence\
FROM lyrics ORDER BY RANDOM() LIMIT 1" FROM lyrics ORDER BY RANDOM() LIMIT 1"
)
logging.info("Searching %s - %s on %s", artist, song, self.label) logging.info("Searching %s - %s on %s", artist, song, self.label)
@@ -320,9 +322,11 @@ class Cache:
self.cache_pre_query self.cache_pre_query
) as _db_cursor: ) as _db_cursor:
if not random_search: if not random_search:
search_query: str = 'SELECT id, artist, song, lyrics, src, confidence FROM lyrics\ search_query: str = (
'SELECT id, artist, song, lyrics, src, confidence FROM lyrics\
WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\ WHERE editdist3((lower(artist) || " " || lower(song)), (? || " " || ?))\
<= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 10' <= 410 ORDER BY editdist3((lower(artist) || " " || lower(song)), ?) ASC LIMIT 10'
)
search_params: tuple = ( search_params: tuple = (
artist.strip(), artist.strip(),
song.strip(), song.strip(),
@@ -343,15 +347,19 @@ class Cache:
) )
else: else:
best_match = (result_tracks[0], 100) best_match = (result_tracks[0], 100)
if not best_match or confidence < 90: if not best_match:
return None return None
(candidate, confidence) = best_match (candidate, confidence) = best_match
if confidence < 90:
return None
logging.info("Result found on %s", self.label) logging.info("Result found on %s", self.label)
matched = self.get_matched( matched = self.get_matched(
sqlite_rows=results, sqlite_rows=results,
matched_candidate=candidate, matched_candidate=candidate,
confidence=confidence, confidence=confidence,
) )
if matched is None:
return None
time_end: float = time.time() time_end: float = time.time()
time_diff: float = time_end - time_start time_diff: float = time_end - time_start
matched.time = time_diff matched.time = time_diff

View File

@@ -45,11 +45,11 @@ class Genius:
Optional[LyricsResult]: The result, if found - None otherwise. Optional[LyricsResult]: The result, if found - None otherwise.
""" """
try: try:
artist = artist.strip().lower() artist_name = artist.strip().lower()
song = song.strip().lower() song_name = song.strip().lower()
time_start: float = time.time() time_start: float = time.time()
logging.info("Searching %s - %s on %s", artist, song, self.label) logging.info("Searching %s - %s on %s", artist_name, song_name, self.label)
search_term: str = f"{artist}%20{song}" search_term: str = f"{artist_name}%20{song_name}"
returned_lyrics: str = "" returned_lyrics: str = ""
async with ClientSession() as client: async with ClientSession() as client:
async with client.get( async with client.get(
@@ -100,10 +100,13 @@ class Genius:
) )
for returned in possible_matches for returned in possible_matches
] ]
searched: str = f"{artist} - {song}" searched: str = f"{artist_name} - {song_name}"
best_match: tuple = self.matcher.find_best_match( best_match: Optional[tuple] = self.matcher.find_best_match(
input_track=searched, candidate_tracks=to_scrape input_track=searched, candidate_tracks=to_scrape
) )
if not best_match:
raise InvalidGeniusResponseException("No matching result")
logging.info("To scrape: %s", to_scrape) logging.info("To scrape: %s", to_scrape)
((scrape_stub, track), confidence) = best_match ((scrape_stub, track), confidence) = best_match
scrape_url: str = f"{self.genius_url}{scrape_stub[1:]}" scrape_url: str = f"{self.genius_url}{scrape_stub[1:]}"
@@ -157,8 +160,8 @@ class Genius:
returned_lyrics: str = self.datautils.scrub_lyrics( returned_lyrics: str = self.datautils.scrub_lyrics(
returned_lyrics returned_lyrics
) )
artist: str = track.split(" - ", maxsplit=1)[0] artist = track.split(" - ", maxsplit=1)[0]
song: str = track.split(" - ", maxsplit=1)[1] song = track.split(" - ", maxsplit=1)[1]
logging.info("Result found on %s", self.label) logging.info("Result found on %s", self.label)
time_end: float = time.time() time_end: float = time.time()
time_diff: float = time_end - time_start time_diff: float = time_end - time_start

View File

@@ -1,45 +1,41 @@
import time import time
import traceback
import logging import logging
from typing import Optional, Union from typing import Optional
from aiohttp import ClientTimeout, ClientSession from sqlalchemy.future import select
from tenacity import retry, stop_after_attempt, wait_fixed
from lyric_search import utils from lyric_search import utils
from lyric_search.constructors import LyricsResult from lyric_search.constructors import LyricsResult
from . import common, cache, redis_cache from lyric_search.models import Tracks, Lyrics, AsyncSessionLocal
from lyric_search.constructors import InvalidLRCLibResponseException from . import redis_cache
logger = logging.getLogger() logger = logging.getLogger()
log_level = logging.getLevelName(logger.level) log_level = logging.getLevelName(logger.level)
class LRCLib: class LRCLib:
"""LRCLib Search Module""" """LRCLib Search Module - Local PostgreSQL Database"""
def __init__(self) -> None: def __init__(self) -> None:
self.label: str = "LRCLib" self.label: str = "LRCLib-Cache"
self.lrclib_url: str = "https://lrclib.net/api/search"
self.headers: dict = common.SCRAPE_HEADERS
self.timeout = ClientTimeout(connect=3, sock_read=8)
self.datautils = utils.DataUtils() self.datautils = utils.DataUtils()
self.matcher = utils.TrackMatcher() self.matcher = utils.TrackMatcher()
self.cache = cache.Cache()
self.redis_cache = redis_cache.RedisCache() self.redis_cache = redis_cache.RedisCache()
@retry(stop=stop_after_attempt(2), wait=wait_fixed(0.5))
async def search( async def search(
self, self,
artist: str, artist: str,
song: str, song: str,
plain: Optional[bool] = True, plain: Optional[bool] = True,
duration: Optional[int] = None, duration: Optional[int] = None,
raw: bool = False,
) -> Optional[LyricsResult]: ) -> Optional[LyricsResult]:
""" """
LRCLib Search LRCLib Local Database Search
Args: Args:
artist (str): the artist to search artist (str): the artist to search
song (str): the song to search song (str): the song to search
plain (bool): return plain lyrics (True) or synced lyrics (False)
duration (int): optional track duration for better matching
raw (bool): return raw LRC string instead of parsed object (only for synced)
Returns: Returns:
Optional[LyricsResult]: The result, if found - None otherwise. Optional[LyricsResult]: The result, if found - None otherwise.
""" """
@@ -47,128 +43,105 @@ class LRCLib:
artist = artist.strip().lower() artist = artist.strip().lower()
song = song.strip().lower() song = song.strip().lower()
time_start: float = time.time() time_start: float = time.time()
lrc_obj: Optional[list[dict]] = None
logging.info("Searching %s - %s on %s", artist, song, self.label) logging.info("Searching %s - %s on %s", artist, song, self.label)
input_track: str = f"{artist} - {song}" async with AsyncSessionLocal() as db:
returned_lyrics: str = ""
async with ClientSession() as client:
async with await client.get(
self.lrclib_url,
params={
"artist_name": artist,
"track_name": song,
**({"duration": duration} if duration else {}),
},
timeout=self.timeout,
headers=self.headers,
) as request:
request.raise_for_status()
text: Optional[str] = await request.text()
if not text:
raise InvalidLRCLibResponseException("No search response.")
if len(text) < 100:
raise InvalidLRCLibResponseException(
"Search response text was invalid (len < 100 chars.)"
)
search_data: Optional[Union[list, dict]] = await request.json()
if not isinstance(search_data, list | dict):
raise InvalidLRCLibResponseException("No JSON search data.")
# logging.info("Search Data:\n%s", search_data)
if not isinstance(search_data, list):
raise InvalidLRCLibResponseException("Invalid JSON.")
# Filter by duration if provided
if duration:
search_data = [
r
for r in search_data
if abs(r.get("duration", 0) - duration) <= 10
]
if plain:
possible_matches = [
(
x,
f"{result.get('artistName')} - {result.get('trackName')}",
)
for x, result in enumerate(search_data)
]
else:
logging.info(
"Limiting possible matches to only those with non-null syncedLyrics"
)
possible_matches = [
(
x,
f"{result.get('artistName')} - {result.get('trackName')}",
)
for x, result in enumerate(search_data)
if isinstance(result["syncedLyrics"], str)
]
best_match = None best_match = None
try:
match_result = self.matcher.find_best_match( # Try exact match first (fastest)
input_track, result = await db.execute(
possible_matches, # type: ignore select(
Tracks.artist_name,
Tracks.name,
Lyrics.plain_lyrics,
Lyrics.synced_lyrics,
) )
if match_result: .join(Lyrics, Tracks.id == Lyrics.track_id)
best_match = match_result[0] .filter(
except: # noqa Tracks.artist_name_lower == artist,
pass Tracks.name_lower == song,
)
.limit(1)
)
best_match = result.first()
# If no exact match, try prefix match (faster than full ILIKE)
if not best_match:
result = await db.execute(
select(
Tracks.artist_name,
Tracks.name,
Lyrics.plain_lyrics,
Lyrics.synced_lyrics,
)
.join(Lyrics, Tracks.id == Lyrics.track_id)
.filter(
Tracks.artist_name_lower.like(f"{artist}%"),
Tracks.name_lower.like(f"{song}%"),
)
.limit(1)
)
best_match = result.first()
# If still no match, try full ILIKE (slowest)
if not best_match:
result = await db.execute(
select(
Tracks.artist_name,
Tracks.name,
Lyrics.plain_lyrics,
Lyrics.synced_lyrics,
)
.join(Lyrics, Tracks.id == Lyrics.track_id)
.filter(
Tracks.artist_name_lower.ilike(f"%{artist}%"),
Tracks.name_lower.ilike(f"%{song}%"),
)
.limit(1)
)
best_match = result.first()
if not best_match: if not best_match:
return logging.info("No result found on %s", self.label)
best_match_id = best_match[0] return None
if not isinstance(search_data[best_match_id]["artistName"], str): returned_artist = best_match.artist_name
raise InvalidLRCLibResponseException( returned_song = best_match.name
f"Invalid JSON: Cannot find artistName key.\n{search_data}"
)
if not isinstance(search_data[best_match_id]["trackName"], str):
raise InvalidLRCLibResponseException(
f"Invalid JSON: Cannot find trackName key.\n{search_data}"
)
returned_artist: str = search_data[best_match_id]["artistName"]
returned_song: str = search_data[best_match_id]["trackName"]
if plain: if plain:
if not isinstance( if not best_match.plain_lyrics:
search_data[best_match_id]["plainLyrics"], str logging.info("No plain lyrics available on %s", self.label)
): return None
raise InvalidLRCLibResponseException( returned_lyrics = best_match.plain_lyrics
f"Invalid JSON: Cannot find plainLyrics key.\n{search_data}"
)
returned_lyrics: str = search_data[best_match_id]["plainLyrics"]
returned_lyrics = self.datautils.scrub_lyrics(returned_lyrics) returned_lyrics = self.datautils.scrub_lyrics(returned_lyrics)
lrc_obj = None
else:
if not best_match.synced_lyrics:
logging.info("No synced lyrics available on %s", self.label)
return None
returned_lyrics = best_match.synced_lyrics
if raw:
lrc_obj = returned_lyrics
else: else:
if not isinstance(
search_data[best_match_id]["syncedLyrics"], str
):
raise InvalidLRCLibResponseException(
f"Invalid JSON: Cannot find syncedLyrics key.\n{search_data}"
)
returned_lyrics: str = search_data[best_match_id][
"syncedLyrics"
]
lrc_obj = self.datautils.create_lrc_object(returned_lyrics) lrc_obj = self.datautils.create_lrc_object(returned_lyrics)
returned_track: str = f"{returned_artist} - {returned_song}"
# Calculate match confidence
input_track = f"{artist} - {song}"
returned_track = f"{returned_artist} - {returned_song}"
match_result = self.matcher.find_best_match( match_result = self.matcher.find_best_match(
input_track=input_track, candidate_tracks=[(0, returned_track)] input_track=input_track, candidate_tracks=[(0, returned_track)]
) )
if not match_result: if not match_result:
return # No suitable match found return None
_matched, confidence = match_result _matched, confidence = match_result
logging.info("Result found on %s", self.label) logging.info("Result found on %s", self.label)
time_end: float = time.time() time_end = time.time()
time_diff: float = time_end - time_start time_diff = time_end - time_start
matched = LyricsResult( matched = LyricsResult(
artist=returned_artist, artist=returned_artist,
song=returned_song, song=returned_song,
@@ -177,10 +150,10 @@ class LRCLib:
confidence=confidence, confidence=confidence,
time=time_diff, time=time_diff,
) )
await self.redis_cache.increment_found_count(self.label) await self.redis_cache.increment_found_count(self.label)
if plain:
await self.cache.store(matched)
return matched return matched
except Exception as e: except Exception as e:
logging.debug("Exception: %s", str(e)) logging.error("Exception in %s: %s", self.label, str(e))
traceback.print_exc() return None

View File

@@ -13,10 +13,10 @@ from lyric_search import notifier
from lyric_search.constructors import LyricsResult from lyric_search.constructors import LyricsResult
import redis.asyncio as redis import redis.asyncio as redis
from redis.commands.search.query import Query # type: ignore from redis.commands.search.query import Query # type: ignore
from redis.commands.search.indexDefinition import IndexDefinition, IndexType # type: ignore from redis.commands.search.index_definition import IndexDefinition, IndexType # type: ignore
from redis.commands.search.field import TextField, Field # type: ignore from redis.commands.search.field import TextField, Field # type: ignore
from redis.commands.json.path import Path # type: ignore from redis.commands.json.path import Path # type: ignore
from . import private import shared # Use shared Redis pool
logger = logging.getLogger() logger = logging.getLogger()
log_level = logging.getLevelName(logger.level) log_level = logging.getLevelName(logger.level)
@@ -34,7 +34,8 @@ class RedisCache:
""" """
def __init__(self) -> None: def __init__(self) -> None:
self.redis_client: redis.Redis = redis.Redis(password=private.REDIS_PW) # Use shared Redis client from connection pool
self.redis_client: redis.Redis = shared.get_redis_async_client()
self.notifier = notifier.DiscordNotifier() self.notifier = notifier.DiscordNotifier()
self.notify_warnings = False self.notify_warnings = False
self.regexes: list[Pattern] = [ self.regexes: list[Pattern] = [
@@ -51,9 +52,9 @@ class RedisCache:
try: try:
await self.redis_client.ping() await self.redis_client.ping()
except Exception: except Exception:
logging.debug("Redis connection lost, attempting to reconnect.") logging.debug("Redis connection lost, refreshing client from pool.")
self.redis_client = redis.Redis(password=private.REDIS_PW) # Get fresh client from shared pool
await self.redis_client.ping() # Test the new connection self.redis_client = shared.get_redis_async_client()
async def create_index(self) -> None: async def create_index(self) -> None:
"""Create Index""" """Create Index"""

View File

@@ -111,8 +111,7 @@ class DataUtils:
""" """
def __init__(self) -> None: def __init__(self) -> None:
self.lrc_regex = ( self.lrc_regex = regex.compile( # capture mm:ss and optional .xxx, then the lyric text
regex.compile( # capture mm:ss and optional .xxx, then the lyric text
r""" r"""
\[ # literal “[” \[ # literal “[”
( # 1st (and only) capture group: ( # 1st (and only) capture group:
@@ -126,7 +125,6 @@ class DataUtils:
""", """,
regex.VERBOSE, regex.VERBOSE,
) )
)
self.scrub_regex_1: Pattern = regex.compile(r"(\[.*?\])(\s){0,}(\:){0,1}") self.scrub_regex_1: Pattern = regex.compile(r"(\[.*?\])(\s){0,}(\:){0,1}")
self.scrub_regex_2: Pattern = regex.compile( self.scrub_regex_2: Pattern = regex.compile(
r"(\d?)(Embed\b)", flags=regex.IGNORECASE r"(\d?)(Embed\b)", flags=regex.IGNORECASE

290
shared.py Normal file
View File

@@ -0,0 +1,290 @@
"""
Shared infrastructure: connection pools and sessions.
This module provides centralized, reusable connections for:
- Redis (async connection pool)
- aiohttp (shared ClientSession)
- SQLite (connection pool for frequently accessed databases)
Usage:
from shared import get_redis_client, get_aiohttp_session, get_sqlite_pool
"""
import asyncio
import logging
from typing import Optional, Dict
from contextlib import asynccontextmanager
import aiohttp
import redis.asyncio as redis_async
import redis as redis_sync
import aiosqlite
from lyric_search.sources import private
logger = logging.getLogger(__name__)
# =============================================================================
# Redis Connection Pool
# =============================================================================
_redis_async_pool: Optional[redis_async.ConnectionPool] = None
_redis_async_client: Optional[redis_async.Redis] = None
_redis_sync_client: Optional[redis_sync.Redis] = None
_redis_sync_client_decoded: Optional[redis_sync.Redis] = None
def _create_redis_pool() -> redis_async.ConnectionPool:
"""Create a shared Redis connection pool."""
return redis_async.ConnectionPool(
host="127.0.0.1",
port=6379,
password=private.REDIS_PW,
max_connections=50,
decode_responses=False, # Default; callers can decode as needed
)
def get_redis_async_pool() -> redis_async.ConnectionPool:
"""Get or create the shared async Redis connection pool."""
global _redis_async_pool
if _redis_async_pool is None:
_redis_async_pool = _create_redis_pool()
return _redis_async_pool
def get_redis_async_client() -> redis_async.Redis:
"""Get or create a shared async Redis client using the connection pool."""
global _redis_async_client
if _redis_async_client is None:
_redis_async_client = redis_async.Redis(connection_pool=get_redis_async_pool())
return _redis_async_client
def get_redis_sync_client(decode_responses: bool = True) -> redis_sync.Redis:
"""
Get or create a shared sync Redis client.
We maintain two separate clients: one with decode_responses=True,
one with decode_responses=False, since this setting affects all operations.
"""
global _redis_sync_client, _redis_sync_client_decoded
if decode_responses:
if _redis_sync_client_decoded is None:
_redis_sync_client_decoded = redis_sync.Redis(
host="127.0.0.1",
port=6379,
password=private.REDIS_PW,
decode_responses=True,
)
return _redis_sync_client_decoded
else:
if _redis_sync_client is None:
_redis_sync_client = redis_sync.Redis(
host="127.0.0.1",
port=6379,
password=private.REDIS_PW,
decode_responses=False,
)
return _redis_sync_client
async def close_redis_pools() -> None:
"""Close Redis connections. Call on app shutdown."""
global _redis_async_pool, _redis_async_client, _redis_sync_client, _redis_sync_client_decoded
if _redis_async_client:
await _redis_async_client.close()
_redis_async_client = None
if _redis_async_pool:
await _redis_async_pool.disconnect()
_redis_async_pool = None
if _redis_sync_client:
_redis_sync_client.close()
_redis_sync_client = None
if _redis_sync_client_decoded:
_redis_sync_client_decoded.close()
_redis_sync_client_decoded = None
logger.info("Redis connections closed")
# =============================================================================
# aiohttp Shared Session
# =============================================================================
_aiohttp_session: Optional[aiohttp.ClientSession] = None
async def get_aiohttp_session() -> aiohttp.ClientSession:
"""
Get or create a shared aiohttp ClientSession.
The session uses connection pooling internally (default: 100 connections).
"""
global _aiohttp_session
if _aiohttp_session is None or _aiohttp_session.closed:
timeout = aiohttp.ClientTimeout(total=30, connect=10)
connector = aiohttp.TCPConnector(
limit=100, # Total connection pool size
limit_per_host=30, # Max connections per host
ttl_dns_cache=300, # DNS cache TTL
keepalive_timeout=60,
)
_aiohttp_session = aiohttp.ClientSession(
timeout=timeout,
connector=connector,
)
logger.info("Created shared aiohttp session")
return _aiohttp_session
async def close_aiohttp_session() -> None:
"""Close the shared aiohttp session. Call on app shutdown."""
global _aiohttp_session
if _aiohttp_session and not _aiohttp_session.closed:
await _aiohttp_session.close()
_aiohttp_session = None
logger.info("aiohttp session closed")
# =============================================================================
# SQLite Connection Pool
# =============================================================================
class SQLitePool:
"""
Simple SQLite connection pool for async access.
Maintains a pool of connections per database file to avoid
opening/closing connections on every request.
"""
def __init__(self, max_connections: int = 5):
self._pools: Dict[str, asyncio.Queue] = {}
self._max_connections = max_connections
self._locks: Dict[str, asyncio.Lock] = {}
self._connection_counts: Dict[str, int] = {}
async def _get_pool(self, db_path: str) -> asyncio.Queue:
"""Get or create a connection pool for the given database."""
if db_path not in self._pools:
self._pools[db_path] = asyncio.Queue(maxsize=self._max_connections)
self._locks[db_path] = asyncio.Lock()
self._connection_counts[db_path] = 0
return self._pools[db_path]
@asynccontextmanager
async def connection(self, db_path: str, timeout: float = 5.0):
"""
Get a connection from the pool.
Usage:
async with sqlite_pool.connection("/path/to/db.db") as conn:
async with conn.execute("SELECT ...") as cursor:
...
"""
pool = await self._get_pool(db_path)
lock = self._locks[db_path]
conn: Optional[aiosqlite.Connection] = None
# Try to get an existing connection from the pool
try:
conn = pool.get_nowait()
except asyncio.QueueEmpty:
# No available connection, create one if under limit
async with lock:
if self._connection_counts[db_path] < self._max_connections:
conn = await aiosqlite.connect(db_path, timeout=timeout)
self._connection_counts[db_path] += 1
# If still no connection (at limit), wait for one
if conn is None:
conn = await asyncio.wait_for(pool.get(), timeout=timeout)
try:
# Verify connection is still valid
if conn is not None:
try:
await conn.execute("SELECT 1")
except Exception:
# Connection is broken, create a new one
try:
await conn.close()
except Exception:
pass
conn = await aiosqlite.connect(db_path, timeout=timeout)
yield conn
finally:
# Return connection to pool
if conn is not None:
try:
pool.put_nowait(conn)
except asyncio.QueueFull:
# Pool is full, close this connection
await conn.close()
async with lock:
self._connection_counts[db_path] -= 1
async def close_all(self) -> None:
"""Close all connections in all pools."""
for db_path, pool in self._pools.items():
while not pool.empty():
try:
conn = pool.get_nowait()
await conn.close()
except asyncio.QueueEmpty:
break
self._connection_counts[db_path] = 0
self._pools.clear()
self._locks.clear()
self._connection_counts.clear()
logger.info("SQLite pools closed")
# Global SQLite pool instance
_sqlite_pool: Optional[SQLitePool] = None
def get_sqlite_pool() -> SQLitePool:
"""Get the shared SQLite connection pool."""
global _sqlite_pool
if _sqlite_pool is None:
_sqlite_pool = SQLitePool(max_connections=5)
return _sqlite_pool
async def close_sqlite_pools() -> None:
"""Close all SQLite pools. Call on app shutdown."""
global _sqlite_pool
if _sqlite_pool:
await _sqlite_pool.close_all()
_sqlite_pool = None
# =============================================================================
# Lifecycle Management
# =============================================================================
async def startup() -> None:
"""Initialize all shared resources. Call on app startup."""
# Pre-warm Redis connection
client = get_redis_async_client()
await client.ping()
logger.info("Shared infrastructure initialized")
async def shutdown() -> None:
"""Clean up all shared resources. Call on app shutdown."""
await close_aiohttp_session()
await close_redis_pools()
await close_sqlite_pools()
logger.info("Shared infrastructure shutdown complete")

File diff suppressed because it is too large Load Diff

View File

@@ -1,252 +0,0 @@
#!/usr/bin/env liquidsoap
set("log.file.path","/home/kyle/.lsl.txt")
set("log.stdout",true)
set("harbor.bind_addrs", ["127.0.0.1"])
# Buffer and timing settings
set("frame.duration",0.02)
set("root.max_latency",2.)
set("audio.converter.samplerate.libsamplerate.quality","best")
set("clock.allow_streaming_errors",false)
# Get next track dynamically [Each station]
def get_next_main() =
uri = list.hd(default="", process.read.lines("uv run get_next_track.py main"))
[request.create(uri)]
end
def get_next_rock() =
uri = list.hd(default="", process.read.lines("uv run get_next_track.py rock"))
[request.create(uri)]
end
def get_next_electronic() =
uri = list.hd(default="", process.read.lines("uv run get_next_track.py electronic"))
[request.create(uri)]
end
def get_next_rap() =
uri = list.hd(default="", process.read.lines("uv run get_next_track.py rap"))
[request.create(uri)]
end
#def get_next_classical() =
# uri = list.hd(default="", process.read.lines("uv run get_next_track.py classical"))
# [request.create(uri)]
#end
def get_next_pop() =
uri = list.hd(default="", process.read.lines("uv run get_next_track.py pop"))
[request.create(uri)]
end
# Set up queues [Each station]
main_list = request.dynamic(
id="requests",
get_next_main,
retry_delay=1.0,
timeout=20.0
)
rock_list = request.dynamic(
id="rock_requests",
get_next_rock,
retry_delay=1.0,
timeout=20.0
)
electronic_list = request.dynamic(
id="electronic_requests",
get_next_electronic,
retry_delay=1.0,
timeout=20.0
)
rap_list = request.dynamic(
id="rap_requests",
get_next_rap,
retry_delay=1.0,
timeout=20.0
)
#classical_list = request.dynamic.list(
# id="classical_requests",
# get_next_classical,
# prefetch=0
#)
pop_list = request.dynamic(
id="pop_requests",
get_next_pop,
retry_delay=1.0,
timeout=20.0
)
# Standard
silence = single("/home/kyle/ls/silence.ogg")
# Queue [Each station]
def main_queue(remaining, _) =
log("MAIN: Queueing with #{remaining} seconds remaining")
if not main_list.fetch() then
log("Fetching next query failed")
end
end
def rock_queue(remaining, _) =
log("ROCK: Queueing with #{remaining} seconds remaining")
if not rock_list.fetch() then
log("Fetching next query failed")
end
end
def electronic_queue(remaining, _) =
log("ELECTRONIC: Queueing with #{remaining} seconds remaining")
if not electronic_list.fetch() then
log("Fetching next query failed")
end
end
def rap_queue(remaining, _) =
log("RAP: Queueing with #{remaining} seconds remaining")
if not rap_list.fetch() then
log("Fetching next query failed")
end
end
#def classical_queue(remaining, _) =
# log("CLASSICAL: Queueing with #{remaining} seconds remaining")
# if not classical_list.fetch() then
# log("Fetching next query failed")
# end
#end
def pop_queue(remaining, _) =
log("POP: Queueing with #{remaining} seconds remaining")
if not pop_list.fetch() then
log("Fetching next query failed")
end
end
# Initial fetch [Each station]
main_list.fetch()
rock_list.fetch()
electronic_list.fetch()
rap_list.fetch()
#classical_list.fetch()
pop_list.fetch()
# Source setup [Each station]
def create_source(s,q) =
source.dynamic(s, track_sensitive=true, {q()})
end
main_source = create_source(main_list, main_queue)
rock_source = create_source(rock_list, rock_queue)
electronic_source = create_source(electronic_list, electronic_queue)
rap_source = create_source(rap_list, rap_queue)
#classical_source = create_source(classical_list, classical_queue)
pop_source = create_source(pop_list, pop_queue)
all_tracks_main = fallback(track_sensitive=false, [main_source, silence])
all_tracks_rock = fallback(track_sensitive=false, [rock_source, silence])
all_tracks_electronic = fallback(track_sensitive=false, [electronic_source, silence])
all_tracks_rap = fallback(track_sensitive=false, [rap_source, silence])
#all_tracks_classical = fallback(track_sensitive=false, [classical_source, silence])
all_tracks_pop = fallback(track_sensitive=false, [pop_source, silence])
# HLS Setup [Standard]
aac_lofi = %ffmpeg(format="mpegts",
%audio(codec="aac",
channels=2,
ar=48000,
b="128k"))
aac_midfi = %ffmpeg(format="mpegts",
%audio(codec="aac",
channels=2,
ar=48000,
b="256k"))
aac_hifi = %ffmpeg(format="mpegts",
%audio(codec="aac",
channels=2,
ar=48000,
b="512k"))
streams =
[("aac_lofi", aac_lofi), ("aac_midfi", aac_midfi), ("aac_hifi", aac_hifi)]
# HLS Outputs [Each station]
def create_hls_output(~name, source) =
output.file.hls(
playlist="#{name}.m3u8",
segment_duration=0.5,
segments=10,
segments_overhead=5,
persist_at="/nvme/pub/hls/#{name}/state.config",
"/nvme/pub/hls/#{name}",
streams,
source
)
end
create_hls_output(name="main", mksafe(main_source))
create_hls_output(name="rock", mksafe(rock_source))
create_hls_output(name="electronic", mksafe(electronic_source))
create_hls_output(name="rap", mksafe(rap_source))
#output.file.hls(
# playlist="classical.m3u8",
# segment_duration=0.45,
# segments=9,
# segments_overhead=3,
# persist_at="/nvme/pub/hls/classical_state.config",
# "/nvme/pub/hls/classical",
# streams,
# mksafe(classical_source)
#)
create_hls_output(name="pop", mksafe(pop_source))
# HTTP Server
def get_next_http(~protocol,~data,~headers,uri) =
source =
if data == "main" then main_source
elsif data == "rock" then rock_source
elsif data == "electronic" then electronic_source
elsif data == "rap" then rap_source
elsif data == "pop" then pop_source
else null() end
if source != null() then
source.skip(source)
http.response(
protocol=protocol,
code=200,
data="OK #{data}"
)
end
harbor.http.register(port=29000, method="POST", "/next", get_next_http)
# EOF

View File

@@ -1,270 +0,0 @@
#!/usr/bin/liquidsoap
set("log.file.path", "/home/kyle/.lsl.txt")
set("log.stdout", true)
set("harbor.bind_addrs", ["127.0.0.1"])
# Get next track dynamically [Each station]
def get_next_main() =
uri = list.hd(default="", process.read.lines("uv run get_next_track.py main"))
[request.create(uri)]
end
def get_next_rock() =
uri = list.hd(default="", process.read.lines("uv run get_next_track.py rock"))
[request.create(uri)]
end
def get_next_electronic() =
uri = list.hd(default="", process.read.lines("uv run get_next_track.py electronic"))
[request.create(uri)]
end
def get_next_rap() =
uri = list.hd(default="", process.read.lines("uv run get_next_track.py rap"))
[request.create(uri)]
end
#def get_next_classical() =
# uri = list.hd(default="", process.read.lines("uv run get_next_track.py classical"))
# [request.create(uri)]
#end
def get_next_pop() =
uri = list.hd(default="", process.read.lines("uv run get_next_track.py pop"))
[request.create(uri)]
end
# Set up queues [Each station]
main_list = request.dynamic.list(
id="requests",
get_next_main,
prefetch=0
)
rock_list = request.dynamic.list(
id="rock_requests",
get_next_rock,
prefetch=0
)
electronic_list = request.dynamic.list(
id="electronic_requests",
get_next_electronic,
prefetch=0
)
rap_list = request.dynamic.list(
id="rap_requests",
get_next_rap,
prefetch=0
)
#classical_list = request.dynamic.list(
# id="classical_requests",
# get_next_classical,
# prefetch=0
#)
pop_list = request.dynamic.list(
id="pop_requests",
get_next_pop,
prefetch=0
)
# Standard
silence = single("/home/kyle/ls/silence.ogg")
# Queue [Each station]
def main_queue(remaining, _) =
log("MAIN: Queueing with #{remaining} seconds remaining")
if not main_list.fetch() then
log("Fetching next query failed")
end
end
def rock_queue(remaining, _) =
log("ROCK: Queueing with #{remaining} seconds remaining")
if not rock_list.fetch() then
log("Fetching next query failed")
end
end
def electronic_queue(remaining, _) =
log("ELECTRONIC: Queueing with #{remaining} seconds remaining")
if not electronic_list.fetch() then
log("Fetching next query failed")
end
end
def rap_queue(remaining, _) =
log("RAP: Queueing with #{remaining} seconds remaining")
if not rap_list.fetch() then
log("Fetching next query failed")
end
end
#def classical_queue(remaining, _) =
# log("CLASSICAL: Queueing with #{remaining} seconds remaining")
# if not classical_list.fetch() then
# log("Fetching next query failed")
# end
#end
def pop_queue(remaining, _) =
log("POP: Queueing with #{remaining} seconds remaining")
if not pop_list.fetch() then
log("Fetching next query failed")
end
end
# Initial fetch [Each station]
main_list.fetch()
rock_list.fetch()
electronic_list.fetch()
rap_list.fetch()
#classical_list.fetch()
pop_list.fetch()
# Source setup [Each station]
main_source = source.on_end(delay=1.0, main_list, main_queue)
rock_source = source.on_end(delay=1.0, rock_list, rock_queue)
electronic_source = source.on_end(delay=1.0, electronic_list, electronic_queue)
rap_source = source.on_end(delay=1.0, rap_list, rap_queue)
#classical_source = source.on_end(delay=1.0, classical_list, classical_queue)
pop_source = source.on_end(delay=1.0, pop_list, pop_queue)
all_tracks_main = fallback(track_sensitive=false, [main_source, silence])
all_tracks_rock = fallback(track_sensitive=false, [rock_source, silence])
all_tracks_electronic = fallback(track_sensitive=false, [electronic_source, silence])
all_tracks_rap = fallback(track_sensitive=false, [rap_source, silence])
#all_tracks_classical = fallback(track_sensitive=false, [classical_source, silence])
all_tracks_pop = fallback(track_sensitive=false, [pop_source, silence])
# HLS Setup [Standard]
aac_lofi =
%ffmpeg(format = "mpegts", %audio(codec = "aac", channels = 2, ar = 44100))
aac_midfi =
%ffmpeg(
format = "mpegts",
%audio(codec = "aac", channels = 2, ar = 44100, b = "96k")
)
aac_hifi =
%ffmpeg(
format = "mpegts",
%audio(codec = "aac", channels = 2, ar = 44100, b = "448k")
)
streams =
[("aac_lofi", aac_lofi), ("aac_midfi", aac_midfi), ("aac_hifi", aac_hifi)]
# HLS Outputs [Each station]
output.file.hls(
playlist="main.m3u8",
segment_duration=0.5,
segments=9,
segments_overhead=4,
persist_at="/nvme/pub/hls/state.config",
"/nvme/pub/hls/main",
streams,
mksafe(main_source)
)
output.file.hls(
playlist="rock.m3u8",
segment_duration=0.5,
segments=9,
segments_overhead=4,
persist_at="/nvme/pub/hls/rock/state.config",
"/nvme/pub/hls/rock",
streams,
mksafe(rock_source)
)
output.file.hls(
playlist="electronic.m3u8",
segment_duration=0.5,
segments=9,
segments_overhead=4,
persist_at="/nvme/pub/hls/electronic/state.config",
"/nvme/pub/hls/electronic",
streams,
mksafe(electronic_source)
)
output.file.hls(
playlist="rap.m3u8",
segment_duration=0.5,
segments=9,
segments_overhead=4,
persist_at="/nvme/pub/hls/rap_state.config",
"/nvme/pub/hls/rap",
streams,
mksafe(rap_source)
)
#output.file.hls(
# playlist="classical.m3u8",
# segment_duration=0.45,
# segments=9,
# segments_overhead=3,
# persist_at="/nvme/pub/hls/classical_state.config",
# "/nvme/pub/hls/classical",
# streams,
# mksafe(classical_source)
#)
output.file.hls(
playlist="pop.m3u8",
segment_duration=0.5,
segments=9,
segments_overhead=4,
persist_at="/nvme/pub/hls/pop_state.config",
"/nvme/pub/hls/pop",
streams,
mksafe(pop_source)
)
# HTTP Server
def get_next_http(~protocol,~data,~headers,uri) =
if data == "main" then
_req = source.skip(main_source)
elsif data == "rock" then
_req = source.skip(rock_source)
elsif data == "electronic" then
_req = source.skip(electronic_source)
elsif data == "rap" then
_req = source.skip(rap_source)
#elsif data == "classical" then
# _req = source.skip(classical_source)
elsif data == "pop" then
_req = source.skip(pop_source)
end
http.response(
protocol=protocol,
code=200,
data="OK #{data}"
)
end
harbor.http.register(port=29000, method="POST", "/next", get_next_http)
# EOF

View File

@@ -127,7 +127,9 @@ class MemeUtil:
db_conn.row_factory = sqlite3.Row db_conn.row_factory = sqlite3.Row
rows_per_page: int = 10 rows_per_page: int = 10
offset: int = (page - 1) * rows_per_page offset: int = (page - 1) * rows_per_page
query: str = "SELECT id, timestamp FROM memes ORDER BY timestamp DESC LIMIT 10 OFFSET ?" query: str = (
"SELECT id, timestamp FROM memes ORDER BY timestamp DESC LIMIT 10 OFFSET ?"
)
async with await db_conn.execute(query, (offset,)) as db_cursor: async with await db_conn.execute(query, (offset,)) as db_cursor:
results = await db_cursor.fetchall() results = await db_cursor.fetchall()
for result in results: for result in results:

View File

@@ -17,7 +17,7 @@ from rapidfuzz import fuzz
from endpoints.constructors import RadioException from endpoints.constructors import RadioException
import redis.asyncio as redis import redis.asyncio as redis
from redis.commands.search.query import Query # noqa from redis.commands.search.query import Query # noqa
from redis.commands.search.indexDefinition import IndexDefinition, IndexType # noqa from redis.commands.search.index_definition import IndexDefinition, IndexType # noqa
from redis.commands.search.field import TextField # noqa from redis.commands.search.field import TextField # noqa
from redis.commands.json.path import Path # noqa from redis.commands.json.path import Path # noqa
from lyric_search.sources import private from lyric_search.sources import private
@@ -339,7 +339,10 @@ class RadioUtil:
time_start: float = time.time() time_start: float = time.time()
artist_genre: dict[str, str] = {} artist_genre: dict[str, str] = {}
query: str = ( query: str = (
"SELECT genre FROM artist_genre WHERE artist LIKE ? COLLATE NOCASE" "SELECT REPLACE(GROUP_CONCAT(DISTINCT g.name), ',', ', ') AS genre FROM artists a "
"JOIN artist_genres ag ON a.id = ag.artist_id "
"JOIN genres g ON ag.genre_id = g.id "
"WHERE a.name LIKE ? COLLATE NOCASE"
) )
with sqlite3.connect(self.artist_genre_db_path) as _db: with sqlite3.connect(self.artist_genre_db_path) as _db:
_db.row_factory = sqlite3.Row _db.row_factory = sqlite3.Row
@@ -347,7 +350,7 @@ class RadioUtil:
params: tuple[str] = (f"%%{artist}%%",) params: tuple[str] = (f"%%{artist}%%",)
_cursor = _db.execute(query, params) _cursor = _db.execute(query, params)
res = _cursor.fetchone() res = _cursor.fetchone()
if not res: if not res or not res["genre"]:
artist_genre[artist] = "N/A" artist_genre[artist] = "N/A"
continue continue
artist_genre[artist] = res["genre"] artist_genre[artist] = res["genre"]
@@ -367,14 +370,17 @@ class RadioUtil:
try: try:
artist = artist.strip() artist = artist.strip()
query: str = ( query: str = (
"SELECT genre FROM artist_genre WHERE artist LIKE ? COLLATE NOCASE" "SELECT REPLACE(GROUP_CONCAT(DISTINCT g.name), ',', ', ') AS genre FROM artists a "
"JOIN artist_genres ag ON a.id = ag.artist_id "
"JOIN genres g ON ag.genre_id = g.id "
"WHERE a.name LIKE ? COLLATE NOCASE"
) )
params: tuple[str] = (artist,) params: tuple[str] = (artist,)
with sqlite3.connect(self.playback_db_path, timeout=2) as _db: with sqlite3.connect(self.playback_db_path, timeout=2) as _db:
_db.row_factory = sqlite3.Row _db.row_factory = sqlite3.Row
_cursor = _db.execute(query, params) _cursor = _db.execute(query, params)
res = _cursor.fetchone() res = _cursor.fetchone()
if not res: if not res or not res["genre"]:
return "Not Found" # Exception suppressed return "Not Found" # Exception suppressed
# raise RadioException( # raise RadioException(
# f"Could not locate {artist} in artist_genre_map db." # f"Could not locate {artist} in artist_genre_map db."
@@ -480,18 +486,18 @@ class RadioUtil:
) )
"""Loading Complete""" """Loading Complete"""
self.playlists_loaded = True
# Request skip from LS to bring streams current # Request skip from LS to bring streams current
for playlist in self.playlists: for playlist in self.playlists:
logging.info("Skipping: %s", playlist) logging.info("Skipping: %s", playlist)
await self._ls_skip(playlist) await self._ls_skip(playlist)
self.playlists_loaded = True
except Exception as e: except Exception as e:
logging.info("Playlist load failed: %s", str(e)) logging.info("Playlist load failed: %s", str(e))
traceback.print_exc() traceback.print_exc()
def cache_album_art(self, track_id: int, file_path: str) -> None: def cache_album_art(self, track_id: int, file_path: str) -> None:
""" """
Cache Album Art to SQLite DB Cache Album Art to SQLite DB - IMPROVED VERSION
Args: Args:
track_id (int): Track ID to update track_id (int): Track ID to update
file_path (str): Path to file, for artwork extraction file_path (str): Path to file, for artwork extraction
@@ -499,30 +505,92 @@ class RadioUtil:
None None
""" """
try: try:
logging.info( # Validate file exists first
"cache_album_art: Attempting to store album art for track_id: %s", if not os.path.exists(file_path):
track_id, logging.warning("cache_album_art: File not found: %s", file_path)
) return
logging.info("cache_album_art: Attempting to store album art for track_id: %s", track_id)
# Check if artwork already exists to avoid duplicates
with sqlite3.connect(self.album_art_db_path, timeout=5) as db_conn:
db_conn.row_factory = sqlite3.Row
cursor = db_conn.execute("SELECT track_id FROM album_art WHERE track_id = ?", (track_id,))
if cursor.fetchone():
logging.debug("cache_album_art: Track %s already has album art", track_id)
return
# Load file with better error handling
try:
tagger = music_tag.load_file(file_path) tagger = music_tag.load_file(file_path)
album_art = tagger["artwork"].first.data if tagger else None
with sqlite3.connect(self.album_art_db_path, timeout=2) as db_conn:
db_cursor = db_conn.execute(
"INSERT OR IGNORE INTO album_art (track_id, album_art) VALUES(?, ?)",
(
track_id,
album_art,
),
)
if isinstance(db_cursor.lastrowid, int):
db_conn.commit()
else:
logging.debug(
"No row inserted for track_id: %s w/ file_path: %s",
track_id,
file_path,
)
except Exception as e: except Exception as e:
logging.debug("cache_album_art Exception: %s", str(e)) logging.warning("cache_album_art: Failed to load file %s: %s", file_path, e)
return
# Extract artwork with validation
album_art = None
try:
if not tagger:
logging.debug("cache_album_art: No tagger available for track %s", track_id)
return
artwork_field = tagger["artwork"]
if artwork_field and hasattr(artwork_field, 'first') and artwork_field.first:
first_artwork = artwork_field.first
if hasattr(first_artwork, 'data') and first_artwork.data:
potential_art = first_artwork.data
# Validate artwork data
if isinstance(potential_art, bytes) and len(potential_art) > 100:
# Check if it looks like valid image data
if (potential_art.startswith(b'\xff\xd8') or # JPEG
potential_art.startswith(b'\x89PNG') or # PNG
potential_art.startswith(b'GIF87a') or # GIF87a
potential_art.startswith(b'GIF89a') or # GIF89a
potential_art.startswith(b'RIFF')): # WEBP/other RIFF
album_art = potential_art
logging.debug("cache_album_art: Found valid artwork (%s bytes)", len(album_art))
else:
logging.warning("cache_album_art: Invalid artwork format for track %s - not caching", track_id)
return
else:
logging.debug("cache_album_art: No valid artwork data for track %s", track_id)
return
else:
logging.debug("cache_album_art: No artwork data available for track %s", track_id)
return
else:
logging.debug("cache_album_art: No artwork field for track %s", track_id)
return
except Exception as e:
logging.warning("cache_album_art: Error extracting artwork for track %s: %s", track_id, e)
return
# Only proceed if we have valid artwork
if not album_art:
logging.debug("cache_album_art: No valid artwork to cache for track %s", track_id)
return
# Insert into database
try:
with sqlite3.connect(self.album_art_db_path, timeout=5) as db_conn:
cursor = db_conn.execute(
"INSERT OR IGNORE INTO album_art (track_id, album_art) VALUES (?, ?)",
(track_id, album_art)
)
if cursor.rowcount == 1:
db_conn.commit()
logging.info("cache_album_art: Successfully cached %s bytes for track %s", len(album_art), track_id)
else:
logging.debug("cache_album_art: No row inserted for track_id: %s (may already exist)", track_id)
except Exception as e:
logging.error("cache_album_art: Database error for track %s: %s", track_id, e)
except Exception as e:
logging.error("cache_album_art: Unexpected error for track %s: %s", track_id, e)
traceback.print_exc() traceback.print_exc()
def get_album_art(self, track_id: int) -> Optional[bytes]: def get_album_art(self, track_id: int) -> Optional[bytes]:

View File

@@ -45,6 +45,29 @@ load_dotenv()
sr = SRUtil() sr = SRUtil()
logger = logging.getLogger(__name__)
async def check_flac_stream(file_path):
"""Check if the given file contains a FLAC stream using ffprobe."""
cmd = [
"ffprobe",
"-v",
"error",
"-select_streams",
"a:0",
"-show_entries",
"stream=codec_name",
"-of",
"default=noprint_wrappers=1:nokey=1",
file_path,
]
process = await asyncio.create_subprocess_exec(
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, _ = await process.communicate()
return b"flac" in stdout
# ---------- Discord helper ---------- # ---------- Discord helper ----------
async def discord_notify( async def discord_notify(
@@ -259,13 +282,16 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
) )
) )
async def process_tracks(): async def process_tracks(track_list):
per_track_meta = [] per_track_meta = []
all_final_files = [] all_final_files = []
all_artists = set() all_artists = set()
(ROOT_DIR / "completed").mkdir(parents=True, exist_ok=True) (ROOT_DIR / "completed").mkdir(parents=True, exist_ok=True)
# Ensure aiohttp session is properly closed
async with aiohttp.ClientSession(headers=HEADERS) as session: async with aiohttp.ClientSession(headers=HEADERS) as session:
print(f"DEBUG: Starting process_tracks with {len(track_list)} tracks")
# Set up a one-time rate-limit callback to notify on the first 429 seen by SRUtil # Set up a one-time rate-limit callback to notify on the first 429 seen by SRUtil
async def _rate_limit_notify(exc: Exception): async def _rate_limit_notify(exc: Exception):
try: try:
@@ -285,6 +311,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
pass pass
total = len(track_list or []) total = len(track_list or [])
for i, track_id in enumerate(track_list or []): for i, track_id in enumerate(track_list or []):
print(f"DEBUG: Processing track {i + 1}/{total}: {track_id}")
track_info = { track_info = {
"track_id": str(track_id), "track_id": str(track_id),
"status": "Pending", "status": "Pending",
@@ -300,31 +327,53 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
track_info["attempts"] = attempt track_info["attempts"] = attempt
try: try:
sr.get_cover_by_album_id print(f"DEBUG: Getting downloadable for track {track_id}")
url = await sr.get_stream_url_by_track_id(track_id, quality) # Fetch downloadable (handles DASH and others)
if not url: downloadable = await sr._safe_api_call(
raise RuntimeError("No stream URL") sr.streamrip_client.get_downloadable,
str(track_id),
2 if quality == "FLAC" else 1,
retries=3,
)
parsed = urlparse(url) print(f"DEBUG: Got downloadable: {type(downloadable)}")
clean_path = unquote(parsed.path) if not downloadable:
ext = Path(clean_path).suffix or ".mp3" raise RuntimeError("No downloadable created")
ext = f".{downloadable.extension}"
tmp_file = Path(f"/tmp/{uuid.uuid4().hex}{ext}") tmp_file = Path(f"/tmp/{uuid.uuid4().hex}{ext}")
async with session.get(url) as resp: print(f"DEBUG: Starting download to {tmp_file}")
resp.raise_for_status() # Download
with open(tmp_file, "wb") as f: print(f"TRACK {track_id}: Starting download")
async for chunk in resp.content.iter_chunked(64 * 1024): try:
f.write(chunk) await downloadable._download(
str(tmp_file), callback=lambda x=None: None
)
print(
f"TRACK {track_id}: Download method completed normally"
)
except Exception as download_e:
print(
f"TRACK {track_id}: Download threw exception: {download_e}"
)
raise
print(
f"DEBUG: Download completed, file exists: {tmp_file.exists()}"
)
if not tmp_file.exists():
raise RuntimeError(
f"Download completed but no file created: {tmp_file}"
)
print(f"DEBUG: Fetching metadata for track {track_id}")
# Metadata fetch
try: try:
md = await sr.get_metadata_by_track_id(track_id) or {} md = await sr.get_metadata_by_track_id(track_id) or {}
print(f"DEBUG: Metadata fetched: {bool(md)}")
except MetadataFetchError as me: except MetadataFetchError as me:
# Permanent metadata failure — notify and continue (mark track failed) # Permanent metadata failure — mark failed and break
msg = f"Metadata permanently failed for track {track_id}: {me}"
try:
send_log_to_discord(msg, "ERROR", target)
except Exception:
pass
track_info["status"] = "Failed" track_info["status"] = "Failed"
track_info["error"] = str(me) track_info["error"] = str(me)
per_track_meta.append(track_info) per_track_meta.append(track_info)
@@ -333,6 +382,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
job.meta["progress"] = int(((i + 1) / total) * 100) job.meta["progress"] = int(((i + 1) / total) * 100)
job.save_meta() job.save_meta()
break break
artist_raw = md.get("artist") or "Unknown Artist" artist_raw = md.get("artist") or "Unknown Artist"
album_raw = md.get("album") or "Unknown Album" album_raw = md.get("album") or "Unknown Album"
title_raw = md.get("title") or f"Track {track_id}" title_raw = md.get("title") or f"Track {track_id}"
@@ -341,15 +391,19 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
album = sanitize_filename(album_raw) album = sanitize_filename(album_raw)
title = sanitize_filename(title_raw) title = sanitize_filename(title_raw)
print(f"TRACK {track_id}: Processing '{title}' by {artist}")
all_artists.add(artist) all_artists.add(artist)
album_dir = staging_root / artist / album album_dir = staging_root / artist / album
album_dir.mkdir(parents=True, exist_ok=True) album_dir.mkdir(parents=True, exist_ok=True)
final_file = ensure_unique_path(album_dir / f"{title}{ext}") final_file = ensure_unique_path(album_dir / f"{title}{ext}")
# Move file into final location first (tags will be updated on moved file) # Move to final location
print(f"TRACK {track_id}: Moving to final location...")
tmp_file.rename(final_file) tmp_file.rename(final_file)
print(f"TRACK {track_id}: File moved successfully")
# Try to fetch cover art via SRUtil (use album_id from metadata) # Fetch cover art
try: try:
album_field = md.get("album") album_field = md.get("album")
album_id = md.get("album_id") or ( album_id = md.get("album_id") or (
@@ -370,9 +424,9 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
else: else:
cover_url = md.get("cover_url") cover_url = md.get("cover_url")
# Embed tags + artwork using music_tag if available, falling back to mediafile tagging # Embed tags
embedded = False embedded = False
try: img_bytes = None
if cover_url: if cover_url:
try: try:
timeout = aiohttp.ClientTimeout(total=15) timeout = aiohttp.ClientTimeout(total=15)
@@ -383,7 +437,6 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
img_bytes = await img_resp.read() img_bytes = await img_resp.read()
else: else:
img_bytes = None img_bytes = None
# Notify Discord about failed cover download (HTTP error)
try: try:
send_log_to_discord( send_log_to_discord(
f"Cover download HTTP `{img_resp.status}` for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`", f"Cover download HTTP `{img_resp.status}` for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`",
@@ -394,7 +447,6 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
pass pass
except Exception as e: except Exception as e:
img_bytes = None img_bytes = None
# Notify Discord about exception during cover download
try: try:
send_log_to_discord( send_log_to_discord(
f"Cover download exception for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`: `{e}`", f"Cover download exception for track `{track_id} album_id={album_id} url={cover_url} artist={artist} album={album}`: `{e}`",
@@ -403,16 +455,15 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
) )
except Exception: except Exception:
pass pass
else:
img_bytes = None
# Prefer music_tag if available (keeps compatibility with add_cover_art.py) # Try music_tag first
try: try:
from music_tag import load_file as mt_load_file # type: ignore from music_tag import load_file as mt_load_file # type: ignore
# Add validation for `mf` object
try: try:
mf = mt_load_file(str(final_file)) mf = mt_load_file(str(final_file))
# set basic tags if mf is not None:
if md.get("title"): if md.get("title"):
mf["title"] = md.get("title") mf["title"] = md.get("title")
if md.get("artist"): if md.get("artist"):
@@ -429,14 +480,15 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
mf["artwork"] = img_bytes mf["artwork"] = img_bytes
mf.save() mf.save()
embedded = True embedded = True
else:
logger.error("Failed to load file with music_tag.")
embedded = False
except Exception: except Exception:
embedded = False embedded = False
except Exception: except Exception:
embedded = False embedded = False
# If music_tag not available or failed, fallback to mediafile tagging
if not embedded: if not embedded:
# If we had a cover_url but no bytes, log a warning to Discord
try: try:
if cover_url and not img_bytes: if cover_url and not img_bytes:
send_log_to_discord( send_log_to_discord(
@@ -446,20 +498,22 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
) )
except Exception: except Exception:
pass pass
tag_with_mediafile(str(final_file), md)
except Exception:
# Ensure at least the basic tags are written
try: try:
tag_with_mediafile(str(final_file), md) tag_with_mediafile(str(final_file), md)
except Exception: except Exception:
pass pass
tmp_file = None
# Success
tmp_file = None
track_info["status"] = "Success" track_info["status"] = "Success"
track_info["file_path"] = str(final_file) track_info["file_path"] = str(final_file)
track_info["error"] = None track_info["error"] = None
all_final_files.append(final_file) all_final_files.append(final_file)
print(
f"TRACK {track_id}: SUCCESS! Progress: {((i + 1) / total) * 100:.0f}%"
)
if job: if job:
job.meta["progress"] = int(((i + 1) / total) * 100) job.meta["progress"] = int(((i + 1) / total) * 100)
job.meta["tracks"] = per_track_meta + [track_info] job.meta["tracks"] = per_track_meta + [track_info]
@@ -469,7 +523,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
except aiohttp.ClientResponseError as e: except aiohttp.ClientResponseError as e:
msg = f"Track {track_id} attempt {attempt} ClientResponseError: {e}" msg = f"Track {track_id} attempt {attempt} ClientResponseError: {e}"
send_log_to_discord(msg, "WARNING", target) send_log_to_discord(msg, "WARNING", target)
if e.status == 429: if getattr(e, "status", None) == 429:
wait_time = min(60, 2**attempt) wait_time = min(60, 2**attempt)
await asyncio.sleep(wait_time) await asyncio.sleep(wait_time)
else: else:
@@ -662,7 +716,7 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
loop = asyncio.new_event_loop() loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop) asyncio.set_event_loop(loop)
try: try:
return loop.run_until_complete(process_tracks()) return loop.run_until_complete(process_tracks(track_list))
except Exception as e: except Exception as e:
send_log_to_discord( send_log_to_discord(
f"bulk_download failed: {e}\n{traceback.format_exc()}", "CRITICAL", target f"bulk_download failed: {e}\n{traceback.format_exc()}", "CRITICAL", target
@@ -672,3 +726,13 @@ def bulk_download(track_list: list, quality: str = "FLAC"):
job.save_meta() job.save_meta()
finally: finally:
loop.close() loop.close()
# Correct integration of FLAC stream check
async def process_tracks(track_list):
for i, track_id in enumerate(track_list or []):
combined_path = f"/tmp/{uuid.uuid4().hex}_combined.m4s" # Example path
if not await check_flac_stream(combined_path):
logger.error(f"No FLAC stream found in {combined_path}. Skipping file.")
continue
# Proceed with decoding pipeline

View File

@@ -1,33 +1,61 @@
# isort: skip_file
from typing import Optional, Any, Callable from typing import Optional, Any, Callable
from uuid import uuid4 from uuid import uuid4
from urllib.parse import urlparse from urllib.parse import urlparse
from pathlib import Path
import hashlib import hashlib
import traceback import traceback
import logging import logging
import random import random
import asyncio import asyncio
import json
import os import os
import aiohttp import aiohttp
import time import time
from streamrip.client import TidalClient # type: ignore
from streamrip.config import Config as StreamripConfig # type: ignore # Monkey-patch streamrip's Tidal client credentials BEFORE importing TidalClient
from dotenv import load_dotenv import streamrip.client.tidal as _tidal_module # type: ignore # noqa: E402
from rapidfuzz import fuzz
_tidal_module.CLIENT_ID = "fX2JxdmntZWK0ixT"
_tidal_module.CLIENT_SECRET = "1Nn9AfDAjxrgJFJbKNWLeAyKGVGmINuXPPLHVXAvxAg="
_tidal_module.AUTH = aiohttp.BasicAuth(
login=_tidal_module.CLIENT_ID, password=_tidal_module.CLIENT_SECRET
)
from streamrip.client import TidalClient # type: ignore # noqa: E402
from streamrip.config import Config as StreamripConfig # type: ignore # noqa: E402
from dotenv import load_dotenv # noqa: E402
from rapidfuzz import fuzz # noqa: E402
# Path to persist Tidal tokens across restarts
TIDAL_TOKEN_CACHE_PATH = Path(__file__).parent.parent / "tidal_token.json"
class MetadataFetchError(Exception): class MetadataFetchError(Exception):
"""Raised when metadata fetch permanently fails after retries.""" """Raised when metadata fetch permanently fails after retries."""
# Suppress all logging output from this module and its children # How long before token expiry to proactively refresh (seconds)
for name in [__name__, "utils.sr_wrapper"]: TIDAL_TOKEN_REFRESH_BUFFER = 600 # 10 minutes
# Maximum age of a session before forcing a fresh login (seconds)
TIDAL_SESSION_MAX_AGE = 1800 # 30 minutes
# Suppress noisy logging from this module and from the `streamrip` library
# We set propagate=False so messages don't bubble up to the root logger and
# attach a NullHandler where appropriate to avoid "No handler found" warnings.
for name in [__name__, "utils.sr_wrapper", "streamrip", "streamrip.client"]:
logger = logging.getLogger(name) logger = logging.getLogger(name)
logger.setLevel(logging.INFO) # Temporarily set to INFO for debugging LRC # Keep default level (or raise to WARNING) so non-important logs are dropped
try:
logger.setLevel(logging.WARNING)
except Exception:
pass
logger.propagate = False logger.propagate = False
for handler in logger.handlers: # Ensure a NullHandler is present so logs don't propagate and no missing-handler
handler.setLevel(logging.INFO) # warnings are printed when the package emits records.
# Also set the root logger to CRITICAL as a last resort (may affect global logging) if not any(isinstance(h, logging.NullHandler) for h in logger.handlers):
# logging.getLogger().setLevel(logging.CRITICAL) logger.addHandler(logging.NullHandler())
load_dotenv() load_dotenv()
@@ -41,27 +69,11 @@ class SRUtil:
def __init__(self) -> None: def __init__(self) -> None:
"""Initialize StreamRip utility.""" """Initialize StreamRip utility."""
self.streamrip_config = StreamripConfig.defaults() self.streamrip_config = StreamripConfig.defaults()
self.streamrip_config.session.tidal.user_id = os.getenv("tidal_user_id", "") self._load_tidal_config()
self.streamrip_config.session.tidal.access_token = os.getenv(
"tidal_access_token", ""
)
self.streamrip_config.session.tidal.refresh_token = os.getenv(
"tidal_refresh_token", ""
)
self.streamrip_config.session.tidal.token_expiry = os.getenv(
"tidal_token_expiry", ""
)
self.streamrip_config.session.tidal.country_code = os.getenv(
"tidal_country_code", ""
)
self.streamrip_config.session.tidal.quality = int(
os.getenv("tidal_default_quality", 2)
)
self.streamrip_config.session.conversion.enabled = False self.streamrip_config.session.conversion.enabled = False
self.streamrip_config.session.downloads.folder = os.getenv( self.streamrip_config.session.downloads.folder = os.getenv(
"tidal_download_folder", "" "tidal_download_folder", ""
) )
self.streamrip_config
self.streamrip_client = TidalClient(self.streamrip_config) self.streamrip_client = TidalClient(self.streamrip_config)
self.MAX_CONCURRENT_METADATA_REQUESTS = 2 self.MAX_CONCURRENT_METADATA_REQUESTS = 2
self.METADATA_RATE_LIMIT = 1.25 self.METADATA_RATE_LIMIT = 1.25
@@ -76,19 +88,344 @@ class SRUtil:
self.on_rate_limit: Optional[Callable[[Exception], Any]] = None self.on_rate_limit: Optional[Callable[[Exception], Any]] = None
# Internal flag to avoid repeated notifications for the same runtime # Internal flag to avoid repeated notifications for the same runtime
self._rate_limit_notified = False self._rate_limit_notified = False
# Track when we last successfully logged in
self._last_login_time: Optional[float] = None
# Track last successful API call
self._last_successful_request: Optional[float] = None
# Keepalive task handle
self._keepalive_task: Optional[asyncio.Task] = None
# Keepalive interval in seconds
self.KEEPALIVE_INTERVAL = 180 # 3 minutes
async def start_keepalive(self) -> None:
"""Start the background keepalive task.
This should be called once at startup to ensure the Tidal session
stays alive even during idle periods.
"""
if self._keepalive_task and not self._keepalive_task.done():
logging.info("Tidal keepalive task already running")
return
# Ensure initial login
try:
await self._login_and_persist()
logging.info("Initial Tidal login successful")
except Exception as e:
logging.warning("Initial Tidal login failed: %s", e)
self._keepalive_task = asyncio.create_task(self._keepalive_runner())
logging.info("Tidal keepalive task started")
async def stop_keepalive(self) -> None:
"""Stop the background keepalive task."""
if self._keepalive_task and not self._keepalive_task.done():
self._keepalive_task.cancel()
try:
await self._keepalive_task
except asyncio.CancelledError:
pass
logging.info("Tidal keepalive task stopped")
async def _keepalive_runner(self) -> None:
"""Background task to keep the Tidal session alive."""
while True:
try:
await asyncio.sleep(self.KEEPALIVE_INTERVAL)
# Check if we've had recent activity
if self._last_successful_request:
time_since_last = time.time() - self._last_successful_request
if time_since_last < self.KEEPALIVE_INTERVAL:
# Recent activity, no need to ping
continue
# Check if token is expiring soon and proactively refresh
if self._is_token_expiring_soon():
logging.info("Tidal keepalive: Token expiring soon, refreshing...")
try:
await self._login_and_persist(force=True)
logging.info("Tidal keepalive: Token refresh successful")
except Exception as e:
logging.warning("Tidal keepalive: Token refresh failed: %s", e)
continue
# Check if session is stale
if self._is_session_stale():
logging.info("Tidal keepalive: Session stale, refreshing...")
try:
await self._login_and_persist(force=True)
logging.info("Tidal keepalive: Session refresh successful")
except Exception as e:
logging.warning(
"Tidal keepalive: Session refresh failed: %s", e
)
continue
# Make a lightweight API call to keep the session alive
if self.streamrip_client.logged_in:
try:
# Simple search to keep the connection alive
await self._safe_api_call(
self.streamrip_client.search,
media_type="artist",
query="test",
retries=1,
)
logging.debug("Tidal keepalive ping successful")
except Exception as e:
logging.warning("Tidal keepalive ping failed: %s", e)
# Try to refresh the session
try:
await self._login_and_persist(force=True)
except Exception:
pass
except asyncio.CancelledError:
logging.info("Tidal keepalive task cancelled")
break
except Exception as e:
logging.error("Error in Tidal keepalive task: %s", e)
def _load_tidal_config(self) -> None:
"""Load Tidal config from cache file if available, otherwise from env."""
tidal = self.streamrip_config.session.tidal
cached = self._load_cached_tokens()
if cached:
tidal.user_id = cached.get("user_id", "")
tidal.access_token = cached.get("access_token", "")
tidal.refresh_token = cached.get("refresh_token", "")
tidal.token_expiry = cached.get("token_expiry", "")
tidal.country_code = cached.get(
"country_code", os.getenv("tidal_country_code", "")
)
else:
tidal.user_id = os.getenv("tidal_user_id", "")
tidal.access_token = os.getenv("tidal_access_token", "")
tidal.refresh_token = os.getenv("tidal_refresh_token", "")
tidal.token_expiry = os.getenv("tidal_token_expiry", "")
tidal.country_code = os.getenv("tidal_country_code", "")
tidal.quality = int(os.getenv("tidal_default_quality", 2))
def _load_cached_tokens(self) -> Optional[dict]:
"""Load cached tokens from disk if valid."""
try:
if TIDAL_TOKEN_CACHE_PATH.exists():
with open(TIDAL_TOKEN_CACHE_PATH, "r") as f:
data = json.load(f)
# Validate required fields exist
if all(
k in data for k in ("access_token", "refresh_token", "token_expiry")
):
logging.info("Loaded Tidal tokens from cache")
return data
except Exception as e:
logging.warning("Failed to load cached Tidal tokens: %s", e)
return None
def _save_cached_tokens(self) -> None:
"""Persist current tokens to disk for use across restarts."""
try:
tidal = self.streamrip_config.session.tidal
data = {
"user_id": tidal.user_id,
"access_token": tidal.access_token,
"refresh_token": tidal.refresh_token,
"token_expiry": tidal.token_expiry,
"country_code": tidal.country_code,
}
with open(TIDAL_TOKEN_CACHE_PATH, "w") as f:
json.dump(data, f)
logging.info("Saved Tidal tokens to cache")
except Exception as e:
logging.warning("Failed to save Tidal tokens: %s", e)
def _apply_new_tokens(self, auth_info: dict) -> None:
"""Apply new tokens from device auth to config."""
tidal = self.streamrip_config.session.tidal
tidal.user_id = str(auth_info.get("user_id", ""))
tidal.access_token = auth_info.get("access_token", "")
tidal.refresh_token = auth_info.get("refresh_token", "")
tidal.token_expiry = auth_info.get("token_expiry", "")
tidal.country_code = auth_info.get("country_code", tidal.country_code)
self._save_cached_tokens()
async def start_device_auth(self) -> tuple[str, str]:
"""Start device authorization flow.
Returns:
tuple: (device_code, verification_url) - User should visit the URL to authorize.
"""
if (
not hasattr(self.streamrip_client, "session")
or not self.streamrip_client.session
):
self.streamrip_client.session = await self.streamrip_client.get_session()
device_code, verification_url = await self.streamrip_client._get_device_code()
return device_code, verification_url
async def check_device_auth(self, device_code: str) -> tuple[bool, Optional[str]]:
"""Check if user has completed device authorization.
Args:
device_code: The device code from start_device_auth()
Returns:
tuple: (success, error_message)
- (True, None) if auth completed successfully
- (False, "pending") if user hasn't authorized yet
- (False, error_message) if auth failed
"""
status, auth_info = await self.streamrip_client._get_auth_status(device_code)
if status == 0:
# Success - apply new tokens
self._apply_new_tokens(auth_info)
# Re-login with new tokens
self.streamrip_client.logged_in = False
try:
await self.streamrip_client.login()
self._save_cached_tokens()
return True, None
except Exception as e:
return False, f"Login after auth failed: {e}"
elif status == 2:
# Pending - user hasn't authorized yet
return False, "pending"
else:
# Failed
return False, "Authorization failed"
def _is_token_expiring_soon(self) -> bool:
"""Check if the token is about to expire within the buffer window."""
tidal = self.streamrip_config.session.tidal
token_expiry = getattr(tidal, "token_expiry", None)
if not token_expiry:
return True # No expiry info means we should refresh
try:
# token_expiry is typically an ISO timestamp string
if isinstance(token_expiry, str):
from datetime import datetime
expiry_dt = datetime.fromisoformat(token_expiry.replace("Z", "+00:00"))
expiry_ts = expiry_dt.timestamp()
else:
expiry_ts = float(token_expiry)
return expiry_ts < (time.time() + TIDAL_TOKEN_REFRESH_BUFFER)
except Exception as e:
logging.warning("Failed to parse token expiry '%s': %s", token_expiry, e)
return True # Err on the side of refreshing
def _is_session_stale(self) -> bool:
"""Check if the login session is too old and should be refreshed."""
if not self._last_login_time:
return True
session_age = time.time() - self._last_login_time
return session_age > TIDAL_SESSION_MAX_AGE
async def _force_fresh_login(self) -> bool:
"""Force a complete fresh login, ignoring logged_in state.
Returns True if login succeeded, False otherwise.
"""
# Reset the logged_in flag to force a fresh login
self.streamrip_client.logged_in = False
# Close existing session if present
if hasattr(self.streamrip_client, "session") and self.streamrip_client.session:
try:
if not self.streamrip_client.session.closed:
await self.streamrip_client.session.close()
except Exception as e:
logging.warning("Error closing old session: %s", e)
# Use object.__setattr__ to bypass type checking for session reset
try:
object.__setattr__(self.streamrip_client, "session", None)
except Exception:
pass # Session will be recreated on next login
try:
logging.info("Forcing fresh Tidal login...")
await self.streamrip_client.login()
self._last_login_time = time.time()
self._save_cached_tokens()
logging.info("Fresh Tidal login successful")
return True
except Exception as e:
logging.warning(
"Forced Tidal login failed: %s - device re-auth may be required", e
)
return False
async def _login_and_persist(self, force: bool = False) -> None:
"""Login to Tidal and persist any refreshed tokens.
Args:
force: If True, force a fresh login even if already logged in.
This method now checks for:
1. Token expiry - refreshes if token is about to expire
2. Session age - refreshes if session is too old
3. logged_in state - logs in if not logged in
If refresh fails, logs a warning but does not raise.
"""
needs_login = force or not self.streamrip_client.logged_in
# Check if token is expiring soon
if not needs_login and self._is_token_expiring_soon():
logging.info("Tidal token expiring soon, will refresh")
needs_login = True
# Check if session is too old
if not needs_login and self._is_session_stale():
logging.info("Tidal session is stale, will refresh")
needs_login = True
if not needs_login:
return
try:
# Reset logged_in to ensure fresh login attempt
if force or self._is_token_expiring_soon():
self.streamrip_client.logged_in = False
await self.streamrip_client.login()
self._last_login_time = time.time()
# After login, tokens may have been refreshed - persist them
self._save_cached_tokens()
logging.info("Tidal login/refresh successful")
except Exception as e:
logging.warning(
"Tidal login/refresh failed: %s - device re-auth may be required", e
)
# Don't mark as logged in on failure - let subsequent calls retry
async def rate_limited_request(self, func, *args, **kwargs): async def rate_limited_request(self, func, *args, **kwargs):
"""Rate-limited wrapper that also ensures login before making requests."""
async with self.METADATA_SEMAPHORE: async with self.METADATA_SEMAPHORE:
now = time.time() now = time.time()
elapsed = now - self.LAST_METADATA_REQUEST elapsed = now - self.LAST_METADATA_REQUEST
if elapsed < self.METADATA_RATE_LIMIT: if elapsed < self.METADATA_RATE_LIMIT:
await asyncio.sleep(self.METADATA_RATE_LIMIT - elapsed) await asyncio.sleep(self.METADATA_RATE_LIMIT - elapsed)
# Ensure we're logged in before making the request
try:
await self._login_and_persist()
except Exception as e:
logging.warning(
"Pre-request login failed in rate_limited_request: %s", e
)
result = await func(*args, **kwargs) result = await func(*args, **kwargs)
self.LAST_METADATA_REQUEST = time.time() self.LAST_METADATA_REQUEST = time.time()
return result return result
async def _safe_api_call( async def _safe_api_call(
self, func, *args, retries: int = 2, backoff: float = 0.5, **kwargs self, func, *args, retries: int = 3, backoff: float = 0.5, **kwargs
): ):
"""Call an async API function with resilient retry behavior. """Call an async API function with resilient retry behavior.
@@ -97,18 +434,40 @@ class SRUtil:
attempt a `login()` and retry up to `retries` times. attempt a `login()` and retry up to `retries` times.
- On 400/429 responses (message contains '400' or '429'): retry with backoff - On 400/429 responses (message contains '400' or '429'): retry with backoff
without triggering login (to avoid excessive logins). without triggering login (to avoid excessive logins).
- On 401 (Unauthorized): force a fresh login and retry.
Returns the result or raises the last exception. Returns the result or raises the last exception.
""" """
last_exc: Optional[Exception] = None last_exc: Optional[Exception] = None
for attempt in range(retries): for attempt in range(retries):
try: try:
return await func(*args, **kwargs) # Before each attempt, ensure we have a valid session
if attempt == 0:
# On first attempt, try to ensure logged in (checks token expiry)
# Wrapped in try/except so login failures don't block the API call
try:
await self._login_and_persist()
except Exception as login_err:
logging.warning(
"Pre-request login failed: %s (continuing anyway)",
login_err,
)
result = await func(*args, **kwargs)
# Track successful request
self._last_successful_request = time.time()
return result
except AttributeError as e: except AttributeError as e:
# Probably missing/closed client internals: try re-login once # Probably missing/closed client internals: try re-login once
last_exc = e last_exc = e
logging.warning(
"AttributeError in API call (attempt %d/%d): %s",
attempt + 1,
retries,
e,
)
try: try:
await self.streamrip_client.login() await self._force_fresh_login()
except Exception: except Exception:
pass pass
continue continue
@@ -138,6 +497,36 @@ class SRUtil:
await asyncio.sleep(backoff * (2**attempt)) await asyncio.sleep(backoff * (2**attempt))
continue continue
# Treat 401 (Unauthorized) as an auth failure: force a fresh re-login then retry
is_401_error = (
(
isinstance(e, aiohttp.ClientResponseError)
and getattr(e, "status", None) == 401
)
or "401" in msg
or "unauthorized" in msg.lower()
)
if is_401_error:
logging.warning(
"Received 401/Unauthorized from Tidal (attempt %d/%d). Forcing fresh re-login...",
attempt + 1,
retries,
)
try:
# Use force=True to ensure we actually re-authenticate
login_success = await self._force_fresh_login()
if login_success:
logging.info("Forced re-login after 401 successful")
else:
logging.warning(
"Forced re-login after 401 failed - may need device re-auth"
)
except Exception as login_exc:
logging.warning("Forced login after 401 failed: %s", login_exc)
if attempt < retries - 1:
await asyncio.sleep(backoff * (2**attempt))
continue
# Connection related errors — try to re-login then retry # Connection related errors — try to re-login then retry
if ( if (
isinstance( isinstance(
@@ -153,7 +542,7 @@ class SRUtil:
or "closed" in msg.lower() or "closed" in msg.lower()
): ):
try: try:
await self.streamrip_client.login() await self._login_and_persist(force=True)
except Exception: except Exception:
pass pass
if attempt < retries - 1: if attempt < retries - 1:
@@ -190,12 +579,21 @@ class SRUtil:
title_match = self.is_fuzzy_match(expected_title, found_title, threshold) title_match = self.is_fuzzy_match(expected_title, found_title, threshold)
return artist_match and album_match and title_match return artist_match and album_match and title_match
def dedupe_by_key(self, key: str, entries: list[dict]) -> list[dict]: def dedupe_by_key(self, key: str | list[str], entries: list[dict]) -> list[dict]:
deduped = {} """Return entries de-duplicated by one or more keys."""
keys = [key] if isinstance(key, str) else list(key)
if not keys:
return entries
def normalize(value: Any) -> str:
return str(value or "").strip().lower()
deduped: dict[tuple[str, ...], dict] = {}
for entry in entries: for entry in entries:
norm = entry[key].strip().lower() composite_key = tuple(normalize(entry.get(k)) for k in keys)
if norm not in deduped: if composite_key not in deduped:
deduped[norm] = entry deduped[composite_key] = entry
return list(deduped.values()) return list(deduped.values())
def group_artists_by_name( def group_artists_by_name(
@@ -308,9 +706,11 @@ class SRUtil:
"upc": album_json.get("upc"), "upc": album_json.get("upc"),
"album_copyright": album_json.get("copyright"), "album_copyright": album_json.get("copyright"),
"album_cover_id": album_json.get("cover"), "album_cover_id": album_json.get("cover"),
"album_cover_url": f"https://resources.tidal.com/images/{album_json.get('cover')}/1280x1280.jpg" "album_cover_url": (
f"https://resources.tidal.com/images/{album_json.get('cover')}/1280x1280.jpg"
if album_json.get("cover") if album_json.get("cover")
else None, else None
),
} }
# Track-level (overrides or adds to album info) # Track-level (overrides or adds to album info)
@@ -417,8 +817,6 @@ class SRUtil:
async def get_albums_by_artist_id(self, artist_id: int) -> Optional[list | dict]: async def get_albums_by_artist_id(self, artist_id: int) -> Optional[list | dict]:
"""Get albums by artist ID. Retry login only on authentication failure. Rate limit and retry on 400/429.""" """Get albums by artist ID. Retry login only on authentication failure. Rate limit and retry on 400/429."""
import asyncio
artist_id_str: str = str(artist_id) artist_id_str: str = str(artist_id)
albums_out: list[dict] = [] albums_out: list[dict] = []
max_retries = 4 max_retries = 4
@@ -444,7 +842,9 @@ class SRUtil:
return None return None
if not metadata: if not metadata:
return None return None
albums = self.dedupe_by_key("title", metadata.get("albums", [])) albums = self.dedupe_by_key(
["title", "releaseDate"], metadata.get("albums", [])
)
albums_out = [ albums_out = [
{ {
"artist": ", ".join(artist["name"] for artist in album["artists"]), "artist": ", ".join(artist["name"] for artist in album["artists"]),
@@ -568,26 +968,26 @@ class SRUtil:
TODO: Reimplement using StreamRip TODO: Reimplement using StreamRip
""" """
try: try:
# _safe_api_call already handles login, no need to call it here
search_res = await self._safe_api_call( search_res = await self._safe_api_call(
self.streamrip_client.search, self.streamrip_client.search,
media_type="track", media_type="track",
query=f"{artist} - {song}", query=f"{artist} - {song}",
retries=3, retries=3,
) )
logging.critical("Result: %s", search_res) logging.debug("Search result: %s", search_res)
return ( return (
search_res[0].get("items") search_res[0].get("items")
if search_res and isinstance(search_res, list) if search_res and isinstance(search_res, list)
else [] else []
) )
except Exception as e: except Exception as e:
traceback.print_exc() logging.warning("Search Exception: %s", str(e))
logging.critical("Search Exception: %s", str(e)) if n < 2: # Reduce max retries from 3 to 2
if n < 3:
n += 1 n += 1
await asyncio.sleep(0.5 * n) # Add backoff
return await self.get_tracks_by_artist_song(artist, song, n) return await self.get_tracks_by_artist_song(artist, song, n)
return [] return []
# return []
async def get_stream_url_by_track_id( async def get_stream_url_by_track_id(
self, track_id: int, quality: str = "FLAC" self, track_id: int, quality: str = "FLAC"
@@ -638,7 +1038,6 @@ class SRUtil:
""" """
for attempt in range(1, self.MAX_METADATA_RETRIES + 1): for attempt in range(1, self.MAX_METADATA_RETRIES + 1):
try: try:
await self._safe_api_call(self.streamrip_client.login, retries=1)
# Track metadata # Track metadata
metadata = await self.rate_limited_request( metadata = await self.rate_limited_request(
self.streamrip_client.get_metadata, str(track_id), "track" self.streamrip_client.get_metadata, str(track_id), "track"
@@ -684,21 +1083,22 @@ class SRUtil:
except Exception as e: except Exception as e:
# Exponential backoff with jitter for 429 or other errors # Exponential backoff with jitter for 429 or other errors
delay = self.RETRY_DELAY * (2 ** (attempt - 1)) + random.uniform(0, 0.5) delay = self.RETRY_DELAY * (2 ** (attempt - 1)) + random.uniform(0, 0.5)
if attempt < self.MAX_METADATA_RETRIES:
logging.warning( logging.warning(
"Metadata fetch failed for track %s (attempt %d/%d): %s. Retrying in %.2fs", "Retrying metadata fetch for track %s (attempt %d/%d): %s. Next retry in %.2fs",
track_id, track_id,
attempt, attempt,
self.MAX_METADATA_RETRIES, self.MAX_METADATA_RETRIES,
str(e), str(e),
delay, delay,
) )
if attempt < self.MAX_METADATA_RETRIES:
await asyncio.sleep(delay) await asyncio.sleep(delay)
else: else:
logging.error( logging.error(
"Metadata fetch failed permanently for track %s after %d attempts", "Metadata fetch failed permanently for track %s after %d attempts: %s",
track_id, track_id,
self.MAX_METADATA_RETRIES, self.MAX_METADATA_RETRIES,
str(e),
) )
# Raise a specific exception so callers can react (e.g. notify) # Raise a specific exception so callers can react (e.g. notify)
raise MetadataFetchError( raise MetadataFetchError(
@@ -716,7 +1116,6 @@ class SRUtil:
bool bool
""" """
try: try:
await self._safe_api_call(self.streamrip_client.login, retries=1)
track_url = await self.get_stream_url_by_track_id(track_id) track_url = await self.get_stream_url_by_track_id(track_id)
if not track_url: if not track_url:
return False return False
@@ -749,10 +1148,10 @@ class SRUtil:
async def get_lrc_by_track_id(self, track_id: int) -> Optional[str]: async def get_lrc_by_track_id(self, track_id: int) -> Optional[str]:
"""Get LRC lyrics by track ID.""" """Get LRC lyrics by track ID."""
logging.info(f"SR: Fetching metadata for track ID {track_id}") logging.debug(f"SR: Fetching metadata for track ID {track_id}")
metadata = await self.get_metadata_by_track_id(track_id) metadata = await self.get_metadata_by_track_id(track_id)
lrc = metadata.get("lyrics") if metadata else None lrc = metadata.get("lyrics") if metadata else None
logging.info(f"SR: LRC {'found' if lrc else 'not found'}") logging.debug(f"SR: LRC {'found' if lrc else 'not found'}")
return lrc return lrc
async def get_lrc_by_artist_song( async def get_lrc_by_artist_song(
@@ -763,7 +1162,7 @@ class SRUtil:
duration: Optional[int] = None, duration: Optional[int] = None,
) -> Optional[str]: ) -> Optional[str]:
"""Get LRC lyrics by artist and song, optionally filtering by album and duration.""" """Get LRC lyrics by artist and song, optionally filtering by album and duration."""
logging.info(f"SR: Searching tracks for {artist} - {song}") logging.debug(f"SR: Searching tracks for {artist} - {song}")
tracks = await self.get_tracks_by_artist_song(artist, song) tracks = await self.get_tracks_by_artist_song(artist, song)
logging.info(f"SR: Found {len(tracks) if tracks else 0} tracks") logging.info(f"SR: Found {len(tracks) if tracks else 0} tracks")
if not tracks: if not tracks:
@@ -788,7 +1187,7 @@ class SRUtil:
tracks_with_diff.sort(key=lambda x: x[1]) tracks_with_diff.sort(key=lambda x: x[1])
best_track, min_diff = tracks_with_diff[0] best_track, min_diff = tracks_with_diff[0]
logging.info(f"SR: Best match duration diff: {min_diff}s") logging.info(f"SR: Best match duration diff: {min_diff}s")
# If the closest match is more than 5 seconds off, consider no match # If the closest match is more than 10 seconds off, consider no match
if min_diff > 10: if min_diff > 10:
logging.info("SR: Duration diff too large, no match") logging.info("SR: Duration diff too large, no match")
return None return None

View File

@@ -1,35 +0,0 @@
# -----------------------
# /m/m2/ PHP handler
location ~ ^/m/m2/(.+\.php)$ {
alias /storage/music2/completed/;
include fastcgi_params;
fastcgi_pass unix:/run/php/php8.2-fpm.sock;
fastcgi_param SCRIPT_FILENAME /storage/music2/completed/$1;
fastcgi_param DOCUMENT_ROOT /storage/music2/completed;
fastcgi_param SCRIPT_NAME /m/m2/$1;
}
# /m/m2/ static files
location /m/m2/ {
alias /storage/music2/completed/;
index index.php;
try_files $uri $uri/ /index.php$is_args$args;
}
# -----------------------
# /m/ PHP handler
location ~ ^/m/(.+\.php)$ {
root /var/www/codey.lol/new/public;
include fastcgi_params;
fastcgi_pass unix:/run/php/php8.2-fpm.sock;
fastcgi_param SCRIPT_FILENAME $document_root/$1;
fastcgi_param DOCUMENT_ROOT $document_root;
fastcgi_param SCRIPT_NAME /m/$1;
}
# /m/ static files
location /m/ {
root /var/www/codey.lol/new/public;
index index.php;
try_files $uri $uri/ /m/index.php$is_args$args;
}

24
utils/yt_utils.py Normal file
View File

@@ -0,0 +1,24 @@
from typing import Optional
import hmac
import hashlib
import time
import base64
import os
VIDEO_PROXY_SECRET = os.environ.get("VIDEO_PROXY_SECRET", "").encode()
def sign_video_id(video_id: Optional[str | bool]) -> str:
"""Generate a signed token for a video ID."""
if not VIDEO_PROXY_SECRET or not video_id:
return "" # Return empty if no secret configured
timestamp = int(time.time() * 1000) # milliseconds to match JS Date.now()
payload = f"{video_id}:{timestamp}"
signature = hmac.new(
VIDEO_PROXY_SECRET, payload.encode(), hashlib.sha256
).hexdigest()
token_data = f"{payload}:{signature}"
# base64url encode (no padding, to match JS base64url)
return base64.urlsafe_b64encode(token_data.encode()).decode().rstrip("=")