various/stale

This commit is contained in:
2026-01-25 13:14:00 -05:00
parent 10ccf8c8eb
commit 97fd7dd67d
14 changed files with 501 additions and 64 deletions

View File

@@ -228,6 +228,10 @@ class Lighting:
if self._state.session.closed:
return False
if not self._is_tcp_connected():
logger.info("Cync TCP manager not connected; will reconnect")
return False
# Check token expiry
if self._is_token_expired():
logger.info("Token expired or expiring soon")
@@ -235,6 +239,35 @@ class Lighting:
return True
def _is_tcp_connected(self) -> bool:
"""Best-effort check that the pycync TCP connection is alive."""
client = getattr(self._state.cync_api, "_command_client", None)
if not client:
return False
tcp_manager = getattr(client, "_tcp_manager", None)
if not tcp_manager:
return False
# If login was never acknowledged or was cleared, treat as disconnected
if not getattr(tcp_manager, "_login_acknowledged", False):
return False
writer = getattr(tcp_manager, "_writer", None)
reader = getattr(tcp_manager, "_reader", None)
# If underlying streams are closed, reconnect
if writer and writer.is_closing():
return False
if reader and reader.at_eof():
return False
# Some versions expose a _closed flag
if getattr(tcp_manager, "_closed", False):
return False
return True
def _is_token_expired(self) -> bool:
"""Check if token is expired or will expire soon."""
if not self._state.user:
@@ -418,11 +451,21 @@ class Lighting:
"""Background task to monitor connection health and refresh tokens."""
while True:
try:
await asyncio.sleep(300) # Check every 5 minutes
await asyncio.sleep(60) # Check every minute
needs_reconnect = False
# Proactively refresh if token is expiring
if self._is_token_expired():
logger.info("Token expiring, proactively reconnecting...")
needs_reconnect = True
# Reconnect if TCP connection looks dead
if not self._is_tcp_connected():
logger.warning("Cync TCP connection lost; reconnecting...")
needs_reconnect = True
if needs_reconnect:
try:
await self._connect(force=True)
except Exception as e:

View File

@@ -245,9 +245,9 @@ class LyricSearch(FastAPI):
if i + line_count <= len(lyric_lines):
# Combine consecutive lines with space separator
combined_lines = []
line_positions: list[tuple[int, int]] = (
[]
) # Track where each line starts in combined text
line_positions: list[
tuple[int, int]
] = [] # Track where each line starts in combined text
combined_text_parts: list[str] = []
for j in range(line_count):

View File

@@ -4,6 +4,7 @@ import time
import random
import json
import asyncio
import socket
from typing import Dict, Set
from .constructors import (
ValidRadioNextRequest,
@@ -33,6 +34,21 @@ from fastapi.responses import RedirectResponse, JSONResponse, FileResponse
from auth.deps import get_current_user
from collections import defaultdict
def _get_local_ips() -> set[str]:
"""Get all local IP addresses for this host."""
ips = {"127.0.0.1", "::1"}
try:
for info in socket.getaddrinfo(socket.gethostname(), None):
ips.add(str(info[4][0]))
except Exception:
pass
return ips
_LOCAL_IPS = _get_local_ips()
class Radio(FastAPI):
"""Radio Endpoints"""
@@ -380,7 +396,6 @@ class Radio(FastAPI):
data: ValidRadioNextRequest,
request: Request,
background_tasks: BackgroundTasks,
user=Depends(get_current_user),
) -> JSONResponse:
"""
Get the next track in the queue. The track will be removed from the queue in the process.
@@ -395,8 +410,11 @@ class Radio(FastAPI):
- **JSONResponse**: Contains the next track information.
"""
if "dj" not in user.get("roles", []):
raise HTTPException(status_code=403, detail="Insufficient permissions")
try:
if request.client and request.client.host not in _LOCAL_IPS:
raise HTTPException(status_code=403, detail="Access denied")
except ValueError:
raise HTTPException(status_code=403, detail="Access denied")
logging.info("Radio get next")
if data.station not in self.radio_util.active_playlist.keys():

View File

@@ -5,6 +5,7 @@ from fastapi.responses import JSONResponse
from utils.sr_wrapper import SRUtil
from auth.deps import get_current_user
from redis import Redis
from pathlib import Path
from rq import Queue
from rq.job import Job
from rq.job import JobStatus
@@ -20,8 +21,7 @@ from lyric_search.sources import private
from typing import Literal
from pydantic import BaseModel
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
logger = logging.getLogger(__name__)
class ValidBulkFetchRequest(BaseModel):
@@ -126,6 +126,22 @@ class RIP(FastAPI):
]
)
# Build detailed per-track list for the job detail response
raw_tracks = job.meta.get("tracks") or []
track_list = []
for t in raw_tracks:
# Normalize fields and pick the requested set
track_list.append(
{
"title": t.get("title"),
"artist": t.get("artist"),
"status": t.get("status"),
"error": t.get("error"),
"filename": t.get("filename")
or (Path(t.get("file_path")).name if t.get("file_path") else None),
}
)
return {
"id": job.id,
"status": job_status.title(),
@@ -140,6 +156,7 @@ class RIP(FastAPI):
if isinstance(tracks_in, int)
else tracks_out
),
"track_list": track_list,
"target": job.meta.get("target"),
"quality": job.meta.get("quality", "Unknown"),
}