#!/usr/bin/env python3.12 import importlib import logging import regex from aiohttp import ClientSession, ClientTimeout from fastapi import FastAPI, Security, Request, HTTPException from fastapi.security import APIKeyHeader, APIKeyQuery from pydantic import BaseModel class ValidAISongRequest(BaseModel): """ - **a**: artist - **s**: track title """ a: str s: str class AI(FastAPI): """AI Endpoints""" def __init__(self, app: FastAPI, my_util, constants, glob_state): # pylint: disable=super-init-not-called self.app = app self.util = my_util self.constants = constants self.glob_state = glob_state self.url_clean_regex = regex.compile(r'^\/ai\/(openai|base)\/') self.endpoints = { "ai/openai": self.ai_openai_handler, "ai/base": self.ai_handler, "ai/song": self.ai_song_handler #tbd } for endpoint, handler in self.endpoints.items(): app.add_api_route(f"/{endpoint}/{{any:path}}", handler, methods=["POST"]) async def ai_handler(self, request: Request): """ /ai/base/ AI BASE Request """ if not self.util.check_key(request.url.path, request.headers.get('X-Authd-With')): raise HTTPException(status_code=403, detail="Unauthorized") local_llm_headers = { 'Authorization': f'Bearer {self.constants.LOCAL_LLM_KEY}' } forward_path = self.url_clean_regex.sub('', request.url.path) try: async with ClientSession() as session: async with await session.post(f'{self.constants.LOCAL_LLM_BASE}/{forward_path}', json=await request.json(), headers=local_llm_headers, timeout=ClientTimeout(connect=15, sock_read=30)) as out_request: await self.glob_state.increment_counter('ai_requests') response = await out_request.json() return response except Exception as e: # pylint: disable=broad-exception-caught logging.error("Error: %s", e) return { 'err': True, 'errorText': 'General Failure' } async def ai_openai_handler(self, request: Request): """ /ai/openai/ AI Request """ if not self.util.check_key(request.url.path, request.headers.get('X-Authd-With')): raise HTTPException(status_code=403, detail="Unauthorized") """ TODO: Implement Claude Currently only routes to local LLM """ local_llm_headers = { 'Authorization': f'Bearer {self.constants.LOCAL_LLM_KEY}' } forward_path = self.url_clean_regex.sub('', request.url.path) try: async with ClientSession() as session: async with await session.post(f'{self.constants.LOCAL_LLM_HOST}/{forward_path}', json=await request.json(), headers=local_llm_headers, timeout=ClientTimeout(connect=15, sock_read=30)) as out_request: await self.glob_state.increment_counter('ai_requests') response = await out_request.json() return response except Exception as e: # pylint: disable=broad-exception-caught logging.error("Error: %s", e) return { 'err': True, 'errorText': 'General Failure' } """ CLAUDE BELOW, COMMENTED """ async def ai_song_handler(self, data: ValidAISongRequest, request: Request): """ /ai/song/ AI (Song Info) Request [Public] """ ai_prompt = "You are a helpful assistant who will provide tidbits of info on songs the user may listen to." ai_question = f"I am going to listen to the song \"{data.s}\" by \"{data.a}\"." local_llm_headers = { 'x-api-key': self.constants.CLAUDE_API_KEY, 'anthropic-version': '2023-06-01', 'content-type': 'application/json', } request_data = { 'model': 'claude-3-haiku-20240307', 'max_tokens': 512, 'temperature': 0.6, 'system': ai_prompt, 'messages': [ { "role": "user", "content": ai_question.strip(), } ] } try: async with ClientSession() as session: async with await session.post('https://api.anthropic.com/v1/messages', json=request_data, headers=local_llm_headers, timeout=ClientTimeout(connect=15, sock_read=30)) as request: await self.glob_state.increment_counter('claude_ai_requests') response = await request.json() print(f"Response: {response}") result = { 'resp': response.get('content')[0].get('text').strip() } return result except Exception as e: # pylint: disable=broad-exception-caught logging.error("Error: %s", e) return { 'err': True, 'errorText': 'General Failure' } # async def ai_song_handler(self, data: ValidAISongRequest, request: Request): # """ # /ai/song/ # AI (Song Info) Request [Public] # """ # ai_question = f"I am going to listen to the song \"{data.s}\" by \"{data.a}\"." # local_llm_headers = { # 'Authorization': f'Bearer {self.constants.LOCAL_LLM_KEY}' # } # ai_req_data = { # 'max_context_length': 8192, # 'max_length': 256, # 'temperature': 0.3, # 'quiet': 1, # 'bypass_eos': False, # 'trim_stop': True, # 'sampler_order': [6,0,1,3,4,2,5], # 'memory': "You are a helpful assistant who will provide only totally accurate tidbits of info on songs the user may listen to. You do not include information about which album a song was released on, or when it was released, and do not mention that you are not including this information in your response. If the input provided is not a song you are aware of, simply state that. Begin your output at your own response.", # 'stop': ['### Inst', '### Resp'], # 'prompt': ai_question # } # try: # async with ClientSession() as session: # async with await session.post(f'{self.constants.LOCAL_LLM_BASE}/generate', # json=ai_req_data, # headers=local_llm_headers, # timeout=ClientTimeout(connect=15, sock_read=30)) as request: # await self.glob_state.increment_counter('ai_requests') # response = await request.json() # result = { # 'resp': response.get('results')[0].get('text').strip() # } # return result # except Exception as e: # pylint: disable=broad-exception-caught # logging.error("Error: %s", e) # return { # 'err': True, # 'errorText': 'General Failure' # }