feat(04-01): create AI client abstraction layer
- Add AIClient class wrapping AsyncOpenAI for model routing - Support Requesty and OpenRouter as backend routers - Add MODEL_MAP with claude, gpt, gemini short names - Add init_ai_client/get_ai_client module functions - Include HTTP-Referer header support for OpenRouter Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
3740691dac
commit
e04ce4eeeb
1 changed files with 125 additions and 0 deletions
125
src/moai/core/ai_client.py
Normal file
125
src/moai/core/ai_client.py
Normal file
|
|
@ -0,0 +1,125 @@
|
||||||
|
"""AI client abstraction for model routing.
|
||||||
|
|
||||||
|
Provides AIClient class that wraps the OpenAI SDK to communicate with
|
||||||
|
AI model routers (Requesty or OpenRouter). Both routers are OpenAI-compatible.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from openai import AsyncOpenAI
|
||||||
|
|
||||||
|
from moai.bot.config import BotConfig
|
||||||
|
|
||||||
|
# Router base URLs
|
||||||
|
ROUTER_URLS = {
|
||||||
|
"requesty": "https://router.requesty.ai/v1",
|
||||||
|
"openrouter": "https://openrouter.ai/api/v1",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Short model names to full model identifiers
|
||||||
|
MODEL_MAP = {
|
||||||
|
"claude": "anthropic/claude-sonnet-4-20250514",
|
||||||
|
"gpt": "openai/gpt-4o",
|
||||||
|
"gemini": "google/gemini-2.0-flash",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class AIClient:
|
||||||
|
"""AI client wrapping OpenAI SDK for model routing.
|
||||||
|
|
||||||
|
Supports Requesty and OpenRouter as backend routers. Both use
|
||||||
|
OpenAI-compatible APIs with different base URLs and headers.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
router: The router service name ("requesty" or "openrouter").
|
||||||
|
referer: HTTP-Referer header for OpenRouter (optional).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, router: str, api_key: str, referer: str | None = None) -> None:
|
||||||
|
"""Initialize AI client.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
router: Router service name ("requesty" or "openrouter").
|
||||||
|
api_key: API key for the router service.
|
||||||
|
referer: HTTP-Referer header for OpenRouter (optional).
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If router is not supported.
|
||||||
|
"""
|
||||||
|
if router not in ROUTER_URLS:
|
||||||
|
raise ValueError(f"Unsupported router: {router}. Use: {list(ROUTER_URLS.keys())}")
|
||||||
|
|
||||||
|
self.router = router
|
||||||
|
self.referer = referer
|
||||||
|
|
||||||
|
base_url = ROUTER_URLS[router]
|
||||||
|
self._client = AsyncOpenAI(base_url=base_url, api_key=api_key)
|
||||||
|
|
||||||
|
async def complete(
|
||||||
|
self,
|
||||||
|
model: str,
|
||||||
|
messages: list[dict],
|
||||||
|
system_prompt: str | None = None,
|
||||||
|
) -> str:
|
||||||
|
"""Get a completion from the AI model.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
model: Model short name (e.g., "claude") or full identifier.
|
||||||
|
messages: List of message dicts with "role" and "content".
|
||||||
|
system_prompt: Optional system prompt to prepend.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The model's response content as a string.
|
||||||
|
"""
|
||||||
|
# Resolve short model names
|
||||||
|
resolved_model = MODEL_MAP.get(model, model)
|
||||||
|
|
||||||
|
# Build message list
|
||||||
|
full_messages = []
|
||||||
|
if system_prompt:
|
||||||
|
full_messages.append({"role": "system", "content": system_prompt})
|
||||||
|
full_messages.extend(messages)
|
||||||
|
|
||||||
|
# Build extra headers for OpenRouter
|
||||||
|
extra_headers = {}
|
||||||
|
if self.router == "openrouter" and self.referer:
|
||||||
|
extra_headers["HTTP-Referer"] = self.referer
|
||||||
|
|
||||||
|
# Make the API call
|
||||||
|
response = await self._client.chat.completions.create(
|
||||||
|
model=resolved_model,
|
||||||
|
messages=full_messages,
|
||||||
|
extra_headers=extra_headers if extra_headers else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
return response.choices[0].message.content or ""
|
||||||
|
|
||||||
|
|
||||||
|
# Module-level singleton
|
||||||
|
_client: AIClient | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def init_ai_client(config: BotConfig) -> AIClient:
|
||||||
|
"""Initialize the global AI client from config.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config: BotConfig instance with AI settings.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The initialized AIClient instance.
|
||||||
|
"""
|
||||||
|
global _client
|
||||||
|
_client = AIClient(config.ai_router, config.ai_api_key, config.ai_referer)
|
||||||
|
return _client
|
||||||
|
|
||||||
|
|
||||||
|
def get_ai_client() -> AIClient:
|
||||||
|
"""Get the global AI client instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The initialized AIClient instance.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If AI client has not been initialized.
|
||||||
|
"""
|
||||||
|
if _client is None:
|
||||||
|
raise RuntimeError("AI client not initialized. Call init_ai_client() first.")
|
||||||
|
return _client
|
||||||
Loading…
Add table
Reference in a new issue