2c6fdc03a8
Co-authored-by: Copilot <copilot@github.com>
64 lines
2.0 KiB
Python
64 lines
2.0 KiB
Python
"""AI router: model listing and chat completions via OpenRouter."""
|
|
from fastapi import APIRouter, Depends, HTTPException, status
|
|
|
|
from ..dependencies import get_current_user
|
|
from ..models.ai import ChatRequest, ChatResponse, ModelInfo
|
|
from ..services import openrouter
|
|
|
|
router = APIRouter(prefix="/ai", tags=["ai"])
|
|
|
|
|
|
@router.get("/models", response_model=list[ModelInfo])
|
|
async def get_models(_: dict = Depends(get_current_user)) -> list[ModelInfo]:
|
|
"""List available AI models from OpenRouter."""
|
|
try:
|
|
raw = await openrouter.list_models()
|
|
except Exception as exc:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_502_BAD_GATEWAY,
|
|
detail=f"OpenRouter error: {exc}",
|
|
)
|
|
return [
|
|
ModelInfo(
|
|
id=m.get("id", ""),
|
|
name=m.get("name", m.get("id", "")),
|
|
context_length=m.get("context_length"),
|
|
pricing=m.get("pricing"),
|
|
)
|
|
for m in raw
|
|
]
|
|
|
|
|
|
@router.post("/chat", response_model=ChatResponse)
|
|
async def chat(
|
|
body: ChatRequest,
|
|
_: dict = Depends(get_current_user),
|
|
) -> ChatResponse:
|
|
"""Send a chat completion request through OpenRouter."""
|
|
try:
|
|
result = await openrouter.chat_completion(
|
|
model=body.model,
|
|
messages=[m.model_dump() for m in body.messages],
|
|
temperature=body.temperature,
|
|
max_tokens=body.max_tokens,
|
|
)
|
|
except Exception as exc:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_502_BAD_GATEWAY,
|
|
detail=f"OpenRouter error: {exc}",
|
|
)
|
|
|
|
try:
|
|
choice = result["choices"][0]
|
|
return ChatResponse(
|
|
id=result["id"],
|
|
model=result.get("model", body.model),
|
|
content=choice["message"]["content"],
|
|
usage=result.get("usage"),
|
|
)
|
|
except (KeyError, IndexError) as exc:
|
|
raise HTTPException(
|
|
status_code=status.HTTP_502_BAD_GATEWAY,
|
|
detail=f"Unexpected response format from OpenRouter: {exc}",
|
|
)
|