add AI and generation routers, models, and OpenRouter service integration with tests

Co-authored-by: Copilot <copilot@github.com>
This commit is contained in:
2026-04-27 18:12:53 +02:00
parent 3ee4ed7e7f
commit 05309f26b4
7 changed files with 826 additions and 0 deletions
+63
View File
@@ -0,0 +1,63 @@
"""AI router: model listing and chat completions via OpenRouter."""
from fastapi import APIRouter, Depends, HTTPException, status
from backend.app.dependencies import get_current_user
from backend.app.models.ai import ChatRequest, ChatResponse, ModelInfo
from backend.app.services import openrouter
router = APIRouter(prefix="/ai", tags=["ai"])
@router.get("/models", response_model=list[ModelInfo])
async def get_models(_: dict = Depends(get_current_user)) -> list[ModelInfo]:
"""List available AI models from OpenRouter."""
try:
raw = await openrouter.list_models()
except Exception as exc:
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY,
detail=f"OpenRouter error: {exc}",
)
return [
ModelInfo(
id=m.get("id", ""),
name=m.get("name", m.get("id", "")),
context_length=m.get("context_length"),
pricing=m.get("pricing"),
)
for m in raw
]
@router.post("/chat", response_model=ChatResponse)
async def chat(
body: ChatRequest,
_: dict = Depends(get_current_user),
) -> ChatResponse:
"""Send a chat completion request through OpenRouter."""
try:
result = await openrouter.chat_completion(
model=body.model,
messages=[m.model_dump() for m in body.messages],
temperature=body.temperature,
max_tokens=body.max_tokens,
)
except Exception as exc:
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY,
detail=f"OpenRouter error: {exc}",
)
try:
choice = result["choices"][0]
return ChatResponse(
id=result["id"],
model=result.get("model", body.model),
content=choice["message"]["content"],
usage=result.get("usage"),
)
except (KeyError, IndexError) as exc:
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY,
detail=f"Unexpected response format from OpenRouter: {exc}",
)
+141
View File
@@ -0,0 +1,141 @@
"""Generate router: text, image, video, and image-to-video generation."""
from fastapi import APIRouter, Depends, HTTPException, status
from backend.app.dependencies import get_current_user
from backend.app.models.ai import (
ImageRequest,
ImageResponse,
ImageResult,
TextRequest,
TextResponse,
VideoFromImageRequest,
VideoRequest,
VideoResponse,
)
from backend.app.services import openrouter
router = APIRouter(prefix="/generate", tags=["generate"])
@router.post("/text", response_model=TextResponse)
async def generate_text(
body: TextRequest,
_: dict = Depends(get_current_user),
) -> TextResponse:
"""Generate text from a prompt using a chat model."""
messages = []
if body.system_prompt:
messages.append({"role": "system", "content": body.system_prompt})
messages.append({"role": "user", "content": body.prompt})
try:
result = await openrouter.chat_completion(
model=body.model,
messages=messages,
temperature=body.temperature,
max_tokens=body.max_tokens,
)
except Exception as exc:
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY, detail=f"OpenRouter error: {exc}")
try:
choice = result["choices"][0]
return TextResponse(
id=result["id"],
model=result.get("model", body.model),
content=choice["message"]["content"],
usage=result.get("usage"),
)
except (KeyError, IndexError) as exc:
raise HTTPException(status_code=status.HTTP_502_BAD_GATEWAY,
detail=f"Unexpected response format: {exc}")
@router.post("/image", response_model=ImageResponse)
async def generate_image(
body: ImageRequest,
_: dict = Depends(get_current_user),
) -> ImageResponse:
"""Generate images from a text prompt."""
try:
result = await openrouter.generate_image(
model=body.model,
prompt=body.prompt,
n=body.n,
size=body.size,
)
except Exception as exc:
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY, detail=f"OpenRouter error: {exc}")
try:
images = [
ImageResult(
url=item.get("url"),
b64_json=item.get("b64_json"),
revised_prompt=item.get("revised_prompt"),
)
for item in result.get("data", [])
]
return ImageResponse(
id=result.get("id", ""),
model=result.get("model", body.model),
images=images,
)
except (KeyError, TypeError) as exc:
raise HTTPException(status_code=status.HTTP_502_BAD_GATEWAY,
detail=f"Unexpected response format: {exc}")
@router.post("/video", response_model=VideoResponse)
async def generate_video(
body: VideoRequest,
_: dict = Depends(get_current_user),
) -> VideoResponse:
"""Generate a video from a text prompt."""
try:
result = await openrouter.generate_video(
model=body.model,
prompt=body.prompt,
duration_seconds=body.duration_seconds,
aspect_ratio=body.aspect_ratio,
)
except Exception as exc:
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY, detail=f"OpenRouter error: {exc}")
return VideoResponse(
id=result.get("id", ""),
model=result.get("model", body.model),
status=result.get("status", "queued"),
video_url=result.get("video_url"),
metadata=result.get("metadata"),
)
@router.post("/video/from-image", response_model=VideoResponse)
async def generate_video_from_image(
body: VideoFromImageRequest,
_: dict = Depends(get_current_user),
) -> VideoResponse:
"""Generate a video from an image and a text prompt."""
try:
result = await openrouter.generate_video_from_image(
model=body.model,
image_url=body.image_url,
prompt=body.prompt,
duration_seconds=body.duration_seconds,
aspect_ratio=body.aspect_ratio,
)
except Exception as exc:
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY, detail=f"OpenRouter error: {exc}")
return VideoResponse(
id=result.get("id", ""),
model=result.get("model", body.model),
status=result.get("status", "queued"),
video_url=result.get("video_url"),
metadata=result.get("metadata"),
)