96d13fc440
Co-authored-by: Copilot <copilot@github.com>
125 lines
3.9 KiB
Python
125 lines
3.9 KiB
Python
"""Model cache service: fetch from OpenRouter, store in DuckDB."""
|
|
import json
|
|
from datetime import datetime, timedelta, timezone
|
|
from typing import Any
|
|
|
|
import duckdb
|
|
|
|
from . import openrouter
|
|
|
|
CACHE_TTL_HOURS = 24
|
|
|
|
|
|
def _parse_modality(raw_modality: str) -> str:
|
|
"""Extract output modality from OpenRouter architecture.modality string.
|
|
|
|
Examples: "text->text", "text+image->text", "text->image", "text->video"
|
|
"""
|
|
output = raw_modality.split(
|
|
"->", 1)[-1].lower() if "->" in raw_modality else raw_modality.lower()
|
|
if "text" in output:
|
|
return "text"
|
|
if "image" in output:
|
|
return "image"
|
|
if "video" in output:
|
|
return "video"
|
|
if "audio" in output:
|
|
return "audio"
|
|
return output
|
|
|
|
|
|
async def refresh_models_cache(conn: duckdb.DuckDBPyConnection) -> int:
|
|
"""Fetch all models from OpenRouter and replace the cache. Returns count stored."""
|
|
raw = await openrouter.list_models()
|
|
# Use naive UTC to avoid DuckDB TIMESTAMP tz-stripping inconsistencies
|
|
now = datetime.now(timezone.utc).replace(tzinfo=None)
|
|
|
|
conn.execute("DELETE FROM models_cache")
|
|
count = 0
|
|
for m in raw:
|
|
arch = m.get("architecture", {})
|
|
modality_raw = arch.get(
|
|
"modality", "text->text") if arch else "text->text"
|
|
modality = _parse_modality(modality_raw)
|
|
pricing = m.get("pricing")
|
|
model_id = m.get("id", "")
|
|
if not model_id:
|
|
continue
|
|
conn.execute(
|
|
"""
|
|
INSERT INTO models_cache (model_id, name, modality, context_length, pricing, fetched_at)
|
|
VALUES (?, ?, ?, ?, ?, ?)
|
|
ON CONFLICT (model_id) DO UPDATE SET
|
|
name = excluded.name,
|
|
modality = excluded.modality,
|
|
context_length = excluded.context_length,
|
|
pricing = excluded.pricing,
|
|
fetched_at = excluded.fetched_at
|
|
""",
|
|
[
|
|
model_id,
|
|
m.get("name", model_id),
|
|
modality,
|
|
m.get("context_length"),
|
|
json.dumps(pricing) if pricing else None,
|
|
now,
|
|
],
|
|
)
|
|
count += 1
|
|
return count
|
|
|
|
|
|
def is_cache_stale(conn: duckdb.DuckDBPyConnection) -> bool:
|
|
"""Return True if cache is empty or last fetched more than CACHE_TTL_HOURS ago."""
|
|
row = conn.execute("SELECT MAX(fetched_at) FROM models_cache").fetchone()
|
|
if not row or row[0] is None:
|
|
return True
|
|
last_fetched = row[0]
|
|
# DuckDB TIMESTAMP is always naive; compare against naive UTC
|
|
if last_fetched.tzinfo is not None:
|
|
last_fetched = last_fetched.replace(tzinfo=None)
|
|
now_naive = datetime.now(timezone.utc).replace(tzinfo=None)
|
|
return now_naive - last_fetched > timedelta(hours=CACHE_TTL_HOURS)
|
|
|
|
|
|
def get_cached_models(
|
|
conn: duckdb.DuckDBPyConnection,
|
|
modality: str | None = None,
|
|
) -> list[dict[str, Any]]:
|
|
"""Return cached models, optionally filtered by modality, ordered by name."""
|
|
if modality:
|
|
rows = conn.execute(
|
|
"""
|
|
SELECT model_id, name, modality, context_length, pricing
|
|
FROM models_cache
|
|
WHERE modality = ?
|
|
ORDER BY name
|
|
""",
|
|
[modality],
|
|
).fetchall()
|
|
else:
|
|
rows = conn.execute(
|
|
"""
|
|
SELECT model_id, name, modality, context_length, pricing
|
|
FROM models_cache
|
|
ORDER BY name
|
|
"""
|
|
).fetchall()
|
|
|
|
result = []
|
|
for row in rows:
|
|
pricing = None
|
|
if row[4]:
|
|
try:
|
|
pricing = json.loads(row[4])
|
|
except (json.JSONDecodeError, TypeError):
|
|
pricing = None
|
|
result.append({
|
|
"id": row[0],
|
|
"name": row[1],
|
|
"modality": row[2],
|
|
"context_length": row[3],
|
|
"pricing": pricing,
|
|
})
|
|
return result
|