feat: implement video job management with retry and delete functionality, enhance video generation status tracking

Co-authored-by: Copilot <copilot@github.com>
This commit is contained in:
2026-04-29 18:27:59 +02:00
parent d5a94947de
commit 37edef716a
10 changed files with 479 additions and 95 deletions
+7
View File
@@ -114,6 +114,13 @@ def _run_migrations(conn: duckdb.DuckDBPyConnection) -> None:
conn.execute(""" conn.execute("""
ALTER TABLE models_cache ADD COLUMN IF NOT EXISTS output_modalities VARCHAR ALTER TABLE models_cache ADD COLUMN IF NOT EXISTS output_modalities VARCHAR
""") """)
# Migration: add video job request params + generation type
conn.execute("""
ALTER TABLE generated_videos ADD COLUMN IF NOT EXISTS request_params VARCHAR
""")
conn.execute("""
ALTER TABLE generated_videos ADD COLUMN IF NOT EXISTS generation_type VARCHAR DEFAULT 'text_to_video'
""")
_seed_admin(conn) _seed_admin(conn)
+9 -1
View File
@@ -5,7 +5,9 @@ from .routers import ai
from .routers import generate from .routers import generate
from .routers import images from .routers import images
from .routers import models from .routers import models
from .db import close_db, init_db from .db import close_db, get_conn, get_write_lock, init_db
from .services.video_worker import run_worker
import asyncio
import os import os
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
@@ -19,7 +21,13 @@ load_dotenv()
@asynccontextmanager @asynccontextmanager
async def lifespan(app: FastAPI): async def lifespan(app: FastAPI):
init_db() init_db()
worker_task = asyncio.create_task(run_worker(get_conn(), get_write_lock()))
yield yield
worker_task.cancel()
try:
await worker_task
except asyncio.CancelledError:
pass
close_db() close_db()
+40
View File
@@ -185,3 +185,43 @@ async def admin_mark_timed_out(_: dict = Depends(require_admin)) -> dict[str, in
conn = get_conn() conn = get_conn()
count = mark_timed_out_video_jobs(conn, timeout_minutes=120) count = mark_timed_out_video_jobs(conn, timeout_minutes=120)
return {"timed_out": count} return {"timed_out": count}
@router.post("/videos/{job_id}/retry", status_code=200)
async def admin_retry_video_job(job_id: str, _: dict = Depends(require_admin)) -> dict[str, str]:
"""Reset a failed or cancelled video job back to 'queued' for reprocessing."""
conn = get_conn()
lock = get_write_lock()
now = datetime.now(timezone.utc)
async with lock:
row = conn.execute(
"SELECT status FROM generated_videos WHERE id = ?", [job_id]
).fetchone()
if row is None:
from fastapi import HTTPException
raise HTTPException(status_code=404, detail="Job not found")
if row[0] not in ("failed", "cancelled"):
from fastapi import HTTPException
raise HTTPException(
status_code=400, detail=f"Cannot retry job with status '{row[0]}'")
conn.execute(
"UPDATE generated_videos SET status = 'queued', updated_at = ? WHERE id = ?",
[now, job_id],
)
return {"status": "ok", "job_id": job_id}
@router.delete("/videos/{job_id}", status_code=200)
async def admin_delete_video_job(job_id: str, _: dict = Depends(require_admin)) -> dict[str, str]:
"""Permanently delete a video job record."""
conn = get_conn()
lock = get_write_lock()
async with lock:
row = conn.execute(
"SELECT id FROM generated_videos WHERE id = ?", [job_id]
).fetchone()
if row is None:
from fastapi import HTTPException
raise HTTPException(status_code=404, detail="Job not found")
conn.execute("DELETE FROM generated_videos WHERE id = ?", [job_id])
return {"status": "ok", "job_id": job_id}
+30 -73
View File
@@ -1,4 +1,5 @@
"""Generate router: text, image, video, and image-to-video generation.""" """Generate router: text, image, video, and image-to-video generation."""
import json
from datetime import datetime, timezone from datetime import datetime, timezone
import httpx import httpx
@@ -209,54 +210,32 @@ async def generate_video(
body: VideoRequest, body: VideoRequest,
current_user: dict = Depends(get_current_user), current_user: dict = Depends(get_current_user),
) -> VideoResponse: ) -> VideoResponse:
"""Generate a video from a text prompt.""" """Queue a text-to-video generation job for background processing."""
try:
result = await openrouter.generate_video(
model=body.model,
prompt=body.prompt,
duration_seconds=body.duration_seconds,
aspect_ratio=body.aspect_ratio,
resolution=body.resolution,
)
except httpx.HTTPStatusError as exc:
detail = (
f"OpenRouter API error: {exc.response.status_code} - {exc.response.text}"
)
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY, detail=detail)
except Exception as exc:
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY, detail=f"OpenRouter error: {exc}"
)
user_id = current_user.get("id") or current_user.get("sub") user_id = current_user.get("id") or current_user.get("sub")
job_id = result.get("id", "")
polling_url = result.get("polling_url")
job_status = result.get("status", "pending")
now = datetime.now(timezone.utc).replace(tzinfo=None) now = datetime.now(timezone.utc).replace(tzinfo=None)
request_params = json.dumps({
"model": body.model,
"prompt": body.prompt,
"duration_seconds": body.duration_seconds,
"aspect_ratio": body.aspect_ratio,
"resolution": body.resolution,
})
db_id = None db_id = None
async with get_write_lock(): async with get_write_lock():
conn = get_conn() conn = get_conn()
row = conn.execute( row = conn.execute(
"""INSERT INTO generated_videos (user_id, job_id, model_id, prompt, polling_url, status, created_at, updated_at) """INSERT INTO generated_videos
VALUES (?, ?, ?, ?, ?, ?, ?, ?) RETURNING id""", (user_id, job_id, model_id, prompt, status, request_params, generation_type, created_at, updated_at)
[user_id, job_id, body.model, body.prompt, VALUES (?, ?, ?, ?, 'queued', ?, 'text_to_video', ?, ?) RETURNING id""",
polling_url, job_status, now, now], [user_id, "", body.model, body.prompt, request_params, now, now],
).fetchone() ).fetchone()
if row: if row:
db_id = str(row[0]) db_id = str(row[0])
urls = result.get("unsigned_urls") or result.get("video_urls")
return VideoResponse( return VideoResponse(
id=job_id, id="",
db_id=db_id, db_id=db_id,
model=body.model, model=body.model,
status=job_status, status="queued",
polling_url=polling_url,
video_urls=urls,
video_url=(urls or [None])[0],
error=result.get("error"),
metadata=result.get("metadata"),
) )
@@ -265,55 +244,33 @@ async def generate_video_from_image(
body: VideoFromImageRequest, body: VideoFromImageRequest,
current_user: dict = Depends(get_current_user), current_user: dict = Depends(get_current_user),
) -> VideoResponse: ) -> VideoResponse:
"""Generate a video from an image and a text prompt.""" """Queue an image-to-video generation job for background processing."""
try:
result = await openrouter.generate_video_from_image(
model=body.model,
image_url=body.image_url,
prompt=body.prompt,
duration_seconds=body.duration_seconds,
aspect_ratio=body.aspect_ratio,
resolution=body.resolution,
)
except httpx.HTTPStatusError as exc:
detail = (
f"OpenRouter API error: {exc.response.status_code} - {exc.response.text}"
)
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY, detail=detail)
except Exception as exc:
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY, detail=f"OpenRouter error: {exc}"
)
user_id = current_user.get("id") or current_user.get("sub") user_id = current_user.get("id") or current_user.get("sub")
job_id = result.get("id", "")
polling_url = result.get("polling_url")
job_status = result.get("status", "pending")
now = datetime.now(timezone.utc).replace(tzinfo=None) now = datetime.now(timezone.utc).replace(tzinfo=None)
request_params = json.dumps({
"model": body.model,
"image_url": body.image_url,
"prompt": body.prompt,
"duration_seconds": body.duration_seconds,
"aspect_ratio": body.aspect_ratio,
"resolution": body.resolution,
})
db_id = None db_id = None
async with get_write_lock(): async with get_write_lock():
conn = get_conn() conn = get_conn()
row = conn.execute( row = conn.execute(
"""INSERT INTO generated_videos (user_id, job_id, model_id, prompt, polling_url, status, created_at, updated_at) """INSERT INTO generated_videos
VALUES (?, ?, ?, ?, ?, ?, ?, ?) RETURNING id""", (user_id, job_id, model_id, prompt, status, request_params, generation_type, created_at, updated_at)
[user_id, job_id, body.model, body.prompt, VALUES (?, ?, ?, ?, 'queued', ?, 'image_to_video', ?, ?) RETURNING id""",
polling_url, job_status, now, now], [user_id, "", body.model, body.prompt, request_params, now, now],
).fetchone() ).fetchone()
if row: if row:
db_id = str(row[0]) db_id = str(row[0])
urls = result.get("unsigned_urls") or result.get("video_urls")
return VideoResponse( return VideoResponse(
id=job_id, id="",
db_id=db_id, db_id=db_id,
model=body.model, model=body.model,
status=job_status, status="queued",
polling_url=polling_url,
video_urls=urls,
video_url=(urls or [None])[0],
error=result.get("error"),
metadata=result.get("metadata"),
) )
+158
View File
@@ -0,0 +1,158 @@
"""Background worker: processes queued/processing video generation jobs."""
import asyncio
import json
import logging
from datetime import datetime, timezone
import duckdb
from . import openrouter
from .models import mark_timed_out_video_jobs
logger = logging.getLogger(__name__)
# Interval between worker ticks (seconds)
WORKER_INTERVAL = 15
# Jobs to process per tick (prevents unbounded bursts)
BATCH_SIZE = 5
async def process_queued_jobs(conn: duckdb.DuckDBPyConnection, lock: asyncio.Lock) -> int:
"""Submit queued jobs to OpenRouter and transition them to 'processing'."""
rows = conn.execute(
"""SELECT id, generation_type, request_params
FROM generated_videos
WHERE status = 'queued' AND request_params IS NOT NULL
ORDER BY created_at ASC
LIMIT ?""",
[BATCH_SIZE],
).fetchall()
processed = 0
for row in rows:
db_id, generation_type, raw_params = str(row[0]), row[1], row[2]
try:
params = json.loads(raw_params)
except (json.JSONDecodeError, TypeError):
logger.error("Bad request_params for video job %s", db_id)
continue
try:
if generation_type == "image_to_video":
result = await openrouter.generate_video_from_image(
model=params["model"],
image_url=params.get("image_url", ""),
prompt=params.get("prompt", ""),
duration_seconds=params.get("duration_seconds"),
aspect_ratio=params.get("aspect_ratio", "16:9"),
resolution=params.get("resolution"),
)
else:
result = await openrouter.generate_video(
model=params["model"],
prompt=params.get("prompt", ""),
duration_seconds=params.get("duration_seconds"),
aspect_ratio=params.get("aspect_ratio", "16:9"),
resolution=params.get("resolution"),
)
except Exception as exc:
logger.warning("OpenRouter call failed for job %s: %s", db_id, exc)
now = datetime.now(timezone.utc).replace(tzinfo=None)
async with lock:
conn.execute(
"UPDATE generated_videos SET status = 'failed', updated_at = ? WHERE id = ?",
[now, db_id],
)
continue
job_id = result.get("id", "")
polling_url = result.get("polling_url")
new_status = result.get("status", "processing")
# Normalise terminal statuses returned immediately (rare but possible)
if new_status not in ("queued", "processing", "completed", "failed", "cancelled"):
new_status = "processing"
urls = result.get("unsigned_urls") or result.get("video_urls")
video_url = (urls or [None])[0]
now = datetime.now(timezone.utc).replace(tzinfo=None)
async with lock:
conn.execute(
"""UPDATE generated_videos
SET job_id = ?, polling_url = ?, status = ?, video_url = ?, updated_at = ?
WHERE id = ?""",
[job_id, polling_url, new_status, video_url, now, db_id],
)
processed += 1
logger.info("Video job %s%s (provider id: %s)",
db_id, new_status, job_id)
return processed
async def process_processing_jobs(conn: duckdb.DuckDBPyConnection, lock: asyncio.Lock) -> int:
"""Poll in-progress jobs and update to 'completed' or 'failed'."""
rows = conn.execute(
"""SELECT id, polling_url
FROM generated_videos
WHERE status = 'processing' AND polling_url IS NOT NULL
ORDER BY updated_at ASC
LIMIT ?""",
[BATCH_SIZE],
).fetchall()
updated = 0
for row in rows:
db_id, polling_url = str(row[0]), row[1]
try:
result = await openrouter.poll_video_status(polling_url)
except Exception as exc:
logger.warning("Polling failed for job %s: %s", db_id, exc)
continue
job_status = result.get("status", "processing")
if job_status not in ("completed", "failed"):
continue # still in-progress — check again next tick
urls = result.get("unsigned_urls") or result.get("video_urls")
video_url = (urls or [None])[0]
now = datetime.now(timezone.utc).replace(tzinfo=None)
async with lock:
conn.execute(
"""UPDATE generated_videos
SET status = ?, video_url = ?, updated_at = ?
WHERE id = ?""",
[job_status, video_url, now, db_id],
)
updated += 1
logger.info("Video job %s%s", db_id, job_status)
return updated
async def worker_tick(conn: duckdb.DuckDBPyConnection, lock: asyncio.Lock) -> None:
"""Single worker tick: submit queued, poll processing, expire timed-out."""
queued = await process_queued_jobs(conn, lock)
polled = await process_processing_jobs(conn, lock)
async with lock:
timed_out = mark_timed_out_video_jobs(conn, timeout_minutes=120)
if queued or polled or timed_out:
logger.info(
"Worker tick: submitted=%d polled=%d timed_out=%d",
queued, polled, timed_out,
)
async def run_worker(conn: duckdb.DuckDBPyConnection, lock: asyncio.Lock) -> None:
"""Infinite loop: run a worker tick every WORKER_INTERVAL seconds."""
logger.info("Video worker started (interval=%ds)", WORKER_INTERVAL)
while True:
try:
await worker_tick(conn, lock)
except asyncio.CancelledError:
logger.info("Video worker stopped.")
return
except Exception as exc:
logger.exception("Unexpected error in video worker: %s", exc)
await asyncio.sleep(WORKER_INTERVAL)
+9
View File
@@ -469,6 +469,15 @@ def generate_video_status():
return jsonify(resp.json()), resp.status_code return jsonify(resp.json()), resp.status_code
@app.get("/generate/video/<video_id>/status")
@login_required
def generate_video_db_status(video_id: str):
"""Return current DB status for a video job (polled by frontend JS)."""
resp = _api(
"GET", f"/generate/videos/{video_id}", token=session["access_token"])
return jsonify(resp.json()), resp.status_code
# ── Admin ───────────────────────────────────────────────────────────────── # ── Admin ─────────────────────────────────────────────────────────────────
@app.get("/admin") @app.get("/admin")
+18 -16
View File
@@ -63,15 +63,14 @@ document.addEventListener("DOMContentLoaded", () => {
// ── Video status polling ─────────────────────────────── // ── Video status polling ───────────────────────────────
const pollDiv = document.getElementById("video-poll-status"); const pollDiv = document.getElementById("video-poll-status");
if (pollDiv) { if (pollDiv) {
const pollingUrl = pollDiv.dataset.pollingUrl; const videoId = pollDiv.dataset.videoId;
const statusText = document.getElementById("poll-status-text"); const statusText = document.getElementById("poll-status-text");
const videoContainer = document.getElementById("poll-video-container"); const videoContainer = document.getElementById("poll-video-container");
const interval = setInterval(async () => { const interval = setInterval(async () => {
try { try {
const resp = await fetch( const resp = await fetch(
"/generate/video/status?polling_url=" + "/generate/video/" + encodeURIComponent(videoId) + "/status",
encodeURIComponent(pollingUrl),
); );
if (!resp.ok) return; if (!resp.ok) return;
const data = await resp.json(); const data = await resp.json();
@@ -82,25 +81,28 @@ document.addEventListener("DOMContentLoaded", () => {
if (data.status === "completed") { if (data.status === "completed") {
clearInterval(interval); clearInterval(interval);
if (data.video_url && videoContainer) { if (data.video_url) {
const vid = document.createElement("video"); if (videoContainer) {
vid.src = data.video_url; const vid = document.createElement("video");
vid.controls = true; vid.src = data.video_url;
vid.className = "generated-video"; vid.controls = true;
videoContainer.appendChild(vid); vid.className = "generated-video";
const msg = pollDiv.querySelector("p"); videoContainer.appendChild(vid);
if (msg) msg.textContent = "Video ready!"; const msg = pollDiv.querySelector("p");
if (msg) msg.textContent = "Video ready!";
} else {
// video_detail page: reload to show the video element
window.location.reload();
}
} }
} else if (data.status === "failed") { } else if (data.status === "failed" || data.status === "cancelled") {
clearInterval(interval); clearInterval(interval);
pollDiv.innerHTML = pollDiv.innerHTML =
'<div class="alert alert-error">Generation failed: ' + '<div class="alert alert-error">Generation failed or was cancelled.</div>';
(data.error || "Unknown error") +
"</div>";
} }
} catch (e) { } catch (e) {
console.error("Video polling error:", e); console.error("Video polling error:", e);
} }
}, 12016); }, 5000);
} }
}); });
+203
View File
@@ -76,5 +76,208 @@
</tbody> </tbody>
</table> </table>
</div> </div>
<!-- ── Video Jobs ──────────────────────────────────────────────── -->
<h2 class="section-title" style="margin-top: 2rem">Video Jobs</h2>
<div
style="
display: flex;
gap: 1rem;
align-items: center;
flex-wrap: wrap;
margin-bottom: 1rem;
"
>
<label for="vj-status-filter" style="font-weight: 600"
>Filter by status:</label
>
<select id="vj-status-filter" class="form-control" style="width: auto">
<option value="">All</option>
<option value="queued">Queued</option>
<option value="processing">Processing</option>
<option value="completed">Completed</option>
<option value="failed">Failed</option>
<option value="cancelled">Cancelled</option>
</select>
<label for="vj-sort" style="font-weight: 600">Sort:</label>
<select id="vj-sort" class="form-control" style="width: auto">
<option value="created_desc">Created (newest first)</option>
<option value="created_asc">Created (oldest first)</option>
<option value="updated_desc">Updated (newest first)</option>
<option value="status_asc">Status (AZ)</option>
<option value="model_asc">Model (AZ)</option>
</select>
<button id="vj-refresh" class="btn btn-sm">Refresh</button>
<span
id="vj-count"
style="color: var(--text-muted, #888); font-size: 0.9em"
></span>
</div>
<div class="table-wrap">
<table id="vj-table">
<thead>
<tr>
<th>User</th>
<th>Status</th>
<th>Model</th>
<th>Prompt</th>
<th>Created</th>
<th>Updated</th>
<th>Actions</th>
</tr>
</thead>
<tbody id="vj-tbody">
<tr>
<td colspan="7" class="text-muted">Loading…</td>
</tr>
</tbody>
</table>
</div>
</div> </div>
<script>
(function () {
const BACKEND = "{{ config['BACKEND_URL'] }}";
const TOKEN = "{{ session['access_token'] }}";
const headers = { Authorization: "Bearer " + TOKEN };
let allJobs = [];
async function loadJobs() {
document.getElementById("vj-tbody").innerHTML =
'<tr><td colspan="7" class="text-muted">Loading…</td></tr>';
try {
const r = await fetch(BACKEND + "/admin/videos", { headers });
if (!r.ok) throw new Error(await r.text());
allJobs = await r.json();
renderJobs();
} catch (e) {
document.getElementById("vj-tbody").innerHTML =
`<tr><td colspan="7" style="color:red;">Error: ${e.message}</td></tr>`;
}
}
function renderJobs() {
const statusFilter = document.getElementById("vj-status-filter").value;
const sort = document.getElementById("vj-sort").value;
let jobs = statusFilter
? allJobs.filter((j) => j.status === statusFilter)
: [...allJobs];
jobs.sort((a, b) => {
if (sort === "created_asc")
return new Date(a.created_at) - new Date(b.created_at);
if (sort === "updated_desc")
return new Date(b.updated_at) - new Date(a.updated_at);
if (sort === "status_asc") return a.status.localeCompare(b.status);
if (sort === "model_asc") return a.model_id.localeCompare(b.model_id);
return new Date(b.created_at) - new Date(a.created_at); // created_desc default
});
document.getElementById("vj-count").textContent =
`${jobs.length} job${jobs.length !== 1 ? "s" : ""}`;
const tbody = document.getElementById("vj-tbody");
if (jobs.length === 0) {
tbody.innerHTML =
'<tr><td colspan="7" class="text-muted">No jobs found.</td></tr>';
return;
}
const statusColor = {
completed: "color:var(--success-color,#4caf50)",
failed: "color:var(--danger-color,#e53935)",
cancelled: "color:var(--danger-color,#e53935)",
processing: "color:var(--warning-color,#fb8c00)",
queued: "color:var(--warning-color,#fb8c00)",
};
tbody.innerHTML = jobs
.map((job) => {
const sc = statusColor[job.status] || "";
const canRetry =
job.status === "failed" || job.status === "cancelled";
const canCancel =
job.status === "queued" || job.status === "processing";
const actions = [
canRetry
? `<button class="btn btn-sm vj-retry" data-id="${job.id}">Retry</button>`
: "",
canCancel
? `<button class="btn btn-sm vj-cancel" data-id="${job.id}">Cancel</button>`
: "",
`<button class="btn btn-sm btn-danger vj-delete" data-id="${job.id}">Delete</button>`,
].join(" ");
const prompt =
job.prompt.length > 60 ? job.prompt.slice(0, 57) + "…" : job.prompt;
const created = job.created_at
? new Date(job.created_at).toLocaleString()
: "—";
const updated = job.updated_at
? new Date(job.updated_at).toLocaleString()
: "—";
return `<tr>
<td>${job.user_email || "—"}</td>
<td style="${sc};font-weight:600;">${job.status}</td>
<td style="font-size:.85em;">${job.model_id}</td>
<td title="${job.prompt.replace(/"/g, "&quot;")}">${prompt}</td>
<td style="white-space:nowrap;">${created}</td>
<td style="white-space:nowrap;">${updated}</td>
<td style="white-space:nowrap;">${actions}</td>
</tr>`;
})
.join("");
}
async function apiPost(path) {
const r = await fetch(BACKEND + path, { method: "POST", headers });
if (!r.ok) {
const d = await r.json().catch(() => ({}));
throw new Error(d.detail || r.statusText);
}
return r.json();
}
async function apiDelete(path) {
const r = await fetch(BACKEND + path, { method: "DELETE", headers });
if (!r.ok) {
const d = await r.json().catch(() => ({}));
throw new Error(d.detail || r.statusText);
}
return r.json();
}
document
.getElementById("vj-tbody")
.addEventListener("click", async function (e) {
const btn = e.target.closest("button");
if (!btn) return;
const id = btn.dataset.id;
try {
if (btn.classList.contains("vj-retry"))
await apiPost(`/admin/videos/${id}/retry`);
if (btn.classList.contains("vj-cancel"))
await apiPost(`/admin/videos/${id}/cancel`);
if (btn.classList.contains("vj-delete")) {
if (!confirm("Permanently delete this video job?")) return;
await apiDelete(`/admin/videos/${id}`);
}
await loadJobs();
} catch (err) {
alert("Error: " + err.message);
}
});
document
.getElementById("vj-status-filter")
.addEventListener("change", renderJobs);
document.getElementById("vj-sort").addEventListener("change", renderJobs);
document.getElementById("vj-refresh").addEventListener("click", loadJobs);
loadJobs();
})();
</script>
{% endblock %} {% endblock %}
+3 -3
View File
@@ -155,9 +155,9 @@ AI{% endblock %} {% block content %}
{% endif %} {% if result %} {% endif %} {% if result %}
<div class="result"> <div class="result">
<h2>Video job</h2> <h2>Video job</h2>
<p>Job ID: <code>{{ result.id }}</code></p> <p>Job ID: <code>{{ result.db_id or result.id }}</code></p>
{% if result.status in ('queued', 'processing') and result.polling_url %} {% if result.status in ('queued', 'processing') and result.db_id %}
<div id="video-poll-status" data-polling-url="{{ result.polling_url }}"> <div id="video-poll-status" data-video-id="{{ result.db_id }}">
<p> <p>
<span id="poll-status-text" <span id="poll-status-text"
>Status: <strong>{{ result.status }}</strong></span >Status: <strong>{{ result.status }}</strong></span
+2 -2
View File
@@ -12,11 +12,11 @@ block content %}
<div class="bg-gray-800 rounded-lg shadow-lg overflow-hidden"> <div class="bg-gray-800 rounded-lg shadow-lg overflow-hidden">
{% if video.status == 'completed' and video.video_url %} {% if video.status == 'completed' and video.video_url %}
<video src="{{ video.video_url }}" controls class="w-full"></video> <video src="{{ video.video_url }}" controls class="w-full"></video>
{% elif video.status in ('queued', 'processing') and video.polling_url %} {% elif video.status in ('queued', 'processing') %}
<div <div
class="w-full bg-black aspect-video flex flex-col items-center justify-center p-6 text-center" class="w-full bg-black aspect-video flex flex-col items-center justify-center p-6 text-center"
id="video-poll-status" id="video-poll-status"
data-polling-url="{{ video.polling_url }}" data-video-id="{{ video.id }}"
> >
<p class="text-xl font-semibold"> <p class="text-xl font-semibold">
Status: <strong id="poll-status-text">{{ video.status }}</strong> Status: <strong id="poll-status-text">{{ video.status }}</strong>