From fd03c16eca085423267c163137b28ccb60de8db0 Mon Sep 17 00:00:00 2001
From: Matthias Nott <mnott@mnsoft.org>
Date: Wed, 25 Feb 2026 00:45:13 +0100
Subject: [PATCH] feat: multi-compose rebuild (Seafile), cancel endpoint, schedule router, project descriptor
---
app/routers/restore.py | 116 ++++++++++++++++++++++++++++++++++++++++++----------------
1 files changed, 84 insertions(+), 32 deletions(-)
diff --git a/app/routers/restore.py b/app/routers/restore.py
index fc1e60f..cd10c22 100644
--- a/app/routers/restore.py
+++ b/app/routers/restore.py
@@ -1,3 +1,4 @@
+import asyncio
import json
from datetime import datetime, timezone
from typing import AsyncGenerator, Literal
@@ -6,7 +7,9 @@
from fastapi.responses import StreamingResponse
from app.auth import verify_token
-from app.ops_runner import _BACKUP_TIMEOUT, stream_ops_host
+from app.ops_runner import _BACKUP_TIMEOUT, new_op_id, is_cancelled, clear_cancelled, stream_ops_host
+
+_KEEPALIVE_INTERVAL = 15 # seconds between SSE keepalive pings
router = APIRouter()
@@ -29,52 +32,101 @@
Runs on the host via nsenter because ops restore delegates to project CLIs
that use host Python venvs incompatible with the container's Python.
"""
- base_args = ["restore", project, env]
+ op_id = new_op_id()
+ yield _sse_line({"op_id": op_id})
- # Pass the backup file path to avoid interactive selection prompt
- if name:
- backup_path = f"/opt/data/backups/{project}/{env}/{name}"
- base_args.append(backup_path)
+ try:
+ base_args = ["restore", project, env]
- if dry_run:
- base_args.append("--dry-run")
+ # Pass the backup file path to avoid interactive selection prompt
+ if name:
+ backup_path = f"/opt/data/backups/{project}/{env}/{name}"
+ base_args.append(backup_path)
- # Granular restore mode
- if mode == "db":
- base_args.append("--db-only")
- elif mode == "wp":
- base_args.append("--wp-only")
+ if dry_run:
+ base_args.append("--dry-run")
- if source == "offsite":
- # ops offsite restore <project> <env>
- download_args = ["offsite", "restore", project, env]
- yield _sse_line({"line": f"Downloading {project}/{env} from offsite...", "timestamp": _now()})
+ # Granular restore mode
+ if mode == "db":
+ base_args.append("--db-only")
+ elif mode == "wp":
+ base_args.append("--wp-only")
- download_ok = True
- async for line in stream_ops_host(download_args, timeout=_BACKUP_TIMEOUT):
- yield _sse_line({"line": line, "timestamp": _now()})
- if line.startswith("[error]"):
- download_ok = False
+ if source == "offsite":
+ # ops offsite restore <project> <env>
+ download_args = ["offsite", "restore", project, env]
+ yield _sse_line({"line": f"Downloading {project}/{env} from offsite...", "timestamp": _now()})
- if not download_ok:
- yield _sse_line({"done": True, "success": False})
- return
+ download_ok = True
+ downloaded_path = None
+ async for line in stream_ops_host(download_args, timeout=_BACKUP_TIMEOUT, op_id=op_id):
+ yield _sse_line({"line": line, "timestamp": _now()})
+ if line.startswith("[error]"):
+ download_ok = False
+ # Capture downloaded file path from offsite.py output
+ if "Downloaded to" in line and "/tmp/" in line:
+ # Parse "Downloaded to: /tmp/filename.tar.gz" or similar
+ for part in line.split():
+ if part.startswith("/tmp/") and part.endswith(".tar.gz"):
+ downloaded_path = part
+ elif line.startswith(" ✓ Downloaded to "):
+ for part in line.split():
+ if part.startswith("/tmp/") and part.endswith(".tar.gz"):
+ downloaded_path = part
- yield _sse_line({"line": "Download complete. Starting restore...", "timestamp": _now()})
+ if is_cancelled(op_id):
+ yield _sse_line({"done": True, "success": False, "cancelled": True})
+ return
- success = True
- async for line in stream_ops_host(base_args, timeout=_BACKUP_TIMEOUT):
- yield _sse_line({"line": line, "timestamp": _now()})
- if line.startswith("[error]"):
- success = False
+ if not download_ok:
+ yield _sse_line({"done": True, "success": False})
+ return
- yield _sse_line({"done": True, "success": success})
+ # Use the downloaded offsite file for restore
+ if downloaded_path:
+ base_args.append(downloaded_path)
+ yield _sse_line({"line": f"Download complete. Restoring from {downloaded_path}...", "timestamp": _now()})
+ else:
+ yield _sse_line({"line": "Download complete. Starting restore...", "timestamp": _now()})
+
+ success = True
+ async for item in _stream_with_keepalive(stream_ops_host(base_args, timeout=_BACKUP_TIMEOUT, op_id=op_id)):
+ if item is None:
+ # Keepalive ping — SSE comment to prevent idle timeout
+ yield ": keepalive\n\n"
+ else:
+ yield _sse_line({"line": item, "timestamp": _now()})
+ if item.startswith("[error]"):
+ success = False
+
+ if is_cancelled(op_id):
+ yield _sse_line({"done": True, "success": False, "cancelled": True})
+ else:
+ yield _sse_line({"done": True, "success": success})
+ finally:
+ clear_cancelled(op_id)
def _now() -> str:
return datetime.now(timezone.utc).isoformat()
+async def _stream_with_keepalive(gen: AsyncGenerator[str, None]) -> AsyncGenerator[str | None, None]:
+ """Wrap an async generator to yield None as keepalive when no data arrives within the interval."""
+ aiter = gen.__aiter__()
+ pending = asyncio.ensure_future(aiter.__anext__())
+ while True:
+ done, _ = await asyncio.wait({pending}, timeout=_KEEPALIVE_INTERVAL)
+ if done:
+ try:
+ yield pending.result()
+ except StopAsyncIteration:
+ break
+ pending = asyncio.ensure_future(aiter.__anext__())
+ else:
+ yield None # keepalive — prevents Traefik idle timeout
+
+
@router.get("/{project}/{env}", summary="Restore a backup with real-time output")
async def restore_backup(
project: str,
--
Gitblit v1.3.1