feat: multi-compose rebuild (Seafile), cancel endpoint, schedule router, project descriptor
3 files added
12 files modified
| .. | .. |
|---|
| 10 | 10 | from starlette.datastructures import MutableHeaders |
|---|
| 11 | 11 | from starlette.types import ASGIApp, Receive, Scope, Send |
|---|
| 12 | 12 | |
|---|
| 13 | | -from app.routers import backups, promote, rebuild, registry, restore, services, status, sync_data, system |
|---|
| 13 | +from app.routers import backups, cancel, promote, rebuild, registry, restore, schedule, services, status, sync_data, system |
|---|
| 14 | 14 | |
|---|
| 15 | 15 | logging.basicConfig( |
|---|
| 16 | 16 | level=logging.INFO, |
|---|
| .. | .. |
|---|
| 64 | 64 | app.include_router(sync_data.router, prefix="/api/sync", tags=["sync"]) |
|---|
| 65 | 65 | app.include_router(registry.router, prefix="/api/registry", tags=["registry"]) |
|---|
| 66 | 66 | app.include_router(rebuild.router, prefix="/api/rebuild", tags=["rebuild"]) |
|---|
| 67 | +app.include_router(schedule.router, prefix="/api/schedule", tags=["schedule"]) |
|---|
| 68 | +app.include_router(cancel.router, prefix="/api/operations", tags=["operations"]) |
|---|
| 67 | 69 | |
|---|
| 68 | 70 | # --------------------------------------------------------------------------- |
|---|
| 69 | 71 | # Index route — serves index.html with content-hashed asset URLs. |
|---|
| .. | .. |
|---|
| 1 | 1 | import asyncio |
|---|
| 2 | 2 | import json |
|---|
| 3 | 3 | import os |
|---|
| 4 | +import uuid |
|---|
| 4 | 5 | from typing import AsyncGenerator |
|---|
| 5 | 6 | |
|---|
| 6 | 7 | OPS_CLI = os.environ.get("OPS_CLI", "/opt/infrastructure/ops") |
|---|
| .. | .. |
|---|
| 9 | 10 | |
|---|
| 10 | 11 | _DEFAULT_TIMEOUT = 300 |
|---|
| 11 | 12 | _BACKUP_TIMEOUT = 3600 |
|---|
| 13 | + |
|---|
| 14 | +# --------------------------------------------------------------------------- |
|---|
| 15 | +# Operation registry — tracks running processes for cancel support |
|---|
| 16 | +# --------------------------------------------------------------------------- |
|---|
| 17 | +_active_ops: dict[str, asyncio.subprocess.Process] = {} |
|---|
| 18 | +_cancelled_ops: set[str] = set() |
|---|
| 19 | + |
|---|
| 20 | + |
|---|
| 21 | +def new_op_id() -> str: |
|---|
| 22 | + return uuid.uuid4().hex[:12] |
|---|
| 23 | + |
|---|
| 24 | + |
|---|
| 25 | +def register_op(op_id: str, proc: asyncio.subprocess.Process) -> None: |
|---|
| 26 | + _active_ops[op_id] = proc |
|---|
| 27 | + |
|---|
| 28 | + |
|---|
| 29 | +def deregister_op(op_id: str) -> None: |
|---|
| 30 | + _active_ops.pop(op_id, None) |
|---|
| 31 | + # NOTE: do NOT clear _cancelled_ops here — callers check is_cancelled() |
|---|
| 32 | + # after the stream ends. The flag is cleared by clear_cancelled() instead. |
|---|
| 33 | + |
|---|
| 34 | + |
|---|
| 35 | +def clear_cancelled(op_id: str) -> None: |
|---|
| 36 | + """Call after the generator has finished checking is_cancelled().""" |
|---|
| 37 | + _cancelled_ops.discard(op_id) |
|---|
| 38 | + |
|---|
| 39 | + |
|---|
| 40 | +def cancel_op(op_id: str) -> bool: |
|---|
| 41 | + """Terminate a running operation. Returns True if found and killed.""" |
|---|
| 42 | + proc = _active_ops.get(op_id) |
|---|
| 43 | + if proc is None: |
|---|
| 44 | + return False |
|---|
| 45 | + _cancelled_ops.add(op_id) |
|---|
| 46 | + try: |
|---|
| 47 | + proc.terminate() |
|---|
| 48 | + except ProcessLookupError: |
|---|
| 49 | + pass |
|---|
| 50 | + return True |
|---|
| 51 | + |
|---|
| 52 | + |
|---|
| 53 | +def is_cancelled(op_id: str) -> bool: |
|---|
| 54 | + return op_id in _cancelled_ops |
|---|
| 12 | 55 | |
|---|
| 13 | 56 | # nsenter via Docker: run commands on the host from inside the container. |
|---|
| 14 | 57 | # Required because ops backup/restore delegate to host Python venvs (3.12) |
|---|
| .. | .. |
|---|
| 90 | 133 | } |
|---|
| 91 | 134 | |
|---|
| 92 | 135 | |
|---|
| 93 | | -async def stream_ops_host(args: list[str], timeout: int = _DEFAULT_TIMEOUT) -> AsyncGenerator[str, None]: |
|---|
| 136 | +async def stream_ops_host(args: list[str], timeout: int = _DEFAULT_TIMEOUT, op_id: str | None = None) -> AsyncGenerator[str, None]: |
|---|
| 94 | 137 | """Stream ops CLI output from the host via nsenter.""" |
|---|
| 95 | | - async for line in _stream_exec(_NSENTER_PREFIX + [OPS_CLI] + args, timeout=timeout): |
|---|
| 138 | + async for line in _stream_exec(_NSENTER_PREFIX + [OPS_CLI] + args, timeout=timeout, op_id=op_id): |
|---|
| 96 | 139 | yield line |
|---|
| 97 | 140 | |
|---|
| 98 | 141 | |
|---|
| .. | .. |
|---|
| 101 | 144 | return await _run_exec(_NSENTER_PREFIX + args, timeout=timeout) |
|---|
| 102 | 145 | |
|---|
| 103 | 146 | |
|---|
| 104 | | -async def stream_command_host(args: list[str], timeout: int = _DEFAULT_TIMEOUT) -> AsyncGenerator[str, None]: |
|---|
| 147 | +async def stream_command_host(args: list[str], timeout: int = _DEFAULT_TIMEOUT, op_id: str | None = None) -> AsyncGenerator[str, None]: |
|---|
| 105 | 148 | """Stream arbitrary command output from the host via nsenter.""" |
|---|
| 106 | | - async for line in _stream_exec(_NSENTER_PREFIX + args, timeout=timeout): |
|---|
| 149 | + async for line in _stream_exec(_NSENTER_PREFIX + args, timeout=timeout, op_id=op_id): |
|---|
| 107 | 150 | yield line |
|---|
| 108 | 151 | |
|---|
| 109 | 152 | |
|---|
| .. | .. |
|---|
| 137 | 180 | return {"success": False, "output": "", "error": str(exc)} |
|---|
| 138 | 181 | |
|---|
| 139 | 182 | |
|---|
| 140 | | -async def _stream_exec(args: list[str], timeout: int = _DEFAULT_TIMEOUT) -> AsyncGenerator[str, None]: |
|---|
| 183 | +async def _stream_exec(args: list[str], timeout: int = _DEFAULT_TIMEOUT, op_id: str | None = None) -> AsyncGenerator[str, None]: |
|---|
| 141 | 184 | """Execute a command and yield interleaved stdout/stderr lines.""" |
|---|
| 142 | 185 | try: |
|---|
| 143 | 186 | proc = await asyncio.create_subprocess_exec( |
|---|
| .. | .. |
|---|
| 152 | 195 | yield f"[error] Failed to start process: {exc}" |
|---|
| 153 | 196 | return |
|---|
| 154 | 197 | |
|---|
| 155 | | - async def _readline(stream, prefix=""): |
|---|
| 156 | | - while True: |
|---|
| 198 | + if op_id: |
|---|
| 199 | + register_op(op_id, proc) |
|---|
| 200 | + |
|---|
| 201 | + try: |
|---|
| 202 | + async def _readline(stream, prefix=""): |
|---|
| 203 | + while True: |
|---|
| 204 | + try: |
|---|
| 205 | + line = await asyncio.wait_for(stream.readline(), timeout=timeout) |
|---|
| 206 | + except asyncio.TimeoutError: |
|---|
| 207 | + yield f"{prefix}[timeout] Command exceeded {timeout}s" |
|---|
| 208 | + break |
|---|
| 209 | + if not line: |
|---|
| 210 | + break |
|---|
| 211 | + yield prefix + line.decode("utf-8", errors="replace").rstrip("\n") |
|---|
| 212 | + |
|---|
| 213 | + stdout_gen = _readline(proc.stdout).__aiter__() |
|---|
| 214 | + stderr_gen = _readline(proc.stderr, "[stderr] ").__aiter__() |
|---|
| 215 | + |
|---|
| 216 | + stdout_done = stderr_done = False |
|---|
| 217 | + pending_out = pending_err = None |
|---|
| 218 | + |
|---|
| 219 | + async def _next(it): |
|---|
| 157 | 220 | try: |
|---|
| 158 | | - line = await asyncio.wait_for(stream.readline(), timeout=timeout) |
|---|
| 159 | | - except asyncio.TimeoutError: |
|---|
| 160 | | - yield f"{prefix}[timeout] Command exceeded {timeout}s" |
|---|
| 221 | + return await it.__anext__() |
|---|
| 222 | + except StopAsyncIteration: |
|---|
| 223 | + return None |
|---|
| 224 | + |
|---|
| 225 | + pending_out = asyncio.create_task(_next(stdout_gen)) |
|---|
| 226 | + pending_err = asyncio.create_task(_next(stderr_gen)) |
|---|
| 227 | + |
|---|
| 228 | + while not (stdout_done and stderr_done): |
|---|
| 229 | + tasks = [t for t in (pending_out, pending_err) if t is not None] |
|---|
| 230 | + if not tasks: |
|---|
| 161 | 231 | break |
|---|
| 162 | | - if not line: |
|---|
| 163 | | - break |
|---|
| 164 | | - yield prefix + line.decode("utf-8", errors="replace").rstrip("\n") |
|---|
| 232 | + done, _ = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) |
|---|
| 165 | 233 | |
|---|
| 166 | | - stdout_gen = _readline(proc.stdout).__aiter__() |
|---|
| 167 | | - stderr_gen = _readline(proc.stderr, "[stderr] ").__aiter__() |
|---|
| 234 | + for task in done: |
|---|
| 235 | + val = task.result() |
|---|
| 236 | + if task is pending_out: |
|---|
| 237 | + if val is None: |
|---|
| 238 | + stdout_done = True |
|---|
| 239 | + pending_out = None |
|---|
| 240 | + else: |
|---|
| 241 | + yield val |
|---|
| 242 | + pending_out = asyncio.create_task(_next(stdout_gen)) |
|---|
| 243 | + elif task is pending_err: |
|---|
| 244 | + if val is None: |
|---|
| 245 | + stderr_done = True |
|---|
| 246 | + pending_err = None |
|---|
| 247 | + else: |
|---|
| 248 | + yield val |
|---|
| 249 | + pending_err = asyncio.create_task(_next(stderr_gen)) |
|---|
| 168 | 250 | |
|---|
| 169 | | - stdout_done = stderr_done = False |
|---|
| 170 | | - pending_out = pending_err = None |
|---|
| 171 | | - |
|---|
| 172 | | - async def _next(it): |
|---|
| 251 | + await proc.wait() |
|---|
| 252 | + except (asyncio.CancelledError, GeneratorExit): |
|---|
| 253 | + # Browser disconnected or generator closed — kill the process |
|---|
| 173 | 254 | try: |
|---|
| 174 | | - return await it.__anext__() |
|---|
| 175 | | - except StopAsyncIteration: |
|---|
| 176 | | - return None |
|---|
| 177 | | - |
|---|
| 178 | | - pending_out = asyncio.create_task(_next(stdout_gen)) |
|---|
| 179 | | - pending_err = asyncio.create_task(_next(stderr_gen)) |
|---|
| 180 | | - |
|---|
| 181 | | - while not (stdout_done and stderr_done): |
|---|
| 182 | | - tasks = [t for t in (pending_out, pending_err) if t is not None] |
|---|
| 183 | | - if not tasks: |
|---|
| 184 | | - break |
|---|
| 185 | | - done, _ = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) |
|---|
| 186 | | - |
|---|
| 187 | | - for task in done: |
|---|
| 188 | | - val = task.result() |
|---|
| 189 | | - if task is pending_out: |
|---|
| 190 | | - if val is None: |
|---|
| 191 | | - stdout_done = True |
|---|
| 192 | | - pending_out = None |
|---|
| 193 | | - else: |
|---|
| 194 | | - yield val |
|---|
| 195 | | - pending_out = asyncio.create_task(_next(stdout_gen)) |
|---|
| 196 | | - elif task is pending_err: |
|---|
| 197 | | - if val is None: |
|---|
| 198 | | - stderr_done = True |
|---|
| 199 | | - pending_err = None |
|---|
| 200 | | - else: |
|---|
| 201 | | - yield val |
|---|
| 202 | | - pending_err = asyncio.create_task(_next(stderr_gen)) |
|---|
| 203 | | - |
|---|
| 204 | | - await proc.wait() |
|---|
| 255 | + proc.terminate() |
|---|
| 256 | + except ProcessLookupError: |
|---|
| 257 | + pass |
|---|
| 258 | + await proc.wait() |
|---|
| 259 | + raise |
|---|
| 260 | + finally: |
|---|
| 261 | + if op_id: |
|---|
| 262 | + deregister_op(op_id) |
|---|
| .. | .. |
|---|
| 1 | | -from typing import Any |
|---|
| 1 | +import json |
|---|
| 2 | +from datetime import datetime, timezone |
|---|
| 3 | +from typing import Any, AsyncGenerator |
|---|
| 2 | 4 | |
|---|
| 3 | 5 | from fastapi import APIRouter, Depends, HTTPException, Query |
|---|
| 6 | +from fastapi.responses import StreamingResponse |
|---|
| 4 | 7 | |
|---|
| 5 | 8 | from app.auth import verify_token |
|---|
| 6 | | -from app.ops_runner import run_ops, run_ops_json, run_ops_host, run_ops_host_json, run_command_host, _BACKUP_TIMEOUT |
|---|
| 9 | +from app.ops_runner import ( |
|---|
| 10 | + run_ops, run_ops_json, run_ops_host, run_ops_host_json, run_command_host, |
|---|
| 11 | + stream_ops_host, stream_command_host, new_op_id, is_cancelled, clear_cancelled, |
|---|
| 12 | + _BACKUP_TIMEOUT, OFFSITE_PYTHON, |
|---|
| 13 | +) |
|---|
| 7 | 14 | |
|---|
| 8 | 15 | router = APIRouter() |
|---|
| 16 | + |
|---|
| 17 | + |
|---|
| 18 | +def _sse(payload: dict) -> str: |
|---|
| 19 | + return f"data: {json.dumps(payload)}\n\n" |
|---|
| 20 | + |
|---|
| 21 | + |
|---|
| 22 | +def _now() -> str: |
|---|
| 23 | + return datetime.now(timezone.utc).isoformat() |
|---|
| 9 | 24 | |
|---|
| 10 | 25 | |
|---|
| 11 | 26 | @router.get("/", summary="List local backups") |
|---|
| .. | .. |
|---|
| 82 | 97 | } |
|---|
| 83 | 98 | |
|---|
| 84 | 99 | |
|---|
| 100 | +async def _backup_stream(project: str, env: str) -> AsyncGenerator[str, None]: |
|---|
| 101 | + """Stream backup creation progress via SSE.""" |
|---|
| 102 | + op_id = new_op_id() |
|---|
| 103 | + yield _sse({"op_id": op_id}) |
|---|
| 104 | + yield _sse({"line": f"Creating backup for {project}/{env}...", "timestamp": _now()}) |
|---|
| 105 | + |
|---|
| 106 | + try: |
|---|
| 107 | + success = True |
|---|
| 108 | + async for line in stream_ops_host( |
|---|
| 109 | + ["backup", project, env], timeout=_BACKUP_TIMEOUT, op_id=op_id |
|---|
| 110 | + ): |
|---|
| 111 | + yield _sse({"line": line, "timestamp": _now()}) |
|---|
| 112 | + if line.startswith("[error]") or line.startswith("ERROR"): |
|---|
| 113 | + success = False |
|---|
| 114 | + |
|---|
| 115 | + if is_cancelled(op_id): |
|---|
| 116 | + yield _sse({"done": True, "success": False, "cancelled": True}) |
|---|
| 117 | + else: |
|---|
| 118 | + yield _sse({"done": True, "success": success, "project": project, "env": env}) |
|---|
| 119 | + finally: |
|---|
| 120 | + clear_cancelled(op_id) |
|---|
| 121 | + |
|---|
| 122 | + |
|---|
| 123 | +@router.get("/stream/{project}/{env}", summary="Create backup with streaming output") |
|---|
| 124 | +async def create_backup_stream( |
|---|
| 125 | + project: str, |
|---|
| 126 | + env: str, |
|---|
| 127 | + _: str = Depends(verify_token), |
|---|
| 128 | +) -> StreamingResponse: |
|---|
| 129 | + """Create a backup with real-time SSE progress output.""" |
|---|
| 130 | + return StreamingResponse( |
|---|
| 131 | + _backup_stream(project, env), |
|---|
| 132 | + media_type="text/event-stream", |
|---|
| 133 | + headers={"Cache-Control": "no-cache", "X-Accel-Buffering": "no"}, |
|---|
| 134 | + ) |
|---|
| 135 | + |
|---|
| 136 | + |
|---|
| 85 | 137 | @router.post("/offsite/upload/{project}/{env}", summary="Upload backup to offsite") |
|---|
| 86 | 138 | async def upload_offsite( |
|---|
| 87 | 139 | project: str, |
|---|
| .. | .. |
|---|
| 100 | 152 | return {"success": True, "output": result["output"], "project": project, "env": env} |
|---|
| 101 | 153 | |
|---|
| 102 | 154 | |
|---|
| 155 | +async def _upload_stream(project: str, env: str, name: str | None = None) -> AsyncGenerator[str, None]: |
|---|
| 156 | + """Stream offsite upload progress via SSE.""" |
|---|
| 157 | + op_id = new_op_id() |
|---|
| 158 | + yield _sse({"op_id": op_id}) |
|---|
| 159 | + label = f"{project}/{env}/{name}" if name else f"{project}/{env} (latest)" |
|---|
| 160 | + yield _sse({"line": f"Uploading {label} to offsite storage...", "timestamp": _now()}) |
|---|
| 161 | + |
|---|
| 162 | + cmd = ["offsite", "upload", project, env] |
|---|
| 163 | + if name: |
|---|
| 164 | + cmd.append(name) |
|---|
| 165 | + |
|---|
| 166 | + try: |
|---|
| 167 | + success = True |
|---|
| 168 | + async for line in stream_ops_host( |
|---|
| 169 | + cmd, timeout=_BACKUP_TIMEOUT, op_id=op_id |
|---|
| 170 | + ): |
|---|
| 171 | + yield _sse({"line": line, "timestamp": _now()}) |
|---|
| 172 | + if line.startswith("[error]") or line.startswith("ERROR"): |
|---|
| 173 | + success = False |
|---|
| 174 | + |
|---|
| 175 | + if is_cancelled(op_id): |
|---|
| 176 | + yield _sse({"done": True, "success": False, "cancelled": True}) |
|---|
| 177 | + else: |
|---|
| 178 | + yield _sse({"done": True, "success": success, "project": project, "env": env}) |
|---|
| 179 | + finally: |
|---|
| 180 | + clear_cancelled(op_id) |
|---|
| 181 | + |
|---|
| 182 | + |
|---|
| 183 | +@router.get("/offsite/stream/{project}/{env}", summary="Upload to offsite with streaming output") |
|---|
| 184 | +async def upload_offsite_stream( |
|---|
| 185 | + project: str, |
|---|
| 186 | + env: str, |
|---|
| 187 | + name: str | None = Query(None), |
|---|
| 188 | + _: str = Depends(verify_token), |
|---|
| 189 | +) -> StreamingResponse: |
|---|
| 190 | + """Upload backup to offsite with real-time SSE progress output.""" |
|---|
| 191 | + return StreamingResponse( |
|---|
| 192 | + _upload_stream(project, env, name), |
|---|
| 193 | + media_type="text/event-stream", |
|---|
| 194 | + headers={"Cache-Control": "no-cache", "X-Accel-Buffering": "no"}, |
|---|
| 195 | + ) |
|---|
| 196 | + |
|---|
| 197 | + |
|---|
| 198 | +async def _download_stream(project: str, env: str, name: str) -> AsyncGenerator[str, None]: |
|---|
| 199 | + """Stream offsite-to-local download progress via SSE.""" |
|---|
| 200 | + op_id = new_op_id() |
|---|
| 201 | + yield _sse({"op_id": op_id}) |
|---|
| 202 | + yield _sse({"line": f"Downloading {name} from offsite to local storage...", "timestamp": _now()}) |
|---|
| 203 | + |
|---|
| 204 | + # Download to the local backup directory so it appears in the backup list |
|---|
| 205 | + local_path = f"/opt/data/backups/{project}/{env}/{name}" |
|---|
| 206 | + cmd = [ |
|---|
| 207 | + OFFSITE_PYTHON, "-c", |
|---|
| 208 | + f"import sys; sys.stdout.reconfigure(line_buffering=True); " |
|---|
| 209 | + f"sys.path.insert(0, '/opt/data/scripts'); " |
|---|
| 210 | + f"from offsite import download; from pathlib import Path; " |
|---|
| 211 | + f"import os; os.makedirs('/opt/data/backups/{project}/{env}', exist_ok=True); " |
|---|
| 212 | + f"ok = download('{name}', Path('{local_path}'), '{project}', '{env}'); " |
|---|
| 213 | + f"sys.exit(0 if ok else 1)" |
|---|
| 214 | + ] |
|---|
| 215 | + |
|---|
| 216 | + try: |
|---|
| 217 | + success = True |
|---|
| 218 | + async for line in stream_command_host(cmd, timeout=_BACKUP_TIMEOUT, op_id=op_id): |
|---|
| 219 | + yield _sse({"line": line, "timestamp": _now()}) |
|---|
| 220 | + if line.startswith("[error]") or line.startswith("ERROR") or "failed" in line.lower(): |
|---|
| 221 | + success = False |
|---|
| 222 | + |
|---|
| 223 | + if is_cancelled(op_id): |
|---|
| 224 | + yield _sse({"done": True, "success": False, "cancelled": True}) |
|---|
| 225 | + else: |
|---|
| 226 | + yield _sse({"done": True, "success": success, "project": project, "env": env, "name": name}) |
|---|
| 227 | + finally: |
|---|
| 228 | + clear_cancelled(op_id) |
|---|
| 229 | + |
|---|
| 230 | + |
|---|
| 231 | +@router.get("/offsite/download/stream/{project}/{env}", summary="Download offsite backup to local storage with streaming output") |
|---|
| 232 | +async def download_offsite_stream( |
|---|
| 233 | + project: str, |
|---|
| 234 | + env: str, |
|---|
| 235 | + name: str = Query(...), |
|---|
| 236 | + _: str = Depends(verify_token), |
|---|
| 237 | +) -> StreamingResponse: |
|---|
| 238 | + """Download an offsite backup to local storage with real-time SSE progress output.""" |
|---|
| 239 | + if "/" in name or "\\" in name or ".." in name: |
|---|
| 240 | + raise HTTPException(status_code=400, detail="Invalid backup name") |
|---|
| 241 | + return StreamingResponse( |
|---|
| 242 | + _download_stream(project, env, name), |
|---|
| 243 | + media_type="text/event-stream", |
|---|
| 244 | + headers={"Cache-Control": "no-cache", "X-Accel-Buffering": "no"}, |
|---|
| 245 | + ) |
|---|
| 246 | + |
|---|
| 247 | + |
|---|
| 103 | 248 | @router.post("/offsite/retention", summary="Apply offsite retention policy") |
|---|
| 104 | 249 | async def apply_retention( |
|---|
| 105 | 250 | _: str = Depends(verify_token), |
|---|
| .. | .. |
|---|
| 1 | +from fastapi import APIRouter, Depends, HTTPException |
|---|
| 2 | + |
|---|
| 3 | +from app.auth import verify_token |
|---|
| 4 | +from app.ops_runner import cancel_op |
|---|
| 5 | + |
|---|
| 6 | +router = APIRouter() |
|---|
| 7 | + |
|---|
| 8 | + |
|---|
| 9 | +@router.delete("/{op_id}", summary="Cancel a running operation") |
|---|
| 10 | +async def cancel_operation( |
|---|
| 11 | + op_id: str, |
|---|
| 12 | + _: str = Depends(verify_token), |
|---|
| 13 | +) -> dict: |
|---|
| 14 | + """Terminate a running operation by its op_id.""" |
|---|
| 15 | + if cancel_op(op_id): |
|---|
| 16 | + return {"cancelled": True, "op_id": op_id} |
|---|
| 17 | + raise HTTPException(status_code=404, detail=f"No active operation with id '{op_id}'") |
|---|
| .. | .. |
|---|
| 1 | 1 | """ |
|---|
| 2 | | -Container lifecycle operations via Coolify API + SSH. |
|---|
| 2 | +Container lifecycle operations via docker compose. |
|---|
| 3 | 3 | |
|---|
| 4 | 4 | Three operations: |
|---|
| 5 | | - restart – docker restart {containers} via SSH (no Coolify, no image pruning) |
|---|
| 6 | | - rebuild – Coolify stop → docker build → Coolify start |
|---|
| 7 | | - recreate – Coolify stop → wipe data → docker build → Coolify start → show backups banner |
|---|
| 5 | + restart - docker restart <containers> (fast, no downtime window) |
|---|
| 6 | + rebuild - docker compose down && docker compose up -d --build |
|---|
| 7 | + recreate - docker compose down --volumes && docker compose up -d --build (destructive DR) |
|---|
| 8 | 8 | """ |
|---|
| 9 | 9 | import json |
|---|
| 10 | | -import os |
|---|
| 11 | | -import urllib.request |
|---|
| 12 | | -import urllib.error |
|---|
| 10 | +import sys |
|---|
| 13 | 11 | from datetime import datetime, timezone |
|---|
| 14 | 12 | from typing import AsyncGenerator |
|---|
| 15 | 13 | |
|---|
| 16 | | -import yaml |
|---|
| 17 | 14 | from fastapi import APIRouter, Depends, Query |
|---|
| 18 | 15 | from fastapi.responses import StreamingResponse |
|---|
| 19 | 16 | |
|---|
| .. | .. |
|---|
| 21 | 18 | from app.ops_runner import ( |
|---|
| 22 | 19 | OPS_CLI, |
|---|
| 23 | 20 | _BACKUP_TIMEOUT, |
|---|
| 21 | + new_op_id, |
|---|
| 22 | + is_cancelled, |
|---|
| 23 | + clear_cancelled, |
|---|
| 24 | 24 | run_command, |
|---|
| 25 | 25 | run_command_host, |
|---|
| 26 | 26 | stream_command_host, |
|---|
| 27 | 27 | ) |
|---|
| 28 | 28 | |
|---|
| 29 | +sys.path.insert(0, "/opt/infrastructure") |
|---|
| 30 | + |
|---|
| 29 | 31 | router = APIRouter() |
|---|
| 30 | 32 | |
|---|
| 33 | + |
|---|
| 31 | 34 | # --------------------------------------------------------------------------- |
|---|
| 32 | | -# Configuration |
|---|
| 35 | +# Descriptor helpers |
|---|
| 33 | 36 | # --------------------------------------------------------------------------- |
|---|
| 34 | 37 | |
|---|
| 35 | | -_REGISTRY_PATH = os.environ.get( |
|---|
| 36 | | - "REGISTRY_PATH", |
|---|
| 37 | | - "/opt/infrastructure/servers/hetzner-vps/registry.yaml", |
|---|
| 38 | | -) |
|---|
| 38 | +def _descriptor(project: str): |
|---|
| 39 | + """Load the project descriptor from /opt/data/{project}/project.yaml.""" |
|---|
| 40 | + from toolkit.descriptor import find as find_project |
|---|
| 41 | + desc = find_project(project) |
|---|
| 42 | + if desc is None: |
|---|
| 43 | + raise ValueError(f"Unknown project '{project}' — no project.yaml found") |
|---|
| 44 | + return desc |
|---|
| 39 | 45 | |
|---|
| 40 | | -_COOLIFY_BASE = os.environ.get( |
|---|
| 41 | | - "COOLIFY_BASE_URL", |
|---|
| 42 | | - "https://cockpit.tekmidian.com/api/v1", |
|---|
| 43 | | -) |
|---|
| 44 | 46 | |
|---|
| 45 | | -_COOLIFY_TOKEN = os.environ.get( |
|---|
| 46 | | - "COOLIFY_API_TOKEN", |
|---|
| 47 | | - "3|f1fa8ee5791440ddd37e6cecafd964c8cd734dd4a8891180c424efad6bfdb7f5", |
|---|
| 48 | | -) |
|---|
| 47 | +def _compose_dir(project: str, env: str) -> str: |
|---|
| 48 | + desc = _descriptor(project) |
|---|
| 49 | + return desc.compose_dir(env) |
|---|
| 49 | 50 | |
|---|
| 50 | | -_COOLIFY_TIMEOUT = 30 # seconds for API calls |
|---|
| 51 | | -_POLL_INTERVAL = 5 # seconds between container status polls |
|---|
| 52 | | -_POLL_MAX_WAIT = 180 # max seconds to wait for containers to stop/start |
|---|
| 51 | + |
|---|
| 52 | +def _container_prefix(project: str, env: str) -> str: |
|---|
| 53 | + """Return expanded container prefix, e.g. 'dev-mdf' or 'prod-seriousletter'.""" |
|---|
| 54 | + desc = _descriptor(project) |
|---|
| 55 | + return desc.container_prefix_for(env) |
|---|
| 56 | + |
|---|
| 57 | + |
|---|
| 58 | +def _all_compose_dirs(project: str, env: str) -> list[tuple[str, str]]: |
|---|
| 59 | + """Return list of (label, compose_dir) for all compose files to manage. |
|---|
| 60 | + |
|---|
| 61 | + Always includes the main compose_dir for the env. Additionally includes |
|---|
| 62 | + any subsystem compose dirs defined in the descriptor's raw config that |
|---|
| 63 | + are applicable to the given env. Currently supports: |
|---|
| 64 | + - seafile: prod-only extra compose at descriptor.raw['seafile']['compose_dir'] |
|---|
| 65 | + """ |
|---|
| 66 | + desc = _descriptor(project) |
|---|
| 67 | + dirs = [] |
|---|
| 68 | + main_dir = desc.compose_dir(env) |
|---|
| 69 | + if main_dir: |
|---|
| 70 | + dirs.append((env, main_dir)) |
|---|
| 71 | + # Check for seafile subsystem (prod-only, lives in its own compose dir) |
|---|
| 72 | + seafile = desc.raw.get("seafile") |
|---|
| 73 | + if seafile and env == "prod" and "compose_dir" in seafile: |
|---|
| 74 | + dirs.append(("seafile", seafile["compose_dir"])) |
|---|
| 75 | + return dirs |
|---|
| 76 | + |
|---|
| 77 | + |
|---|
| 78 | +def _compose_cmd(project: str, env: str) -> list[str]: |
|---|
| 79 | + """Build the base docker compose command with env-file and profile.""" |
|---|
| 80 | + import os |
|---|
| 81 | + compose_dir = _compose_dir(project, env) |
|---|
| 82 | + # Find compose file |
|---|
| 83 | + compose_file = "docker-compose.yaml" |
|---|
| 84 | + if not os.path.isfile(os.path.join(compose_dir, compose_file)): |
|---|
| 85 | + compose_file = "docker-compose.yml" |
|---|
| 86 | + cmd = ["docker", "compose", "-f", f"{compose_dir}/{compose_file}"] |
|---|
| 87 | + # Find env file |
|---|
| 88 | + for candidate in [f".env.{env}", ".env"]: |
|---|
| 89 | + path = os.path.join(compose_dir, candidate) |
|---|
| 90 | + if os.path.isfile(path): |
|---|
| 91 | + cmd += ["--env-file", path] |
|---|
| 92 | + break |
|---|
| 93 | + cmd += ["--profile", env] |
|---|
| 94 | + return cmd |
|---|
| 95 | + |
|---|
| 96 | + |
|---|
| 97 | +def _compose_cmd_for(compose_dir: str, env: str) -> list[str]: |
|---|
| 98 | + """Build the base docker compose command for a specific compose directory. |
|---|
| 99 | + |
|---|
| 100 | + Searches for .env.{env} first, then .env. Adds --profile {env}. |
|---|
| 101 | + """ |
|---|
| 102 | + import os |
|---|
| 103 | + compose_file = "docker-compose.yaml" |
|---|
| 104 | + if not os.path.isfile(os.path.join(compose_dir, compose_file)): |
|---|
| 105 | + compose_file = "docker-compose.yml" |
|---|
| 106 | + cmd = ["docker", "compose", "-f", f"{compose_dir}/{compose_file}"] |
|---|
| 107 | + for candidate in [f".env.{env}", ".env"]: |
|---|
| 108 | + path = os.path.join(compose_dir, candidate) |
|---|
| 109 | + if os.path.isfile(path): |
|---|
| 110 | + cmd += ["--env-file", path] |
|---|
| 111 | + break |
|---|
| 112 | + cmd += ["--profile", env] |
|---|
| 113 | + return cmd |
|---|
| 53 | 114 | |
|---|
| 54 | 115 | |
|---|
| 55 | 116 | # --------------------------------------------------------------------------- |
|---|
| 56 | | -# Registry helpers |
|---|
| 117 | +# Container discovery |
|---|
| 57 | 118 | # --------------------------------------------------------------------------- |
|---|
| 58 | 119 | |
|---|
| 59 | | -def _load_registry() -> dict: |
|---|
| 60 | | - with open(_REGISTRY_PATH) as f: |
|---|
| 61 | | - return yaml.safe_load(f) or {} |
|---|
| 120 | +async def _find_containers(project: str, env: str) -> list[str]: |
|---|
| 121 | + """Find all running containers matching the project/env prefix.""" |
|---|
| 122 | + prefix = _container_prefix(project, env) |
|---|
| 123 | + pattern = f"{prefix}-" |
|---|
| 62 | 124 | |
|---|
| 63 | | - |
|---|
| 64 | | -def _project_cfg(project: str) -> dict: |
|---|
| 65 | | - reg = _load_registry() |
|---|
| 66 | | - projects = reg.get("projects", {}) |
|---|
| 67 | | - if project not in projects: |
|---|
| 68 | | - raise ValueError(f"Unknown project '{project}'") |
|---|
| 69 | | - return projects[project] |
|---|
| 70 | | - |
|---|
| 71 | | - |
|---|
| 72 | | -def _coolify_uuid(project: str, env: str) -> str: |
|---|
| 73 | | - cfg = _project_cfg(project) |
|---|
| 74 | | - uuids = cfg.get("coolify_uuids", {}) |
|---|
| 75 | | - uuid = uuids.get(env) |
|---|
| 76 | | - if not uuid: |
|---|
| 77 | | - raise ValueError( |
|---|
| 78 | | - f"No coolify_uuid configured for {project}/{env} in registry.yaml" |
|---|
| 79 | | - ) |
|---|
| 80 | | - return uuid |
|---|
| 81 | | - |
|---|
| 82 | | - |
|---|
| 83 | | -def _data_dir(project: str, env: str) -> str: |
|---|
| 84 | | - cfg = _project_cfg(project) |
|---|
| 85 | | - template = cfg.get("data_dir", "") |
|---|
| 86 | | - if not template: |
|---|
| 87 | | - raise ValueError(f"No data_dir configured for {project} in registry.yaml") |
|---|
| 88 | | - return template.replace("{env}", env) |
|---|
| 89 | | - |
|---|
| 90 | | - |
|---|
| 91 | | -def _build_cfg(project: str, env: str) -> dict | None: |
|---|
| 92 | | - """Return build config or None if the project uses registry-only images.""" |
|---|
| 93 | | - cfg = _project_cfg(project) |
|---|
| 94 | | - build = cfg.get("build", {}) |
|---|
| 95 | | - if build.get("no_local_image"): |
|---|
| 96 | | - return None |
|---|
| 97 | | - ctx_template = build.get("build_context", "") |
|---|
| 98 | | - if not ctx_template: |
|---|
| 99 | | - return None |
|---|
| 100 | | - return { |
|---|
| 101 | | - "build_context": ctx_template.replace("{env}", env), |
|---|
| 102 | | - "image_name": build.get("image_name", project), |
|---|
| 103 | | - "env": env, |
|---|
| 104 | | - } |
|---|
| 125 | + result = await run_command( |
|---|
| 126 | + ["docker", "ps", "--filter", f"name={pattern}", "--format", "{{.Names}}"], |
|---|
| 127 | + timeout=15, |
|---|
| 128 | + ) |
|---|
| 129 | + containers = [] |
|---|
| 130 | + if result["success"]: |
|---|
| 131 | + for name in result["output"].strip().splitlines(): |
|---|
| 132 | + name = name.strip() |
|---|
| 133 | + if name and name.startswith(pattern): |
|---|
| 134 | + containers.append(name) |
|---|
| 135 | + return containers |
|---|
| 105 | 136 | |
|---|
| 106 | 137 | |
|---|
| 107 | 138 | # --------------------------------------------------------------------------- |
|---|
| .. | .. |
|---|
| 120 | 151 | return _sse({"line": text, "timestamp": _now()}) |
|---|
| 121 | 152 | |
|---|
| 122 | 153 | |
|---|
| 123 | | -def _done(success: bool, project: str, env: str, action: str) -> str: |
|---|
| 124 | | - return _sse({ |
|---|
| 154 | +def _done(success: bool, project: str, env: str, action: str, cancelled: bool = False) -> str: |
|---|
| 155 | + payload = { |
|---|
| 125 | 156 | "done": True, |
|---|
| 126 | 157 | "success": success, |
|---|
| 127 | 158 | "project": project, |
|---|
| 128 | 159 | "env": env, |
|---|
| 129 | 160 | "action": action, |
|---|
| 130 | | - }) |
|---|
| 131 | | - |
|---|
| 132 | | - |
|---|
| 133 | | -# --------------------------------------------------------------------------- |
|---|
| 134 | | -# Coolify API (synchronous — called from async context via run_in_executor) |
|---|
| 135 | | -# --------------------------------------------------------------------------- |
|---|
| 136 | | - |
|---|
| 137 | | -def _coolify_request(method: str, path: str) -> dict: |
|---|
| 138 | | - """Make a Coolify API request. Returns parsed JSON body.""" |
|---|
| 139 | | - url = f"{_COOLIFY_BASE}{path}" |
|---|
| 140 | | - req = urllib.request.Request( |
|---|
| 141 | | - url, |
|---|
| 142 | | - method=method, |
|---|
| 143 | | - headers={ |
|---|
| 144 | | - "Authorization": f"Bearer {_COOLIFY_TOKEN}", |
|---|
| 145 | | - "Content-Type": "application/json", |
|---|
| 146 | | - "Accept": "application/json", |
|---|
| 147 | | - }, |
|---|
| 148 | | - ) |
|---|
| 149 | | - try: |
|---|
| 150 | | - with urllib.request.urlopen(req, timeout=_COOLIFY_TIMEOUT) as resp: |
|---|
| 151 | | - body = resp.read() |
|---|
| 152 | | - return json.loads(body) if body else {} |
|---|
| 153 | | - except urllib.error.HTTPError as exc: |
|---|
| 154 | | - body = exc.read() |
|---|
| 155 | | - raise RuntimeError( |
|---|
| 156 | | - f"Coolify API {method} {path} returned HTTP {exc.code}: {body.decode(errors='replace')[:500]}" |
|---|
| 157 | | - ) from exc |
|---|
| 158 | | - except Exception as exc: |
|---|
| 159 | | - raise RuntimeError(f"Coolify API call failed: {exc}") from exc |
|---|
| 160 | | - |
|---|
| 161 | | - |
|---|
| 162 | | -async def _coolify_action(action: str, uuid: str) -> dict: |
|---|
| 163 | | - """Call a Coolify service action endpoint (stop/start/restart).""" |
|---|
| 164 | | - import asyncio |
|---|
| 165 | | - loop = asyncio.get_event_loop() |
|---|
| 166 | | - return await loop.run_in_executor( |
|---|
| 167 | | - None, _coolify_request, "POST", f"/services/{uuid}/{action}" |
|---|
| 168 | | - ) |
|---|
| 169 | | - |
|---|
| 170 | | - |
|---|
| 171 | | -# --------------------------------------------------------------------------- |
|---|
| 172 | | -# Container polling helpers |
|---|
| 173 | | -# --------------------------------------------------------------------------- |
|---|
| 174 | | - |
|---|
| 175 | | -async def _find_containers_for_service(project: str, env: str) -> list[str]: |
|---|
| 176 | | - """ |
|---|
| 177 | | - Find all running Docker containers belonging to a project/env. |
|---|
| 178 | | - Uses the registry name_prefix and matches {env}-{prefix}-* pattern. |
|---|
| 179 | | - """ |
|---|
| 180 | | - cfg = _project_cfg(project) |
|---|
| 181 | | - prefix = cfg.get("name_prefix", project) |
|---|
| 182 | | - name_pattern = f"{env}-{prefix}-" |
|---|
| 183 | | - |
|---|
| 184 | | - result = await run_command( |
|---|
| 185 | | - ["docker", "ps", "--filter", f"name={name_pattern}", "--format", "{{.Names}}"], |
|---|
| 186 | | - timeout=15, |
|---|
| 187 | | - ) |
|---|
| 188 | | - containers = [] |
|---|
| 189 | | - if result["success"]: |
|---|
| 190 | | - for name in result["output"].strip().splitlines(): |
|---|
| 191 | | - name = name.strip() |
|---|
| 192 | | - if name and name.startswith(name_pattern): |
|---|
| 193 | | - containers.append(name) |
|---|
| 194 | | - return containers |
|---|
| 195 | | - |
|---|
| 196 | | - |
|---|
| 197 | | -async def _poll_until_stopped( |
|---|
| 198 | | - project: str, |
|---|
| 199 | | - env: str, |
|---|
| 200 | | - max_wait: int = _POLL_MAX_WAIT, |
|---|
| 201 | | -) -> bool: |
|---|
| 202 | | - """Poll until no containers for project/env are running. Returns True if stopped.""" |
|---|
| 203 | | - import asyncio |
|---|
| 204 | | - cfg = _project_cfg(project) |
|---|
| 205 | | - prefix = cfg.get("name_prefix", project) |
|---|
| 206 | | - name_pattern = f"{env}-{prefix}-" |
|---|
| 207 | | - waited = 0 |
|---|
| 208 | | - while waited < max_wait: |
|---|
| 209 | | - result = await run_command( |
|---|
| 210 | | - ["docker", "ps", "--filter", f"name={name_pattern}", "--format", "{{.Names}}"], |
|---|
| 211 | | - timeout=15, |
|---|
| 212 | | - ) |
|---|
| 213 | | - running = [ |
|---|
| 214 | | - n.strip() |
|---|
| 215 | | - for n in result["output"].strip().splitlines() |
|---|
| 216 | | - if n.strip().startswith(name_pattern) |
|---|
| 217 | | - ] if result["success"] else [] |
|---|
| 218 | | - if not running: |
|---|
| 219 | | - return True |
|---|
| 220 | | - await asyncio.sleep(_POLL_INTERVAL) |
|---|
| 221 | | - waited += _POLL_INTERVAL |
|---|
| 222 | | - return False |
|---|
| 223 | | - |
|---|
| 224 | | - |
|---|
| 225 | | -async def _poll_until_running( |
|---|
| 226 | | - project: str, |
|---|
| 227 | | - env: str, |
|---|
| 228 | | - max_wait: int = _POLL_MAX_WAIT, |
|---|
| 229 | | -) -> bool: |
|---|
| 230 | | - """Poll until at least one container for project/env is running. Returns True if up.""" |
|---|
| 231 | | - import asyncio |
|---|
| 232 | | - cfg = _project_cfg(project) |
|---|
| 233 | | - prefix = cfg.get("name_prefix", project) |
|---|
| 234 | | - name_pattern = f"{env}-{prefix}-" |
|---|
| 235 | | - waited = 0 |
|---|
| 236 | | - while waited < max_wait: |
|---|
| 237 | | - result = await run_command( |
|---|
| 238 | | - ["docker", "ps", "--filter", f"name={name_pattern}", "--format", "{{.Names}}"], |
|---|
| 239 | | - timeout=15, |
|---|
| 240 | | - ) |
|---|
| 241 | | - running = [ |
|---|
| 242 | | - n.strip() |
|---|
| 243 | | - for n in result["output"].strip().splitlines() |
|---|
| 244 | | - if n.strip().startswith(name_pattern) |
|---|
| 245 | | - ] if result["success"] else [] |
|---|
| 246 | | - if running: |
|---|
| 247 | | - return True |
|---|
| 248 | | - await asyncio.sleep(_POLL_INTERVAL) |
|---|
| 249 | | - waited += _POLL_INTERVAL |
|---|
| 250 | | - return False |
|---|
| 161 | + } |
|---|
| 162 | + if cancelled: |
|---|
| 163 | + payload["cancelled"] = True |
|---|
| 164 | + return _sse(payload) |
|---|
| 251 | 165 | |
|---|
| 252 | 166 | |
|---|
| 253 | 167 | # --------------------------------------------------------------------------- |
|---|
| 254 | 168 | # Operation: Restart |
|---|
| 255 | 169 | # --------------------------------------------------------------------------- |
|---|
| 256 | 170 | |
|---|
| 257 | | -async def _op_restart(project: str, env: str) -> AsyncGenerator[str, None]: |
|---|
| 171 | +async def _op_restart(project: str, env: str, op_id: str | None = None) -> AsyncGenerator[str, None]: |
|---|
| 172 | + """Restart: docker restart <containers>. Fast, no compose cycle. |
|---|
| 173 | + |
|---|
| 174 | + Uses _find_containers which matches all containers with the project/env |
|---|
| 175 | + prefix (e.g. 'prod-mdf-'). This naturally includes any subsystem containers |
|---|
| 176 | + such as prod-mdf-seafile, prod-mdf-seafile-mysql, prod-mdf-seafile-redis. |
|---|
| 258 | 177 | """ |
|---|
| 259 | | - Restart: docker restart {containers} via SSH/nsenter. |
|---|
| 260 | | - No Coolify involvement — avoids the image-pruning stop/start cycle. |
|---|
| 261 | | - """ |
|---|
| 178 | + if op_id: |
|---|
| 179 | + yield _sse({"op_id": op_id}) |
|---|
| 262 | 180 | yield _line(f"[restart] Finding containers for {project}/{env}...") |
|---|
| 263 | 181 | |
|---|
| 264 | 182 | try: |
|---|
| 265 | | - containers = await _find_containers_for_service(project, env) |
|---|
| 183 | + containers = await _find_containers(project, env) |
|---|
| 266 | 184 | except Exception as exc: |
|---|
| 267 | | - yield _line(f"[error] Registry lookup failed: {exc}") |
|---|
| 185 | + yield _line(f"[error] Descriptor lookup failed: {exc}") |
|---|
| 268 | 186 | yield _done(False, project, env, "restart") |
|---|
| 269 | 187 | return |
|---|
| 270 | 188 | |
|---|
| .. | .. |
|---|
| 275 | 193 | |
|---|
| 276 | 194 | yield _line(f"[restart] Restarting {len(containers)} container(s): {', '.join(containers)}") |
|---|
| 277 | 195 | |
|---|
| 278 | | - cmd = ["docker", "restart"] + containers |
|---|
| 279 | | - result = await run_command(cmd, timeout=120) |
|---|
| 196 | + result = await run_command(["docker", "restart"] + containers, timeout=120) |
|---|
| 280 | 197 | |
|---|
| 281 | | - if result["output"].strip(): |
|---|
| 282 | | - for line in result["output"].strip().splitlines(): |
|---|
| 283 | | - yield _line(line) |
|---|
| 284 | | - if result["error"].strip(): |
|---|
| 285 | | - for line in result["error"].strip().splitlines(): |
|---|
| 286 | | - yield _line(f"[stderr] {line}") |
|---|
| 198 | + for output_line in result["output"].strip().splitlines(): |
|---|
| 199 | + if output_line.strip(): |
|---|
| 200 | + yield _line(output_line) |
|---|
| 201 | + for err_line in result["error"].strip().splitlines(): |
|---|
| 202 | + if err_line.strip(): |
|---|
| 203 | + yield _line(f"[stderr] {err_line}") |
|---|
| 287 | 204 | |
|---|
| 288 | 205 | if result["success"]: |
|---|
| 289 | 206 | yield _line(f"[restart] All containers restarted successfully.") |
|---|
| 290 | 207 | yield _done(True, project, env, "restart") |
|---|
| 291 | 208 | else: |
|---|
| 292 | | - yield _line(f"[error] docker restart failed (exit code non-zero)") |
|---|
| 209 | + yield _line(f"[error] docker restart failed") |
|---|
| 293 | 210 | yield _done(False, project, env, "restart") |
|---|
| 294 | 211 | |
|---|
| 295 | 212 | |
|---|
| .. | .. |
|---|
| 297 | 214 | # Operation: Rebuild |
|---|
| 298 | 215 | # --------------------------------------------------------------------------- |
|---|
| 299 | 216 | |
|---|
| 300 | | -async def _op_rebuild(project: str, env: str) -> AsyncGenerator[str, None]: |
|---|
| 217 | +async def _op_rebuild(project: str, env: str, op_id: str | None = None) -> AsyncGenerator[str, None]: |
|---|
| 218 | + """Rebuild: docker compose down && docker compose up -d --build. No data loss. |
|---|
| 219 | + |
|---|
| 220 | + Iterates over all compose dirs (main + any subsystem dirs like seafile for prod). |
|---|
| 221 | + Each compose is brought down then rebuilt in sequence. |
|---|
| 301 | 222 | """ |
|---|
| 302 | | - Rebuild: docker compose down → build image → docker compose up. |
|---|
| 303 | | - Uses `ops rebuild` on the host which handles env files, profiles, and cd correctly. |
|---|
| 304 | | - No data loss. For code/Dockerfile changes. |
|---|
| 305 | | - """ |
|---|
| 306 | | - yield _line(f"[rebuild] Rebuilding {project}/{env} via ops CLI...") |
|---|
| 223 | + if op_id: |
|---|
| 224 | + yield _sse({"op_id": op_id}) |
|---|
| 307 | 225 | |
|---|
| 308 | | - had_output = False |
|---|
| 309 | | - success = True |
|---|
| 310 | | - async for line in stream_command_host( |
|---|
| 311 | | - [OPS_CLI, "rebuild", project, env], |
|---|
| 312 | | - timeout=_BACKUP_TIMEOUT, |
|---|
| 313 | | - ): |
|---|
| 314 | | - had_output = True |
|---|
| 315 | | - if line.startswith("[stderr] "): |
|---|
| 316 | | - yield _line(line) |
|---|
| 317 | | - elif line.startswith("ERROR") or line.startswith("[error]"): |
|---|
| 318 | | - yield _line(f"[error] {line}") |
|---|
| 319 | | - success = False |
|---|
| 320 | | - else: |
|---|
| 321 | | - yield _line(f"[rebuild] {line}") |
|---|
| 226 | + try: |
|---|
| 227 | + compose_dirs = _all_compose_dirs(project, env) |
|---|
| 228 | + except Exception as exc: |
|---|
| 229 | + yield _line(f"[error] Descriptor lookup failed: {exc}") |
|---|
| 230 | + yield _done(False, project, env, "rebuild") |
|---|
| 231 | + return |
|---|
| 322 | 232 | |
|---|
| 323 | | - if not had_output: |
|---|
| 324 | | - yield _line(f"[error] ops rebuild produced no output — check registry config for {project}") |
|---|
| 325 | | - success = False |
|---|
| 233 | + if not compose_dirs: |
|---|
| 234 | + yield _line(f"[error] No compose directories found for {project}/{env}") |
|---|
| 235 | + yield _done(False, project, env, "rebuild") |
|---|
| 236 | + return |
|---|
| 326 | 237 | |
|---|
| 327 | | - if success: |
|---|
| 328 | | - # Verify containers came up |
|---|
| 329 | | - containers = await _find_containers_for_service(project, env) |
|---|
| 330 | | - if containers: |
|---|
| 331 | | - yield _line(f"[rebuild] {len(containers)} container(s) running: {', '.join(containers)}") |
|---|
| 332 | | - yield _done(True, project, env, "rebuild") |
|---|
| 333 | | - else: |
|---|
| 334 | | - yield _line(f"[warn] No containers found after rebuild — check docker compose logs") |
|---|
| 238 | + for label, cdir in compose_dirs: |
|---|
| 239 | + yield _line(f"[rebuild] Compose dir ({label}): {cdir}") |
|---|
| 240 | + |
|---|
| 241 | + # Step 1: docker compose down |
|---|
| 242 | + yield _line(f"[rebuild] Stopping {label} via docker compose down...") |
|---|
| 243 | + result = await run_command_host( |
|---|
| 244 | + _compose_cmd_for(cdir, env) + ["down"], |
|---|
| 245 | + timeout=120, |
|---|
| 246 | + ) |
|---|
| 247 | + for output_line in (result["output"] + result["error"]).strip().splitlines(): |
|---|
| 248 | + if output_line.strip(): |
|---|
| 249 | + yield _line(output_line) |
|---|
| 250 | + |
|---|
| 251 | + if not result["success"]: |
|---|
| 252 | + yield _line(f"[error] docker compose down failed for {label}") |
|---|
| 335 | 253 | yield _done(False, project, env, "rebuild") |
|---|
| 254 | + return |
|---|
| 255 | + |
|---|
| 256 | + yield _line(f"[rebuild] {label} containers stopped.") |
|---|
| 257 | + |
|---|
| 258 | + if op_id and is_cancelled(op_id): |
|---|
| 259 | + yield _line(f"[rebuild] Cancelled after stop. Run docker compose up manually to recover.") |
|---|
| 260 | + yield _done(False, project, env, "rebuild", cancelled=True) |
|---|
| 261 | + return |
|---|
| 262 | + |
|---|
| 263 | + # Step 2: docker compose up -d --build (streaming for real-time build output) |
|---|
| 264 | + yield _line(f"[rebuild] Building and starting {label}...") |
|---|
| 265 | + async for build_line in stream_command_host( |
|---|
| 266 | + _compose_cmd_for(cdir, env) + ["up", "-d", "--build"], |
|---|
| 267 | + timeout=_BACKUP_TIMEOUT, |
|---|
| 268 | + op_id=op_id, |
|---|
| 269 | + ): |
|---|
| 270 | + yield _line(f"[rebuild] {build_line}") |
|---|
| 271 | + |
|---|
| 272 | + if op_id and is_cancelled(op_id): |
|---|
| 273 | + yield _line(f"[rebuild] Cancelled during build/start of {label}.") |
|---|
| 274 | + yield _done(False, project, env, "rebuild", cancelled=True) |
|---|
| 275 | + return |
|---|
| 276 | + |
|---|
| 277 | + # Verify all containers came up |
|---|
| 278 | + containers = await _find_containers(project, env) |
|---|
| 279 | + if containers: |
|---|
| 280 | + yield _line(f"[rebuild] {len(containers)} container(s) running: {', '.join(containers)}") |
|---|
| 281 | + yield _done(True, project, env, "rebuild") |
|---|
| 336 | 282 | else: |
|---|
| 283 | + yield _line(f"[warn] No running containers detected after rebuild — check compose logs") |
|---|
| 337 | 284 | yield _done(False, project, env, "rebuild") |
|---|
| 338 | 285 | |
|---|
| 339 | 286 | |
|---|
| .. | .. |
|---|
| 341 | 288 | # Operation: Recreate (Disaster Recovery) |
|---|
| 342 | 289 | # --------------------------------------------------------------------------- |
|---|
| 343 | 290 | |
|---|
| 344 | | -async def _op_recreate(project: str, env: str) -> AsyncGenerator[str, None]: |
|---|
| 291 | +async def _op_recreate(project: str, env: str, op_id: str | None = None) -> AsyncGenerator[str, None]: |
|---|
| 292 | + """Recreate: docker compose down --volumes && up --build. DESTRUCTIVE — wipes volumes. |
|---|
| 293 | + |
|---|
| 294 | + Iterates over all compose dirs (main + any subsystem dirs like seafile for prod). |
|---|
| 295 | + A safety backup is taken first. Then each compose is wiped and rebuilt in sequence. |
|---|
| 345 | 296 | """ |
|---|
| 346 | | - Recreate: docker compose down → wipe data → docker build → docker compose up. |
|---|
| 347 | | - DESTRUCTIVE — wipes all data volumes. Shows "Go to Backups" banner on success. |
|---|
| 348 | | - """ |
|---|
| 297 | + if op_id: |
|---|
| 298 | + yield _sse({"op_id": op_id}) |
|---|
| 299 | + |
|---|
| 349 | 300 | try: |
|---|
| 350 | | - data_dir = _data_dir(project, env) |
|---|
| 351 | | - cfg = _project_cfg(project) |
|---|
| 352 | | - except ValueError as exc: |
|---|
| 353 | | - yield _line(f"[error] Config error: {exc}") |
|---|
| 301 | + compose_dirs = _all_compose_dirs(project, env) |
|---|
| 302 | + except Exception as exc: |
|---|
| 303 | + yield _line(f"[error] Descriptor lookup failed: {exc}") |
|---|
| 354 | 304 | yield _done(False, project, env, "recreate") |
|---|
| 355 | 305 | return |
|---|
| 356 | 306 | |
|---|
| 357 | | - # Step 1: Find and stop containers via docker compose |
|---|
| 358 | | - code_dir = cfg.get("path", "") + f"/{env}/code" |
|---|
| 359 | | - yield _line(f"[recreate] Stopping {project}/{env} containers...") |
|---|
| 360 | | - |
|---|
| 361 | | - stop_result = await run_command_host( |
|---|
| 362 | | - ["sh", "-c", f"cd {code_dir} && docker compose -p {env}-{cfg.get('name_prefix', project)} --profile {env} down 2>&1 || true"], |
|---|
| 363 | | - timeout=120, |
|---|
| 364 | | - ) |
|---|
| 365 | | - if stop_result["output"].strip(): |
|---|
| 366 | | - for line in stop_result["output"].strip().splitlines(): |
|---|
| 367 | | - yield _line(line) |
|---|
| 368 | | - |
|---|
| 369 | | - # Step 2: Verify containers are stopped |
|---|
| 370 | | - name_prefix = cfg.get("name_prefix", project) |
|---|
| 371 | | - verify = await run_command_host( |
|---|
| 372 | | - ["sh", "-c", f"docker ps --format '{{{{.Names}}}}' | grep '^{env}-{name_prefix}-' || true"], |
|---|
| 373 | | - timeout=30, |
|---|
| 374 | | - ) |
|---|
| 375 | | - running_containers = verify["output"].strip() |
|---|
| 376 | | - if running_containers: |
|---|
| 377 | | - yield _line(f"[error] Containers still running for {project}/{env}:") |
|---|
| 378 | | - for line in running_containers.splitlines(): |
|---|
| 379 | | - yield _line(f" {line}") |
|---|
| 380 | | - yield _done(False, project, env, "recreate") |
|---|
| 381 | | - return |
|---|
| 382 | | - yield _line(f"[recreate] All containers stopped.") |
|---|
| 383 | | - |
|---|
| 384 | | - # Step 3: Wipe data volumes |
|---|
| 385 | | - yield _line(f"[recreate] WARNING: Wiping data directory: {data_dir}") |
|---|
| 386 | | - wipe_result = await run_command_host( |
|---|
| 387 | | - ["sh", "-c", f"rm -r {data_dir}/* 2>&1; echo EXIT_CODE=$?"], |
|---|
| 388 | | - timeout=120, |
|---|
| 389 | | - ) |
|---|
| 390 | | - for line in (wipe_result["output"].strip() + "\n" + wipe_result["error"].strip()).strip().splitlines(): |
|---|
| 391 | | - if line: |
|---|
| 392 | | - yield _line(line) |
|---|
| 393 | | - if "EXIT_CODE=0" in wipe_result["output"]: |
|---|
| 394 | | - yield _line(f"[recreate] Data directory wiped.") |
|---|
| 395 | | - else: |
|---|
| 396 | | - yield _line(f"[error] Wipe may have failed — check output above.") |
|---|
| 307 | + if not compose_dirs: |
|---|
| 308 | + yield _line(f"[error] No compose directories found for {project}/{env}") |
|---|
| 397 | 309 | yield _done(False, project, env, "recreate") |
|---|
| 398 | 310 | return |
|---|
| 399 | 311 | |
|---|
| 400 | | - # Step 4: Rebuild via ops CLI (handles image build + compose up) |
|---|
| 401 | | - yield _line(f"[recreate] Rebuilding containers...") |
|---|
| 402 | | - async for line in stream_command_host( |
|---|
| 403 | | - [OPS_CLI, "rebuild", project, env], |
|---|
| 312 | + # Log all compose dirs we will operate on |
|---|
| 313 | + for label, cdir in compose_dirs: |
|---|
| 314 | + yield _line(f"[recreate] Compose dir ({label}): {cdir}") |
|---|
| 315 | + |
|---|
| 316 | + # Step 1: Safety backup before destroying anything |
|---|
| 317 | + yield _line(f"[recreate] Creating safety backup before wipe...") |
|---|
| 318 | + async for backup_line in stream_command_host( |
|---|
| 319 | + [OPS_CLI, "backup", project, env], |
|---|
| 404 | 320 | timeout=_BACKUP_TIMEOUT, |
|---|
| 321 | + op_id=op_id, |
|---|
| 405 | 322 | ): |
|---|
| 406 | | - if line.startswith("[stderr] "): |
|---|
| 407 | | - yield _line(line) |
|---|
| 408 | | - else: |
|---|
| 409 | | - yield _line(f"[recreate] {line}") |
|---|
| 323 | + yield _line(f"[recreate] {backup_line}") |
|---|
| 410 | 324 | |
|---|
| 411 | | - # Step 5: Verify containers came up |
|---|
| 412 | | - containers = await _find_containers_for_service(project, env) |
|---|
| 325 | + if op_id and is_cancelled(op_id): |
|---|
| 326 | + yield _line(f"[recreate] Cancelled during safety backup. No data was lost.") |
|---|
| 327 | + yield _done(False, project, env, "recreate", cancelled=True) |
|---|
| 328 | + return |
|---|
| 329 | + |
|---|
| 330 | + yield _line(f"[recreate] Safety backup complete.") |
|---|
| 331 | + |
|---|
| 332 | + for label, cdir in compose_dirs: |
|---|
| 333 | + # Step 2: docker compose down --volumes (removes named volumes) |
|---|
| 334 | + yield _line(f"[recreate] WARNING: Running docker compose down --volumes for {label} (data will be wiped)...") |
|---|
| 335 | + result = await run_command_host( |
|---|
| 336 | + _compose_cmd_for(cdir, env) + ["down", "--volumes"], |
|---|
| 337 | + timeout=120, |
|---|
| 338 | + ) |
|---|
| 339 | + for output_line in (result["output"] + result["error"]).strip().splitlines(): |
|---|
| 340 | + if output_line.strip(): |
|---|
| 341 | + yield _line(output_line) |
|---|
| 342 | + |
|---|
| 343 | + if not result["success"]: |
|---|
| 344 | + yield _line(f"[error] docker compose down --volumes failed for {label}") |
|---|
| 345 | + yield _done(False, project, env, "recreate") |
|---|
| 346 | + return |
|---|
| 347 | + |
|---|
| 348 | + yield _line(f"[recreate] {label} containers and volumes removed.") |
|---|
| 349 | + |
|---|
| 350 | + if op_id and is_cancelled(op_id): |
|---|
| 351 | + yield _line(f"[recreate] Cancelled after volume wipe of {label}. Restore a backup to recover.") |
|---|
| 352 | + yield _done(False, project, env, "recreate", cancelled=True) |
|---|
| 353 | + return |
|---|
| 354 | + |
|---|
| 355 | + # Step 3: docker compose up -d --build |
|---|
| 356 | + yield _line(f"[recreate] Building and starting fresh {label}...") |
|---|
| 357 | + async for build_line in stream_command_host( |
|---|
| 358 | + _compose_cmd_for(cdir, env) + ["up", "-d", "--build"], |
|---|
| 359 | + timeout=_BACKUP_TIMEOUT, |
|---|
| 360 | + op_id=op_id, |
|---|
| 361 | + ): |
|---|
| 362 | + yield _line(f"[recreate] {build_line}") |
|---|
| 363 | + |
|---|
| 364 | + if op_id and is_cancelled(op_id): |
|---|
| 365 | + yield _line(f"[recreate] Cancelled during build/start of {label}.") |
|---|
| 366 | + yield _done(False, project, env, "recreate", cancelled=True) |
|---|
| 367 | + return |
|---|
| 368 | + |
|---|
| 369 | + # Verify containers came up |
|---|
| 370 | + containers = await _find_containers(project, env) |
|---|
| 413 | 371 | if containers: |
|---|
| 414 | 372 | yield _line(f"[recreate] {len(containers)} container(s) running. Restore a backup to complete recovery.") |
|---|
| 415 | 373 | yield _done(True, project, env, "recreate") |
|---|
| 416 | 374 | else: |
|---|
| 417 | | - yield _line(f"[warn] No containers found after recreate — check docker compose logs") |
|---|
| 418 | | - yield _done(True, project, env, "recreate") |
|---|
| 375 | + yield _line(f"[warn] No running containers after recreate — check compose logs") |
|---|
| 376 | + yield _done(False, project, env, "recreate") |
|---|
| 419 | 377 | |
|---|
| 420 | 378 | |
|---|
| 421 | 379 | # --------------------------------------------------------------------------- |
|---|
| 422 | | -# Dispatch wrapper |
|---|
| 380 | +# Dispatch |
|---|
| 423 | 381 | # --------------------------------------------------------------------------- |
|---|
| 424 | 382 | |
|---|
| 425 | | -async def _op_generator( |
|---|
| 426 | | - project: str, |
|---|
| 427 | | - env: str, |
|---|
| 428 | | - action: str, |
|---|
| 429 | | -) -> AsyncGenerator[str, None]: |
|---|
| 430 | | - """Route to the correct operation generator.""" |
|---|
| 431 | | - if action == "restart": |
|---|
| 432 | | - async for chunk in _op_restart(project, env): |
|---|
| 433 | | - yield chunk |
|---|
| 434 | | - elif action == "rebuild": |
|---|
| 435 | | - async for chunk in _op_rebuild(project, env): |
|---|
| 436 | | - yield chunk |
|---|
| 437 | | - elif action == "recreate": |
|---|
| 438 | | - async for chunk in _op_recreate(project, env): |
|---|
| 439 | | - yield chunk |
|---|
| 440 | | - else: |
|---|
| 441 | | - yield _line(f"[error] Unknown action '{action}'. Valid: restart, rebuild, recreate") |
|---|
| 442 | | - yield _done(False, project, env, action) |
|---|
| 383 | +async def _op_generator(project: str, env: str, action: str) -> AsyncGenerator[str, None]: |
|---|
| 384 | + op_id = new_op_id() |
|---|
| 385 | + try: |
|---|
| 386 | + if action == "restart": |
|---|
| 387 | + async for chunk in _op_restart(project, env, op_id=op_id): |
|---|
| 388 | + yield chunk |
|---|
| 389 | + elif action == "rebuild": |
|---|
| 390 | + async for chunk in _op_rebuild(project, env, op_id=op_id): |
|---|
| 391 | + yield chunk |
|---|
| 392 | + elif action == "recreate": |
|---|
| 393 | + async for chunk in _op_recreate(project, env, op_id=op_id): |
|---|
| 394 | + yield chunk |
|---|
| 395 | + else: |
|---|
| 396 | + yield _line(f"[error] Unknown action '{action}'. Valid: restart, rebuild, recreate") |
|---|
| 397 | + yield _done(False, project, env, action) |
|---|
| 398 | + finally: |
|---|
| 399 | + clear_cancelled(op_id) |
|---|
| 443 | 400 | |
|---|
| 444 | 401 | |
|---|
| 445 | 402 | # --------------------------------------------------------------------------- |
|---|
| .. | .. |
|---|
| 463 | 420 | Stream a container lifecycle operation via SSE. |
|---|
| 464 | 421 | |
|---|
| 465 | 422 | - restart: docker restart containers (safe, fast) |
|---|
| 466 | | - - rebuild: stop via Coolify, rebuild image, start via Coolify |
|---|
| 467 | | - - recreate: stop, wipe data, rebuild image, start (destructive — DR only) |
|---|
| 423 | + - rebuild: docker compose down && up --build (no data loss) |
|---|
| 424 | + - recreate: docker compose down --volumes && up --build (destructive — DR only) |
|---|
| 468 | 425 | """ |
|---|
| 469 | 426 | return StreamingResponse( |
|---|
| 470 | 427 | _op_generator(project, env, action), |
|---|
| .. | .. |
|---|
| 1 | | -import yaml |
|---|
| 2 | | -from pathlib import Path |
|---|
| 1 | +import sys |
|---|
| 3 | 2 | from typing import Any |
|---|
| 4 | 3 | |
|---|
| 5 | 4 | from fastapi import APIRouter, Depends |
|---|
| 6 | 5 | |
|---|
| 7 | 6 | from app.auth import verify_token |
|---|
| 8 | 7 | |
|---|
| 8 | +sys.path.insert(0, "/opt/infrastructure") |
|---|
| 9 | +from toolkit.discovery import all_projects # noqa: E402 |
|---|
| 10 | + |
|---|
| 9 | 11 | router = APIRouter() |
|---|
| 10 | 12 | |
|---|
| 11 | | -_REGISTRY_PATH = Path("/opt/infrastructure/servers/hetzner-vps/registry.yaml") |
|---|
| 12 | 13 | |
|---|
| 14 | +def _serialize_project(desc: Any) -> dict: |
|---|
| 15 | + """Serialize a ProjectDescriptor to a response dict.""" |
|---|
| 16 | + environments = [ |
|---|
| 17 | + { |
|---|
| 18 | + "name": e.name, |
|---|
| 19 | + "domain": e.domain, |
|---|
| 20 | + "compose_dir": e.compose_dir, |
|---|
| 21 | + } |
|---|
| 22 | + for e in desc.environments |
|---|
| 23 | + ] |
|---|
| 13 | 24 | |
|---|
| 14 | | -def _load_registry() -> dict: |
|---|
| 15 | | - """Load and return the registry YAML.""" |
|---|
| 16 | | - with open(_REGISTRY_PATH) as f: |
|---|
| 17 | | - return yaml.safe_load(f) |
|---|
| 25 | + return { |
|---|
| 26 | + "environments": environments, |
|---|
| 27 | + "domains": desc.domains, |
|---|
| 28 | + "promote": desc.promote or None, |
|---|
| 29 | + "has_cli": bool(desc.sync.get("type")), |
|---|
| 30 | + "backup": desc.backup or None, |
|---|
| 31 | + "type": desc.type, |
|---|
| 32 | + } |
|---|
| 18 | 33 | |
|---|
| 19 | 34 | |
|---|
| 20 | 35 | @router.get("/", summary="Get project registry") |
|---|
| .. | .. |
|---|
| 22 | 37 | _: str = Depends(verify_token), |
|---|
| 23 | 38 | ) -> dict[str, Any]: |
|---|
| 24 | 39 | """Return project list with environments, promote config, and domains.""" |
|---|
| 25 | | - registry = _load_registry() |
|---|
| 26 | | - projects = {} |
|---|
| 27 | | - |
|---|
| 28 | | - for name, cfg in registry.get("projects", {}).items(): |
|---|
| 29 | | - projects[name] = { |
|---|
| 30 | | - "environments": cfg.get("environments", []), |
|---|
| 31 | | - "domains": cfg.get("domains", {}), |
|---|
| 32 | | - "promote": cfg.get("promote"), |
|---|
| 33 | | - "has_cli": bool(cfg.get("cli")), |
|---|
| 34 | | - "static": cfg.get("static", False), |
|---|
| 35 | | - "infrastructure": cfg.get("infrastructure", False), |
|---|
| 36 | | - "backup_dir": cfg.get("backup_dir"), |
|---|
| 37 | | - "has_coolify": bool(cfg.get("coolify_uuids")), |
|---|
| 38 | | - } |
|---|
| 39 | | - |
|---|
| 40 | + projects = { |
|---|
| 41 | + name: _serialize_project(desc) |
|---|
| 42 | + for name, desc in all_projects().items() |
|---|
| 43 | + } |
|---|
| 40 | 44 | return {"projects": projects} |
|---|
| .. | .. |
|---|
| 1 | +import asyncio |
|---|
| 1 | 2 | import json |
|---|
| 2 | 3 | from datetime import datetime, timezone |
|---|
| 3 | 4 | from typing import AsyncGenerator, Literal |
|---|
| .. | .. |
|---|
| 6 | 7 | from fastapi.responses import StreamingResponse |
|---|
| 7 | 8 | |
|---|
| 8 | 9 | from app.auth import verify_token |
|---|
| 9 | | -from app.ops_runner import _BACKUP_TIMEOUT, stream_ops_host |
|---|
| 10 | +from app.ops_runner import _BACKUP_TIMEOUT, new_op_id, is_cancelled, clear_cancelled, stream_ops_host |
|---|
| 11 | + |
|---|
| 12 | +_KEEPALIVE_INTERVAL = 15 # seconds between SSE keepalive pings |
|---|
| 10 | 13 | |
|---|
| 11 | 14 | router = APIRouter() |
|---|
| 12 | 15 | |
|---|
| .. | .. |
|---|
| 29 | 32 | Runs on the host via nsenter because ops restore delegates to project CLIs |
|---|
| 30 | 33 | that use host Python venvs incompatible with the container's Python. |
|---|
| 31 | 34 | """ |
|---|
| 32 | | - base_args = ["restore", project, env] |
|---|
| 35 | + op_id = new_op_id() |
|---|
| 36 | + yield _sse_line({"op_id": op_id}) |
|---|
| 33 | 37 | |
|---|
| 34 | | - # Pass the backup file path to avoid interactive selection prompt |
|---|
| 35 | | - if name: |
|---|
| 36 | | - backup_path = f"/opt/data/backups/{project}/{env}/{name}" |
|---|
| 37 | | - base_args.append(backup_path) |
|---|
| 38 | + try: |
|---|
| 39 | + base_args = ["restore", project, env] |
|---|
| 38 | 40 | |
|---|
| 39 | | - if dry_run: |
|---|
| 40 | | - base_args.append("--dry-run") |
|---|
| 41 | + # Pass the backup file path to avoid interactive selection prompt |
|---|
| 42 | + if name: |
|---|
| 43 | + backup_path = f"/opt/data/backups/{project}/{env}/{name}" |
|---|
| 44 | + base_args.append(backup_path) |
|---|
| 41 | 45 | |
|---|
| 42 | | - # Granular restore mode |
|---|
| 43 | | - if mode == "db": |
|---|
| 44 | | - base_args.append("--db-only") |
|---|
| 45 | | - elif mode == "wp": |
|---|
| 46 | | - base_args.append("--wp-only") |
|---|
| 46 | + if dry_run: |
|---|
| 47 | + base_args.append("--dry-run") |
|---|
| 47 | 48 | |
|---|
| 48 | | - if source == "offsite": |
|---|
| 49 | | - # ops offsite restore <project> <env> |
|---|
| 50 | | - download_args = ["offsite", "restore", project, env] |
|---|
| 51 | | - yield _sse_line({"line": f"Downloading {project}/{env} from offsite...", "timestamp": _now()}) |
|---|
| 49 | + # Granular restore mode |
|---|
| 50 | + if mode == "db": |
|---|
| 51 | + base_args.append("--db-only") |
|---|
| 52 | + elif mode == "wp": |
|---|
| 53 | + base_args.append("--wp-only") |
|---|
| 52 | 54 | |
|---|
| 53 | | - download_ok = True |
|---|
| 54 | | - async for line in stream_ops_host(download_args, timeout=_BACKUP_TIMEOUT): |
|---|
| 55 | | - yield _sse_line({"line": line, "timestamp": _now()}) |
|---|
| 56 | | - if line.startswith("[error]"): |
|---|
| 57 | | - download_ok = False |
|---|
| 55 | + if source == "offsite": |
|---|
| 56 | + # ops offsite restore <project> <env> |
|---|
| 57 | + download_args = ["offsite", "restore", project, env] |
|---|
| 58 | + yield _sse_line({"line": f"Downloading {project}/{env} from offsite...", "timestamp": _now()}) |
|---|
| 58 | 59 | |
|---|
| 59 | | - if not download_ok: |
|---|
| 60 | | - yield _sse_line({"done": True, "success": False}) |
|---|
| 61 | | - return |
|---|
| 60 | + download_ok = True |
|---|
| 61 | + downloaded_path = None |
|---|
| 62 | + async for line in stream_ops_host(download_args, timeout=_BACKUP_TIMEOUT, op_id=op_id): |
|---|
| 63 | + yield _sse_line({"line": line, "timestamp": _now()}) |
|---|
| 64 | + if line.startswith("[error]"): |
|---|
| 65 | + download_ok = False |
|---|
| 66 | + # Capture downloaded file path from offsite.py output |
|---|
| 67 | + if "Downloaded to" in line and "/tmp/" in line: |
|---|
| 68 | + # Parse "Downloaded to: /tmp/filename.tar.gz" or similar |
|---|
| 69 | + for part in line.split(): |
|---|
| 70 | + if part.startswith("/tmp/") and part.endswith(".tar.gz"): |
|---|
| 71 | + downloaded_path = part |
|---|
| 72 | + elif line.startswith(" ✓ Downloaded to "): |
|---|
| 73 | + for part in line.split(): |
|---|
| 74 | + if part.startswith("/tmp/") and part.endswith(".tar.gz"): |
|---|
| 75 | + downloaded_path = part |
|---|
| 62 | 76 | |
|---|
| 63 | | - yield _sse_line({"line": "Download complete. Starting restore...", "timestamp": _now()}) |
|---|
| 77 | + if is_cancelled(op_id): |
|---|
| 78 | + yield _sse_line({"done": True, "success": False, "cancelled": True}) |
|---|
| 79 | + return |
|---|
| 64 | 80 | |
|---|
| 65 | | - success = True |
|---|
| 66 | | - async for line in stream_ops_host(base_args, timeout=_BACKUP_TIMEOUT): |
|---|
| 67 | | - yield _sse_line({"line": line, "timestamp": _now()}) |
|---|
| 68 | | - if line.startswith("[error]"): |
|---|
| 69 | | - success = False |
|---|
| 81 | + if not download_ok: |
|---|
| 82 | + yield _sse_line({"done": True, "success": False}) |
|---|
| 83 | + return |
|---|
| 70 | 84 | |
|---|
| 71 | | - yield _sse_line({"done": True, "success": success}) |
|---|
| 85 | + # Use the downloaded offsite file for restore |
|---|
| 86 | + if downloaded_path: |
|---|
| 87 | + base_args.append(downloaded_path) |
|---|
| 88 | + yield _sse_line({"line": f"Download complete. Restoring from {downloaded_path}...", "timestamp": _now()}) |
|---|
| 89 | + else: |
|---|
| 90 | + yield _sse_line({"line": "Download complete. Starting restore...", "timestamp": _now()}) |
|---|
| 91 | + |
|---|
| 92 | + success = True |
|---|
| 93 | + async for item in _stream_with_keepalive(stream_ops_host(base_args, timeout=_BACKUP_TIMEOUT, op_id=op_id)): |
|---|
| 94 | + if item is None: |
|---|
| 95 | + # Keepalive ping — SSE comment to prevent idle timeout |
|---|
| 96 | + yield ": keepalive\n\n" |
|---|
| 97 | + else: |
|---|
| 98 | + yield _sse_line({"line": item, "timestamp": _now()}) |
|---|
| 99 | + if item.startswith("[error]"): |
|---|
| 100 | + success = False |
|---|
| 101 | + |
|---|
| 102 | + if is_cancelled(op_id): |
|---|
| 103 | + yield _sse_line({"done": True, "success": False, "cancelled": True}) |
|---|
| 104 | + else: |
|---|
| 105 | + yield _sse_line({"done": True, "success": success}) |
|---|
| 106 | + finally: |
|---|
| 107 | + clear_cancelled(op_id) |
|---|
| 72 | 108 | |
|---|
| 73 | 109 | |
|---|
| 74 | 110 | def _now() -> str: |
|---|
| 75 | 111 | return datetime.now(timezone.utc).isoformat() |
|---|
| 76 | 112 | |
|---|
| 77 | 113 | |
|---|
| 114 | +async def _stream_with_keepalive(gen: AsyncGenerator[str, None]) -> AsyncGenerator[str | None, None]: |
|---|
| 115 | + """Wrap an async generator to yield None as keepalive when no data arrives within the interval.""" |
|---|
| 116 | + aiter = gen.__aiter__() |
|---|
| 117 | + pending = asyncio.ensure_future(aiter.__anext__()) |
|---|
| 118 | + while True: |
|---|
| 119 | + done, _ = await asyncio.wait({pending}, timeout=_KEEPALIVE_INTERVAL) |
|---|
| 120 | + if done: |
|---|
| 121 | + try: |
|---|
| 122 | + yield pending.result() |
|---|
| 123 | + except StopAsyncIteration: |
|---|
| 124 | + break |
|---|
| 125 | + pending = asyncio.ensure_future(aiter.__anext__()) |
|---|
| 126 | + else: |
|---|
| 127 | + yield None # keepalive — prevents Traefik idle timeout |
|---|
| 128 | + |
|---|
| 129 | + |
|---|
| 78 | 130 | @router.get("/{project}/{env}", summary="Restore a backup with real-time output") |
|---|
| 79 | 131 | async def restore_backup( |
|---|
| 80 | 132 | project: str, |
|---|
| .. | .. |
|---|
| 1 | +import json |
|---|
| 2 | +import sys |
|---|
| 3 | +import yaml |
|---|
| 4 | +from pathlib import Path |
|---|
| 5 | +from typing import Any, AsyncGenerator |
|---|
| 6 | +from datetime import datetime, timezone |
|---|
| 7 | + |
|---|
| 8 | +from fastapi import APIRouter, Depends, HTTPException |
|---|
| 9 | +from fastapi.responses import StreamingResponse |
|---|
| 10 | +from pydantic import BaseModel |
|---|
| 11 | + |
|---|
| 12 | +from app.auth import verify_token |
|---|
| 13 | +from app.ops_runner import ( |
|---|
| 14 | + run_command_host, stream_ops_host, new_op_id, is_cancelled, clear_cancelled, |
|---|
| 15 | + _BACKUP_TIMEOUT, |
|---|
| 16 | +) |
|---|
| 17 | + |
|---|
| 18 | +sys.path.insert(0, "/opt/infrastructure") |
|---|
| 19 | +from toolkit.discovery import all_projects # noqa: E402 |
|---|
| 20 | +from toolkit.descriptor import find as find_project # noqa: E402 |
|---|
| 21 | + |
|---|
| 22 | +router = APIRouter() |
|---|
| 23 | + |
|---|
| 24 | + |
|---|
| 25 | +class ScheduleUpdate(BaseModel): |
|---|
| 26 | + enabled: bool = True |
|---|
| 27 | + schedule: str = "03:00" |
|---|
| 28 | + environments: list[str] | None = None |
|---|
| 29 | + command: str | None = None |
|---|
| 30 | + offsite: bool = False |
|---|
| 31 | + offsite_envs: list[str] | None = None |
|---|
| 32 | + retention_local_days: int | None = 7 |
|---|
| 33 | + retention_offsite_days: int | None = 30 |
|---|
| 34 | + |
|---|
| 35 | + |
|---|
| 36 | +@router.get("/", summary="Get backup schedules for all projects") |
|---|
| 37 | +async def get_schedules( |
|---|
| 38 | + _: str = Depends(verify_token), |
|---|
| 39 | +) -> list[dict[str, Any]]: |
|---|
| 40 | + """Return backup schedule config for each project from descriptors.""" |
|---|
| 41 | + projects = all_projects() |
|---|
| 42 | + result = [] |
|---|
| 43 | + |
|---|
| 44 | + for name, desc in sorted(projects.items()): |
|---|
| 45 | + backup = desc.backup or {} |
|---|
| 46 | + result.append({ |
|---|
| 47 | + "project": name, |
|---|
| 48 | + "has_backup_dir": bool(backup.get("backup_dir") or backup.get("volumes")), |
|---|
| 49 | + "has_cli": desc.sync.get("type") == "cli", |
|---|
| 50 | + "static": desc.type == "static", |
|---|
| 51 | + "infrastructure": desc.type == "infrastructure", |
|---|
| 52 | + "environments": [e.name for e in desc.environments], |
|---|
| 53 | + # Backup schedule fields |
|---|
| 54 | + "enabled": backup.get("enabled", False), |
|---|
| 55 | + "schedule": backup.get("schedule", ""), |
|---|
| 56 | + "backup_environments": backup.get("environments"), |
|---|
| 57 | + "command": backup.get("command"), |
|---|
| 58 | + "offsite": backup.get("offsite", False), |
|---|
| 59 | + "offsite_envs": backup.get("offsite_envs"), |
|---|
| 60 | + "retention_local_days": backup.get("retention", {}).get("local_days"), |
|---|
| 61 | + "retention_offsite_days": backup.get("retention", {}).get("offsite_days"), |
|---|
| 62 | + }) |
|---|
| 63 | + |
|---|
| 64 | + return result |
|---|
| 65 | + |
|---|
| 66 | + |
|---|
| 67 | +@router.put("/{project}", summary="Update backup schedule for a project") |
|---|
| 68 | +async def update_schedule( |
|---|
| 69 | + project: str, |
|---|
| 70 | + body: ScheduleUpdate, |
|---|
| 71 | + _: str = Depends(verify_token), |
|---|
| 72 | +) -> dict[str, Any]: |
|---|
| 73 | + """Update the backup schedule in project.yaml and regenerate timers.""" |
|---|
| 74 | + desc = find_project(project) |
|---|
| 75 | + if not desc: |
|---|
| 76 | + raise HTTPException(status_code=404, detail=f"Project '{project}' not found") |
|---|
| 77 | + |
|---|
| 78 | + # Read the full project.yaml |
|---|
| 79 | + yaml_path = Path(desc.path) / "project.yaml" |
|---|
| 80 | + try: |
|---|
| 81 | + with open(yaml_path) as f: |
|---|
| 82 | + project_yaml = yaml.safe_load(f) or {} |
|---|
| 83 | + except FileNotFoundError: |
|---|
| 84 | + raise HTTPException(status_code=404, detail=f"project.yaml not found at {yaml_path}") |
|---|
| 85 | + |
|---|
| 86 | + # Update only the backup block fields that were sent |
|---|
| 87 | + backup = project_yaml.get("backup", {}) |
|---|
| 88 | + backup["enabled"] = body.enabled |
|---|
| 89 | + backup["schedule"] = body.schedule |
|---|
| 90 | + if body.command: |
|---|
| 91 | + backup["command"] = body.command |
|---|
| 92 | + if body.environments: |
|---|
| 93 | + backup["environments"] = body.environments |
|---|
| 94 | + if body.offsite: |
|---|
| 95 | + backup["offsite"] = True |
|---|
| 96 | + if body.offsite_envs: |
|---|
| 97 | + backup["offsite_envs"] = body.offsite_envs |
|---|
| 98 | + else: |
|---|
| 99 | + backup["offsite"] = False |
|---|
| 100 | + retention = backup.get("retention", {}) |
|---|
| 101 | + if body.retention_local_days is not None: |
|---|
| 102 | + retention["local_days"] = body.retention_local_days |
|---|
| 103 | + if body.offsite and body.retention_offsite_days is not None: |
|---|
| 104 | + retention["offsite_days"] = body.retention_offsite_days |
|---|
| 105 | + if retention: |
|---|
| 106 | + backup["retention"] = retention |
|---|
| 107 | + |
|---|
| 108 | + project_yaml["backup"] = backup |
|---|
| 109 | + new_yaml = yaml.dump(project_yaml, default_flow_style=False, sort_keys=False) |
|---|
| 110 | + |
|---|
| 111 | + write_result = await run_command_host([ |
|---|
| 112 | + "bash", "-c", |
|---|
| 113 | + f"cat > {yaml_path} << 'YAMLEOF'\n{new_yaml}YAMLEOF" |
|---|
| 114 | + ]) |
|---|
| 115 | + if not write_result["success"]: |
|---|
| 116 | + raise HTTPException( |
|---|
| 117 | + status_code=500, |
|---|
| 118 | + detail=f"Failed to write project.yaml: {write_result['error']}" |
|---|
| 119 | + ) |
|---|
| 120 | + |
|---|
| 121 | + gen_result = await run_command_host([ |
|---|
| 122 | + "/usr/local/bin/ops", "gen-timers" |
|---|
| 123 | + ]) |
|---|
| 124 | + if not gen_result["success"]: |
|---|
| 125 | + raise HTTPException( |
|---|
| 126 | + status_code=500, |
|---|
| 127 | + detail=f"Failed to regenerate timers: {gen_result['error'] or gen_result['output']}" |
|---|
| 128 | + ) |
|---|
| 129 | + |
|---|
| 130 | + return { |
|---|
| 131 | + "success": True, |
|---|
| 132 | + "project": project, |
|---|
| 133 | + "backup": backup, |
|---|
| 134 | + "gen_timers_output": gen_result["output"], |
|---|
| 135 | + } |
|---|
| 136 | + |
|---|
| 137 | + |
|---|
| 138 | +def _sse(payload: dict) -> str: |
|---|
| 139 | + return f"data: {json.dumps(payload)}\n\n" |
|---|
| 140 | + |
|---|
| 141 | + |
|---|
| 142 | +def _now() -> str: |
|---|
| 143 | + return datetime.now(timezone.utc).isoformat() |
|---|
| 144 | + |
|---|
| 145 | + |
|---|
| 146 | +async def _run_now_stream(project: str) -> AsyncGenerator[str, None]: |
|---|
| 147 | + """Run backup for a project (all configured envs).""" |
|---|
| 148 | + op_id = new_op_id() |
|---|
| 149 | + yield _sse({"op_id": op_id}) |
|---|
| 150 | + |
|---|
| 151 | + desc = find_project(project) |
|---|
| 152 | + if not desc: |
|---|
| 153 | + yield _sse({"line": f"[error] Project '{project}' not found", "timestamp": _now()}) |
|---|
| 154 | + yield _sse({"done": True, "success": False}) |
|---|
| 155 | + return |
|---|
| 156 | + |
|---|
| 157 | + envs = [e.name for e in desc.environments] or [None] |
|---|
| 158 | + |
|---|
| 159 | + success = True |
|---|
| 160 | + for env in envs: |
|---|
| 161 | + if is_cancelled(op_id): |
|---|
| 162 | + yield _sse({"line": "Cancelled.", "timestamp": _now()}) |
|---|
| 163 | + yield _sse({"done": True, "success": False, "cancelled": True}) |
|---|
| 164 | + clear_cancelled(op_id) |
|---|
| 165 | + return |
|---|
| 166 | + |
|---|
| 167 | + cmd = ["backup", project] |
|---|
| 168 | + if env: |
|---|
| 169 | + cmd.append(env) |
|---|
| 170 | + label = f"{project}/{env}" if env else project |
|---|
| 171 | + yield _sse({"line": f"=== Backing up {label} ===", "timestamp": _now()}) |
|---|
| 172 | + |
|---|
| 173 | + async for line in stream_ops_host(cmd, timeout=_BACKUP_TIMEOUT, op_id=op_id): |
|---|
| 174 | + yield _sse({"line": line, "timestamp": _now()}) |
|---|
| 175 | + if line.startswith("[error]") or line.startswith("ERROR"): |
|---|
| 176 | + success = False |
|---|
| 177 | + |
|---|
| 178 | + if is_cancelled(op_id): |
|---|
| 179 | + yield _sse({"done": True, "success": False, "cancelled": True}) |
|---|
| 180 | + else: |
|---|
| 181 | + yield _sse({"done": True, "success": success}) |
|---|
| 182 | + clear_cancelled(op_id) |
|---|
| 183 | + |
|---|
| 184 | + |
|---|
| 185 | +@router.get("/{project}/run", summary="Run backup now (streaming)") |
|---|
| 186 | +async def run_backup_now( |
|---|
| 187 | + project: str, |
|---|
| 188 | + _: str = Depends(verify_token), |
|---|
| 189 | +) -> StreamingResponse: |
|---|
| 190 | + """Trigger an immediate backup for a project, streaming output via SSE.""" |
|---|
| 191 | + desc = find_project(project) |
|---|
| 192 | + if not desc: |
|---|
| 193 | + raise HTTPException(status_code=404, detail=f"Project '{project}' not found") |
|---|
| 194 | + |
|---|
| 195 | + return StreamingResponse( |
|---|
| 196 | + _run_now_stream(project), |
|---|
| 197 | + media_type="text/event-stream", |
|---|
| 198 | + headers={"Cache-Control": "no-cache", "X-Accel-Buffering": "no"}, |
|---|
| 199 | + ) |
|---|
| .. | .. |
|---|
| 1 | | -import os |
|---|
| 1 | +import sys |
|---|
| 2 | 2 | from typing import Any |
|---|
| 3 | 3 | |
|---|
| 4 | | -import yaml |
|---|
| 5 | 4 | from fastapi import APIRouter, Depends, HTTPException, Query |
|---|
| 6 | 5 | |
|---|
| 7 | 6 | from app.auth import verify_token |
|---|
| 8 | 7 | from app.ops_runner import run_command |
|---|
| 9 | 8 | |
|---|
| 9 | +sys.path.insert(0, "/opt/infrastructure") |
|---|
| 10 | +from toolkit.descriptor import find as find_project # noqa: E402 |
|---|
| 11 | + |
|---|
| 10 | 12 | router = APIRouter() |
|---|
| 11 | 13 | |
|---|
| 12 | 14 | _DOCKER = "docker" |
|---|
| 13 | | -_REGISTRY_PATH = os.environ.get( |
|---|
| 14 | | - "REGISTRY_PATH", |
|---|
| 15 | | - "/opt/infrastructure/servers/hetzner-vps/registry.yaml", |
|---|
| 16 | | -) |
|---|
| 17 | | - |
|---|
| 18 | | -# --------------------------------------------------------------------------- |
|---|
| 19 | | -# Registry-based name prefix lookup (cached) |
|---|
| 20 | | -# --------------------------------------------------------------------------- |
|---|
| 21 | | -_prefix_cache: dict[str, str] | None = None |
|---|
| 22 | | - |
|---|
| 23 | | - |
|---|
| 24 | | -def _load_prefixes() -> dict[str, str]: |
|---|
| 25 | | - """Load project -> name_prefix mapping from the ops registry.""" |
|---|
| 26 | | - global _prefix_cache |
|---|
| 27 | | - if _prefix_cache is not None: |
|---|
| 28 | | - return _prefix_cache |
|---|
| 29 | | - |
|---|
| 30 | | - try: |
|---|
| 31 | | - with open(_REGISTRY_PATH) as f: |
|---|
| 32 | | - data = yaml.safe_load(f) |
|---|
| 33 | | - _prefix_cache = {} |
|---|
| 34 | | - for proj_name, cfg in data.get("projects", {}).items(): |
|---|
| 35 | | - _prefix_cache[proj_name] = cfg.get("name_prefix", proj_name) |
|---|
| 36 | | - return _prefix_cache |
|---|
| 37 | | - except Exception: |
|---|
| 38 | | - return {} |
|---|
| 39 | 15 | |
|---|
| 40 | 16 | |
|---|
| 41 | 17 | # --------------------------------------------------------------------------- |
|---|
| .. | .. |
|---|
| 76 | 52 | """ |
|---|
| 77 | 53 | Resolve the actual Docker container name from project/env/service. |
|---|
| 78 | 54 | |
|---|
| 79 | | - Uses the ops registry name_prefix mapping and tries patterns in order: |
|---|
| 80 | | - 1. {env}-{prefix}-{service} (mdf, seriousletter: dev-mdf-mysql-UUID) |
|---|
| 81 | | - 2. {prefix}-{service} (ringsaday: ringsaday-website-UUID, coolify: coolify-db) |
|---|
| 82 | | - 3. {prefix}-{env} (ringsaday: ringsaday-dev-UUID) |
|---|
| 83 | | - 4. exact {prefix} (coolify infra: coolify) |
|---|
| 55 | + Loads the project descriptor and expands container_prefix for the given |
|---|
| 56 | + env (e.g. "{env}-mdf" -> "dev-mdf"), then tries: |
|---|
| 57 | + 1. {expanded_prefix}-{service} e.g. dev-mdf-wordpress |
|---|
| 58 | + 2. exact match on expanded_prefix (infra containers with no service suffix) |
|---|
| 84 | 59 | """ |
|---|
| 85 | | - prefixes = _load_prefixes() |
|---|
| 86 | | - prefix = prefixes.get(project, project) |
|---|
| 60 | + desc = find_project(project) |
|---|
| 61 | + if desc is None: |
|---|
| 62 | + raise HTTPException( |
|---|
| 63 | + status_code=404, |
|---|
| 64 | + detail=f"Project '{project}' not found", |
|---|
| 65 | + ) |
|---|
| 87 | 66 | |
|---|
| 88 | | - # Pattern 1: {env}-{prefix}-{service} |
|---|
| 89 | | - hit = await _find_by_prefix(f"{env}-{prefix}-{service}") |
|---|
| 67 | + expanded_prefix = desc.container_prefix_for(env) |
|---|
| 68 | + |
|---|
| 69 | + # Pattern 1: {expanded_prefix}-{service} |
|---|
| 70 | + hit = await _find_by_prefix(f"{expanded_prefix}-{service}") |
|---|
| 90 | 71 | if hit: |
|---|
| 91 | 72 | return hit |
|---|
| 92 | 73 | |
|---|
| 93 | | - # Pattern 2: {prefix}-{service} |
|---|
| 94 | | - hit = await _find_by_prefix(f"{prefix}-{service}") |
|---|
| 95 | | - if hit: |
|---|
| 96 | | - return hit |
|---|
| 97 | | - |
|---|
| 98 | | - # Pattern 3: {prefix}-{env} |
|---|
| 99 | | - hit = await _find_by_prefix(f"{prefix}-{env}") |
|---|
| 100 | | - if hit: |
|---|
| 101 | | - return hit |
|---|
| 102 | | - |
|---|
| 103 | | - # Pattern 4: exact match when service == prefix (e.g., coolify) |
|---|
| 104 | | - if service == prefix: |
|---|
| 105 | | - hit = await _find_exact(prefix) |
|---|
| 74 | + # Pattern 2: exact match on prefix (infrastructure containers, e.g. "coolify") |
|---|
| 75 | + if service == expanded_prefix or service == desc.name: |
|---|
| 76 | + hit = await _find_exact(expanded_prefix) |
|---|
| 106 | 77 | if hit: |
|---|
| 107 | 78 | return hit |
|---|
| 108 | 79 | |
|---|
| .. | .. |
|---|
| 28 | 28 | to_env: str, |
|---|
| 29 | 29 | db_only: bool, |
|---|
| 30 | 30 | uploads_only: bool, |
|---|
| 31 | + dry_run: bool = False, |
|---|
| 31 | 32 | ) -> AsyncGenerator[str, None]: |
|---|
| 32 | 33 | """Stream sync output via SSE.""" |
|---|
| 33 | | - args = ["sync", project, "--from", from_env, "--to", to_env, "--yes"] |
|---|
| 34 | + args = ["sync", project, "--from", from_env, "--to", to_env] |
|---|
| 34 | 35 | if db_only: |
|---|
| 35 | 36 | args.append("--db-only") |
|---|
| 36 | 37 | if uploads_only: |
|---|
| 37 | 38 | args.append("--uploads-only") |
|---|
| 39 | + if dry_run: |
|---|
| 40 | + args.append("--dry-run") |
|---|
| 38 | 41 | |
|---|
| 39 | 42 | mode = "db-only" if db_only else ("uploads-only" if uploads_only else "full") |
|---|
| 40 | 43 | yield _sse_line({ |
|---|
| .. | .. |
|---|
| 58 | 61 | to_env: str = Query(default="int", alias="to"), |
|---|
| 59 | 62 | db_only: bool = Query(default=False), |
|---|
| 60 | 63 | uploads_only: bool = Query(default=False), |
|---|
| 64 | + dry_run: bool = Query(default=False), |
|---|
| 61 | 65 | _: str = Depends(verify_token), |
|---|
| 62 | 66 | ) -> StreamingResponse: |
|---|
| 63 | 67 | """Sync data backward (prod->int, int->dev) with SSE streaming.""" |
|---|
| .. | .. |
|---|
| 67 | 71 | detail=f"Invalid sync path '{from_env} -> {to_env}'. Only adjacent pairs are allowed: prod->int, int->dev.", |
|---|
| 68 | 72 | ) |
|---|
| 69 | 73 | return StreamingResponse( |
|---|
| 70 | | - _sync_generator(project, from_env, to_env, db_only, uploads_only), |
|---|
| 74 | + _sync_generator(project, from_env, to_env, db_only, uploads_only, dry_run), |
|---|
| 71 | 75 | media_type="text/event-stream", |
|---|
| 72 | 76 | headers={ |
|---|
| 73 | 77 | "Cache-Control": "no-cache", |
|---|
| .. | .. |
|---|
| 4 | 4 | container_name: ops-dashboard |
|---|
| 5 | 5 | restart: unless-stopped |
|---|
| 6 | 6 | env_file: .env |
|---|
| 7 | + healthcheck: |
|---|
| 8 | + test: ["CMD-SHELL", "curl -sf http://localhost:8080/api/registry/?token=$AUTH_TOKEN"] |
|---|
| 9 | + interval: 30s |
|---|
| 10 | + timeout: 5s |
|---|
| 11 | + retries: 3 |
|---|
| 12 | + start_period: 10s |
|---|
| 7 | 13 | volumes: |
|---|
| 8 | 14 | - /opt/infrastructure:/opt/infrastructure |
|---|
| 9 | 15 | - /opt/data:/opt/data |
|---|
| 10 | 16 | - /var/run/docker.sock:/var/run/docker.sock |
|---|
| 11 | 17 | - ./static:/app/static |
|---|
| 18 | + - ./app:/app/app |
|---|
| 12 | 19 | labels: |
|---|
| 13 | 20 | - "traefik.enable=true" |
|---|
| 14 | 21 | - "traefik.http.routers.ops-dashboard.rule=Host(`cockpit.tekmidian.com`)" |
|---|
| 15 | | - - "traefik.http.routers.ops-dashboard.entrypoints=websecure" |
|---|
| 22 | + - "traefik.http.routers.ops-dashboard.entrypoints=https" |
|---|
| 16 | 23 | - "traefik.http.routers.ops-dashboard.tls=true" |
|---|
| 17 | 24 | - "traefik.http.routers.ops-dashboard.tls.certresolver=letsencrypt" |
|---|
| 18 | 25 | - "traefik.http.services.ops-dashboard.loadbalancer.server.port=8080" |
|---|
| 19 | 26 | networks: |
|---|
| 20 | | - - coolify |
|---|
| 27 | + - proxy |
|---|
| 21 | 28 | |
|---|
| 22 | 29 | networks: |
|---|
| 23 | | - coolify: |
|---|
| 30 | + proxy: |
|---|
| 24 | 31 | external: true |
|---|
| .. | .. |
|---|
| 1 | +name: ops-dashboard |
|---|
| 2 | +type: infrastructure |
|---|
| 3 | +description: "Ops Dashboard \u2014 FastAPI-based web dashboard for monitoring containers,\ |
|---|
| 4 | + \ triggering backups/restores, and managing server health across all projects" |
|---|
| 5 | +path: /opt/data/ops-dashboard |
|---|
| 6 | +container_prefix: ops-dashboard |
|---|
| 7 | +environments: |
|---|
| 8 | +- name: prod |
|---|
| 9 | + domain: ops.tekmidian.com |
|---|
| 10 | + compose_dir: /opt/data/ops-dashboard |
|---|
| 11 | +networks: |
|---|
| 12 | + proxy: coolify |
|---|
| 13 | +build: |
|---|
| 14 | + context: /opt/data/ops-dashboard |
|---|
| 15 | + image: ops-dashboard |
|---|
| 16 | + tag: latest |
|---|
| 17 | + description: "Built locally from Dockerfile \u2014 no registry push" |
|---|
| 18 | +services: |
|---|
| 19 | + ops-dashboard: |
|---|
| 20 | + container: ops-dashboard |
|---|
| 21 | + image: ops-dashboard:latest |
|---|
| 22 | + volumes: |
|---|
| 23 | + - /opt/infrastructure:/opt/infrastructure |
|---|
| 24 | + - /opt/data:/opt/data |
|---|
| 25 | + - /var/run/docker.sock:/var/run/docker.sock |
|---|
| 26 | + - /opt/data/ops-dashboard/static:/app/static |
|---|
| 27 | + - /opt/data/ops-dashboard/app:/app/app |
|---|
| 28 | +backup: |
|---|
| 29 | + enabled: true |
|---|
| 30 | + schedule: 04:15 |
|---|
| 31 | + retention: |
|---|
| 32 | + local_days: 30 |
|---|
| 33 | + offsite_days: 30 |
|---|
| 34 | + offsite: true |
|---|
| 35 | + backup_dir: /opt/data/backups/ops-dashboard |
|---|
| 36 | + volumes: |
|---|
| 37 | + - /opt/data/ops-dashboard |
|---|
| 38 | + environments: |
|---|
| 39 | + - prod |
|---|
| 40 | + offsite_envs: |
|---|
| 41 | + - prod |
|---|
| 42 | +restore: |
|---|
| 43 | + volumes: |
|---|
| 44 | + - /opt/data/ops-dashboard |
|---|
| 45 | + post_restore: |
|---|
| 46 | + - docker restart ops-dashboard |
|---|
| 47 | +promote: |
|---|
| 48 | + type: git |
|---|
| 49 | + description: "Infrastructure tool \u2014 deploy by rebuilding image from source" |
|---|
| 50 | + post_pull: rebuild |
|---|
| 51 | +health: |
|---|
| 52 | +- env: prod |
|---|
| 53 | + url: https://ops.tekmidian.com/ |
|---|
| 54 | + status: 200 |
|---|
| 55 | +domains: |
|---|
| 56 | + prod: ops.tekmidian.com |
|---|
| .. | .. |
|---|
| 525 | 525 | color: #f3f4f6; |
|---|
| 526 | 526 | font-weight: 600; |
|---|
| 527 | 527 | } |
|---|
| 528 | + |
|---|
| 529 | +/* ---------- Operation Progress Bar ---------- */ |
|---|
| 530 | +.op-progress { |
|---|
| 531 | + height: 3px; |
|---|
| 532 | + border-radius: 2px; |
|---|
| 533 | + margin-bottom: 0.75rem; |
|---|
| 534 | + overflow: hidden; |
|---|
| 535 | + background: #1f2937; |
|---|
| 536 | + transition: opacity 0.3s; |
|---|
| 537 | +} |
|---|
| 538 | +.op-progress.hidden { |
|---|
| 539 | + opacity: 0; |
|---|
| 540 | + height: 0; |
|---|
| 541 | + margin: 0; |
|---|
| 542 | +} |
|---|
| 543 | +.op-progress.running { |
|---|
| 544 | + opacity: 1; |
|---|
| 545 | +} |
|---|
| 546 | +.op-progress.running .op-progress-fill { |
|---|
| 547 | + width: 100%; |
|---|
| 548 | + height: 100%; |
|---|
| 549 | + background: linear-gradient(90deg, #3b82f6 0%, #60a5fa 50%, #3b82f6 100%); |
|---|
| 550 | + background-size: 200% 100%; |
|---|
| 551 | + animation: progress-slide 1.5s ease-in-out infinite; |
|---|
| 552 | +} |
|---|
| 553 | +.op-progress.done-ok .op-progress-fill { |
|---|
| 554 | + width: 100%; |
|---|
| 555 | + height: 100%; |
|---|
| 556 | + background: #10b981; |
|---|
| 557 | + animation: none; |
|---|
| 558 | +} |
|---|
| 559 | +.op-progress.done-fail .op-progress-fill { |
|---|
| 560 | + width: 100%; |
|---|
| 561 | + height: 100%; |
|---|
| 562 | + background: #ef4444; |
|---|
| 563 | + animation: none; |
|---|
| 564 | +} |
|---|
| 565 | + |
|---|
| 566 | +@keyframes progress-slide { |
|---|
| 567 | + 0% { background-position: 200% 0; } |
|---|
| 568 | + 100% { background-position: -200% 0; } |
|---|
| 569 | +} |
|---|
| .. | .. |
|---|
| 83 | 83 | <svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><path d="M12 2L2 7l10 5 10-5-10-5z"/><path d="M2 17l10 5 10-5"/><path d="M2 12l10 5 10-5"/></svg> |
|---|
| 84 | 84 | Operations |
|---|
| 85 | 85 | </a> |
|---|
| 86 | + <a class="sidebar-link" data-page="schedules" onclick="showPage('schedules')"> |
|---|
| 87 | + <svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><circle cx="12" cy="12" r="10"/><polyline points="12 6 12 12 16 14"/></svg> |
|---|
| 88 | + Schedules |
|---|
| 89 | + </a> |
|---|
| 86 | 90 | <a class="sidebar-link" data-page="system" onclick="showPage('system')"> |
|---|
| 87 | 91 | <svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><circle cx="12" cy="12" r="3"/><path d="M19.4 15a1.65 1.65 0 00.33 1.82l.06.06a2 2 0 010 2.83 2 2 0 01-2.83 0l-.06-.06a1.65 1.65 0 00-1.82-.33 1.65 1.65 0 00-1 1.51V21a2 2 0 01-4 0v-.09A1.65 1.65 0 009 19.4a1.65 1.65 0 00-1.82.33l-.06.06a2 2 0 01-2.83-2.83l.06-.06A1.65 1.65 0 004.68 15a1.65 1.65 0 00-1.51-1H3a2 2 0 010-4h.09A1.65 1.65 0 004.6 9a1.65 1.65 0 00-.33-1.82l-.06-.06a2 2 0 012.83-2.83l.06.06A1.65 1.65 0 009 4.68a1.65 1.65 0 001-1.51V3a2 2 0 014 0v.09a1.65 1.65 0 001 1.51 1.65 1.65 0 001.82-.33l.06-.06a2 2 0 012.83 2.83l-.06.06A1.65 1.65 0 0019.4 9a1.65 1.65 0 001.51 1H21a2 2 0 010 4h-.09a1.65 1.65 0 00-1.51 1z"/></svg> |
|---|
| 88 | 92 | System |
|---|
| .. | .. |
|---|
| 180 | 184 | |
|---|
| 181 | 185 | <!-- SSE output (shown after start) --> |
|---|
| 182 | 186 | <div id="restore-modal-output" style="display:none;"> |
|---|
| 187 | + <div id="restore-progress-bar" class="op-progress hidden"><div class="op-progress-fill"></div></div> |
|---|
| 183 | 188 | <div style="font-size:0.8125rem;font-weight:500;color:#9ca3af;margin-bottom:0.375rem;">Output</div> |
|---|
| 184 | 189 | <div id="restore-modal-terminal" class="terminal" style="max-height:300px;"></div> |
|---|
| 185 | 190 | </div> |
|---|
| .. | .. |
|---|
| 205 | 210 | Dry run (preview only) |
|---|
| 206 | 211 | </label> |
|---|
| 207 | 212 | <div id="ops-modal-output" style="display:none;"> |
|---|
| 213 | + <div id="ops-progress-bar" class="op-progress hidden"><div class="op-progress-fill"></div></div> |
|---|
| 208 | 214 | <div style="font-size:0.8125rem;font-weight:500;color:#9ca3af;margin-bottom:0.375rem;">Output</div> |
|---|
| 209 | 215 | <div id="ops-modal-terminal" class="terminal" style="max-height:350px;"></div> |
|---|
| 210 | 216 | </div> |
|---|
| .. | .. |
|---|
| 216 | 222 | </div> |
|---|
| 217 | 223 | </div> |
|---|
| 218 | 224 | |
|---|
| 219 | | -<script src="/static/js/app.js?v=12"></script> |
|---|
| 225 | +<!-- Schedule Edit Modal --> |
|---|
| 226 | +<div id="schedule-modal" class="modal-overlay" style="display:none;" onclick="if(event.target===this)closeScheduleModal()"> |
|---|
| 227 | + <div class="modal-box" style="max-width:520px;"> |
|---|
| 228 | + <div class="modal-header"> |
|---|
| 229 | + <span id="schedule-modal-title" style="font-weight:600;color:#f3f4f6;">Edit Schedule</span> |
|---|
| 230 | + <button onclick="closeScheduleModal()" style="background:none;border:none;color:#9ca3af;font-size:1.25rem;cursor:pointer;">×</button> |
|---|
| 231 | + </div> |
|---|
| 232 | + <div class="modal-body"> |
|---|
| 233 | + <input type="hidden" id="sched-project"> |
|---|
| 234 | + |
|---|
| 235 | + <div style="margin-bottom:1rem;"> |
|---|
| 236 | + <label style="display:flex;align-items:center;gap:0.5rem;font-size:0.875rem;color:#d1d5db;cursor:pointer;"> |
|---|
| 237 | + <input type="checkbox" id="sched-enabled" style="width:1rem;height:1rem;accent-color:#3b82f6;"> |
|---|
| 238 | + Enabled |
|---|
| 239 | + </label> |
|---|
| 240 | + </div> |
|---|
| 241 | + |
|---|
| 242 | + <div style="margin-bottom:1rem;"> |
|---|
| 243 | + <div style="font-size:0.8125rem;font-weight:500;color:#9ca3af;margin-bottom:0.375rem;">Schedule (HH:MM UTC)</div> |
|---|
| 244 | + <div style="display:flex;align-items:center;gap:0.75rem;"> |
|---|
| 245 | + <input type="time" id="sched-time" class="form-input" style="width:120px;"> |
|---|
| 246 | + <span id="sched-server-clock" style="font-size:0.75rem;color:#6b7280;font-variant-numeric:tabular-nums;"></span> |
|---|
| 247 | + </div> |
|---|
| 248 | + </div> |
|---|
| 249 | + |
|---|
| 250 | + <div style="margin-bottom:1rem;"> |
|---|
| 251 | + <div style="font-size:0.8125rem;font-weight:500;color:#9ca3af;margin-bottom:0.375rem;">Environments</div> |
|---|
| 252 | + <div id="sched-envs" style="display:flex;gap:1rem;flex-wrap:wrap;"></div> |
|---|
| 253 | + </div> |
|---|
| 254 | + |
|---|
| 255 | + <div style="margin-bottom:1rem;"> |
|---|
| 256 | + <div style="font-size:0.8125rem;font-weight:500;color:#9ca3af;margin-bottom:0.375rem;">Custom Command (optional)</div> |
|---|
| 257 | + <input type="text" id="sched-command" class="form-input" placeholder="Leave empty for default ops backup" style="width:100%;font-size:0.8125rem;"> |
|---|
| 258 | + </div> |
|---|
| 259 | + |
|---|
| 260 | + <div style="margin-bottom:1rem;"> |
|---|
| 261 | + <label style="display:flex;align-items:center;gap:0.5rem;font-size:0.875rem;color:#d1d5db;cursor:pointer;"> |
|---|
| 262 | + <input type="checkbox" id="sched-offsite" onchange="toggleOffsiteSection()" style="width:1rem;height:1rem;accent-color:#3b82f6;"> |
|---|
| 263 | + Offsite Upload |
|---|
| 264 | + </label> |
|---|
| 265 | + </div> |
|---|
| 266 | + |
|---|
| 267 | + <div id="sched-offsite-section" style="display:none;margin-bottom:1rem;padding-left:1.5rem;"> |
|---|
| 268 | + <div style="font-size:0.8125rem;font-weight:500;color:#9ca3af;margin-bottom:0.375rem;">Offsite Environments</div> |
|---|
| 269 | + <div id="sched-offsite-envs" style="display:flex;gap:1rem;flex-wrap:wrap;"></div> |
|---|
| 270 | + </div> |
|---|
| 271 | + |
|---|
| 272 | + <div style="display:flex;gap:1rem;margin-bottom:1rem;"> |
|---|
| 273 | + <div> |
|---|
| 274 | + <div style="font-size:0.8125rem;font-weight:500;color:#9ca3af;margin-bottom:0.375rem;">Local Retention (days)</div> |
|---|
| 275 | + <input type="number" id="sched-retention-local" class="form-input" style="width:80px;" min="1" max="365"> |
|---|
| 276 | + </div> |
|---|
| 277 | + <div> |
|---|
| 278 | + <div style="font-size:0.8125rem;font-weight:500;color:#9ca3af;margin-bottom:0.375rem;">Offsite Retention (days)</div> |
|---|
| 279 | + <input type="number" id="sched-retention-offsite" class="form-input" style="width:80px;" min="1" max="365"> |
|---|
| 280 | + </div> |
|---|
| 281 | + </div> |
|---|
| 282 | + </div> |
|---|
| 283 | + <div class="modal-footer"> |
|---|
| 284 | + <button class="btn btn-ghost btn-sm" onclick="closeScheduleModal()">Cancel</button> |
|---|
| 285 | + <button id="sched-save-btn" class="btn btn-primary btn-sm" onclick="saveSchedule()">Save</button> |
|---|
| 286 | + </div> |
|---|
| 287 | + </div> |
|---|
| 288 | +</div> |
|---|
| 289 | + |
|---|
| 290 | +<script src="/static/js/app.js?v=13"></script> |
|---|
| 220 | 291 | </body> |
|---|
| 221 | 292 | </html> |
|---|
| .. | .. |
|---|
| 1 | 1 | 'use strict'; |
|---|
| 2 | | -const APP_VERSION = 'v13-20260222'; |
|---|
| 2 | +const APP_VERSION = 'v14-20260222'; |
|---|
| 3 | 3 | |
|---|
| 4 | 4 | // ============================================================ |
|---|
| 5 | 5 | // OPS Dashboard — Vanilla JS Application (v6) |
|---|
| .. | .. |
|---|
| 38 | 38 | let opsEventSource = null; |
|---|
| 39 | 39 | let opsCtx = { type: null, project: null, fromEnv: null, toEnv: null }; |
|---|
| 40 | 40 | let cachedRegistry = null; |
|---|
| 41 | +let currentOpId = null; |
|---|
| 41 | 42 | |
|---|
| 42 | 43 | // --------------------------------------------------------------------------- |
|---|
| 43 | 44 | // Helpers |
|---|
| .. | .. |
|---|
| 104 | 105 | } |
|---|
| 105 | 106 | |
|---|
| 106 | 107 | // --------------------------------------------------------------------------- |
|---|
| 108 | +// Progress Bar |
|---|
| 109 | +// --------------------------------------------------------------------------- |
|---|
| 110 | +function _setProgressState(barId, state) { |
|---|
| 111 | + const bar = document.getElementById(barId); |
|---|
| 112 | + if (!bar) return; |
|---|
| 113 | + bar.className = 'op-progress ' + (state === 'running' ? 'running' : state === 'ok' ? 'done-ok' : state === 'fail' ? 'done-fail' : 'hidden'); |
|---|
| 114 | +} |
|---|
| 115 | + |
|---|
| 116 | +// --------------------------------------------------------------------------- |
|---|
| 107 | 117 | // Auth |
|---|
| 108 | 118 | // --------------------------------------------------------------------------- |
|---|
| 109 | 119 | function getToken() { return localStorage.getItem('ops_token'); } |
|---|
| .. | .. |
|---|
| 142 | 152 | async function api(path, opts = {}) { |
|---|
| 143 | 153 | const token = getToken(); |
|---|
| 144 | 154 | const headers = { ...(opts.headers || {}), 'Authorization': 'Bearer ' + token }; |
|---|
| 145 | | - const resp = await fetch(path, { ...opts, headers }); |
|---|
| 155 | + const resp = await fetch(path, { ...opts, headers, cache: 'no-store' }); |
|---|
| 146 | 156 | if (resp.status === 401) { doLogout(); throw new Error('Session expired'); } |
|---|
| 147 | 157 | if (!resp.ok) { const b = await resp.text(); throw new Error(b || 'HTTP ' + resp.status); } |
|---|
| 148 | 158 | const ct = resp.headers.get('content-type') || ''; |
|---|
| .. | .. |
|---|
| 197 | 207 | case 'backups': renderBackups(); break; |
|---|
| 198 | 208 | case 'system': renderSystem(); break; |
|---|
| 199 | 209 | case 'operations': renderOperations(); break; |
|---|
| 210 | + case 'schedules': renderSchedules(); break; |
|---|
| 200 | 211 | default: renderDashboard(); |
|---|
| 201 | 212 | } |
|---|
| 202 | 213 | } |
|---|
| .. | .. |
|---|
| 288 | 299 | } else if (backupDrillLevel === 2) { |
|---|
| 289 | 300 | h = '<a onclick="backupDrillBack(0)">Backups</a><span class="sep">/</span><a onclick="backupDrillBack(1)">' + esc(backupDrillProject) + '</a><span class="sep">/</span><span class="current">' + esc(backupDrillEnv) + '</span>'; |
|---|
| 290 | 301 | } |
|---|
| 302 | + } else if (currentPage === 'schedules') { |
|---|
| 303 | + h = '<span class="current">Schedules</span>'; |
|---|
| 291 | 304 | } else if (currentPage === 'system') { |
|---|
| 292 | 305 | h = '<span class="current">System</span>'; |
|---|
| 293 | 306 | } else if (currentPage === 'operations') { |
|---|
| .. | .. |
|---|
| 499 | 512 | // YYYYMMDD_HHMMSS -> YYYY-MM-DD HH:MM |
|---|
| 500 | 513 | const m = String(raw).match(/^(\d{4})(\d{2})(\d{2})[_T](\d{2})(\d{2})/); |
|---|
| 501 | 514 | if (m) return `${m[1]}-${m[2]}-${m[3]} ${m[4]}:${m[5]}`; |
|---|
| 502 | | - // YYYY-MM-DD passthrough |
|---|
| 515 | + // ISO 8601: YYYY-MM-DDTHH:MM:SS |
|---|
| 516 | + const iso = String(raw).match(/^(\d{4})-(\d{2})-(\d{2})[T ](\d{2}):(\d{2})/); |
|---|
| 517 | + if (iso) return `${iso[1]}-${iso[2]}-${iso[3]} ${iso[4]}:${iso[5]}`; |
|---|
| 503 | 518 | return raw; |
|---|
| 504 | 519 | } |
|---|
| 505 | 520 | |
|---|
| 506 | | -// Parse YYYYMMDD_HHMMSS -> { dateKey: 'YYYY-MM-DD', timeStr: 'HH:MM' } |
|---|
| 521 | +// Parse backup date -> { dateKey: 'YYYY-MM-DD', timeStr: 'HH:MM' } |
|---|
| 507 | 522 | function parseBackupDate(raw) { |
|---|
| 508 | 523 | if (!raw) return { dateKey: '', timeStr: '' }; |
|---|
| 524 | + // YYYYMMDD_HHMMSS |
|---|
| 509 | 525 | const m = String(raw).match(/^(\d{4})(\d{2})(\d{2})[_T](\d{2})(\d{2})/); |
|---|
| 510 | 526 | if (m) return { dateKey: `${m[1]}-${m[2]}-${m[3]}`, timeStr: `${m[4]}:${m[5]}` }; |
|---|
| 527 | + // ISO 8601: YYYY-MM-DDTHH:MM:SS |
|---|
| 528 | + const iso = String(raw).match(/^(\d{4})-(\d{2})-(\d{2})[T ](\d{2}):(\d{2})/); |
|---|
| 529 | + if (iso) return { dateKey: `${iso[1]}-${iso[2]}-${iso[3]}`, timeStr: `${iso[4]}:${iso[5]}` }; |
|---|
| 511 | 530 | return { dateKey: raw, timeStr: '' }; |
|---|
| 512 | 531 | } |
|---|
| 513 | 532 | |
|---|
| .. | .. |
|---|
| 536 | 555 | if (chevron) chevron.classList.toggle('open', !isOpen); |
|---|
| 537 | 556 | } |
|---|
| 538 | 557 | |
|---|
| 558 | +// Normalize any backup date to ISO-sortable format (YYYY-MM-DDTHH:MM:SS) |
|---|
| 559 | +function normalizeBackupDate(raw) { |
|---|
| 560 | + if (!raw) return ''; |
|---|
| 561 | + // Compact: YYYYMMDD_HHMMSS -> YYYY-MM-DDTHH:MM:SS |
|---|
| 562 | + const m = String(raw).match(/^(\d{4})(\d{2})(\d{2})[_T](\d{2})(\d{2})(\d{2})?/); |
|---|
| 563 | + if (m) return `${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6] || '00'}`; |
|---|
| 564 | + // Already ISO-ish: pass through |
|---|
| 565 | + return String(raw); |
|---|
| 566 | +} |
|---|
| 567 | + |
|---|
| 539 | 568 | // --------------------------------------------------------------------------- |
|---|
| 540 | 569 | // Backups — merge helper (dedup local+offsite by filename) |
|---|
| 541 | 570 | // --------------------------------------------------------------------------- |
|---|
| .. | .. |
|---|
| 544 | 573 | |
|---|
| 545 | 574 | for (const b of local) { |
|---|
| 546 | 575 | const name = b.name || b.file || ''; |
|---|
| 547 | | - const key = name || (b.project + '/' + b.env + '/' + (b.date || b.timestamp)); |
|---|
| 576 | + const key = name || (b.project + '/' + b.env + '/' + (b.date || b.mtime || b.timestamp)); |
|---|
| 548 | 577 | byName.set(key, { |
|---|
| 549 | 578 | project: b.project || '', |
|---|
| 550 | 579 | env: b.env || b.environment || '', |
|---|
| 551 | 580 | name: name, |
|---|
| 552 | | - date: b.date || b.timestamp || '', |
|---|
| 581 | + date: normalizeBackupDate(b.date || b.mtime || b.timestamp || ''), |
|---|
| 553 | 582 | size_human: b.size_human || b.size || '', |
|---|
| 554 | 583 | size_bytes: Number(b.size || 0), |
|---|
| 555 | 584 | hasLocal: true, |
|---|
| .. | .. |
|---|
| 561 | 590 | const name = b.name || ''; |
|---|
| 562 | 591 | const key = name || (b.project + '/' + b.env + '/' + (b.date || '')); |
|---|
| 563 | 592 | if (byName.has(key)) { |
|---|
| 564 | | - byName.get(key).hasOffsite = true; |
|---|
| 593 | + const existing = byName.get(key); |
|---|
| 594 | + existing.hasOffsite = true; |
|---|
| 595 | + if (!existing.date && b.date) existing.date = normalizeBackupDate(b.date); |
|---|
| 565 | 596 | } else { |
|---|
| 566 | 597 | byName.set(key, { |
|---|
| 567 | 598 | project: b.project || '', |
|---|
| 568 | 599 | env: b.env || b.environment || '', |
|---|
| 569 | 600 | name: name, |
|---|
| 570 | | - date: b.date || '', |
|---|
| 601 | + date: normalizeBackupDate(b.date || ''), |
|---|
| 571 | 602 | size_human: b.size || '', |
|---|
| 572 | 603 | size_bytes: Number(b.size_bytes || 0), |
|---|
| 573 | 604 | hasLocal: false, |
|---|
| .. | .. |
|---|
| 628 | 659 | h += '</div></div>'; |
|---|
| 629 | 660 | |
|---|
| 630 | 661 | // Global stat tiles |
|---|
| 631 | | - h += '<div class="grid-stats" style="margin-bottom:1.5rem;">'; |
|---|
| 662 | + h += '<div class="grid-stats" style="margin-bottom:0.5rem;">'; |
|---|
| 632 | 663 | h += statTile('Local', localCount, '#3b82f6'); |
|---|
| 633 | 664 | h += statTile('Offsite', offsiteCount, '#8b5cf6'); |
|---|
| 634 | 665 | h += statTile('Synced', syncedCount, '#10b981'); |
|---|
| 635 | | - h += statTile('Latest', latestDisplay, '#f59e0b'); |
|---|
| 636 | 666 | h += '</div>'; |
|---|
| 667 | + h += `<div style="margin-bottom:1.5rem;font-size:0.8125rem;color:#9ca3af;">Latest backup: <span style="color:#f59e0b;">${esc(latestDisplay)}</span></div>`; |
|---|
| 637 | 668 | |
|---|
| 638 | 669 | // Project cards |
|---|
| 639 | 670 | const projects = groupBy(all, 'project'); |
|---|
| .. | .. |
|---|
| 722 | 753 | |
|---|
| 723 | 754 | let h = '<div class="page-enter">'; |
|---|
| 724 | 755 | |
|---|
| 756 | + // Action bar: Create Backup + Upload |
|---|
| 757 | + h += `<div style="display:flex;gap:0.5rem;margin-bottom:0.75rem;">`; |
|---|
| 758 | + h += `<button class="btn btn-primary btn-sm" onclick="createBackup('${esc(backupDrillProject)}','${esc(backupDrillEnv)}')">Create Backup</button>`; |
|---|
| 759 | + h += `<button class="btn btn-ghost btn-sm" style="color:#a78bfa;border-color:rgba(167,139,250,0.25);" onclick="uploadOffsiteBackup('${esc(backupDrillProject)}','${esc(backupDrillEnv)}')">Upload to Offsite</button>`; |
|---|
| 760 | + if (filtered.some(b => b.hasOffsite && !b.hasLocal)) { |
|---|
| 761 | + h += `<button class="btn btn-ghost btn-sm" style="color:#34d399;border-color:rgba(52,211,153,0.25);" onclick="downloadOffsiteBackup('${esc(backupDrillProject)}','${esc(backupDrillEnv)}')">Download from Offsite</button>`; |
|---|
| 762 | + } |
|---|
| 763 | + h += `</div>`; |
|---|
| 764 | + |
|---|
| 725 | 765 | // Selection action bar |
|---|
| 726 | 766 | h += `<div id="backup-selection-bar" class="selection-bar" style="display:${selectedBackups.size > 0 ? 'flex' : 'none'};">`; |
|---|
| 727 | 767 | h += `<span id="selection-count">${selectedBackups.size} selected</span>`; |
|---|
| .. | .. |
|---|
| 782 | 822 | const checked = selectedBackups.has(b.name) ? ' checked' : ''; |
|---|
| 783 | 823 | const deleteBtn = `<button class="btn btn-ghost btn-xs" style="color:#f87171;border-color:#7f1d1d;" onclick="deleteBackup('${esc(b.project)}','${esc(b.env)}','${esc(b.name)}',${b.hasLocal},${b.hasOffsite})">Delete</button>`; |
|---|
| 784 | 824 | const uploadBtn = (b.hasLocal && !b.hasOffsite) |
|---|
| 785 | | - ? `<button class="btn btn-ghost btn-xs" style="color:#a78bfa;border-color:rgba(167,139,250,0.25);" onclick="uploadOffsiteBackup('${esc(b.project)}','${esc(b.env)}')">Upload</button>` |
|---|
| 825 | + ? `<button class="btn btn-ghost btn-xs" style="color:#a78bfa;border-color:rgba(167,139,250,0.25);" onclick="uploadOffsiteBackup('${esc(b.project)}','${esc(b.env)}','${esc(b.name)}')">Upload</button>` |
|---|
| 826 | + : ''; |
|---|
| 827 | + const downloadBtn = (!b.hasLocal && b.hasOffsite) |
|---|
| 828 | + ? `<button class="btn btn-ghost btn-xs" style="color:#34d399;border-color:rgba(52,211,153,0.25);" onclick="downloadOffsiteBackup('${esc(b.project)}','${esc(b.env)}','${esc(b.name)}')">Download</button>` |
|---|
| 786 | 829 | : ''; |
|---|
| 787 | 830 | h += `<tr> |
|---|
| 788 | 831 | <td style="padding-left:0.75rem;"><input type="checkbox" class="backup-cb" value="${esc(b.name)}"${checked} onclick="toggleBackupSelect('${esc(b.name)}')" style="accent-color:#3b82f6;cursor:pointer;"></td> |
|---|
| .. | .. |
|---|
| 792 | 835 | <td style="white-space:nowrap;"> |
|---|
| 793 | 836 | <button class="btn btn-danger btn-xs" onclick="openRestoreModal('${esc(b.project)}','${esc(b.env)}','${restoreSource}','${esc(b.name)}',${b.hasLocal},${b.hasOffsite})">Restore</button> |
|---|
| 794 | 837 | ${uploadBtn} |
|---|
| 838 | + ${downloadBtn} |
|---|
| 795 | 839 | ${deleteBtn} |
|---|
| 796 | 840 | </td> |
|---|
| 797 | 841 | </tr>`; |
|---|
| .. | .. |
|---|
| 870 | 914 | if (allOffsite) target = 'offsite'; |
|---|
| 871 | 915 | } |
|---|
| 872 | 916 | const label = target === 'both' ? 'local + offsite' : target; |
|---|
| 873 | | - if (!confirm(`Delete ${names.length} backup${names.length > 1 ? 's' : ''} (${label})?\n\nThis cannot be undone.`)) return; |
|---|
| 917 | + if (!await showConfirmDialog(`Delete ${names.length} backup${names.length > 1 ? 's' : ''} (${label})?\n\nThis cannot be undone.`, 'Delete', true)) return; |
|---|
| 874 | 918 | toast(`Deleting ${names.length} backups (${label})...`, 'info'); |
|---|
| 875 | 919 | let ok = 0, fail = 0; |
|---|
| 876 | 920 | for (const name of names) { |
|---|
| .. | .. |
|---|
| 885 | 929 | if (currentPage === 'backups') renderBackups(); |
|---|
| 886 | 930 | } |
|---|
| 887 | 931 | |
|---|
| 888 | | -async function uploadOffsiteBackup(project, env) { |
|---|
| 889 | | - if (!confirm(`Upload latest ${project}/${env} backup to offsite storage?`)) return; |
|---|
| 890 | | - toast('Uploading to offsite...', 'info'); |
|---|
| 891 | | - try { |
|---|
| 892 | | - await api(`/api/backups/offsite/upload/${encodeURIComponent(project)}/${encodeURIComponent(env)}`, { method: 'POST' }); |
|---|
| 893 | | - toast('Offsite upload complete for ' + project + '/' + env, 'success'); |
|---|
| 894 | | - cachedBackups = null; |
|---|
| 895 | | - if (currentPage === 'backups') renderBackups(); |
|---|
| 896 | | - } catch (e) { toast('Upload failed: ' + e.message, 'error'); } |
|---|
| 932 | +async function uploadOffsiteBackup(project, env, name) { |
|---|
| 933 | + const label = name ? name : `latest ${project}/${env}`; |
|---|
| 934 | + if (!await showConfirmDialog(`Upload ${label} to offsite storage?`, 'Upload')) return; |
|---|
| 935 | + |
|---|
| 936 | + // Open the ops modal with streaming output |
|---|
| 937 | + opsCtx = { type: 'upload', project, fromEnv: env, toEnv: null }; |
|---|
| 938 | + if (opsEventSource) { opsEventSource.close(); opsEventSource = null; } |
|---|
| 939 | + |
|---|
| 940 | + const title = document.getElementById('ops-modal-title'); |
|---|
| 941 | + const info = document.getElementById('ops-modal-info'); |
|---|
| 942 | + const startBtn = document.getElementById('ops-start-btn'); |
|---|
| 943 | + const dryRunRow = document.getElementById('ops-dry-run-row'); |
|---|
| 944 | + const outputDiv = document.getElementById('ops-modal-output'); |
|---|
| 945 | + const term = document.getElementById('ops-modal-terminal'); |
|---|
| 946 | + |
|---|
| 947 | + title.textContent = 'Upload to Offsite'; |
|---|
| 948 | + let infoHtml = '<div class="restore-info-row"><span class="restore-info-label">Project</span><span class="restore-info-value">' + esc(project) + '</span></div>' |
|---|
| 949 | + + '<div class="restore-info-row"><span class="restore-info-label">Environment</span><span class="restore-info-value">' + esc(env) + '</span></div>'; |
|---|
| 950 | + if (name) infoHtml += '<div class="restore-info-row"><span class="restore-info-label">File</span><span class="restore-info-value mono">' + esc(name) + '</span></div>'; |
|---|
| 951 | + info.innerHTML = infoHtml; |
|---|
| 952 | + if (dryRunRow) dryRunRow.style.display = 'none'; |
|---|
| 953 | + startBtn.style.display = 'none'; |
|---|
| 954 | + |
|---|
| 955 | + outputDiv.style.display = 'block'; |
|---|
| 956 | + term.textContent = 'Starting upload...\n'; |
|---|
| 957 | + currentOpId = null; |
|---|
| 958 | + _setProgressState('ops-progress-bar', 'running'); |
|---|
| 959 | + |
|---|
| 960 | + document.getElementById('ops-modal').style.display = 'flex'; |
|---|
| 961 | + |
|---|
| 962 | + let url = '/api/backups/offsite/stream/' + encodeURIComponent(project) + '/' + encodeURIComponent(env) + '?token=' + encodeURIComponent(getToken()); |
|---|
| 963 | + if (name) url += '&name=' + encodeURIComponent(name); |
|---|
| 964 | + const es = new EventSource(url); |
|---|
| 965 | + opsEventSource = es; |
|---|
| 966 | + |
|---|
| 967 | + es.onmessage = function(e) { |
|---|
| 968 | + try { |
|---|
| 969 | + const d = JSON.parse(e.data); |
|---|
| 970 | + if (d.op_id && !currentOpId) { currentOpId = d.op_id; return; } |
|---|
| 971 | + if (d.done) { |
|---|
| 972 | + es.close(); |
|---|
| 973 | + opsEventSource = null; |
|---|
| 974 | + currentOpId = null; |
|---|
| 975 | + const msg = d.cancelled ? '\n--- Cancelled ---\n' : d.success ? '\n--- Upload complete ---\n' : '\n--- Upload FAILED ---\n'; |
|---|
| 976 | + term.textContent += msg; |
|---|
| 977 | + term.scrollTop = term.scrollHeight; |
|---|
| 978 | + toast(d.cancelled ? 'Upload cancelled' : d.success ? 'Offsite upload complete for ' + project + '/' + env : 'Upload failed', d.success ? 'success' : d.cancelled ? 'warning' : 'error'); |
|---|
| 979 | + _setProgressState('ops-progress-bar', d.success ? 'ok' : 'fail'); |
|---|
| 980 | + cachedBackups = null; |
|---|
| 981 | + if (d.success && currentPage === 'backups') renderBackups(); |
|---|
| 982 | + return; |
|---|
| 983 | + } |
|---|
| 984 | + if (d.line) { |
|---|
| 985 | + term.textContent += d.line + '\n'; |
|---|
| 986 | + term.scrollTop = term.scrollHeight; |
|---|
| 987 | + } |
|---|
| 988 | + } catch (_) {} |
|---|
| 989 | + }; |
|---|
| 990 | + |
|---|
| 991 | + es.onerror = function() { |
|---|
| 992 | + es.close(); |
|---|
| 993 | + opsEventSource = null; |
|---|
| 994 | + currentOpId = null; |
|---|
| 995 | + term.textContent += '\n--- Connection lost ---\n'; |
|---|
| 996 | + toast('Connection lost', 'error'); |
|---|
| 997 | + _setProgressState('ops-progress-bar', 'fail'); |
|---|
| 998 | + }; |
|---|
| 999 | +} |
|---|
| 1000 | + |
|---|
| 1001 | +// --------------------------------------------------------------------------- |
|---|
| 1002 | +// Offsite Download (download to local storage, no restore) |
|---|
| 1003 | +// --------------------------------------------------------------------------- |
|---|
| 1004 | +async function downloadOffsiteBackup(project, env, name) { |
|---|
| 1005 | + const label = name ? name : `latest offsite backup for ${project}/${env}`; |
|---|
| 1006 | + if (!name) { |
|---|
| 1007 | + // No specific name: find the latest offsite-only backup for this env |
|---|
| 1008 | + const latest = cachedBackups && cachedBackups.find(b => b.project === project && b.env === env && b.hasOffsite && !b.hasLocal); |
|---|
| 1009 | + if (!latest) { |
|---|
| 1010 | + toast('No offsite-only backup found for ' + project + '/' + env, 'warning'); |
|---|
| 1011 | + return; |
|---|
| 1012 | + } |
|---|
| 1013 | + name = latest.name; |
|---|
| 1014 | + } |
|---|
| 1015 | + if (!await showConfirmDialog(`Download "${name}" from offsite to local storage?`, 'Download')) return; |
|---|
| 1016 | + |
|---|
| 1017 | + // Open the ops modal with streaming output |
|---|
| 1018 | + opsCtx = { type: 'download', project, fromEnv: env, toEnv: null }; |
|---|
| 1019 | + if (opsEventSource) { opsEventSource.close(); opsEventSource = null; } |
|---|
| 1020 | + |
|---|
| 1021 | + const title = document.getElementById('ops-modal-title'); |
|---|
| 1022 | + const info = document.getElementById('ops-modal-info'); |
|---|
| 1023 | + const startBtn = document.getElementById('ops-start-btn'); |
|---|
| 1024 | + const dryRunRow = document.getElementById('ops-dry-run-row'); |
|---|
| 1025 | + const outputDiv = document.getElementById('ops-modal-output'); |
|---|
| 1026 | + const term = document.getElementById('ops-modal-terminal'); |
|---|
| 1027 | + |
|---|
| 1028 | + title.textContent = 'Download from Offsite'; |
|---|
| 1029 | + let infoHtml = '<div class="restore-info-row"><span class="restore-info-label">Project</span><span class="restore-info-value">' + esc(project) + '</span></div>' |
|---|
| 1030 | + + '<div class="restore-info-row"><span class="restore-info-label">Environment</span><span class="restore-info-value">' + esc(env) + '</span></div>' |
|---|
| 1031 | + + '<div class="restore-info-row"><span class="restore-info-label">File</span><span class="restore-info-value mono">' + esc(name) + '</span></div>'; |
|---|
| 1032 | + info.innerHTML = infoHtml; |
|---|
| 1033 | + if (dryRunRow) dryRunRow.style.display = 'none'; |
|---|
| 1034 | + startBtn.style.display = 'none'; |
|---|
| 1035 | + |
|---|
| 1036 | + outputDiv.style.display = 'block'; |
|---|
| 1037 | + term.textContent = 'Starting download...\n'; |
|---|
| 1038 | + currentOpId = null; |
|---|
| 1039 | + _setProgressState('ops-progress-bar', 'running'); |
|---|
| 1040 | + |
|---|
| 1041 | + document.getElementById('ops-modal').style.display = 'flex'; |
|---|
| 1042 | + |
|---|
| 1043 | + const url = '/api/backups/offsite/download/stream/' + encodeURIComponent(project) + '/' + encodeURIComponent(env) + '?name=' + encodeURIComponent(name) + '&token=' + encodeURIComponent(getToken()); |
|---|
| 1044 | + const es = new EventSource(url); |
|---|
| 1045 | + opsEventSource = es; |
|---|
| 1046 | + |
|---|
| 1047 | + es.onmessage = function(e) { |
|---|
| 1048 | + try { |
|---|
| 1049 | + const d = JSON.parse(e.data); |
|---|
| 1050 | + if (d.op_id && !currentOpId) { currentOpId = d.op_id; return; } |
|---|
| 1051 | + if (d.done) { |
|---|
| 1052 | + es.close(); |
|---|
| 1053 | + opsEventSource = null; |
|---|
| 1054 | + currentOpId = null; |
|---|
| 1055 | + const msg = d.cancelled ? '\n--- Cancelled ---\n' : d.success ? '\n--- Download complete ---\n' : '\n--- Download FAILED ---\n'; |
|---|
| 1056 | + term.textContent += msg; |
|---|
| 1057 | + term.scrollTop = term.scrollHeight; |
|---|
| 1058 | + toast(d.cancelled ? 'Download cancelled' : d.success ? 'Downloaded ' + (d.name || name) + ' to local storage' : 'Download failed', d.success ? 'success' : d.cancelled ? 'warning' : 'error'); |
|---|
| 1059 | + _setProgressState('ops-progress-bar', d.success ? 'ok' : 'fail'); |
|---|
| 1060 | + cachedBackups = null; |
|---|
| 1061 | + if (d.success && currentPage === 'backups') renderBackups(); |
|---|
| 1062 | + return; |
|---|
| 1063 | + } |
|---|
| 1064 | + if (d.line) { |
|---|
| 1065 | + term.textContent += d.line + '\n'; |
|---|
| 1066 | + term.scrollTop = term.scrollHeight; |
|---|
| 1067 | + } |
|---|
| 1068 | + } catch (_) {} |
|---|
| 1069 | + }; |
|---|
| 1070 | + |
|---|
| 1071 | + es.onerror = function() { |
|---|
| 1072 | + es.close(); |
|---|
| 1073 | + opsEventSource = null; |
|---|
| 1074 | + currentOpId = null; |
|---|
| 1075 | + term.textContent += '\n--- Connection lost ---\n'; |
|---|
| 1076 | + toast('Connection lost', 'error'); |
|---|
| 1077 | + _setProgressState('ops-progress-bar', 'fail'); |
|---|
| 1078 | + }; |
|---|
| 897 | 1079 | } |
|---|
| 898 | 1080 | |
|---|
| 899 | 1081 | // --------------------------------------------------------------------------- |
|---|
| .. | .. |
|---|
| 943 | 1125 | } |
|---|
| 944 | 1126 | |
|---|
| 945 | 1127 | function closeRestoreModal() { |
|---|
| 1128 | + if (currentOpId && restoreEventSource) { |
|---|
| 1129 | + fetch('/api/operations/' + currentOpId, { method: 'DELETE', headers: { 'Authorization': 'Bearer ' + getToken() } }).catch(() => {}); |
|---|
| 1130 | + } |
|---|
| 946 | 1131 | if (restoreEventSource) { restoreEventSource.close(); restoreEventSource = null; } |
|---|
| 1132 | + currentOpId = null; |
|---|
| 1133 | + _setProgressState('restore-progress-bar', 'hidden'); |
|---|
| 947 | 1134 | document.getElementById('restore-modal').style.display = 'none'; |
|---|
| 948 | 1135 | restoreCtx = { project: null, env: null, source: null, name: null }; |
|---|
| 949 | 1136 | } |
|---|
| .. | .. |
|---|
| 975 | 1162 | const modeEl = document.querySelector('input[name="restore-mode"]:checked'); |
|---|
| 976 | 1163 | const mode = modeEl ? modeEl.value : 'full'; |
|---|
| 977 | 1164 | const url = `/api/restore/${encodeURIComponent(project)}/${encodeURIComponent(env)}?source=${encodeURIComponent(source)}${dryRun ? '&dry_run=true' : ''}&token=${encodeURIComponent(getToken())}${name ? '&name=' + encodeURIComponent(name) : ''}&mode=${encodeURIComponent(mode)}`; |
|---|
| 1165 | + currentOpId = null; |
|---|
| 1166 | + _setProgressState('restore-progress-bar', 'running'); |
|---|
| 978 | 1167 | const es = new EventSource(url); |
|---|
| 979 | 1168 | restoreEventSource = es; |
|---|
| 980 | 1169 | |
|---|
| 981 | 1170 | es.onmessage = function(e) { |
|---|
| 982 | 1171 | try { |
|---|
| 983 | 1172 | const d = JSON.parse(e.data); |
|---|
| 1173 | + if (d.op_id && !currentOpId) { currentOpId = d.op_id; return; } |
|---|
| 984 | 1174 | if (d.done) { |
|---|
| 985 | 1175 | es.close(); |
|---|
| 986 | 1176 | restoreEventSource = null; |
|---|
| 987 | | - const msg = d.success ? '\n--- Restore complete ---\n' : '\n--- Restore FAILED ---\n'; |
|---|
| 1177 | + currentOpId = null; |
|---|
| 1178 | + const msg = d.cancelled ? '\n--- Cancelled ---\n' : d.success ? '\n--- Restore complete ---\n' : '\n--- Restore FAILED ---\n'; |
|---|
| 988 | 1179 | term.textContent += msg; |
|---|
| 989 | 1180 | term.scrollTop = term.scrollHeight; |
|---|
| 990 | | - toast(d.success ? 'Restore completed' : 'Restore failed', d.success ? 'success' : 'error'); |
|---|
| 1181 | + const toastMsg = d.cancelled ? 'Restore cancelled' : d.success ? 'Restore completed' : 'Restore failed'; |
|---|
| 1182 | + toast(toastMsg, d.success ? 'success' : d.cancelled ? 'warning' : 'error'); |
|---|
| 1183 | + _setProgressState('restore-progress-bar', d.success ? 'ok' : 'fail'); |
|---|
| 991 | 1184 | startBtn.disabled = false; |
|---|
| 992 | 1185 | startBtn.textContent = 'Start Restore'; |
|---|
| 993 | 1186 | return; |
|---|
| .. | .. |
|---|
| 1002 | 1195 | es.onerror = function() { |
|---|
| 1003 | 1196 | es.close(); |
|---|
| 1004 | 1197 | restoreEventSource = null; |
|---|
| 1198 | + currentOpId = null; |
|---|
| 1005 | 1199 | term.textContent += '\n--- Connection lost ---\n'; |
|---|
| 1006 | 1200 | toast('Connection lost', 'error'); |
|---|
| 1201 | + _setProgressState('restore-progress-bar', 'fail'); |
|---|
| 1007 | 1202 | startBtn.disabled = false; |
|---|
| 1008 | 1203 | startBtn.textContent = 'Start Restore'; |
|---|
| 1009 | 1204 | }; |
|---|
| .. | .. |
|---|
| 1120 | 1315 | } |
|---|
| 1121 | 1316 | |
|---|
| 1122 | 1317 | // --------------------------------------------------------------------------- |
|---|
| 1318 | +// Schedules Page |
|---|
| 1319 | +// --------------------------------------------------------------------------- |
|---|
| 1320 | +let cachedSchedules = null; |
|---|
| 1321 | + |
|---|
| 1322 | +async function renderSchedules() { |
|---|
| 1323 | + updateBreadcrumbs(); |
|---|
| 1324 | + const c = document.getElementById('page-content'); |
|---|
| 1325 | + try { |
|---|
| 1326 | + const schedules = await api('/api/schedule/'); |
|---|
| 1327 | + cachedSchedules = schedules; |
|---|
| 1328 | + |
|---|
| 1329 | + let h = '<div class="page-enter">'; |
|---|
| 1330 | + h += '<h2 style="font-size:1.125rem;font-weight:600;color:#f3f4f6;margin-bottom:0.75rem;">Backup Schedules</h2>'; |
|---|
| 1331 | + h += '<p style="font-size:0.8125rem;color:#6b7280;margin-bottom:1rem;">Managed via registry.yaml. Changes regenerate systemd timers on the server.</p>'; |
|---|
| 1332 | + |
|---|
| 1333 | + h += '<div class="table-wrapper"><table class="ops-table"><thead><tr>' |
|---|
| 1334 | + + '<th>Project</th><th>Enabled</th><th>Schedule</th><th>Environments</th>' |
|---|
| 1335 | + + '<th>Offsite</th><th>Retention</th><th></th>' |
|---|
| 1336 | + + '</tr></thead><tbody>'; |
|---|
| 1337 | + |
|---|
| 1338 | + for (const s of schedules) { |
|---|
| 1339 | + if (s.static) continue; // skip static sites |
|---|
| 1340 | + |
|---|
| 1341 | + const enabled = s.enabled; |
|---|
| 1342 | + const enabledBadge = enabled |
|---|
| 1343 | + ? '<span class="badge badge-green">On</span>' |
|---|
| 1344 | + : '<span class="badge badge-gray">Off</span>'; |
|---|
| 1345 | + const schedule = s.schedule || '\u2014'; |
|---|
| 1346 | + const envs = (s.backup_environments || s.environments || []).join(', ') || '\u2014'; |
|---|
| 1347 | + const offsiteBadge = s.offsite |
|---|
| 1348 | + ? '<span class="badge badge-blue" style="background:rgba(59,130,246,0.15);color:#60a5fa;border-color:rgba(59,130,246,0.3);">Yes</span>' |
|---|
| 1349 | + : '<span class="badge badge-gray">No</span>'; |
|---|
| 1350 | + const retLocal = s.retention_local_days != null ? s.retention_local_days + 'd local' : ''; |
|---|
| 1351 | + const retOffsite = s.retention_offsite_days != null ? s.retention_offsite_days + 'd offsite' : ''; |
|---|
| 1352 | + const retention = [retLocal, retOffsite].filter(Boolean).join(', ') || '\u2014'; |
|---|
| 1353 | + |
|---|
| 1354 | + const canEdit = s.has_backup_dir || s.has_cli; |
|---|
| 1355 | + const editBtn = canEdit |
|---|
| 1356 | + ? `<button class="btn btn-ghost btn-xs" onclick="openScheduleEdit('${esc(s.project)}')">Edit</button>` |
|---|
| 1357 | + : '<span style="color:#4b5563;font-size:0.75rem;">n/a</span>'; |
|---|
| 1358 | + const runBtn = canEdit |
|---|
| 1359 | + ? `<button class="btn btn-ghost btn-xs" onclick="runBackupNow('${esc(s.project)}')">Run Now</button>` |
|---|
| 1360 | + : ''; |
|---|
| 1361 | + |
|---|
| 1362 | + h += `<tr> |
|---|
| 1363 | + <td style="font-weight:500;">${esc(s.project)}</td> |
|---|
| 1364 | + <td>${enabledBadge}</td> |
|---|
| 1365 | + <td class="mono">${esc(schedule)}</td> |
|---|
| 1366 | + <td>${esc(envs)}</td> |
|---|
| 1367 | + <td>${offsiteBadge}</td> |
|---|
| 1368 | + <td style="font-size:0.8125rem;color:#9ca3af;">${esc(retention)}</td> |
|---|
| 1369 | + <td style="display:flex;gap:0.25rem;">${editBtn} ${runBtn}</td> |
|---|
| 1370 | + </tr>`; |
|---|
| 1371 | + } |
|---|
| 1372 | + h += '</tbody></table></div>'; |
|---|
| 1373 | + h += '</div>'; |
|---|
| 1374 | + c.innerHTML = h; |
|---|
| 1375 | + } catch (e) { |
|---|
| 1376 | + c.innerHTML = '<div class="card" style="color:#f87171;">Failed to load schedules: ' + esc(e.message) + '</div>'; |
|---|
| 1377 | + } |
|---|
| 1378 | +} |
|---|
| 1379 | + |
|---|
| 1380 | +let _schedClockInterval = null; |
|---|
| 1381 | +function _startScheduleClock() { |
|---|
| 1382 | + _stopScheduleClock(); |
|---|
| 1383 | + const el = document.getElementById('sched-server-clock'); |
|---|
| 1384 | + const tick = () => { |
|---|
| 1385 | + const now = new Date(); |
|---|
| 1386 | + el.textContent = 'Server now: ' + now.toISOString().slice(11, 19) + ' UTC'; |
|---|
| 1387 | + }; |
|---|
| 1388 | + tick(); |
|---|
| 1389 | + _schedClockInterval = setInterval(tick, 1000); |
|---|
| 1390 | +} |
|---|
| 1391 | +function _stopScheduleClock() { |
|---|
| 1392 | + if (_schedClockInterval) { clearInterval(_schedClockInterval); _schedClockInterval = null; } |
|---|
| 1393 | +} |
|---|
| 1394 | + |
|---|
| 1395 | +function openScheduleEdit(project) { |
|---|
| 1396 | + const s = (cachedSchedules || []).find(x => x.project === project); |
|---|
| 1397 | + if (!s) return; |
|---|
| 1398 | + |
|---|
| 1399 | + const envOptions = (s.environments || []).map(e => { |
|---|
| 1400 | + const checked = (s.backup_environments || s.environments || []).includes(e) ? 'checked' : ''; |
|---|
| 1401 | + return `<label style="display:flex;align-items:center;gap:0.375rem;font-size:0.875rem;color:#d1d5db;cursor:pointer;"> |
|---|
| 1402 | + <input type="checkbox" name="sched-env" value="${esc(e)}" ${checked} style="accent-color:#3b82f6;"> ${esc(e)} |
|---|
| 1403 | + </label>`; |
|---|
| 1404 | + }).join(''); |
|---|
| 1405 | + |
|---|
| 1406 | + const offsiteEnvOptions = (s.environments || []).map(e => { |
|---|
| 1407 | + const checked = (s.offsite_envs || ['prod']).includes(e) ? 'checked' : ''; |
|---|
| 1408 | + return `<label style="display:flex;align-items:center;gap:0.375rem;font-size:0.875rem;color:#d1d5db;cursor:pointer;"> |
|---|
| 1409 | + <input type="checkbox" name="sched-offsite-env" value="${esc(e)}" ${checked} style="accent-color:#3b82f6;"> ${esc(e)} |
|---|
| 1410 | + </label>`; |
|---|
| 1411 | + }).join(''); |
|---|
| 1412 | + |
|---|
| 1413 | + const modal = document.getElementById('schedule-modal'); |
|---|
| 1414 | + document.getElementById('schedule-modal-title').textContent = 'Edit Schedule: ' + project; |
|---|
| 1415 | + document.getElementById('sched-project').value = project; |
|---|
| 1416 | + document.getElementById('sched-enabled').checked = s.enabled; |
|---|
| 1417 | + document.getElementById('sched-time').value = s.schedule || '03:00'; |
|---|
| 1418 | + document.getElementById('sched-envs').innerHTML = envOptions; |
|---|
| 1419 | + document.getElementById('sched-command').value = s.command || ''; |
|---|
| 1420 | + document.getElementById('sched-offsite').checked = s.offsite; |
|---|
| 1421 | + document.getElementById('sched-offsite-envs').innerHTML = offsiteEnvOptions; |
|---|
| 1422 | + document.getElementById('sched-offsite-section').style.display = s.offsite ? '' : 'none'; |
|---|
| 1423 | + document.getElementById('sched-retention-local').value = s.retention_local_days != null ? s.retention_local_days : 7; |
|---|
| 1424 | + document.getElementById('sched-retention-offsite').value = s.retention_offsite_days != null ? s.retention_offsite_days : 30; |
|---|
| 1425 | + document.getElementById('sched-save-btn').disabled = false; |
|---|
| 1426 | + document.getElementById('sched-save-btn').textContent = 'Save'; |
|---|
| 1427 | + _startScheduleClock(); |
|---|
| 1428 | + modal.style.display = 'flex'; |
|---|
| 1429 | +} |
|---|
| 1430 | + |
|---|
| 1431 | +function closeScheduleModal() { |
|---|
| 1432 | + _stopScheduleClock(); |
|---|
| 1433 | + document.getElementById('schedule-modal').style.display = 'none'; |
|---|
| 1434 | +} |
|---|
| 1435 | + |
|---|
| 1436 | +function toggleOffsiteSection() { |
|---|
| 1437 | + const show = document.getElementById('sched-offsite').checked; |
|---|
| 1438 | + document.getElementById('sched-offsite-section').style.display = show ? '' : 'none'; |
|---|
| 1439 | +} |
|---|
| 1440 | + |
|---|
| 1441 | +async function saveSchedule() { |
|---|
| 1442 | + const project = document.getElementById('sched-project').value; |
|---|
| 1443 | + const btn = document.getElementById('sched-save-btn'); |
|---|
| 1444 | + btn.disabled = true; |
|---|
| 1445 | + btn.textContent = 'Saving...'; |
|---|
| 1446 | + |
|---|
| 1447 | + const envCheckboxes = document.querySelectorAll('input[name="sched-env"]:checked'); |
|---|
| 1448 | + const environments = Array.from(envCheckboxes).map(cb => cb.value); |
|---|
| 1449 | + const offsiteEnvCheckboxes = document.querySelectorAll('input[name="sched-offsite-env"]:checked'); |
|---|
| 1450 | + const offsite_envs = Array.from(offsiteEnvCheckboxes).map(cb => cb.value); |
|---|
| 1451 | + |
|---|
| 1452 | + const body = { |
|---|
| 1453 | + enabled: document.getElementById('sched-enabled').checked, |
|---|
| 1454 | + schedule: document.getElementById('sched-time').value, |
|---|
| 1455 | + environments: environments.length ? environments : null, |
|---|
| 1456 | + command: document.getElementById('sched-command').value || null, |
|---|
| 1457 | + offsite: document.getElementById('sched-offsite').checked, |
|---|
| 1458 | + offsite_envs: offsite_envs.length ? offsite_envs : null, |
|---|
| 1459 | + retention_local_days: parseInt(document.getElementById('sched-retention-local').value) || null, |
|---|
| 1460 | + retention_offsite_days: parseInt(document.getElementById('sched-retention-offsite').value) || null, |
|---|
| 1461 | + }; |
|---|
| 1462 | + |
|---|
| 1463 | + try { |
|---|
| 1464 | + await api('/api/schedule/' + encodeURIComponent(project), { |
|---|
| 1465 | + method: 'PUT', |
|---|
| 1466 | + headers: { 'Content-Type': 'application/json' }, |
|---|
| 1467 | + body: JSON.stringify(body), |
|---|
| 1468 | + }); |
|---|
| 1469 | + toast('Schedule updated for ' + project, 'success'); |
|---|
| 1470 | + closeScheduleModal(); |
|---|
| 1471 | + cachedSchedules = null; |
|---|
| 1472 | + renderSchedules(); |
|---|
| 1473 | + } catch (e) { |
|---|
| 1474 | + toast('Failed to save schedule: ' + e.message, 'error'); |
|---|
| 1475 | + btn.disabled = false; |
|---|
| 1476 | + btn.textContent = 'Save'; |
|---|
| 1477 | + } |
|---|
| 1478 | +} |
|---|
| 1479 | + |
|---|
| 1480 | +async function runBackupNow(project) { |
|---|
| 1481 | + if (!await showConfirmDialog(`Run backup now for ${project}?`, 'Run Backup')) return; |
|---|
| 1482 | + |
|---|
| 1483 | + opsCtx = { type: 'backup', project, fromEnv: null, toEnv: null }; |
|---|
| 1484 | + if (opsEventSource) { opsEventSource.close(); opsEventSource = null; } |
|---|
| 1485 | + |
|---|
| 1486 | + const title = document.getElementById('ops-modal-title'); |
|---|
| 1487 | + const info = document.getElementById('ops-modal-info'); |
|---|
| 1488 | + const startBtn = document.getElementById('ops-start-btn'); |
|---|
| 1489 | + const dryRunRow = document.getElementById('ops-dry-run-row'); |
|---|
| 1490 | + const outputDiv = document.getElementById('ops-modal-output'); |
|---|
| 1491 | + const term = document.getElementById('ops-modal-terminal'); |
|---|
| 1492 | + |
|---|
| 1493 | + title.textContent = 'Backup: ' + project; |
|---|
| 1494 | + info.innerHTML = '<div class="restore-info-row"><span class="restore-info-label">Project</span><span class="restore-info-value">' + esc(project) + '</span></div>'; |
|---|
| 1495 | + if (dryRunRow) dryRunRow.style.display = 'none'; |
|---|
| 1496 | + startBtn.style.display = 'none'; |
|---|
| 1497 | + |
|---|
| 1498 | + outputDiv.style.display = 'block'; |
|---|
| 1499 | + term.textContent = 'Starting backup...\n'; |
|---|
| 1500 | + currentOpId = null; |
|---|
| 1501 | + _setProgressState('ops-progress-bar', 'running'); |
|---|
| 1502 | + |
|---|
| 1503 | + document.getElementById('ops-modal').style.display = 'flex'; |
|---|
| 1504 | + |
|---|
| 1505 | + const url = '/api/schedule/' + encodeURIComponent(project) + '/run?token=' + encodeURIComponent(getToken()); |
|---|
| 1506 | + const es = new EventSource(url); |
|---|
| 1507 | + opsEventSource = es; |
|---|
| 1508 | + |
|---|
| 1509 | + es.onmessage = function(e) { |
|---|
| 1510 | + try { |
|---|
| 1511 | + const d = JSON.parse(e.data); |
|---|
| 1512 | + if (d.op_id && !currentOpId) { currentOpId = d.op_id; return; } |
|---|
| 1513 | + if (d.done) { |
|---|
| 1514 | + es.close(); |
|---|
| 1515 | + opsEventSource = null; |
|---|
| 1516 | + _setProgressState('ops-progress-bar', d.success ? 'done' : 'error'); |
|---|
| 1517 | + if (d.cancelled) term.textContent += '\n--- Cancelled ---\n'; |
|---|
| 1518 | + else if (d.success) term.textContent += '\n--- Done ---\n'; |
|---|
| 1519 | + else term.textContent += '\n--- Failed ---\n'; |
|---|
| 1520 | + return; |
|---|
| 1521 | + } |
|---|
| 1522 | + if (d.line != null) { |
|---|
| 1523 | + term.textContent += d.line + '\n'; |
|---|
| 1524 | + term.scrollTop = term.scrollHeight; |
|---|
| 1525 | + } |
|---|
| 1526 | + } catch {} |
|---|
| 1527 | + }; |
|---|
| 1528 | + es.onerror = function() { es.close(); opsEventSource = null; _setProgressState('ops-progress-bar', 'error'); }; |
|---|
| 1529 | +} |
|---|
| 1530 | + |
|---|
| 1531 | +// --------------------------------------------------------------------------- |
|---|
| 1123 | 1532 | // Operations Page |
|---|
| 1124 | 1533 | // --------------------------------------------------------------------------- |
|---|
| 1125 | 1534 | async function renderOperations() { |
|---|
| .. | .. |
|---|
| 1148 | 1557 | for (const [name, cfg] of Object.entries(projects)) { |
|---|
| 1149 | 1558 | if (!cfg.promote || cfg.static || cfg.infrastructure) continue; |
|---|
| 1150 | 1559 | const pType = cfg.promote.type || 'unknown'; |
|---|
| 1151 | | - const envs = cfg.environments || []; |
|---|
| 1560 | + const envs = (cfg.environments || []).map(e => typeof e === 'string' ? e : e.name); |
|---|
| 1152 | 1561 | const typeBadge = pType === 'git' |
|---|
| 1153 | 1562 | ? '<span class="badge badge-blue" style="font-size:0.6875rem;">git</span>' |
|---|
| 1154 | 1563 | : '<span class="badge badge-purple" style="font-size:0.6875rem;">rsync</span>'; |
|---|
| .. | .. |
|---|
| 1187 | 1596 | |
|---|
| 1188 | 1597 | for (const [name, cfg] of Object.entries(projects)) { |
|---|
| 1189 | 1598 | if (!cfg.has_cli || cfg.static || cfg.infrastructure) continue; |
|---|
| 1190 | | - const envs = cfg.environments || []; |
|---|
| 1599 | + const envs = (cfg.environments || []).map(e => typeof e === 'string' ? e : e.name); |
|---|
| 1191 | 1600 | |
|---|
| 1192 | 1601 | h += '<div class="card">'; |
|---|
| 1193 | 1602 | h += '<div style="margin-bottom:0.75rem;font-weight:600;color:#f3f4f6;">' + esc(name) + '</div>'; |
|---|
| .. | .. |
|---|
| 1215 | 1624 | |
|---|
| 1216 | 1625 | // Section: Container Lifecycle |
|---|
| 1217 | 1626 | h += '<h2 style="font-size:1.125rem;font-weight:600;color:#f3f4f6;margin-bottom:0.375rem;">Container Lifecycle</h2>'; |
|---|
| 1218 | | - h += '<p style="font-size:0.8125rem;color:#9ca3af;margin-bottom:1rem;">Manage container state via Coolify API. ' |
|---|
| 1627 | + h += '<p style="font-size:0.8125rem;color:#9ca3af;margin-bottom:1rem;">Manage container state via docker compose. ' |
|---|
| 1219 | 1628 | + '<span style="color:#6ee7b7;">Restart</span> is safe. ' |
|---|
| 1220 | | - + '<span style="color:#fbbf24;">Rebuild</span> refreshes the image. ' |
|---|
| 1221 | | - + '<span style="color:#f87171;">Recreate</span> wipes data (disaster recovery only).</p>'; |
|---|
| 1222 | | - h += '<div class="grid-auto" style="margin-bottom:2rem;">'; |
|---|
| 1629 | + + '<span style="color:#fbbf24;">Rebuild</span> refreshes the image.</p>'; |
|---|
| 1630 | + h += '<div style="display:grid;grid-template-columns:repeat(auto-fill,minmax(320px,1fr));gap:1rem;margin-bottom:2rem;">'; |
|---|
| 1223 | 1631 | |
|---|
| 1224 | 1632 | for (const [name, cfg] of Object.entries(projects)) { |
|---|
| 1225 | | - if (cfg.static || cfg.infrastructure || !cfg.has_coolify) continue; |
|---|
| 1226 | | - const envs = (cfg.environments || []).filter(e => e !== 'infra'); |
|---|
| 1633 | + if (cfg.type === 'static' || cfg.type === 'infrastructure') continue; |
|---|
| 1634 | + const envs = (cfg.environments || []).map(e => typeof e === 'string' ? e : e.name).filter(e => e !== 'infra'); |
|---|
| 1227 | 1635 | if (!envs.length) continue; |
|---|
| 1228 | 1636 | |
|---|
| 1229 | 1637 | h += '<div class="card">'; |
|---|
| .. | .. |
|---|
| 1231 | 1639 | h += '<div style="display:flex;flex-direction:column;gap:0.625rem;">'; |
|---|
| 1232 | 1640 | |
|---|
| 1233 | 1641 | for (const env of envs) { |
|---|
| 1234 | | - h += '<div style="display:flex;align-items:center;gap:0.5rem;">'; |
|---|
| 1642 | + h += '<div style="display:flex;align-items:center;gap:0.375rem;">'; |
|---|
| 1235 | 1643 | // Environment label |
|---|
| 1236 | | - h += '<span style="min-width:2.5rem;font-size:0.75rem;color:#9ca3af;font-weight:500;">' + esc(env) + '</span>'; |
|---|
| 1644 | + h += '<span style="min-width:2.25rem;font-size:0.75rem;color:#9ca3af;font-weight:500;">' + esc(env) + '</span>'; |
|---|
| 1237 | 1645 | // Restart (green) |
|---|
| 1238 | | - h += '<button class="btn btn-ghost btn-xs" style="color:#6ee7b7;border-color:rgba(110,231,179,0.3);" ' |
|---|
| 1646 | + h += '<button class="btn btn-ghost btn-xs" style="color:#6ee7b7;border-color:rgba(110,231,179,0.3);padding:0.125rem 0.375rem;font-size:0.6875rem;" ' |
|---|
| 1239 | 1647 | + 'onclick="openLifecycleModal('restart','' + esc(name) + '','' + esc(env) + '')">' |
|---|
| 1240 | 1648 | + 'Restart</button>'; |
|---|
| 1241 | 1649 | // Rebuild (yellow) |
|---|
| 1242 | | - h += '<button class="btn btn-ghost btn-xs" style="color:#fbbf24;border-color:rgba(251,191,36,0.3);" ' |
|---|
| 1650 | + h += '<button class="btn btn-ghost btn-xs" style="color:#fbbf24;border-color:rgba(251,191,36,0.3);padding:0.125rem 0.375rem;font-size:0.6875rem;" ' |
|---|
| 1243 | 1651 | + 'onclick="openLifecycleModal('rebuild','' + esc(name) + '','' + esc(env) + '')">' |
|---|
| 1244 | 1652 | + 'Rebuild</button>'; |
|---|
| 1245 | | - // Recreate (red) |
|---|
| 1246 | | - h += '<button class="btn btn-ghost btn-xs" style="color:#f87171;border-color:rgba(248,113,113,0.3);" ' |
|---|
| 1247 | | - + 'onclick="openLifecycleModal('recreate','' + esc(name) + '','' + esc(env) + '')">' |
|---|
| 1248 | | - + 'Recreate</button>'; |
|---|
| 1653 | + // Backup (blue) |
|---|
| 1654 | + h += '<button class="btn btn-ghost btn-xs" style="color:#60a5fa;border-color:rgba(96,165,250,0.3);padding:0.125rem 0.375rem;font-size:0.6875rem;" ' |
|---|
| 1655 | + + 'onclick="openLifecycleModal('backup','' + esc(name) + '','' + esc(env) + '')">' |
|---|
| 1656 | + + 'Backup</button>'; |
|---|
| 1657 | + // Restore (navigate to backups page) |
|---|
| 1658 | + h += '<button class="btn btn-ghost btn-xs" style="color:#a78bfa;border-color:rgba(167,139,250,0.3);padding:0.125rem 0.375rem;font-size:0.6875rem;" ' |
|---|
| 1659 | + + 'onclick="currentPage='backups';backupDrillLevel=2;backupDrillProject='' + esc(name) + '';backupDrillEnv='' + esc(env) + '';cachedBackups=null;selectedBackups.clear();document.querySelectorAll('#sidebar-nav .sidebar-link').forEach(el=>el.classList.toggle('active',el.dataset.page==='backups'));renderPage();pushHash();">' |
|---|
| 1660 | + + 'Restore</button>'; |
|---|
| 1249 | 1661 | h += '</div>'; |
|---|
| 1250 | 1662 | } |
|---|
| 1251 | 1663 | |
|---|
| .. | .. |
|---|
| 1353 | 1765 | } |
|---|
| 1354 | 1766 | |
|---|
| 1355 | 1767 | // --------------------------------------------------------------------------- |
|---|
| 1356 | | -// Lifecycle Modal (Restart / Rebuild / Recreate) |
|---|
| 1768 | +// Lifecycle Modal (Restart / Rebuild / Backup) |
|---|
| 1357 | 1769 | // --------------------------------------------------------------------------- |
|---|
| 1358 | 1770 | function openLifecycleModal(action, project, env) { |
|---|
| 1359 | 1771 | opsCtx = { type: action, project, fromEnv: env, toEnv: null }; |
|---|
| .. | .. |
|---|
| 1385 | 1797 | + '<div class="restore-info-row"><span class="restore-info-label">Project</span><span class="restore-info-value">' + esc(project) + '</span></div>' |
|---|
| 1386 | 1798 | + '<div class="restore-info-row"><span class="restore-info-label">Environment</span><span class="restore-info-value">' + esc(env) + '</span></div>' |
|---|
| 1387 | 1799 | + '<div style="background:rgba(251,191,36,0.08);border:1px solid rgba(251,191,36,0.25);border-radius:0.5rem;padding:0.625rem 0.875rem;font-size:0.8125rem;color:#fde68a;margin-top:0.75rem;">' |
|---|
| 1388 | | - + 'Stops containers via Coolify, rebuilds the Docker image, then starts again. No data loss.</div>'; |
|---|
| 1800 | + + 'Runs <code>docker compose down</code>, rebuilds the image, then starts again. No data loss.</div>'; |
|---|
| 1389 | 1801 | startBtn.className = 'btn btn-sm'; |
|---|
| 1390 | 1802 | startBtn.style.cssText = 'background:#78350f;color:#fde68a;border:1px solid rgba(251,191,36,0.3);'; |
|---|
| 1391 | 1803 | startBtn.textContent = 'Rebuild'; |
|---|
| 1392 | 1804 | |
|---|
| 1393 | | - } else if (action === 'recreate') { |
|---|
| 1394 | | - title.textContent = 'Recreate Environment'; |
|---|
| 1805 | + } else if (action === 'backup') { |
|---|
| 1806 | + title.textContent = 'Create Backup'; |
|---|
| 1395 | 1807 | info.innerHTML = '' |
|---|
| 1396 | 1808 | + '<div class="restore-info-row"><span class="restore-info-label">Project</span><span class="restore-info-value">' + esc(project) + '</span></div>' |
|---|
| 1397 | 1809 | + '<div class="restore-info-row"><span class="restore-info-label">Environment</span><span class="restore-info-value">' + esc(env) + '</span></div>' |
|---|
| 1398 | | - + '<div style="background:rgba(220,38,38,0.1);border:1px solid rgba(220,38,38,0.3);border-radius:0.5rem;padding:0.75rem 1rem;font-size:0.8125rem;color:#fca5a5;margin-top:0.75rem;">' |
|---|
| 1399 | | - + '<strong style="display:block;margin-bottom:0.375rem;">DESTRUCTIVE — Disaster Recovery Only</strong>' |
|---|
| 1400 | | - + 'Stops containers, wipes all data volumes, rebuilds image, starts fresh. ' |
|---|
| 1401 | | - + 'You must restore a backup afterwards.</div>' |
|---|
| 1402 | | - + '<div style="margin-top:0.875rem;">' |
|---|
| 1403 | | - + '<label style="font-size:0.8125rem;color:#9ca3af;display:block;margin-bottom:0.375rem;">Type the environment name to confirm:</label>' |
|---|
| 1404 | | - + '<input id="recreate-confirm-input" type="text" placeholder="' + esc(env) + '" ' |
|---|
| 1405 | | - + 'style="width:100%;box-sizing:border-box;padding:0.5rem 0.75rem;background:#1f2937;border:1px solid rgba(220,38,38,0.4);border-radius:0.375rem;color:#f3f4f6;font-size:0.875rem;" ' |
|---|
| 1406 | | - + 'oninput="checkRecreateConfirm(\'' + esc(env) + '\')">' |
|---|
| 1407 | | - + '</div>'; |
|---|
| 1408 | | - startBtn.className = 'btn btn-danger btn-sm'; |
|---|
| 1810 | + + '<div style="background:rgba(59,130,246,0.08);border:1px solid rgba(59,130,246,0.25);border-radius:0.5rem;padding:0.625rem 0.875rem;font-size:0.8125rem;color:#93c5fd;margin-top:0.75rem;">' |
|---|
| 1811 | + + 'Creates a backup of the database and uploads for this environment.</div>'; |
|---|
| 1812 | + startBtn.className = 'btn btn-primary btn-sm'; |
|---|
| 1409 | 1813 | startBtn.style.cssText = ''; |
|---|
| 1410 | | - startBtn.textContent = 'Recreate'; |
|---|
| 1411 | | - startBtn.disabled = true; // enabled after typing env name |
|---|
| 1814 | + startBtn.textContent = 'Create Backup'; |
|---|
| 1412 | 1815 | } |
|---|
| 1413 | 1816 | |
|---|
| 1414 | 1817 | document.getElementById('ops-modal-output').style.display = 'none'; |
|---|
| 1415 | 1818 | document.getElementById('ops-modal-terminal').textContent = ''; |
|---|
| 1416 | 1819 | |
|---|
| 1417 | 1820 | document.getElementById('ops-modal').style.display = 'flex'; |
|---|
| 1418 | | - if (action === 'recreate') { |
|---|
| 1419 | | - setTimeout(() => { |
|---|
| 1420 | | - const inp = document.getElementById('recreate-confirm-input'); |
|---|
| 1421 | | - if (inp) inp.focus(); |
|---|
| 1422 | | - }, 100); |
|---|
| 1423 | | - } |
|---|
| 1424 | | -} |
|---|
| 1425 | | - |
|---|
| 1426 | | -function checkRecreateConfirm(expectedEnv) { |
|---|
| 1427 | | - const inp = document.getElementById('recreate-confirm-input'); |
|---|
| 1428 | | - const startBtn = document.getElementById('ops-start-btn'); |
|---|
| 1429 | | - if (!inp || !startBtn) return; |
|---|
| 1430 | | - startBtn.disabled = inp.value.trim() !== expectedEnv; |
|---|
| 1431 | 1821 | } |
|---|
| 1432 | 1822 | |
|---|
| 1433 | 1823 | function closeOpsModal() { |
|---|
| 1824 | + if (currentOpId && opsEventSource) { |
|---|
| 1825 | + fetch('/api/operations/' + currentOpId, { method: 'DELETE', headers: { 'Authorization': 'Bearer ' + getToken() } }).catch(() => {}); |
|---|
| 1826 | + } |
|---|
| 1434 | 1827 | if (opsEventSource) { opsEventSource.close(); opsEventSource = null; } |
|---|
| 1828 | + currentOpId = null; |
|---|
| 1829 | + _setProgressState('ops-progress-bar', 'hidden'); |
|---|
| 1435 | 1830 | document.getElementById('ops-modal').style.display = 'none'; |
|---|
| 1831 | + // Refresh backup list if we just ran a backup or upload |
|---|
| 1832 | + if ((opsCtx.type === 'backup' || opsCtx.type === 'upload') && currentPage === 'backups') { |
|---|
| 1833 | + cachedBackups = null; |
|---|
| 1834 | + renderBackups(); |
|---|
| 1835 | + } |
|---|
| 1436 | 1836 | opsCtx = { type: null, project: null, fromEnv: null, toEnv: null }; |
|---|
| 1437 | 1837 | // Restore dry-run row visibility for promote/sync operations |
|---|
| 1438 | 1838 | const dryRunRow = document.getElementById('ops-dry-run-row'); |
|---|
| 1439 | 1839 | if (dryRunRow) dryRunRow.style.display = ''; |
|---|
| 1440 | | - // Reset start button style |
|---|
| 1840 | + // Reset start button style and visibility |
|---|
| 1441 | 1841 | const startBtn = document.getElementById('ops-start-btn'); |
|---|
| 1442 | | - if (startBtn) { startBtn.style.cssText = ''; startBtn.disabled = false; } |
|---|
| 1842 | + if (startBtn) { startBtn.style.cssText = ''; startBtn.style.display = ''; startBtn.disabled = false; } |
|---|
| 1443 | 1843 | } |
|---|
| 1444 | 1844 | |
|---|
| 1445 | 1845 | function _btnLabelForType(type) { |
|---|
| .. | .. |
|---|
| 1447 | 1847 | if (type === 'sync') return 'Sync'; |
|---|
| 1448 | 1848 | if (type === 'restart') return 'Restart'; |
|---|
| 1449 | 1849 | if (type === 'rebuild') return 'Rebuild'; |
|---|
| 1450 | | - if (type === 'recreate') return 'Recreate'; |
|---|
| 1850 | + if (type === 'backup') return 'Create Backup'; |
|---|
| 1451 | 1851 | return 'Run'; |
|---|
| 1452 | 1852 | } |
|---|
| 1453 | 1853 | |
|---|
| .. | .. |
|---|
| 1461 | 1861 | const term = document.getElementById('ops-modal-terminal'); |
|---|
| 1462 | 1862 | |
|---|
| 1463 | 1863 | outputDiv.style.display = 'block'; |
|---|
| 1864 | + // Remove leftover banners from previous operations |
|---|
| 1865 | + outputDiv.querySelectorAll('div').forEach(el => { if (el !== term) el.remove(); }); |
|---|
| 1464 | 1866 | term.textContent = 'Starting...\n'; |
|---|
| 1465 | 1867 | startBtn.disabled = true; |
|---|
| 1466 | 1868 | startBtn.textContent = 'Running...'; |
|---|
| .. | .. |
|---|
| 1470 | 1872 | url = '/api/promote/' + encodeURIComponent(project) + '/' + encodeURIComponent(fromEnv) + '/' + encodeURIComponent(toEnv) + '?dry_run=' + dryRun + '&token=' + encodeURIComponent(getToken()); |
|---|
| 1471 | 1873 | } else if (type === 'sync') { |
|---|
| 1472 | 1874 | url = '/api/sync/' + encodeURIComponent(project) + '?from=' + encodeURIComponent(fromEnv) + '&to=' + encodeURIComponent(toEnv) + '&dry_run=' + dryRun + '&token=' + encodeURIComponent(getToken()); |
|---|
| 1473 | | - } else if (type === 'restart' || type === 'rebuild' || type === 'recreate') { |
|---|
| 1474 | | - // All three lifecycle ops go through /api/rebuild/{project}/{env}?action=... |
|---|
| 1875 | + } else if (type === 'restart' || type === 'rebuild') { |
|---|
| 1475 | 1876 | url = '/api/rebuild/' + encodeURIComponent(project) + '/' + encodeURIComponent(fromEnv) |
|---|
| 1476 | 1877 | + '?action=' + encodeURIComponent(type) + '&token=' + encodeURIComponent(getToken()); |
|---|
| 1878 | + } else if (type === 'backup') { |
|---|
| 1879 | + url = '/api/backups/stream/' + encodeURIComponent(project) + '/' + encodeURIComponent(fromEnv) |
|---|
| 1880 | + + '?token=' + encodeURIComponent(getToken()); |
|---|
| 1477 | 1881 | } |
|---|
| 1478 | 1882 | |
|---|
| 1883 | + currentOpId = null; |
|---|
| 1884 | + _setProgressState('ops-progress-bar', 'running'); |
|---|
| 1479 | 1885 | const es = new EventSource(url); |
|---|
| 1480 | 1886 | opsEventSource = es; |
|---|
| 1481 | 1887 | let opDone = false; |
|---|
| .. | .. |
|---|
| 1483 | 1889 | es.onmessage = function(e) { |
|---|
| 1484 | 1890 | try { |
|---|
| 1485 | 1891 | const d = JSON.parse(e.data); |
|---|
| 1892 | + if (d.op_id && !currentOpId) { currentOpId = d.op_id; return; } |
|---|
| 1486 | 1893 | if (d.done) { |
|---|
| 1487 | 1894 | opDone = true; |
|---|
| 1488 | 1895 | es.close(); |
|---|
| 1489 | 1896 | opsEventSource = null; |
|---|
| 1490 | | - const msg = d.success ? '\n--- Operation complete ---\n' : '\n--- Operation FAILED ---\n'; |
|---|
| 1897 | + currentOpId = null; |
|---|
| 1898 | + const msg = d.cancelled ? '\n--- Cancelled ---\n' : d.success ? '\n--- Operation complete ---\n' : '\n--- Operation FAILED ---\n'; |
|---|
| 1491 | 1899 | term.textContent += msg; |
|---|
| 1492 | 1900 | term.scrollTop = term.scrollHeight; |
|---|
| 1493 | | - toast(d.success ? 'Operation completed' : 'Operation failed', d.success ? 'success' : 'error'); |
|---|
| 1901 | + const toastMsg = d.cancelled ? 'Operation cancelled' : d.success ? 'Operation completed' : 'Operation failed'; |
|---|
| 1902 | + toast(toastMsg, d.success ? 'success' : d.cancelled ? 'warning' : 'error'); |
|---|
| 1903 | + _setProgressState('ops-progress-bar', d.success ? 'ok' : 'fail'); |
|---|
| 1494 | 1904 | startBtn.disabled = false; |
|---|
| 1495 | 1905 | startBtn.textContent = _btnLabelForType(type); |
|---|
| 1496 | 1906 | |
|---|
| 1497 | | - // Show "Go to Backups" banner after recreate (or legacy rebuild) |
|---|
| 1498 | | - const showBackupBanner = (type === 'recreate') && d.success && d.project && d.env; |
|---|
| 1499 | | - if (showBackupBanner) { |
|---|
| 1500 | | - const restoreProject = d.project; |
|---|
| 1501 | | - const restoreEnv = d.env; |
|---|
| 1502 | | - const banner = document.createElement('div'); |
|---|
| 1503 | | - banner.style.cssText = 'margin-top:1rem;padding:0.75rem 1rem;background:rgba(16,185,129,0.1);border:1px solid rgba(16,185,129,0.3);border-radius:0.5rem;display:flex;align-items:center;gap:0.75rem;'; |
|---|
| 1504 | | - banner.innerHTML = '<span style="color:#6ee7b7;font-size:0.8125rem;flex:1;">Environment recreated. Next step: restore a backup.</span>' |
|---|
| 1505 | | - + '<button class="btn btn-ghost btn-sm" style="color:#6ee7b7;border-color:rgba(110,231,179,0.3);white-space:nowrap;" ' |
|---|
| 1506 | | - + 'onclick="closeOpsModal();currentPage=\'backups\';backupDrillLevel=2;backupDrillProject=\'' + restoreProject + '\';backupDrillEnv=\'' + restoreEnv + '\';cachedBackups=null;selectedBackups.clear();document.querySelectorAll(\'#sidebar-nav .sidebar-link\').forEach(el=>el.classList.toggle(\'active\',el.dataset.page===\'backups\'));renderPage();pushHash();">' |
|---|
| 1507 | | - + 'Go to Backups →</button>'; |
|---|
| 1508 | | - outputDiv.appendChild(banner); |
|---|
| 1907 | + // After a successful backup, invalidate cache so backups page refreshes |
|---|
| 1908 | + if (type === 'backup' && d.success) { |
|---|
| 1909 | + cachedBackups = null; |
|---|
| 1509 | 1910 | } |
|---|
| 1510 | 1911 | |
|---|
| 1511 | 1912 | return; |
|---|
| .. | .. |
|---|
| 1520 | 1921 | es.onerror = function() { |
|---|
| 1521 | 1922 | es.close(); |
|---|
| 1522 | 1923 | opsEventSource = null; |
|---|
| 1924 | + currentOpId = null; |
|---|
| 1523 | 1925 | if (opDone) return; |
|---|
| 1524 | 1926 | term.textContent += '\n--- Connection lost ---\n'; |
|---|
| 1525 | 1927 | toast('Connection lost', 'error'); |
|---|
| 1928 | + _setProgressState('ops-progress-bar', 'fail'); |
|---|
| 1526 | 1929 | startBtn.disabled = false; |
|---|
| 1527 | 1930 | startBtn.textContent = _btnLabelForType(type); |
|---|
| 1528 | 1931 | }; |
|---|
| .. | .. |
|---|
| 1532 | 1935 | // Service Actions |
|---|
| 1533 | 1936 | // --------------------------------------------------------------------------- |
|---|
| 1534 | 1937 | async function restartService(project, env, service) { |
|---|
| 1535 | | - if (!confirm(`Restart ${service} in ${project}/${env}?`)) return; |
|---|
| 1938 | + if (!await showConfirmDialog(`Restart ${service} in ${project}/${env}?`, 'Restart')) return; |
|---|
| 1536 | 1939 | toast('Restarting ' + service + '...', 'info'); |
|---|
| 1537 | 1940 | try { |
|---|
| 1538 | 1941 | const r = await api(`/api/services/restart/${project}/${env}/${service}`, { method: 'POST' }); |
|---|
| .. | .. |
|---|
| 1565 | 1968 | } |
|---|
| 1566 | 1969 | |
|---|
| 1567 | 1970 | async function createBackup(project, env) { |
|---|
| 1568 | | - if (!confirm(`Create backup for ${project}/${env}?`)) return; |
|---|
| 1569 | | - toast('Creating backup...', 'info'); |
|---|
| 1570 | | - try { |
|---|
| 1571 | | - await api(`/api/backups/${project}/${env}`, { method: 'POST' }); |
|---|
| 1572 | | - toast('Backup created for ' + project + '/' + env, 'success'); |
|---|
| 1573 | | - cachedBackups = null; |
|---|
| 1574 | | - if (currentPage === 'backups') renderBackups(); |
|---|
| 1575 | | - } catch (e) { toast('Backup failed: ' + e.message, 'error'); } |
|---|
| 1971 | + if (!await showConfirmDialog(`Create backup for ${project}/${env}?`, 'Create Backup')) return; |
|---|
| 1972 | + |
|---|
| 1973 | + // Open the ops modal with streaming output |
|---|
| 1974 | + opsCtx = { type: 'backup', project, fromEnv: env, toEnv: null }; |
|---|
| 1975 | + if (opsEventSource) { opsEventSource.close(); opsEventSource = null; } |
|---|
| 1976 | + |
|---|
| 1977 | + const title = document.getElementById('ops-modal-title'); |
|---|
| 1978 | + const info = document.getElementById('ops-modal-info'); |
|---|
| 1979 | + const startBtn = document.getElementById('ops-start-btn'); |
|---|
| 1980 | + const dryRunRow = document.getElementById('ops-dry-run-row'); |
|---|
| 1981 | + const outputDiv = document.getElementById('ops-modal-output'); |
|---|
| 1982 | + const term = document.getElementById('ops-modal-terminal'); |
|---|
| 1983 | + |
|---|
| 1984 | + title.textContent = 'Create Backup'; |
|---|
| 1985 | + info.innerHTML = '<div class="restore-info-row"><span class="restore-info-label">Project</span><span class="restore-info-value">' + esc(project) + '</span></div>' |
|---|
| 1986 | + + '<div class="restore-info-row"><span class="restore-info-label">Environment</span><span class="restore-info-value">' + esc(env) + '</span></div>'; |
|---|
| 1987 | + if (dryRunRow) dryRunRow.style.display = 'none'; |
|---|
| 1988 | + startBtn.style.display = 'none'; |
|---|
| 1989 | + |
|---|
| 1990 | + outputDiv.style.display = 'block'; |
|---|
| 1991 | + term.textContent = 'Starting backup...\n'; |
|---|
| 1992 | + currentOpId = null; |
|---|
| 1993 | + _setProgressState('ops-progress-bar', 'running'); |
|---|
| 1994 | + |
|---|
| 1995 | + document.getElementById('ops-modal').style.display = 'flex'; |
|---|
| 1996 | + |
|---|
| 1997 | + const url = '/api/backups/stream/' + encodeURIComponent(project) + '/' + encodeURIComponent(env) + '?token=' + encodeURIComponent(getToken()); |
|---|
| 1998 | + const es = new EventSource(url); |
|---|
| 1999 | + opsEventSource = es; |
|---|
| 2000 | + |
|---|
| 2001 | + es.onmessage = function(e) { |
|---|
| 2002 | + try { |
|---|
| 2003 | + const d = JSON.parse(e.data); |
|---|
| 2004 | + if (d.op_id && !currentOpId) { currentOpId = d.op_id; return; } |
|---|
| 2005 | + if (d.done) { |
|---|
| 2006 | + es.close(); |
|---|
| 2007 | + opsEventSource = null; |
|---|
| 2008 | + currentOpId = null; |
|---|
| 2009 | + const msg = d.cancelled ? '\n--- Cancelled ---\n' : d.success ? '\n--- Backup complete ---\n' : '\n--- Backup FAILED ---\n'; |
|---|
| 2010 | + term.textContent += msg; |
|---|
| 2011 | + term.scrollTop = term.scrollHeight; |
|---|
| 2012 | + toast(d.cancelled ? 'Backup cancelled' : d.success ? 'Backup created for ' + project + '/' + env : 'Backup failed', d.success ? 'success' : d.cancelled ? 'warning' : 'error'); |
|---|
| 2013 | + _setProgressState('ops-progress-bar', d.success ? 'ok' : 'fail'); |
|---|
| 2014 | + cachedBackups = null; |
|---|
| 2015 | + return; |
|---|
| 2016 | + } |
|---|
| 2017 | + if (d.line) { |
|---|
| 2018 | + term.textContent += d.line + '\n'; |
|---|
| 2019 | + term.scrollTop = term.scrollHeight; |
|---|
| 2020 | + } |
|---|
| 2021 | + } catch (_) {} |
|---|
| 2022 | + }; |
|---|
| 2023 | + |
|---|
| 2024 | + es.onerror = function() { |
|---|
| 2025 | + es.close(); |
|---|
| 2026 | + opsEventSource = null; |
|---|
| 2027 | + currentOpId = null; |
|---|
| 2028 | + term.textContent += '\n--- Connection lost ---\n'; |
|---|
| 2029 | + toast('Connection lost', 'error'); |
|---|
| 2030 | + _setProgressState('ops-progress-bar', 'fail'); |
|---|
| 2031 | + }; |
|---|
| 1576 | 2032 | } |
|---|
| 1577 | 2033 | |
|---|
| 1578 | 2034 | async function deleteBackup(project, env, name, hasLocal, hasOffsite) { |
|---|
| .. | .. |
|---|
| 1586 | 2042 | target = 'offsite'; |
|---|
| 1587 | 2043 | } |
|---|
| 1588 | 2044 | const label = target === 'both' ? 'local + offsite' : target; |
|---|
| 1589 | | - if (!confirm(`Delete ${label} copy of ${name}?\n\nThis cannot be undone.`)) return; |
|---|
| 2045 | + if (!await showConfirmDialog(`Delete ${label} copy of ${name}?\n\nThis cannot be undone.`, 'Delete', true)) return; |
|---|
| 1590 | 2046 | toast('Deleting backup (' + label + ')...', 'info'); |
|---|
| 1591 | 2047 | try { |
|---|
| 1592 | 2048 | await api(`/api/backups/${encodeURIComponent(project)}/${encodeURIComponent(env)}/${encodeURIComponent(name)}?target=${target}`, { method: 'DELETE' }); |
|---|
| .. | .. |
|---|
| 1594 | 2050 | cachedBackups = null; |
|---|
| 1595 | 2051 | if (currentPage === 'backups') renderBackups(); |
|---|
| 1596 | 2052 | } catch (e) { toast('Delete failed: ' + e.message, 'error'); } |
|---|
| 2053 | +} |
|---|
| 2054 | + |
|---|
| 2055 | +function showConfirmDialog(message, confirmLabel = 'Confirm', isDanger = false) { |
|---|
| 2056 | + return new Promise(resolve => { |
|---|
| 2057 | + const overlay = document.createElement('div'); |
|---|
| 2058 | + overlay.style.cssText = 'position:fixed;inset:0;background:rgba(0,0,0,0.6);backdrop-filter:blur(2px);display:flex;align-items:center;justify-content:center;z-index:9999;animation:fadeIn 0.15s ease-out;'; |
|---|
| 2059 | + const box = document.createElement('div'); |
|---|
| 2060 | + box.style.cssText = 'background:#1e293b;border:1px solid #334155;border-radius:0.75rem;padding:1.5rem;min-width:320px;max-width:420px;color:#e2e8f0;animation:modalIn 0.2s ease-out;'; |
|---|
| 2061 | + const btnClass = isDanger ? 'btn btn-danger' : 'btn btn-primary'; |
|---|
| 2062 | + box.innerHTML = ` |
|---|
| 2063 | + <p style="margin:0 0 1.25rem;font-size:0.9rem;color:#d1d5db;white-space:pre-line;">${esc(message)}</p> |
|---|
| 2064 | + <div style="display:flex;gap:0.75rem;justify-content:flex-end;"> |
|---|
| 2065 | + <button class="btn btn-ghost" data-action="cancel">Cancel</button> |
|---|
| 2066 | + <button class="${btnClass}" data-action="confirm">${esc(confirmLabel)}</button> |
|---|
| 2067 | + </div>`; |
|---|
| 2068 | + overlay.appendChild(box); |
|---|
| 2069 | + document.body.appendChild(overlay); |
|---|
| 2070 | + box.addEventListener('click', e => { |
|---|
| 2071 | + const btn = e.target.closest('[data-action]'); |
|---|
| 2072 | + if (!btn) return; |
|---|
| 2073 | + document.body.removeChild(overlay); |
|---|
| 2074 | + resolve(btn.dataset.action === 'confirm'); |
|---|
| 2075 | + }); |
|---|
| 2076 | + overlay.addEventListener('click', e => { |
|---|
| 2077 | + if (e.target === overlay) { document.body.removeChild(overlay); resolve(false); } |
|---|
| 2078 | + }); |
|---|
| 2079 | + const onKey = e => { if (e.key === 'Escape') { document.removeEventListener('keydown', onKey); document.body.removeChild(overlay); resolve(false); } }; |
|---|
| 2080 | + document.addEventListener('keydown', onKey); |
|---|
| 2081 | + }); |
|---|
| 1597 | 2082 | } |
|---|
| 1598 | 2083 | |
|---|
| 1599 | 2084 | function showDeleteTargetDialog(name) { |
|---|
| .. | .. |
|---|
| 1658 | 2143 | } else { |
|---|
| 1659 | 2144 | hash = '/backups'; |
|---|
| 1660 | 2145 | } |
|---|
| 2146 | + } else if (currentPage === 'schedules') { |
|---|
| 2147 | + hash = '/schedules'; |
|---|
| 1661 | 2148 | } else if (currentPage === 'system') { |
|---|
| 1662 | 2149 | hash = '/system'; |
|---|
| 1663 | 2150 | } else if (currentPage === 'operations') { |
|---|
| .. | .. |
|---|
| 1708 | 2195 | document.querySelectorAll('#sidebar-nav .sidebar-link').forEach(el => |
|---|
| 1709 | 2196 | el.classList.toggle('active', el.dataset.page === 'backups')); |
|---|
| 1710 | 2197 | renderPage(); |
|---|
| 2198 | + } else if (page === 'schedules') { |
|---|
| 2199 | + showPage('schedules'); |
|---|
| 1711 | 2200 | } else if (page === 'system') { |
|---|
| 1712 | 2201 | showPage('system'); |
|---|
| 1713 | 2202 | } else if (page === 'operations') { |
|---|