diff --git a/algo-web b/algo-web new file mode 100755 index 000000000..6b1ed8f0e --- /dev/null +++ b/algo-web @@ -0,0 +1,74 @@ +#!/usr/bin/env bash +set -euo pipefail +IFS=$'\n\t' + +# Algo VPN Web UI launcher +# Starts a local web server and opens the browser for GUI-based VPN deployment + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" + +# Configuration via environment variables +PORT="${ALGO_WEB_PORT:-8080}" +HOST="${ALGO_WEB_HOST:-127.0.0.1}" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +CYAN='\033[0;36m' +NC='\033[0m' # No Color + +echo -e "${CYAN}" +echo " _ _ __ ______ _ _ " +echo " / \ | | __ _ ___ \ \ / / _ \| \ | |" +echo " / _ \ | |/ _\` |/ _ \ \ \ / /| |_) | \| |" +echo " / ___ \| | (_| | (_) | \ V / | __/| |\ |" +echo "/_/ \_\_|\__, |\___/ \_/ |_| |_| \_|" +echo " |___/ " +echo -e "${NC}" +echo "Web UI for deploying personal VPN servers" +echo "" + +# Check if uv is installed +if ! command -v uv &> /dev/null; then + echo -e "${RED}Error: uv is not installed.${NC}" + echo "Please run ./algo first to install uv, or install it manually:" + echo " curl -LsSf https://astral.sh/uv/install.sh | sh" + exit 1 +fi + +# Install web dependencies if needed +echo "Checking dependencies..." +if ! uv pip show starlette &> /dev/null 2>&1; then + echo "Installing web dependencies..." + uv pip install '.[web]' --quiet +fi + +echo -e "${GREEN}Starting Algo VPN Web UI${NC}" +echo -e "Server: ${CYAN}http://$HOST:$PORT${NC}" +echo "" +echo "Press Ctrl+C to stop the server" +echo "" + +# Open browser after short delay (background) +# Use platform-specific command +open_browser() { + local url="http://$HOST:$PORT" + sleep 1.5 + + if [[ "$OSTYPE" == "darwin"* ]]; then + open "$url" 2>/dev/null || true + elif [[ "$OSTYPE" == "linux-gnu"* ]]; then + xdg-open "$url" 2>/dev/null || sensible-browser "$url" 2>/dev/null || true + elif [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" ]]; then + start "$url" 2>/dev/null || true + else + # Fallback: try python webbrowser module + python3 -c "import webbrowser; webbrowser.open('$url')" 2>/dev/null || true + fi +} + +open_browser & + +# Run web server (foreground) +exec uv run uvicorn app.main:app --host "$HOST" --port "$PORT" --no-access-log diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 000000000..6f4bcef6a --- /dev/null +++ b/app/__init__.py @@ -0,0 +1,3 @@ +"""Algo VPN Web UI - Browser-based GUI for deploying VPN servers.""" + +__version__ = "1.0.0" diff --git a/app/main.py b/app/main.py new file mode 100644 index 000000000..d506dd50b --- /dev/null +++ b/app/main.py @@ -0,0 +1,349 @@ +"""Algo VPN Web UI - Starlette application.""" + +import asyncio +import os +import zipfile +from io import BytesIO +from pathlib import Path +from typing import Any + +from sse_starlette.sse import EventSourceResponse +from starlette.applications import Starlette +from starlette.requests import Request +from starlette.responses import FileResponse, HTMLResponse, JSONResponse, Response, StreamingResponse +from starlette.routing import Mount, Route +from starlette.staticfiles import StaticFiles +from starlette.templating import Jinja2Templates + +from .providers import PROVIDERS, validate_credentials +from .runner import cancel_deployment, run_playbook +from .sessions import sessions + +# Paths +APP_DIR = Path(__file__).parent +TEMPLATES_DIR = APP_DIR / "templates" +STATIC_DIR = APP_DIR / "static" +ALGO_DIR = APP_DIR.parent + +# Templates +templates = Jinja2Templates(directory=str(TEMPLATES_DIR)) + + +# === Page Routes === + + +async def index(request: Request) -> Response: + """Main page with provider selection and configuration form.""" + return templates.TemplateResponse( + request, + "index.html", + { + "providers": PROVIDERS, + }, + ) + + +async def deploy_page(request: Request) -> Response: + """Deployment progress page.""" + session_id = request.path_params["session_id"] + session = await sessions.get(session_id) + + if not session: + return HTMLResponse( + "
This deployment session has expired or does not exist.
", + status_code=404, + ) + + provider = PROVIDERS.get(session.provider) + return templates.TemplateResponse( + request, + "deploy.html", + { + "session": session, + "provider": provider, + }, + ) + + +async def success_page(request: Request) -> Response: + """Success page with config downloads.""" + session_id = request.path_params["session_id"] + session = await sessions.get(session_id) + + if not session: + return HTMLResponse( + "This deployment session has expired or does not exist.
", + status_code=404, + ) + + # List config files + config_files: list[dict[str, Any]] = [] + if session.config_path and Path(session.config_path).exists(): + config_dir = Path(session.config_path) + for file_path in sorted(config_dir.rglob("*")): + if file_path.is_file(): + rel_path = file_path.relative_to(config_dir) + config_files.append( + { + "name": str(rel_path), + "path": str(file_path), + "size": file_path.stat().st_size, + "is_qr": file_path.suffix == ".png", + } + ) + + provider = PROVIDERS.get(session.provider) + return templates.TemplateResponse( + request, + "success.html", + { + "session": session, + "provider": provider, + "config_files": config_files, + }, + ) + + +# === API Routes === + + +async def provider_form(request: Request) -> Response: + """Return the credential form partial for a provider.""" + provider_id = request.path_params["provider"] + provider = PROVIDERS.get(provider_id) + + if not provider: + return HTMLResponse("Unknown provider
", status_code=404) + + return templates.TemplateResponse( + request, + "partials/provider_form.html", + {"provider": provider}, + ) + + +async def validate_provider(request: Request) -> Response: + """Validate provider credentials and return regions.""" + provider_id = request.path_params["provider"] + provider = PROVIDERS.get(provider_id) + + if not provider: + return JSONResponse({"valid": False, "error": "Unknown provider"}, status_code=404) + + # Get credentials from form + form_data = await request.form() + credentials = {field.name: form_data.get(field.name, "") for field in provider.fields} + + # Validate credentials + result = await validate_credentials(provider_id, credentials) + + if not result.valid: + return templates.TemplateResponse( + request, + "partials/validation_error.html", + {"error": result.error}, + status_code=400, + ) + + # Return regions partial + return templates.TemplateResponse( + request, + "partials/regions.html", + { + "provider": provider, + "regions": result.regions or [], + "credentials": credentials, + }, + ) + + +async def start_deploy(request: Request) -> Response: + """Start a deployment.""" + form_data = await request.form() + + provider_id = form_data.get("provider", "") + if not provider_id or provider_id not in PROVIDERS: + return JSONResponse({"error": "Invalid provider"}, status_code=400) + + provider = PROVIDERS[provider_id] + + # Extract credentials + credentials = {field.name: form_data.get(field.name, "") for field in provider.fields} + + # Extract config + users_raw = form_data.get("users", "phone,laptop,desktop") + users = [u.strip() for u in str(users_raw).split(",") if u.strip()] + + config = { + "server_name": form_data.get("server_name", "algo"), + "region": form_data.get("region", provider.default_region), + "users": users, + "wireguard_enabled": form_data.get("wireguard_enabled") == "on", + "ipsec_enabled": form_data.get("ipsec_enabled") == "on", + "dns_adblocking": form_data.get("dns_adblocking") == "on", + "ssh_tunneling": form_data.get("ssh_tunneling") == "on", + "ondemand_cellular": form_data.get("ondemand_cellular") == "on", + "ondemand_wifi": form_data.get("ondemand_wifi") == "on", + "store_pki": form_data.get("store_pki") == "on", + } + + # Create session + session = await sessions.create( + provider=provider_id, + credentials=credentials, + config=config, + ) + + # Return redirect to deploy page via HX-Redirect header + response = Response(status_code=200) + response.headers["HX-Redirect"] = f"/deploy/{session.id}" + return response + + +async def deploy_stream(request: Request) -> Response: + """SSE endpoint for deployment progress.""" + session_id = request.path_params["session_id"] + session = await sessions.get(session_id) + + if not session: + return JSONResponse({"error": "Session not found"}, status_code=404) + + async def event_generator(): + """Generate SSE events from playbook output.""" + try: + async for line in run_playbook(session): + # Send each line as an SSE event + yield { + "event": "output", + "data": line, + } + + # Send final status + yield { + "event": "complete", + "data": session.status.value, + } + except asyncio.CancelledError: + yield { + "event": "complete", + "data": "cancelled", + } + + return EventSourceResponse(event_generator()) + + +async def cancel_deploy(request: Request) -> Response: + """Cancel a running deployment.""" + session_id = request.path_params["session_id"] + session = await sessions.get(session_id) + + if not session: + return JSONResponse({"error": "Session not found"}, status_code=404) + + success = await cancel_deployment(session) + + if success: + return JSONResponse({"status": "cancelled"}) + return JSONResponse({"error": "Cannot cancel - deployment not running"}, status_code=400) + + +async def deploy_status(request: Request) -> Response: + """Get deployment status.""" + session_id = request.path_params["session_id"] + session = await sessions.get(session_id) + + if not session: + return JSONResponse({"error": "Session not found"}, status_code=404) + + return JSONResponse( + { + "status": session.status.value, + "exit_code": session.exit_code, + "error": session.error, + "config_path": session.config_path, + } + ) + + +async def download_file(request: Request) -> Response: + """Download a single config file.""" + session_id = request.path_params["session_id"] + file_path = request.path_params["path"] + + session = await sessions.get(session_id) + if not session or not session.config_path: + return HTMLResponse("Not found", status_code=404) + + full_path = Path(session.config_path) / file_path + if not full_path.exists() or not full_path.is_file(): + return HTMLResponse("File not found", status_code=404) + + # Security check: ensure path is within config directory + try: + full_path.resolve().relative_to(Path(session.config_path).resolve()) + except ValueError: + return HTMLResponse("Access denied", status_code=403) + + return FileResponse( + full_path, + filename=full_path.name, + media_type="application/octet-stream", + ) + + +async def download_zip(request: Request) -> Response: + """Download all config files as a ZIP.""" + session_id = request.path_params["session_id"] + session = await sessions.get(session_id) + + if not session or not session.config_path: + return HTMLResponse("Not found", status_code=404) + + config_dir = Path(session.config_path) + if not config_dir.exists(): + return HTMLResponse("Config directory not found", status_code=404) + + # Create ZIP in memory + buffer = BytesIO() + with zipfile.ZipFile(buffer, "w", zipfile.ZIP_DEFLATED) as zf: + for file_path in config_dir.rglob("*"): + if file_path.is_file(): + arcname = file_path.relative_to(config_dir) + zf.write(file_path, arcname) + + buffer.seek(0) + server_name = session.config.get("server_name", "algo") + + return StreamingResponse( + buffer, + media_type="application/zip", + headers={ + "Content-Disposition": f'attachment; filename="{server_name}-configs.zip"', + }, + ) + + +# === Application === + + +routes = [ + # Pages + Route("/", endpoint=index, methods=["GET"]), + Route("/deploy/{session_id}", endpoint=deploy_page, methods=["GET"]), + Route("/configs/{session_id}", endpoint=success_page, methods=["GET"]), + # API - Provider + Route("/providers/{provider}/form", endpoint=provider_form, methods=["GET"]), + Route("/providers/{provider}/validate", endpoint=validate_provider, methods=["POST"]), + # API - Deploy + Route("/deploy", endpoint=start_deploy, methods=["POST"]), + Route("/deploy/{session_id}/stream", endpoint=deploy_stream, methods=["GET"]), + Route("/deploy/{session_id}/cancel", endpoint=cancel_deploy, methods=["POST"]), + Route("/deploy/{session_id}/status", endpoint=deploy_status, methods=["GET"]), + # API - Downloads + Route("/configs/{session_id}/files/{path:path}", endpoint=download_file, methods=["GET"]), + Route("/configs/{session_id}/zip", endpoint=download_zip, methods=["GET"]), + # Static files + Mount("/static", app=StaticFiles(directory=str(STATIC_DIR)), name="static"), +] + +app = Starlette(routes=routes, debug=os.environ.get("DEBUG", "").lower() == "true") diff --git a/app/providers.py b/app/providers.py new file mode 100644 index 000000000..92e281896 --- /dev/null +++ b/app/providers.py @@ -0,0 +1,328 @@ +"""Cloud provider credential schemas and validation.""" + +from dataclasses import dataclass +from typing import Any + +import httpx + + +@dataclass +class ProviderField: + """A credential field for a cloud provider.""" + + name: str + label: str + field_type: str = "password" # password, text, file + placeholder: str = "" + help_url: str = "" + + +@dataclass +class Provider: + """Cloud provider configuration.""" + + id: str + name: str + fields: list[ProviderField] + regions_api: str = "" + default_region: str = "" + + +# Provider definitions for MVP (simple token auth providers) +PROVIDERS: dict[str, Provider] = { + "digitalocean": Provider( + id="digitalocean", + name="DigitalOcean", + fields=[ + ProviderField( + name="do_token", + label="API Token", + field_type="password", + placeholder="Enter your DigitalOcean API token", + help_url="https://cloud.digitalocean.com/settings/api/tokens", + ), + ], + regions_api="https://api.digitalocean.com/v2/regions", + default_region="nyc3", + ), + "hetzner": Provider( + id="hetzner", + name="Hetzner Cloud", + fields=[ + ProviderField( + name="hcloud_token", + label="API Token", + field_type="password", + placeholder="Enter your Hetzner Cloud API token", + help_url="https://console.hetzner.cloud/projects", + ), + ], + regions_api="https://api.hetzner.cloud/v1/datacenters", + default_region="nbg1", + ), + "linode": Provider( + id="linode", + name="Linode", + fields=[ + ProviderField( + name="linode_token", + label="API Token", + field_type="password", + placeholder="Enter your Linode API token", + help_url="https://cloud.linode.com/profile/tokens", + ), + ], + regions_api="https://api.linode.com/v4/regions", + default_region="us-east", + ), + "vultr": Provider( + id="vultr", + name="Vultr", + fields=[ + ProviderField( + name="vultr_api_key", + label="API Key", + field_type="password", + placeholder="Enter your Vultr API key", + help_url="https://my.vultr.com/settings/#settingsapi", + ), + ], + regions_api="https://api.vultr.com/v2/regions", + default_region="ewr", + ), +} + + +@dataclass +class Region: + """A cloud provider region.""" + + id: str + name: str + available: bool = True + + +@dataclass +class ValidationResult: + """Result of credential validation.""" + + valid: bool + error: str = "" + regions: list[Region] | None = None + + +async def validate_digitalocean(token: str) -> ValidationResult: + """Validate DigitalOcean API token and fetch regions.""" + async with httpx.AsyncClient(timeout=15.0) as client: + try: + response = await client.get( + "https://api.digitalocean.com/v2/regions", + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + }, + ) + + if response.status_code == 401: + return ValidationResult( + valid=False, + error="Invalid or expired API token. Please check your token and try again.", + ) + if response.status_code == 403: + return ValidationResult( + valid=False, + error="Token lacks required permissions. Ensure it has Read and Write scopes.", + ) + if response.status_code == 429: + return ValidationResult( + valid=False, + error="Rate limit exceeded. Please wait a few minutes and try again.", + ) + if response.status_code != 200: + return ValidationResult( + valid=False, + error=f"API error (HTTP {response.status_code}). Please try again later.", + ) + + data = response.json() + regions = [ + Region(id=r["slug"], name=r["name"], available=r.get("available", True)) + for r in data.get("regions", []) + if r.get("available", True) + ] + regions.sort(key=lambda r: r.id) + + return ValidationResult(valid=True, regions=regions) + + except httpx.TimeoutException: + return ValidationResult( + valid=False, + error="Connection timed out. Please check your internet connection.", + ) + except httpx.RequestError as e: + return ValidationResult(valid=False, error=f"Connection error: {e!s}") + + +async def validate_hetzner(token: str) -> ValidationResult: + """Validate Hetzner Cloud API token and fetch datacenters.""" + async with httpx.AsyncClient(timeout=15.0) as client: + try: + response = await client.get( + "https://api.hetzner.cloud/v1/datacenters", + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + }, + ) + + if response.status_code == 401: + return ValidationResult( + valid=False, + error="Invalid or expired API token. Please check your token.", + ) + if response.status_code != 200: + return ValidationResult( + valid=False, + error=f"API error (HTTP {response.status_code}). Please try again.", + ) + + data = response.json() + regions = [ + Region( + id=dc["name"], + name=f"{dc['location']['city']} ({dc['description']})", + available=True, + ) + for dc in data.get("datacenters", []) + ] + regions.sort(key=lambda r: r.id) + + return ValidationResult(valid=True, regions=regions) + + except httpx.TimeoutException: + return ValidationResult( + valid=False, + error="Connection timed out. Please check your internet connection.", + ) + except httpx.RequestError as e: + return ValidationResult(valid=False, error=f"Connection error: {e!s}") + + +async def validate_linode(token: str) -> ValidationResult: + """Validate Linode API token and fetch regions.""" + async with httpx.AsyncClient(timeout=15.0) as client: + try: + response = await client.get( + "https://api.linode.com/v4/regions", + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + }, + ) + + if response.status_code == 401: + return ValidationResult( + valid=False, + error="Invalid or expired API token. Please check your token.", + ) + if response.status_code != 200: + return ValidationResult( + valid=False, + error=f"API error (HTTP {response.status_code}). Please try again.", + ) + + data = response.json() + regions = [ + Region( + id=r["id"], + name=f"{r['label']} ({r['country'].upper()})", + available=r.get("status") == "ok", + ) + for r in data.get("data", []) + if r.get("status") == "ok" + ] + regions.sort(key=lambda r: r.id) + + return ValidationResult(valid=True, regions=regions) + + except httpx.TimeoutException: + return ValidationResult( + valid=False, + error="Connection timed out. Please check your internet connection.", + ) + except httpx.RequestError as e: + return ValidationResult(valid=False, error=f"Connection error: {e!s}") + + +async def validate_vultr(api_key: str) -> ValidationResult: + """Validate Vultr API key and fetch regions.""" + async with httpx.AsyncClient(timeout=15.0) as client: + try: + response = await client.get( + "https://api.vultr.com/v2/regions", + headers={ + "Authorization": f"Bearer {api_key}", + "Content-Type": "application/json", + }, + ) + + if response.status_code == 401: + return ValidationResult( + valid=False, + error="Invalid API key. Please check your key and try again.", + ) + if response.status_code != 200: + return ValidationResult( + valid=False, + error=f"API error (HTTP {response.status_code}). Please try again.", + ) + + data = response.json() + regions = [ + Region( + id=r["id"], + name=f"{r['city']} ({r['country']})", + available=True, + ) + for r in data.get("regions", []) + ] + regions.sort(key=lambda r: r.id) + + return ValidationResult(valid=True, regions=regions) + + except httpx.TimeoutException: + return ValidationResult( + valid=False, + error="Connection timed out. Please check your internet connection.", + ) + except httpx.RequestError as e: + return ValidationResult(valid=False, error=f"Connection error: {e!s}") + + +async def validate_credentials(provider_id: str, credentials: dict[str, Any]) -> ValidationResult: + """Validate credentials for a given provider.""" + if provider_id == "digitalocean": + token = credentials.get("do_token", "") + if not token: + return ValidationResult(valid=False, error="API token is required") + return await validate_digitalocean(token) + + if provider_id == "hetzner": + token = credentials.get("hcloud_token", "") + if not token: + return ValidationResult(valid=False, error="API token is required") + return await validate_hetzner(token) + + if provider_id == "linode": + token = credentials.get("linode_token", "") + if not token: + return ValidationResult(valid=False, error="API token is required") + return await validate_linode(token) + + if provider_id == "vultr": + api_key = credentials.get("vultr_api_key", "") + if not api_key: + return ValidationResult(valid=False, error="API key is required") + return await validate_vultr(api_key) + + return ValidationResult(valid=False, error=f"Unknown provider: {provider_id}") diff --git a/app/runner.py b/app/runner.py new file mode 100644 index 000000000..a6fd2fcf0 --- /dev/null +++ b/app/runner.py @@ -0,0 +1,193 @@ +"""Async subprocess wrapper for Ansible playbook execution.""" + +import asyncio +import json +import os +import re +from collections.abc import AsyncGenerator +from pathlib import Path +from typing import Any + +from .sessions import DeploymentStatus, Session, sessions + +# Get the algo project root directory +ALGO_DIR = Path(__file__).parent.parent + + +def build_extra_vars(session: Session) -> dict[str, Any]: + """Build the extra-vars dict for Ansible from session config.""" + config = session.config + credentials = session.credentials + + # Map provider IDs to Ansible provider aliases + provider_map = { + "digitalocean": "digitalocean", + "hetzner": "hetzner", + "linode": "linode", + "vultr": "vultr", + } + + extra_vars: dict[str, Any] = { + "provider": provider_map.get(session.provider, session.provider), + } + + # Add credentials based on provider + if session.provider == "digitalocean": + extra_vars["do_token"] = credentials.get("do_token", "") + elif session.provider == "hetzner": + extra_vars["hcloud_token"] = credentials.get("hcloud_token", "") + elif session.provider == "linode": + extra_vars["linode_token"] = credentials.get("linode_token", "") + elif session.provider == "vultr": + # Vultr uses a different approach - write temp config file + extra_vars["vultr_config"] = credentials.get("vultr_api_key", "") + + # Add server configuration + if config.get("server_name"): + extra_vars["server_name"] = config["server_name"] + if config.get("region"): + extra_vars["region"] = config["region"] + + # Add users list + if config.get("users"): + extra_vars["users"] = config["users"] + + # Add boolean options + bool_options = [ + ("wireguard_enabled", "wireguard_enabled"), + ("ipsec_enabled", "ipsec_enabled"), + ("dns_adblocking", "dns_adblocking"), + ("ssh_tunneling", "ssh_tunneling"), + ("ondemand_cellular", "ondemand_cellular"), + ("ondemand_wifi", "ondemand_wifi"), + ("store_pki", "store_pki"), + ] + + for config_key, ansible_var in bool_options: + if config_key in config: + extra_vars[ansible_var] = config[config_key] + + return extra_vars + + +def strip_ansi(text: str) -> str: + """Remove ANSI escape codes from text.""" + ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") + return ansi_escape.sub("", text) + + +async def run_playbook(session: Session) -> AsyncGenerator[str, None]: + """Run ansible-playbook with streaming output. + + This function uses asyncio.create_subprocess_exec which is safe from + shell injection - arguments are passed as a list, not interpolated. + + Args: + session: The deployment session + + Yields: + Lines of output from the playbook run + """ + extra_vars = build_extra_vars(session) + + # Build command as list (safe - no shell interpolation) + cmd = [ + "uv", + "run", + "ansible-playbook", + "main.yml", + "--extra-vars", + json.dumps(extra_vars), + ] + + # Set up environment + env = os.environ.copy() + env["ANSIBLE_FORCE_COLOR"] = "0" # Disable colors for cleaner output + env["ANSIBLE_NOCOLOR"] = "1" + env["PYTHONUNBUFFERED"] = "1" + + try: + # Start the process using create_subprocess_exec (safe, no shell) + process = await asyncio.create_subprocess_exec( + *cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.STDOUT, + cwd=str(ALGO_DIR), + env=env, + ) + + session.process = process + session.status = DeploymentStatus.RUNNING + await sessions.update(session) + + # Clear credentials from memory now that we've started + session.clear_credentials() + + # Stream output line by line + config_path_pattern = re.compile(r"configs/[\d.]+|configs/localhost") + + if process.stdout: + async for line_bytes in process.stdout: + line = strip_ansi(line_bytes.decode("utf-8", errors="replace")).rstrip() + if line: + session.add_output(line) + yield line + + # Try to capture config path from output + if match := config_path_pattern.search(line): + session.config_path = str(ALGO_DIR / match.group(0)) + + # Wait for process to complete + await process.wait() + + session.exit_code = process.returncode + if process.returncode == 0: + session.status = DeploymentStatus.SUCCESS + yield "[SUCCESS] Deployment completed successfully!" + else: + session.status = DeploymentStatus.FAILED + session.error = f"Playbook exited with code {process.returncode}" + yield f"[ERROR] Deployment failed with exit code {process.returncode}" + + except asyncio.CancelledError: + session.status = DeploymentStatus.CANCELLED + session.error = "Deployment was cancelled" + if session.process: + session.process.terminate() + await session.process.wait() + yield "[CANCELLED] Deployment was cancelled" + raise + + except Exception as e: + session.status = DeploymentStatus.FAILED + session.error = str(e) + yield f"[ERROR] {e!s}" + + finally: + session.process = None + await sessions.update(session) + + +async def cancel_deployment(session: Session) -> bool: + """Cancel a running deployment. + + Args: + session: The session to cancel + + Returns: + True if cancellation was successful + """ + if session.process and session.status == DeploymentStatus.RUNNING: + session.process.terminate() + try: + await asyncio.wait_for(session.process.wait(), timeout=10.0) + except TimeoutError: + session.process.kill() + await session.process.wait() + + session.status = DeploymentStatus.CANCELLED + session.error = "Deployment was cancelled by user" + await sessions.update(session) + return True + + return False diff --git a/app/sessions.py b/app/sessions.py new file mode 100644 index 000000000..e08e2fbdd --- /dev/null +++ b/app/sessions.py @@ -0,0 +1,109 @@ +"""In-memory session management for deployments.""" + +import asyncio +import uuid +from dataclasses import dataclass, field +from datetime import datetime, timedelta +from enum import Enum +from typing import Any + + +class DeploymentStatus(Enum): + """Status of a deployment session.""" + + PENDING = "pending" + VALIDATING = "validating" + RUNNING = "running" + SUCCESS = "success" + FAILED = "failed" + CANCELLED = "cancelled" + + +@dataclass +class Session: + """A deployment session.""" + + id: str + provider: str + credentials: dict[str, Any] + config: dict[str, Any] + status: DeploymentStatus = DeploymentStatus.PENDING + output_lines: list[str] = field(default_factory=list) + config_path: str | None = None + error: str | None = None + exit_code: int | None = None + created_at: datetime = field(default_factory=datetime.now) + process: asyncio.subprocess.Process | None = None + + def is_expired(self) -> bool: + """Check if this session has expired (1 hour TTL).""" + return datetime.now() - self.created_at > timedelta(hours=1) + + def add_output(self, line: str) -> None: + """Add a line to the output buffer.""" + self.output_lines.append(line) + + def clear_credentials(self) -> None: + """Clear credentials from memory after deployment starts.""" + self.credentials = {} + + +class SessionStore: + """Thread-safe in-memory session store.""" + + def __init__(self) -> None: + self._sessions: dict[str, Session] = {} + self._lock = asyncio.Lock() + + async def create( + self, + provider: str, + credentials: dict[str, Any], + config: dict[str, Any], + ) -> Session: + """Create a new session.""" + session_id = str(uuid.uuid4()) + session = Session( + id=session_id, + provider=provider, + credentials=credentials, + config=config, + ) + + async with self._lock: + self._sessions[session_id] = session + # Cleanup expired sessions + await self._cleanup_expired() + + return session + + async def get(self, session_id: str) -> Session | None: + """Get a session by ID.""" + async with self._lock: + session = self._sessions.get(session_id) + if session and session.is_expired(): + del self._sessions[session_id] + return None + return session + + async def update(self, session: Session) -> None: + """Update a session.""" + async with self._lock: + if session.id in self._sessions: + self._sessions[session.id] = session + + async def delete(self, session_id: str) -> None: + """Delete a session.""" + async with self._lock: + if session_id in self._sessions: + del self._sessions[session_id] + + async def _cleanup_expired(self) -> None: + """Remove expired sessions (called during create).""" + expired = [sid for sid, session in self._sessions.items() if session.is_expired()] + for sid in expired: + del self._sessions[sid] + + +# Global session store instance +sessions = SessionStore() diff --git a/app/static/algo-logo.png b/app/static/algo-logo.png new file mode 100644 index 000000000..eb56459e8 Binary files /dev/null and b/app/static/algo-logo.png differ diff --git a/app/static/htmx-sse.js b/app/static/htmx-sse.js new file mode 100644 index 000000000..1ffe67469 --- /dev/null +++ b/app/static/htmx-sse.js @@ -0,0 +1,369 @@ +/* +Server Sent Events Extension +============================ +This extension adds support for Server Sent Events to htmx. See /www/extensions/sse.md for usage instructions. + +*/ + +(function() { + + /** @type {import("../htmx").HtmxInternalApi} */ + var api; + + htmx.defineExtension("sse", { + + /** + * Init saves the provided reference to the internal HTMX API. + * + * @param {import("../htmx").HtmxInternalApi} api + * @returns void + */ + init: function(apiRef) { + // store a reference to the internal API. + api = apiRef; + + // set a function in the public API for creating new EventSource objects + if (htmx.createEventSource == undefined) { + htmx.createEventSource = createEventSource; + } + }, + + /** + * onEvent handles all events passed to this extension. + * + * @param {string} name + * @param {Event} evt + * @returns void + */ + onEvent: function(name, evt) { + + var parent = evt.target || evt.detail.elt; + switch (name) { + + case "htmx:beforeCleanupElement": + var internalData = api.getInternalData(parent) + // Try to remove remove an EventSource when elements are removed + if (internalData.sseEventSource) { + internalData.sseEventSource.close(); + } + + return; + + // Try to create EventSources when elements are processed + case "htmx:afterProcessNode": + ensureEventSourceOnElement(parent); + } + } + }); + + /////////////////////////////////////////////// + // HELPER FUNCTIONS + /////////////////////////////////////////////// + + + /** + * createEventSource is the default method for creating new EventSource objects. + * it is hoisted into htmx.config.createEventSource to be overridden by the user, if needed. + * + * @param {string} url + * @returns EventSource + */ + function createEventSource(url) { + return new EventSource(url, { withCredentials: true }); + } + + function splitOnWhitespace(trigger) { + return trigger.trim().split(/\s+/); + } + + function getLegacySSEURL(elt) { + var legacySSEValue = api.getAttributeValue(elt, "hx-sse"); + if (legacySSEValue) { + var values = splitOnWhitespace(legacySSEValue); + for (var i = 0; i < values.length; i++) { + var value = values[i].split(/:(.+)/); + if (value[0] === "connect") { + return value[1]; + } + } + } + } + + function getLegacySSESwaps(elt) { + var legacySSEValue = api.getAttributeValue(elt, "hx-sse"); + var returnArr = []; + if (legacySSEValue != null) { + var values = splitOnWhitespace(legacySSEValue); + for (var i = 0; i < values.length; i++) { + var value = values[i].split(/:(.+)/); + if (value[0] === "swap") { + returnArr.push(value[1]); + } + } + } + return returnArr; + } + + /** + * registerSSE looks for attributes that can contain sse events, right + * now hx-trigger and sse-swap and adds listeners based on these attributes too + * the closest event source + * + * @param {HTMLElement} elt + */ + function registerSSE(elt) { + // Add message handlers for every `sse-swap` attribute + queryAttributeOnThisOrChildren(elt, "sse-swap").forEach(function (child) { + // Find closest existing event source + var sourceElement = api.getClosestMatch(child, hasEventSource); + if (sourceElement == null) { + // api.triggerErrorEvent(elt, "htmx:noSSESourceError") + return null; // no eventsource in parentage, orphaned element + } + + // Set internalData and source + var internalData = api.getInternalData(sourceElement); + var source = internalData.sseEventSource; + + var sseSwapAttr = api.getAttributeValue(child, "sse-swap"); + if (sseSwapAttr) { + var sseEventNames = sseSwapAttr.split(","); + } else { + var sseEventNames = getLegacySSESwaps(child); + } + + for (var i = 0; i < sseEventNames.length; i++) { + var sseEventName = sseEventNames[i].trim(); + var listener = function(event) { + + // If the source is missing then close SSE + if (maybeCloseSSESource(sourceElement)) { + return; + } + + // If the body no longer contains the element, remove the listener + if (!api.bodyContains(child)) { + source.removeEventListener(sseEventName, listener); + return; + } + + // swap the response into the DOM and trigger a notification + if(!api.triggerEvent(elt, "htmx:sseBeforeMessage", event)) { + return; + } + swap(child, event.data); + api.triggerEvent(elt, "htmx:sseMessage", event); + }; + + // Register the new listener + api.getInternalData(child).sseEventListener = listener; + source.addEventListener(sseEventName, listener); + } + }); + + // Add message handlers for every `hx-trigger="sse:*"` attribute + queryAttributeOnThisOrChildren(elt, "hx-trigger").forEach(function(child) { + // Find closest existing event source + var sourceElement = api.getClosestMatch(child, hasEventSource); + if (sourceElement == null) { + // api.triggerErrorEvent(elt, "htmx:noSSESourceError") + return null; // no eventsource in parentage, orphaned element + } + + // Set internalData and source + var internalData = api.getInternalData(sourceElement); + var source = internalData.sseEventSource; + + var sseEventName = api.getAttributeValue(child, "hx-trigger"); + if (sseEventName == null) { + return; + } + + // Only process hx-triggers for events with the "sse:" prefix + if (sseEventName.slice(0, 4) != "sse:") { + return; + } + + // remove the sse: prefix from here on out + sseEventName = sseEventName.substr(4); + + var listener = function() { + if (maybeCloseSSESource(sourceElement)) { + return + } + + if (!api.bodyContains(child)) { + source.removeEventListener(sseEventName, listener); + } + } + }); + } + + /** + * ensureEventSourceOnElement creates a new EventSource connection on the provided element. + * If a usable EventSource already exists, then it is returned. If not, then a new EventSource + * is created and stored in the element's internalData. + * @param {HTMLElement} elt + * @param {number} retryCount + * @returns {EventSource | null} + */ + function ensureEventSourceOnElement(elt, retryCount) { + + if (elt == null) { + return null; + } + + // handle extension source creation attribute + queryAttributeOnThisOrChildren(elt, "sse-connect").forEach(function(child) { + var sseURL = api.getAttributeValue(child, "sse-connect"); + if (sseURL == null) { + return; + } + + ensureEventSource(child, sseURL, retryCount); + }); + + // handle legacy sse, remove for HTMX2 + queryAttributeOnThisOrChildren(elt, "hx-sse").forEach(function(child) { + var sseURL = getLegacySSEURL(child); + if (sseURL == null) { + return; + } + + ensureEventSource(child, sseURL, retryCount); + }); + + registerSSE(elt); + } + + function ensureEventSource(elt, url, retryCount) { + var source = htmx.createEventSource(url); + + source.onerror = function(err) { + + // Log an error event + api.triggerErrorEvent(elt, "htmx:sseError", { error: err, source: source }); + + // If parent no longer exists in the document, then clean up this EventSource + if (maybeCloseSSESource(elt)) { + return; + } + + // Otherwise, try to reconnect the EventSource + if (source.readyState === EventSource.CLOSED) { + retryCount = retryCount || 0; + var timeout = Math.random() * (2 ^ retryCount) * 500; + window.setTimeout(function() { + ensureEventSourceOnElement(elt, Math.min(7, retryCount + 1)); + }, timeout); + } + }; + + source.onopen = function(evt) { + api.triggerEvent(elt, "htmx:sseOpen", { source: source }); + } + + api.getInternalData(elt).sseEventSource = source; + } + + /** + * maybeCloseSSESource confirms that the parent element still exists. + * If not, then any associated SSE source is closed and the function returns true. + * + * @param {HTMLElement} elt + * @returns boolean + */ + function maybeCloseSSESource(elt) { + if (!api.bodyContains(elt)) { + var source = api.getInternalData(elt).sseEventSource; + if (source != undefined) { + source.close(); + // source = null + return true; + } + } + return false; + } + + /** + * queryAttributeOnThisOrChildren returns all nodes that contain the requested attributeName, INCLUDING THE PROVIDED ROOT ELEMENT. + * + * @param {HTMLElement} elt + * @param {string} attributeName + */ + function queryAttributeOnThisOrChildren(elt, attributeName) { + + var result = []; + + // If the parent element also contains the requested attribute, then add it to the results too. + if (api.hasAttribute(elt, attributeName)) { + result.push(elt); + } + + // Search all child nodes that match the requested attribute + elt.querySelectorAll("[" + attributeName + "], [data-" + attributeName + "]").forEach(function(node) { + result.push(node); + }); + + return result; + } + + /** + * @param {HTMLElement} elt + * @param {string} content + */ + function swap(elt, content) { + + api.withExtensions(elt, function(extension) { + content = extension.transformResponse(content, null, elt); + }); + + var swapSpec = api.getSwapSpecification(elt); + var target = api.getTarget(elt); + var settleInfo = api.makeSettleInfo(elt); + + api.selectAndSwap(swapSpec.swapStyle, target, elt, content, settleInfo); + + settleInfo.elts.forEach(function(elt) { + if (elt.classList) { + elt.classList.add(htmx.config.settlingClass); + } + api.triggerEvent(elt, 'htmx:beforeSettle'); + }); + + // Handle settle tasks (with delay if requested) + if (swapSpec.settleDelay > 0) { + setTimeout(doSettle(settleInfo), swapSpec.settleDelay); + } else { + doSettle(settleInfo)(); + } + } + + /** + * doSettle mirrors much of the functionality in htmx that + * settles elements after their content has been swapped. + * TODO: this should be published by htmx, and not duplicated here + * @param {import("../htmx").HtmxSettleInfo} settleInfo + * @returns () => void + */ + function doSettle(settleInfo) { + + return function() { + settleInfo.tasks.forEach(function(task) { + task.call(); + }); + + settleInfo.elts.forEach(function(elt) { + if (elt.classList) { + elt.classList.remove(htmx.config.settlingClass); + } + api.triggerEvent(elt, 'htmx:afterSettle'); + }); + } + } + + function hasEventSource(node) { + return api.getInternalData(node).sseEventSource != null; + } + +})(); diff --git a/app/static/htmx.min.js b/app/static/htmx.min.js new file mode 100644 index 000000000..2172f7ab7 --- /dev/null +++ b/app/static/htmx.min.js @@ -0,0 +1 @@ +(function(e,t){if(typeof define==="function"&&define.amd){define([],t)}else if(typeof module==="object"&&module.exports){module.exports=t()}else{e.htmx=e.htmx||t()}})(typeof self!=="undefined"?self:this,function(){return function(){"use strict";var Q={onLoad:F,process:zt,on:de,off:ge,trigger:ce,ajax:Nr,find:C,findAll:f,closest:v,values:function(e,t){var r=dr(e,t||"post");return r.values},remove:_,addClass:z,removeClass:n,toggleClass:$,takeClass:W,defineExtension:Ur,removeExtension:Br,logAll:V,logNone:j,logger:null,config:{historyEnabled:true,historyCacheSize:10,refreshOnHistoryMiss:false,defaultSwapStyle:"innerHTML",defaultSwapDelay:0,defaultSettleDelay:20,includeIndicatorStyles:true,indicatorClass:"htmx-indicator",requestClass:"htmx-request",addedClass:"htmx-added",settlingClass:"htmx-settling",swappingClass:"htmx-swapping",allowEval:true,allowScriptTags:true,inlineScriptNonce:"",attributesToSettle:["class","style","width","height"],withCredentials:false,timeout:0,wsReconnectDelay:"full-jitter",wsBinaryType:"blob",disableSelector:"[hx-disable], [data-hx-disable]",useTemplateFragments:false,scrollBehavior:"smooth",defaultFocusScroll:false,getCacheBusterParam:false,globalViewTransitions:false,methodsThatUseUrlParams:["get"],selfRequestsOnly:false,ignoreTitle:false,scrollIntoViewOnBoost:true,triggerSpecsCache:null},parseInterval:d,_:t,createEventSource:function(e){return new EventSource(e,{withCredentials:true})},createWebSocket:function(e){var t=new WebSocket(e,[]);t.binaryType=Q.config.wsBinaryType;return t},version:"1.9.12"};var r={addTriggerHandler:Lt,bodyContains:se,canAccessLocalStorage:U,findThisElement:xe,filterValues:yr,hasAttribute:o,getAttributeValue:te,getClosestAttributeValue:ne,getClosestMatch:c,getExpressionVars:Hr,getHeaders:xr,getInputValues:dr,getInternalData:ae,getSwapSpecification:wr,getTriggerSpecs:it,getTarget:ye,makeFragment:l,mergeObjects:le,makeSettleInfo:T,oobSwap:Ee,querySelectorExt:ue,selectAndSwap:je,settleImmediately:nr,shouldCancel:ut,triggerEvent:ce,triggerErrorEvent:fe,withExtensions:R};var w=["get","post","put","delete","patch"];var i=w.map(function(e){return"[hx-"+e+"], [data-hx-"+e+"]"}).join(", ");var S=e("head"),q=e("title"),H=e("svg",true);function e(e,t){return new RegExp("<"+e+"(\\s[^>]*>|>)([\\s\\S]*?)<\\/"+e+">",!!t?"gim":"im")}function d(e){if(e==undefined){return undefined}let t=NaN;if(e.slice(-2)=="ms"){t=parseFloat(e.slice(0,-2))}else if(e.slice(-1)=="s"){t=parseFloat(e.slice(0,-1))*1e3}else if(e.slice(-1)=="m"){t=parseFloat(e.slice(0,-1))*1e3*60}else{t=parseFloat(e)}return isNaN(t)?undefined:t}function ee(e,t){return e.getAttribute&&e.getAttribute(t)}function o(e,t){return e.hasAttribute&&(e.hasAttribute(t)||e.hasAttribute("data-"+t))}function te(e,t){return ee(e,t)||ee(e,"data-"+t)}function u(e){return e.parentElement}function re(){return document}function c(e,t){while(e&&!t(e)){e=u(e)}return e?e:null}function L(e,t,r){var n=te(t,r);var i=te(t,"hx-disinherit");if(e!==t&&i&&(i==="*"||i.split(" ").indexOf(r)>=0)){return"unset"}else{return n}}function ne(t,r){var n=null;c(t,function(e){return n=L(t,e,r)});if(n!=="unset"){return n}}function h(e,t){var r=e.matches||e.matchesSelector||e.msMatchesSelector||e.mozMatchesSelector||e.webkitMatchesSelector||e.oMatchesSelector;return r&&r.call(e,t)}function A(e){var t=/<([a-z][^\/\0>\x20\t\r\n\f]*)/i;var r=t.exec(e);if(r){return r[1].toLowerCase()}else{return""}}function s(e,t){var r=new DOMParser;var n=r.parseFromString(e,"text/html");var i=n.body;while(t>0){t--;i=i.firstChild}if(i==null){i=re().createDocumentFragment()}return i}function N(e){return/"+n+"",0);var a=i.querySelector("template").content;if(Q.config.allowScriptTags){oe(a.querySelectorAll("script"),function(e){if(Q.config.inlineScriptNonce){e.nonce=Q.config.inlineScriptNonce}e.htmxExecuted=navigator.userAgent.indexOf("Firefox")===-1})}else{oe(a.querySelectorAll("script"),function(e){_(e)})}return a}switch(r){case"thead":case"tbody":case"tfoot":case"colgroup":case"caption":return s("
+
+
+ + Provider: {{ provider.name }} • + Region: {{ session.config.region }} +
+Select the region where your VPN server will be deployed
+.conf file into your WireGuard app, or scan the QR code
+ .mobileconfig (Apple) or .p12 certificate
+ + Download individual files or get everything as a ZIP +
+
+ Run ./algo update-users to add or remove VPN users
+
+ ssh -F configs/*/ssh_config algo
+