From 73a5c7c370b70bca285516b4afe03a042761e058 Mon Sep 17 00:00:00 2001 From: zap Date: Wed, 4 Mar 2026 19:15:17 +0000 Subject: [PATCH] feat(ops): add backup and LiteLLM sync maintenance scripts --- litellm-config.patched.yaml | 53 +++++++++++++ scripts/backup-to-minio.sh | 54 +++++++++++++ scripts/sync-litellm-models.py | 134 +++++++++++++++++++++++++++++++++ 3 files changed, 241 insertions(+) create mode 100644 litellm-config.patched.yaml create mode 100755 scripts/backup-to-minio.sh create mode 100644 scripts/sync-litellm-models.py diff --git a/litellm-config.patched.yaml b/litellm-config.patched.yaml new file mode 100644 index 0000000..2074c3f --- /dev/null +++ b/litellm-config.patched.yaml @@ -0,0 +1,53 @@ +model_list: + - model_name: gpt-4o + litellm_params: + model: openai/gpt-4o + api_base: "os.environ/OPENAI_BASE_URL" + api_key: "os.environ/OPENAI_API_KEY" + + - model_name: gpt-4-turbo + litellm_params: + model: openai/gpt-4-turbo + api_base: "os.environ/OPENAI_BASE_URL" + api_key: "os.environ/OPENAI_API_KEY" + + - model_name: claude-3-5-sonnet + litellm_params: + model: anthropic/claude-3-5-sonnet-20240620 + api_key: "os.environ/ANTHROPIC_API_KEY" + + - model_name: claude-3-opus + litellm_params: + model: anthropic/claude-3-opus-20240229 + api_key: "os.environ/ANTHROPIC_API_KEY" + + - model_name: gemini-1.5-pro + litellm_params: + model: gemini/gemini-1.5-pro + api_key: "os.environ/GOOGLE_API_KEY" + + - model_name: gemini-1.5-flash + litellm_params: + model: gemini/gemini-1.5-flash + api_key: "os.environ/GOOGLE_API_KEY" + + - model_name: openrouter-claude-3.5-sonnet + litellm_params: + model: openrouter/anthropic/claude-3.5-sonnet + api_key: "os.environ/OPENROUTER_API_KEY" + + - model_name: opencode-gpt-5 + litellm_params: + model: openai/opencode/gpt-5 + api_base: "https://opencode.ai/zen/v1" + api_key: "os.environ/ZEN_API_KEY" + + - model_name: opencode-gpt-5-codex + litellm_params: + model: openai/opencode/gpt-5-codex + api_base: "https://opencode.ai/zen/v1" + api_key: "os.environ/ZEN_API_KEY" + +litellm_settings: + drop_params: true + set_verbose: false diff --git a/scripts/backup-to-minio.sh b/scripts/backup-to-minio.sh new file mode 100755 index 0000000..f15f85d --- /dev/null +++ b/scripts/backup-to-minio.sh @@ -0,0 +1,54 @@ +#!/usr/bin/env bash +set -euo pipefail + +CREDS_FILE="${CREDS_FILE:-$HOME/.openclaw/credentials/minio-zap.env}" +WORKSPACE="${WORKSPACE:-$HOME/.openclaw/workspace}" +STAMP="$(date -u +%Y%m%dT%H%M%SZ)" +PREFIX="${PREFIX:-workspace-backups/$STAMP}" + +if [[ ! -f "$CREDS_FILE" ]]; then + echo "Missing creds file: $CREDS_FILE" >&2 + exit 1 +fi + +# shellcheck disable=SC1090 +source "$CREDS_FILE" + +if [[ -z "${MINIO_ENDPOINT:-}" || -z "${MINIO_ACCESS_KEY:-}" || -z "${MINIO_SECRET_KEY:-}" || -z "${MINIO_BUCKET:-}" ]]; then + echo "Missing required MinIO env vars in $CREDS_FILE" >&2 + exit 1 +fi + +# Backup only durable assistant context files +INCLUDE=( + "MEMORY.md" + "USER.md" + "TOOLS.md" + "HEARTBEAT.md" + "memory" +) + +TMPDIR="$(mktemp -d)" +trap 'rm -rf "$TMPDIR"' EXIT + +for p in "${INCLUDE[@]}"; do + if [[ -e "$WORKSPACE/$p" ]]; then + cp -a "$WORKSPACE/$p" "$TMPDIR/" + fi +done + +MC_BIN="${MC_BIN:-$WORKSPACE/bin/mc}" +if [[ ! -x "$MC_BIN" ]]; then + MC_BIN="$(command -v mc || true)" +fi +if [[ -z "$MC_BIN" || ! -x "$MC_BIN" ]]; then + echo "MinIO client not found. Set MC_BIN or install mc." >&2 + exit 1 +fi + +"$MC_BIN" alias set minio "$MINIO_ENDPOINT" "$MINIO_ACCESS_KEY" "$MINIO_SECRET_KEY" >/dev/null +"$MC_BIN" mb --ignore-existing "minio/$MINIO_BUCKET" >/dev/null +"$MC_BIN" mirror --overwrite "$TMPDIR" "minio/$MINIO_BUCKET/$PREFIX" >/dev/null +"$MC_BIN" ls --recursive "minio/$MINIO_BUCKET/$PREFIX" + +echo "Backup complete: s3://$MINIO_BUCKET/$PREFIX" diff --git a/scripts/sync-litellm-models.py b/scripts/sync-litellm-models.py new file mode 100644 index 0000000..23212c7 --- /dev/null +++ b/scripts/sync-litellm-models.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 +import json +import os +import re +import shutil +import sys +import urllib.request +from datetime import datetime, timezone +from pathlib import Path + +CONFIG_PATH = Path.home() / ".openclaw" / "openclaw.json" +TIMEOUT = 12 + + +def die(msg: str, code: int = 1): + print(f"ERROR: {msg}", file=sys.stderr) + raise SystemExit(code) + + +def normalize_base(url: str) -> str: + u = (url or "").rstrip("/") + if not u: + return u + return u + + +def fetch_models(base_url: str, api_key: str | None): + url = normalize_base(base_url) + if not url: + die("litellm.baseUrl is empty") + if not url.endswith("/v1"): + url = f"{url}/v1" + models_url = f"{url}/models" + + req = urllib.request.Request(models_url, method="GET") + req.add_header("Accept", "application/json") + if api_key: + req.add_header("Authorization", f"Bearer {api_key}") + + with urllib.request.urlopen(req, timeout=TIMEOUT) as resp: + payload = json.loads(resp.read().decode("utf-8", errors="replace")) + + # OpenAI-style: {"data": [{"id": "..."}, ...]} + if isinstance(payload, dict) and isinstance(payload.get("data"), list): + rows = payload["data"] + elif isinstance(payload, list): + rows = payload + else: + die(f"Unexpected /models payload shape: {type(payload).__name__}") + + ids = [] + for row in rows: + if isinstance(row, dict): + mid = row.get("id") or row.get("model") + else: + mid = None + if isinstance(mid, str) and mid.strip(): + ids.append(mid.strip()) + + # stable unique preserve order + seen = set() + out = [] + for mid in ids: + if mid not in seen: + seen.add(mid) + out.append(mid) + return out + + +def main(): + if not CONFIG_PATH.exists(): + die(f"Config not found: {CONFIG_PATH}") + + raw = CONFIG_PATH.read_text(encoding="utf-8") + cfg = json.loads(raw) + + providers = (((cfg.get("models") or {}).get("providers") or {})) + litellm = providers.get("litellm") + if not isinstance(litellm, dict): + die("models.providers.litellm not found") + + base_url = litellm.get("baseUrl") + api_key = litellm.get("apiKey") or os.environ.get("LITELLM_API_KEY") + + model_ids = fetch_models(base_url, api_key) + if not model_ids: + die("No models returned from LiteLLM /v1/models") + + existing_models = litellm.get("models") if isinstance(litellm.get("models"), list) else [] + existing_by_id = { + m.get("id"): m + for m in existing_models + if isinstance(m, dict) and isinstance(m.get("id"), str) + } + + new_models = [] + for mid in model_ids: + if mid in existing_by_id: + m = dict(existing_by_id[mid]) + m["id"] = mid + m.setdefault("name", mid) + m.setdefault("input", ["text"]) + new_models.append(m) + else: + new_models.append({"id": mid, "name": mid, "input": ["text"]}) + + litellm["models"] = new_models + + # Sync agents.defaults.models entries: keep non-litellm, rebuild litellm/* only. + defaults = ((cfg.get("agents") or {}).get("defaults") or {}) + model_map = defaults.get("models") if isinstance(defaults.get("models"), dict) else {} + + preserved = {k: v for k, v in model_map.items() if not k.startswith("litellm/")} + + # preserve any existing per-model settings for still-present models + for mid in model_ids: + key = f"litellm/{mid}" + preserved[key] = model_map.get(key, {}) + + defaults["models"] = preserved + + # write backup + updated config + ts = datetime.now(timezone.utc).strftime("%Y%m%dT%H%M%SZ") + backup = CONFIG_PATH.with_suffix(f".json.bak-{ts}") + shutil.copy2(CONFIG_PATH, backup) + + CONFIG_PATH.write_text(json.dumps(cfg, indent=2) + "\n", encoding="utf-8") + + print(f"Synced {len(model_ids)} LiteLLM models") + print(f"Backup: {backup}") + + +if __name__ == "__main__": + main()