Files
claude-code/mcp/llm-router/delegate.py
OpenCode Test df6cf94dae feat(external-llm): add external LLM integration (fc-004)
Implements external LLM routing via opencode CLI for:
- GitHub Copilot (gpt-5.2, claude-sonnet-4.5, claude-haiku-4.5, o3, gemini-3-pro)
- Z.AI (glm-4.7 for code generation)
- OpenCode native (big-pickle)

Components:
- mcp/llm-router/invoke.py: Main router with task-based model selection
- mcp/llm-router/delegate.py: Agent delegation helper (respects external mode)
- mcp/llm-router/toggle.py: Enable/disable external-only mode
- mcp/llm-router/providers/: CLI wrappers for opencode and gemini

Features:
- Persistent toggle via state/external-mode.json
- Task routing: reasoning -> gpt-5.2, code-gen -> glm-4.7, long-context -> gemini
- Claude tier mapping: opus -> gpt-5.2, sonnet -> claude-sonnet-4.5, haiku -> claude-haiku-4.5
- Session-start hook announces when external mode is active
- Natural language toggle support via component registry

Plan: gleaming-routing-mercury

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-08 13:34:35 -08:00

126 lines
3.3 KiB
Python
Executable File

#!/usr/bin/env python3
"""
Agent delegation helper. Routes to external or Claude based on mode.
Usage:
delegate.py --tier sonnet -p "prompt"
delegate.py --tier opus -p "complex reasoning task" --json
"""
import argparse
import json
import subprocess
import sys
from pathlib import Path
STATE_DIR = Path.home() / ".claude/state"
ROUTER_DIR = Path(__file__).parent
def is_external_mode() -> bool:
"""Check if external-only mode is enabled."""
mode_file = STATE_DIR / "external-mode.json"
if mode_file.exists():
with open(mode_file) as f:
data = json.load(f)
return data.get("enabled", False)
return False
def get_external_model(tier: str) -> str:
"""Get the external model equivalent for a Claude tier."""
policy_file = STATE_DIR / "model-policy.json"
with open(policy_file) as f:
policy = json.load(f)
mapping = policy.get("claude_to_external_map", {})
if tier not in mapping:
raise ValueError(f"No external mapping for tier: {tier}")
return mapping[tier]
def delegate(tier: str, prompt: str, use_json: bool = False) -> str:
"""
Delegate to appropriate model based on mode.
Args:
tier: Claude tier (opus, sonnet, haiku)
prompt: The prompt text
use_json: Return JSON output
Returns:
Model response as string
"""
if is_external_mode():
# Use external model
model = get_external_model(tier)
invoke_script = ROUTER_DIR / "invoke.py"
cmd = [sys.executable, str(invoke_script), "--model", model, "-p", prompt]
if use_json:
cmd.append("--json")
result = subprocess.run(cmd, capture_output=True, text=True)
if result.returncode != 0:
raise RuntimeError(f"External invoke failed: {result.stderr}")
return result.stdout.strip()
else:
# Use Claude
cmd = ["claude", "--print", "--model", tier, prompt]
result = subprocess.run(cmd, capture_output=True, text=True)
if result.returncode != 0:
raise RuntimeError(f"Claude failed: {result.stderr}")
response = result.stdout.strip()
if use_json:
return json.dumps({
"model": f"claude/{tier}",
"response": response,
"success": True
}, indent=2)
return response
def main():
parser = argparse.ArgumentParser(
description="Delegate to Claude or external model based on mode"
)
parser.add_argument(
"--tier",
required=True,
choices=["opus", "sonnet", "haiku"],
help="Claude tier (maps to external equivalent when in external mode)"
)
parser.add_argument(
"-p", "--prompt",
required=True,
help="Prompt text"
)
parser.add_argument(
"--json",
action="store_true",
help="Output as JSON"
)
args = parser.parse_args()
try:
result = delegate(args.tier, args.prompt, args.json)
print(result)
except Exception as e:
if args.json:
print(json.dumps({"error": str(e), "success": False}, indent=2))
sys.exit(1)
else:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()