feat(external-llm): add external LLM integration (fc-004)

Implements external LLM routing via opencode CLI for:
- GitHub Copilot (gpt-5.2, claude-sonnet-4.5, claude-haiku-4.5, o3, gemini-3-pro)
- Z.AI (glm-4.7 for code generation)
- OpenCode native (big-pickle)

Components:
- mcp/llm-router/invoke.py: Main router with task-based model selection
- mcp/llm-router/delegate.py: Agent delegation helper (respects external mode)
- mcp/llm-router/toggle.py: Enable/disable external-only mode
- mcp/llm-router/providers/: CLI wrappers for opencode and gemini

Features:
- Persistent toggle via state/external-mode.json
- Task routing: reasoning -> gpt-5.2, code-gen -> glm-4.7, long-context -> gemini
- Claude tier mapping: opus -> gpt-5.2, sonnet -> claude-sonnet-4.5, haiku -> claude-haiku-4.5
- Session-start hook announces when external mode is active
- Natural language toggle support via component registry

Plan: gleaming-routing-mercury

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
OpenCode Test
2026-01-08 13:34:35 -08:00
parent 7dcb8af1bb
commit df6cf94dae
14 changed files with 1831 additions and 5 deletions

125
mcp/llm-router/delegate.py Executable file
View File

@@ -0,0 +1,125 @@
#!/usr/bin/env python3
"""
Agent delegation helper. Routes to external or Claude based on mode.
Usage:
delegate.py --tier sonnet -p "prompt"
delegate.py --tier opus -p "complex reasoning task" --json
"""
import argparse
import json
import subprocess
import sys
from pathlib import Path
STATE_DIR = Path.home() / ".claude/state"
ROUTER_DIR = Path(__file__).parent
def is_external_mode() -> bool:
"""Check if external-only mode is enabled."""
mode_file = STATE_DIR / "external-mode.json"
if mode_file.exists():
with open(mode_file) as f:
data = json.load(f)
return data.get("enabled", False)
return False
def get_external_model(tier: str) -> str:
"""Get the external model equivalent for a Claude tier."""
policy_file = STATE_DIR / "model-policy.json"
with open(policy_file) as f:
policy = json.load(f)
mapping = policy.get("claude_to_external_map", {})
if tier not in mapping:
raise ValueError(f"No external mapping for tier: {tier}")
return mapping[tier]
def delegate(tier: str, prompt: str, use_json: bool = False) -> str:
"""
Delegate to appropriate model based on mode.
Args:
tier: Claude tier (opus, sonnet, haiku)
prompt: The prompt text
use_json: Return JSON output
Returns:
Model response as string
"""
if is_external_mode():
# Use external model
model = get_external_model(tier)
invoke_script = ROUTER_DIR / "invoke.py"
cmd = [sys.executable, str(invoke_script), "--model", model, "-p", prompt]
if use_json:
cmd.append("--json")
result = subprocess.run(cmd, capture_output=True, text=True)
if result.returncode != 0:
raise RuntimeError(f"External invoke failed: {result.stderr}")
return result.stdout.strip()
else:
# Use Claude
cmd = ["claude", "--print", "--model", tier, prompt]
result = subprocess.run(cmd, capture_output=True, text=True)
if result.returncode != 0:
raise RuntimeError(f"Claude failed: {result.stderr}")
response = result.stdout.strip()
if use_json:
return json.dumps({
"model": f"claude/{tier}",
"response": response,
"success": True
}, indent=2)
return response
def main():
parser = argparse.ArgumentParser(
description="Delegate to Claude or external model based on mode"
)
parser.add_argument(
"--tier",
required=True,
choices=["opus", "sonnet", "haiku"],
help="Claude tier (maps to external equivalent when in external mode)"
)
parser.add_argument(
"-p", "--prompt",
required=True,
help="Prompt text"
)
parser.add_argument(
"--json",
action="store_true",
help="Output as JSON"
)
args = parser.parse_args()
try:
result = delegate(args.tier, args.prompt, args.json)
print(result)
except Exception as e:
if args.json:
print(json.dumps({"error": str(e), "success": False}, indent=2))
sys.exit(1)
else:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()

127
mcp/llm-router/invoke.py Executable file
View File

@@ -0,0 +1,127 @@
#!/usr/bin/env python3
"""
Invoke external LLM via configured provider.
Usage:
invoke.py --model copilot/gpt-5.2 -p "prompt"
invoke.py --task reasoning -p "prompt"
invoke.py --task code-generation -p "prompt" --json
Model selection priority:
1. Explicit --model flag
2. Task-based routing (--task flag)
3. Default from policy
"""
import argparse
import json
import sys
from pathlib import Path
STATE_DIR = Path.home() / ".claude/state"
ROUTER_DIR = Path(__file__).parent
def load_policy() -> dict:
"""Load model policy from state file."""
policy_file = STATE_DIR / "model-policy.json"
with open(policy_file) as f:
return json.load(f)
def resolve_model(args: argparse.Namespace, policy: dict) -> str:
"""Determine which model to use based on args and policy."""
if args.model:
return args.model
if args.task and args.task in policy.get("task_routing", {}):
return policy["task_routing"][args.task]
return policy.get("task_routing", {}).get("default", "copilot/sonnet-4.5")
def invoke(model: str, prompt: str, policy: dict) -> str:
"""Invoke the appropriate provider for the given model."""
external_models = policy.get("external_models", {})
if model not in external_models:
raise ValueError(f"Unknown model: {model}. Available: {list(external_models.keys())}")
model_config = external_models[model]
cli = model_config["cli"]
cli_args = model_config.get("cli_args", [])
# Import and invoke appropriate provider
if cli == "opencode":
sys.path.insert(0, str(ROUTER_DIR))
from providers.opencode import invoke as opencode_invoke
return opencode_invoke(cli_args, prompt)
elif cli == "gemini":
sys.path.insert(0, str(ROUTER_DIR))
from providers.gemini import invoke as gemini_invoke
return gemini_invoke(cli_args, prompt)
else:
raise ValueError(f"Unknown CLI: {cli}")
def main():
parser = argparse.ArgumentParser(
description="Invoke external LLM via configured provider"
)
parser.add_argument(
"-p", "--prompt",
required=True,
help="Prompt text"
)
parser.add_argument(
"--model",
help="Explicit model (e.g., copilot/gpt-5.2)"
)
parser.add_argument(
"--task",
choices=["reasoning", "code-generation", "long-context", "general"],
help="Task type for automatic model routing"
)
parser.add_argument(
"--json",
action="store_true",
help="Output as JSON with model info"
)
parser.add_argument(
"--timeout",
type=int,
default=300,
help="Timeout in seconds (default: 300)"
)
args = parser.parse_args()
try:
policy = load_policy()
model = resolve_model(args, policy)
result = invoke(model, args.prompt, policy)
if args.json:
output = {
"model": model,
"response": result,
"success": True
}
print(json.dumps(output, indent=2))
else:
print(result)
except Exception as e:
if args.json:
output = {
"model": args.model or "unknown",
"error": str(e),
"success": False
}
print(json.dumps(output, indent=2))
sys.exit(1)
else:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()

View File

View File

@@ -0,0 +1,49 @@
#!/usr/bin/env python3
"""Gemini CLI wrapper for Google models."""
import subprocess
from typing import List
def invoke(cli_args: List[str], prompt: str, timeout: int = 300) -> str:
"""
Invoke gemini CLI with given args and prompt.
Args:
cli_args: Model args like ["-m", "gemini-3-pro"]
prompt: The prompt text
timeout: Timeout in seconds (default 5 minutes)
Returns:
Model response as string
Raises:
RuntimeError: If gemini CLI fails
TimeoutError: If request exceeds timeout
"""
cmd = ["gemini"] + cli_args + ["-p", prompt]
try:
result = subprocess.run(
cmd,
capture_output=True,
text=True,
timeout=timeout
)
except subprocess.TimeoutExpired:
raise TimeoutError(f"gemini timed out after {timeout}s")
if result.returncode != 0:
raise RuntimeError(f"gemini failed (exit {result.returncode}): {result.stderr}")
return result.stdout.strip()
if __name__ == "__main__":
# Quick test
import sys
if len(sys.argv) > 1:
response = invoke(["-m", "gemini-3-pro"], sys.argv[1])
print(response)
else:
print("Usage: gemini.py 'prompt'")

View File

@@ -0,0 +1,52 @@
#!/usr/bin/env python3
"""OpenCode CLI wrapper for GitHub Copilot, Z.AI, and other providers."""
import subprocess
from typing import List
def invoke(cli_args: List[str], prompt: str, timeout: int = 300) -> str:
"""
Invoke opencode CLI with given args and prompt.
Args:
cli_args: Model args like ["-m", "github-copilot/gpt-5.2"]
prompt: The prompt text
timeout: Timeout in seconds (default 5 minutes)
Returns:
Model response as string
Raises:
RuntimeError: If opencode CLI fails
TimeoutError: If request exceeds timeout
Example invocation:
opencode -m github-copilot/gpt-5.2 -p "Hello world"
"""
cmd = ["opencode"] + cli_args + ["-p", prompt]
try:
result = subprocess.run(
cmd,
capture_output=True,
text=True,
timeout=timeout
)
except subprocess.TimeoutExpired:
raise TimeoutError(f"opencode timed out after {timeout}s")
if result.returncode != 0:
raise RuntimeError(f"opencode failed (exit {result.returncode}): {result.stderr}")
return result.stdout.strip()
if __name__ == "__main__":
# Quick test
import sys
if len(sys.argv) > 1:
response = invoke(["-m", "github-copilot/gpt-5.2"], sys.argv[1])
print(response)
else:
print("Usage: opencode.py 'prompt'")

98
mcp/llm-router/toggle.py Executable file
View File

@@ -0,0 +1,98 @@
#!/usr/bin/env python3
"""
Toggle external-only mode.
Usage:
toggle.py on [--reason "user requested"]
toggle.py off
toggle.py status
"""
import argparse
import json
from datetime import datetime
from pathlib import Path
from typing import Optional
STATE_FILE = Path.home() / ".claude/state/external-mode.json"
def load_state() -> dict:
"""Load current state."""
if STATE_FILE.exists():
with open(STATE_FILE) as f:
return json.load(f)
return {"enabled": False, "activated_at": None, "reason": None}
def save_state(state: dict):
"""Save state to file."""
with open(STATE_FILE, "w") as f:
json.dump(state, f, indent=2)
def enable(reason: Optional[str] = None):
"""Enable external-only mode."""
state = {
"enabled": True,
"activated_at": datetime.now().isoformat(),
"reason": reason or "user-requested"
}
save_state(state)
print("External-only mode ENABLED")
print(f" Activated: {state['activated_at']}")
print(f" Reason: {state['reason']}")
print("\nAll agent requests will now use external LLMs.")
print("Run 'toggle.py off' or '/pa --external off' to disable.")
def disable():
"""Disable external-only mode."""
state = {
"enabled": False,
"activated_at": None,
"reason": None
}
save_state(state)
print("External-only mode DISABLED")
print("\nAll agent requests will now use Claude.")
def status():
"""Show current mode status."""
state = load_state()
if state.get("enabled"):
print("External-only mode: ENABLED")
print(f" Activated: {state.get('activated_at', 'unknown')}")
print(f" Reason: {state.get('reason', 'unknown')}")
else:
print("External-only mode: DISABLED")
print(" Using Claude for all requests.")
def main():
parser = argparse.ArgumentParser(description="Toggle external-only mode")
subparsers = parser.add_subparsers(dest="command", required=True)
# on command
on_parser = subparsers.add_parser("on", help="Enable external-only mode")
on_parser.add_argument("--reason", help="Reason for enabling")
# off command
subparsers.add_parser("off", help="Disable external-only mode")
# status command
subparsers.add_parser("status", help="Show current mode")
args = parser.parse_args()
if args.command == "on":
enable(args.reason)
elif args.command == "off":
disable()
elif args.command == "status":
status()
if __name__ == "__main__":
main()