- OpenCode: use `opencode run -m MODEL "prompt"` syntax - OpenCode: set correct binary path (/home/linuxbrew/.linuxbrew/bin/opencode) - Gemini: route long-context to gemini-2.5-pro (gemini-3 not available yet) Tested working: - opencode/big-pickle - github-copilot/claude-sonnet-4.5 - zai-coding-plan/glm-4.7 - gemini/gemini-2.5-pro - gemini/gemini-2.5-flash Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
57 lines
1.5 KiB
Python
Executable File
57 lines
1.5 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""OpenCode CLI wrapper for GitHub Copilot, Z.AI, and other providers."""
|
|
|
|
import subprocess
|
|
from typing import List
|
|
|
|
# OpenCode binary path (linuxbrew installation)
|
|
OPENCODE_BIN = "/home/linuxbrew/.linuxbrew/bin/opencode"
|
|
|
|
|
|
def invoke(cli_args: List[str], prompt: str, timeout: int = 300) -> str:
|
|
"""
|
|
Invoke opencode CLI with given args and prompt.
|
|
|
|
Args:
|
|
cli_args: Model args like ["-m", "github-copilot/gpt-5.2"]
|
|
prompt: The prompt text
|
|
timeout: Timeout in seconds (default 5 minutes)
|
|
|
|
Returns:
|
|
Model response as string
|
|
|
|
Raises:
|
|
RuntimeError: If opencode CLI fails
|
|
TimeoutError: If request exceeds timeout
|
|
|
|
Example invocation:
|
|
opencode run -m github-copilot/gpt-5.2 "Hello world"
|
|
"""
|
|
# Build command: opencode run -m MODEL "prompt"
|
|
cmd = [OPENCODE_BIN, "run"] + cli_args + [prompt]
|
|
|
|
try:
|
|
result = subprocess.run(
|
|
cmd,
|
|
capture_output=True,
|
|
text=True,
|
|
timeout=timeout
|
|
)
|
|
except subprocess.TimeoutExpired:
|
|
raise TimeoutError(f"opencode timed out after {timeout}s")
|
|
|
|
if result.returncode != 0:
|
|
raise RuntimeError(f"opencode failed (exit {result.returncode}): {result.stderr}")
|
|
|
|
return result.stdout.strip()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
# Quick test
|
|
import sys
|
|
if len(sys.argv) > 1:
|
|
response = invoke(["-m", "github-copilot/gpt-5.2"], sys.argv[1])
|
|
print(response)
|
|
else:
|
|
print("Usage: opencode.py 'prompt'")
|