Implement component registry for PA session awareness
Components: - state/component-registry.json: Registry with all skills, commands, agents, workflows - automation/generate-registry.py: Auto-generate from directory scan - automation/validate-registry.py: Check for drift and TODO placeholders - system-instructions.json: Added component-lifecycle process Registry includes: - 6 skills with routing triggers - 10 commands with aliases - 12 agents with model info - 10 workflows with triggers - 2 delegation helpers 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
260
automation/generate-registry.py
Executable file
260
automation/generate-registry.py
Executable file
@@ -0,0 +1,260 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Generate Component Registry
|
||||||
|
|
||||||
|
Scans component directories and generates/updates the registry.
|
||||||
|
Preserves existing manual hints (triggers, descriptions) on regeneration.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python3 generate-registry.py [--dry-run]
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
|
LOCAL_TZ = ZoneInfo('America/Los_Angeles')
|
||||||
|
CLAUDE_DIR = Path.home() / ".claude"
|
||||||
|
REGISTRY_PATH = CLAUDE_DIR / "state" / "component-registry.json"
|
||||||
|
|
||||||
|
# Scan paths
|
||||||
|
SCAN_PATHS = {
|
||||||
|
"skills": CLAUDE_DIR / "skills",
|
||||||
|
"commands": CLAUDE_DIR / "commands",
|
||||||
|
"agents": CLAUDE_DIR / "agents",
|
||||||
|
"workflows": CLAUDE_DIR / "workflows",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def parse_frontmatter(file_path: Path) -> dict:
|
||||||
|
"""Extract YAML frontmatter from markdown file."""
|
||||||
|
try:
|
||||||
|
content = file_path.read_text()
|
||||||
|
if content.startswith('---'):
|
||||||
|
end = content.find('---', 3)
|
||||||
|
if end != -1:
|
||||||
|
frontmatter = content[3:end].strip()
|
||||||
|
result = {}
|
||||||
|
for line in frontmatter.split('\n'):
|
||||||
|
if ':' in line:
|
||||||
|
key, value = line.split(':', 1)
|
||||||
|
result[key.strip()] = value.strip().strip('"\'')
|
||||||
|
return result
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def scan_skills() -> dict:
|
||||||
|
"""Scan skills directory for SKILL.md files."""
|
||||||
|
skills = {}
|
||||||
|
skills_dir = SCAN_PATHS["skills"]
|
||||||
|
if not skills_dir.exists():
|
||||||
|
return skills
|
||||||
|
|
||||||
|
for skill_dir in skills_dir.iterdir():
|
||||||
|
if skill_dir.is_dir():
|
||||||
|
skill_file = skill_dir / "SKILL.md"
|
||||||
|
if skill_file.exists():
|
||||||
|
fm = parse_frontmatter(skill_file)
|
||||||
|
skills[skill_dir.name] = {
|
||||||
|
"description": fm.get("description", "TODO"),
|
||||||
|
"triggers": ["TODO"]
|
||||||
|
}
|
||||||
|
return skills
|
||||||
|
|
||||||
|
|
||||||
|
def scan_commands() -> dict:
|
||||||
|
"""Scan commands directory for .md files."""
|
||||||
|
commands = {}
|
||||||
|
commands_dir = SCAN_PATHS["commands"]
|
||||||
|
if not commands_dir.exists():
|
||||||
|
return commands
|
||||||
|
|
||||||
|
for cmd_file in commands_dir.rglob("*.md"):
|
||||||
|
rel_path = cmd_file.relative_to(commands_dir)
|
||||||
|
# Convert path to command name
|
||||||
|
cmd_name = "/" + str(rel_path).replace(".md", "").replace("/", ":")
|
||||||
|
|
||||||
|
fm = parse_frontmatter(cmd_file)
|
||||||
|
aliases = fm.get("aliases", "")
|
||||||
|
if aliases.startswith("["):
|
||||||
|
# Parse array format
|
||||||
|
aliases = [a.strip().strip('"\'') for a in aliases[1:-1].split(",") if a.strip()]
|
||||||
|
aliases = ["/" + a if not a.startswith("/") else a for a in aliases]
|
||||||
|
else:
|
||||||
|
aliases = []
|
||||||
|
|
||||||
|
commands[cmd_name] = {
|
||||||
|
"description": fm.get("description", "TODO"),
|
||||||
|
"aliases": aliases,
|
||||||
|
"invokes": fm.get("invokes", "")
|
||||||
|
}
|
||||||
|
return commands
|
||||||
|
|
||||||
|
|
||||||
|
def scan_agents() -> dict:
|
||||||
|
"""Scan agents directory for .md files."""
|
||||||
|
agents = {}
|
||||||
|
agents_dir = SCAN_PATHS["agents"]
|
||||||
|
if not agents_dir.exists():
|
||||||
|
return agents
|
||||||
|
|
||||||
|
for agent_file in agents_dir.glob("*.md"):
|
||||||
|
agent_name = agent_file.stem
|
||||||
|
fm = parse_frontmatter(agent_file)
|
||||||
|
agents[agent_name] = {
|
||||||
|
"description": fm.get("description", "TODO"),
|
||||||
|
"model": fm.get("model", "sonnet"),
|
||||||
|
"triggers": ["TODO"]
|
||||||
|
}
|
||||||
|
return agents
|
||||||
|
|
||||||
|
|
||||||
|
def scan_workflows() -> dict:
|
||||||
|
"""Scan workflows directory for .yaml/.yml files."""
|
||||||
|
workflows = {}
|
||||||
|
workflows_dir = SCAN_PATHS["workflows"]
|
||||||
|
if not workflows_dir.exists():
|
||||||
|
return workflows
|
||||||
|
|
||||||
|
for wf_file in workflows_dir.rglob("*.yaml"):
|
||||||
|
rel_path = wf_file.relative_to(workflows_dir)
|
||||||
|
wf_name = str(rel_path).replace(".yaml", "")
|
||||||
|
|
||||||
|
# Try to parse name from YAML
|
||||||
|
try:
|
||||||
|
content = wf_file.read_text()
|
||||||
|
for line in content.split('\n'):
|
||||||
|
if line.startswith('name:'):
|
||||||
|
desc = line.split(':', 1)[1].strip().strip('"\'')
|
||||||
|
break
|
||||||
|
if line.startswith('description:'):
|
||||||
|
desc = line.split(':', 1)[1].strip().strip('"\'')
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
desc = "TODO"
|
||||||
|
except Exception:
|
||||||
|
desc = "TODO"
|
||||||
|
|
||||||
|
workflows[wf_name] = {
|
||||||
|
"description": desc,
|
||||||
|
"triggers": ["TODO"]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Also check .yml
|
||||||
|
for wf_file in workflows_dir.rglob("*.yml"):
|
||||||
|
rel_path = wf_file.relative_to(workflows_dir)
|
||||||
|
wf_name = str(rel_path).replace(".yml", "")
|
||||||
|
workflows[wf_name] = {
|
||||||
|
"description": "TODO",
|
||||||
|
"triggers": ["TODO"]
|
||||||
|
}
|
||||||
|
|
||||||
|
return workflows
|
||||||
|
|
||||||
|
|
||||||
|
def merge_with_existing(scanned: dict, existing: dict, component_type: str) -> dict:
|
||||||
|
"""Merge scanned components with existing registry, preserving manual hints."""
|
||||||
|
merged = {}
|
||||||
|
|
||||||
|
# Process scanned components
|
||||||
|
for name, data in scanned.items():
|
||||||
|
if name in existing:
|
||||||
|
# Preserve existing manual hints
|
||||||
|
merged[name] = existing[name].copy()
|
||||||
|
# Update auto-generated fields if they were TODO
|
||||||
|
if merged[name].get("description") == "TODO" and data.get("description") != "TODO":
|
||||||
|
merged[name]["description"] = data["description"]
|
||||||
|
else:
|
||||||
|
# New component
|
||||||
|
merged[name] = data
|
||||||
|
print(f" + NEW: {component_type}/{name}")
|
||||||
|
|
||||||
|
# Check for removed components
|
||||||
|
for name in existing:
|
||||||
|
if name not in scanned:
|
||||||
|
merged[name] = existing[name].copy()
|
||||||
|
merged[name]["status"] = "removed"
|
||||||
|
print(f" - REMOVED: {component_type}/{name}")
|
||||||
|
|
||||||
|
return merged
|
||||||
|
|
||||||
|
|
||||||
|
def generate_registry(dry_run: bool = False) -> dict:
|
||||||
|
"""Generate the component registry."""
|
||||||
|
print("Scanning components...")
|
||||||
|
|
||||||
|
# Load existing registry
|
||||||
|
existing = {"skills": {}, "commands": {}, "agents": {}, "workflows": {}}
|
||||||
|
if REGISTRY_PATH.exists():
|
||||||
|
try:
|
||||||
|
with open(REGISTRY_PATH) as f:
|
||||||
|
existing_data = json.load(f)
|
||||||
|
existing = {
|
||||||
|
"skills": existing_data.get("skills", {}),
|
||||||
|
"commands": existing_data.get("commands", {}),
|
||||||
|
"agents": existing_data.get("agents", {}),
|
||||||
|
"workflows": existing_data.get("workflows", {}),
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Warning: Could not load existing registry: {e}")
|
||||||
|
|
||||||
|
# Scan directories
|
||||||
|
scanned_skills = scan_skills()
|
||||||
|
scanned_commands = scan_commands()
|
||||||
|
scanned_agents = scan_agents()
|
||||||
|
scanned_workflows = scan_workflows()
|
||||||
|
|
||||||
|
print(f"\nFound: {len(scanned_skills)} skills, {len(scanned_commands)} commands, "
|
||||||
|
f"{len(scanned_agents)} agents, {len(scanned_workflows)} workflows")
|
||||||
|
|
||||||
|
# Merge with existing
|
||||||
|
print("\nMerging with existing registry...")
|
||||||
|
merged_skills = merge_with_existing(scanned_skills, existing["skills"], "skills")
|
||||||
|
merged_commands = merge_with_existing(scanned_commands, existing["commands"], "commands")
|
||||||
|
merged_agents = merge_with_existing(scanned_agents, existing["agents"], "agents")
|
||||||
|
merged_workflows = merge_with_existing(scanned_workflows, existing["workflows"], "workflows")
|
||||||
|
|
||||||
|
# Build registry
|
||||||
|
registry = {
|
||||||
|
"version": "1.0",
|
||||||
|
"generated": datetime.now(LOCAL_TZ).isoformat(),
|
||||||
|
"description": "Component registry for PA session awareness. Read at session start for routing.",
|
||||||
|
"skills": merged_skills,
|
||||||
|
"commands": merged_commands,
|
||||||
|
"agents": merged_agents,
|
||||||
|
"workflows": merged_workflows,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Preserve delegation_helpers if exists
|
||||||
|
if REGISTRY_PATH.exists():
|
||||||
|
try:
|
||||||
|
with open(REGISTRY_PATH) as f:
|
||||||
|
existing_data = json.load(f)
|
||||||
|
if "delegation_helpers" in existing_data:
|
||||||
|
registry["delegation_helpers"] = existing_data["delegation_helpers"]
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
print("\n[DRY RUN] Would write:")
|
||||||
|
print(json.dumps(registry, indent=2))
|
||||||
|
else:
|
||||||
|
with open(REGISTRY_PATH, 'w') as f:
|
||||||
|
json.dump(registry, f, indent=2)
|
||||||
|
print(f"\nRegistry written to {REGISTRY_PATH}")
|
||||||
|
|
||||||
|
return registry
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
dry_run = "--dry-run" in sys.argv
|
||||||
|
generate_registry(dry_run)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
155
automation/validate-registry.py
Executable file
155
automation/validate-registry.py
Executable file
@@ -0,0 +1,155 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Validate Component Registry
|
||||||
|
|
||||||
|
Checks that the registry is in sync with actual component files.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python3 validate-registry.py
|
||||||
|
|
||||||
|
Exit codes:
|
||||||
|
0 - All valid
|
||||||
|
1 - Warnings (stale entries)
|
||||||
|
2 - Errors (missing entries or TODO placeholders)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
CLAUDE_DIR = Path.home() / ".claude"
|
||||||
|
REGISTRY_PATH = CLAUDE_DIR / "state" / "component-registry.json"
|
||||||
|
|
||||||
|
# Scan paths
|
||||||
|
SCAN_PATHS = {
|
||||||
|
"skills": (CLAUDE_DIR / "skills", "*/SKILL.md"),
|
||||||
|
"commands": (CLAUDE_DIR / "commands", "**/*.md"),
|
||||||
|
"agents": (CLAUDE_DIR / "agents", "*.md"),
|
||||||
|
"workflows": (CLAUDE_DIR / "workflows", "**/*.yaml"),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_actual_components() -> dict:
|
||||||
|
"""Get actual components from filesystem."""
|
||||||
|
actual = {
|
||||||
|
"skills": set(),
|
||||||
|
"commands": set(),
|
||||||
|
"agents": set(),
|
||||||
|
"workflows": set(),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Skills
|
||||||
|
skills_dir = SCAN_PATHS["skills"][0]
|
||||||
|
if skills_dir.exists():
|
||||||
|
for skill_dir in skills_dir.iterdir():
|
||||||
|
if skill_dir.is_dir() and (skill_dir / "SKILL.md").exists():
|
||||||
|
actual["skills"].add(skill_dir.name)
|
||||||
|
|
||||||
|
# Commands
|
||||||
|
commands_dir = SCAN_PATHS["commands"][0]
|
||||||
|
if commands_dir.exists():
|
||||||
|
for cmd_file in commands_dir.rglob("*.md"):
|
||||||
|
rel_path = cmd_file.relative_to(commands_dir)
|
||||||
|
cmd_name = "/" + str(rel_path).replace(".md", "").replace("/", ":")
|
||||||
|
actual["commands"].add(cmd_name)
|
||||||
|
|
||||||
|
# Agents
|
||||||
|
agents_dir = SCAN_PATHS["agents"][0]
|
||||||
|
if agents_dir.exists():
|
||||||
|
for agent_file in agents_dir.glob("*.md"):
|
||||||
|
actual["agents"].add(agent_file.stem)
|
||||||
|
|
||||||
|
# Workflows
|
||||||
|
workflows_dir = SCAN_PATHS["workflows"][0]
|
||||||
|
if workflows_dir.exists():
|
||||||
|
for wf_file in workflows_dir.rglob("*.yaml"):
|
||||||
|
rel_path = wf_file.relative_to(workflows_dir)
|
||||||
|
actual["workflows"].add(str(rel_path).replace(".yaml", ""))
|
||||||
|
for wf_file in workflows_dir.rglob("*.yml"):
|
||||||
|
rel_path = wf_file.relative_to(workflows_dir)
|
||||||
|
actual["workflows"].add(str(rel_path).replace(".yml", ""))
|
||||||
|
|
||||||
|
return actual
|
||||||
|
|
||||||
|
|
||||||
|
def validate_registry() -> int:
|
||||||
|
"""Validate the registry against actual components."""
|
||||||
|
print("Registry Validation")
|
||||||
|
print("=" * 40)
|
||||||
|
|
||||||
|
if not REGISTRY_PATH.exists():
|
||||||
|
print("✗ Registry file not found!")
|
||||||
|
print(f" Run: python3 generate-registry.py")
|
||||||
|
return 2
|
||||||
|
|
||||||
|
# Load registry
|
||||||
|
with open(REGISTRY_PATH) as f:
|
||||||
|
registry = json.load(f)
|
||||||
|
|
||||||
|
# Get actual components
|
||||||
|
actual = get_actual_components()
|
||||||
|
|
||||||
|
errors = 0
|
||||||
|
warnings = 0
|
||||||
|
|
||||||
|
for component_type in ["skills", "commands", "agents", "workflows"]:
|
||||||
|
registered = set(registry.get(component_type, {}).keys())
|
||||||
|
registered_active = {
|
||||||
|
k for k, v in registry.get(component_type, {}).items()
|
||||||
|
if v.get("status") != "removed"
|
||||||
|
}
|
||||||
|
actual_set = actual[component_type]
|
||||||
|
|
||||||
|
# Check for missing in registry
|
||||||
|
missing = actual_set - registered
|
||||||
|
if missing:
|
||||||
|
print(f"✗ {component_type}: {len(missing)} missing from registry")
|
||||||
|
for name in sorted(missing):
|
||||||
|
print(f" + {name}")
|
||||||
|
errors += len(missing)
|
||||||
|
|
||||||
|
# Check for stale entries
|
||||||
|
stale = registered_active - actual_set
|
||||||
|
if stale:
|
||||||
|
print(f"⚠ {component_type}: {len(stale)} stale entries")
|
||||||
|
for name in sorted(stale):
|
||||||
|
print(f" - {name}")
|
||||||
|
warnings += len(stale)
|
||||||
|
|
||||||
|
# Check for TODO placeholders
|
||||||
|
for name, data in registry.get(component_type, {}).items():
|
||||||
|
if data.get("status") == "removed":
|
||||||
|
continue
|
||||||
|
if data.get("description") == "TODO":
|
||||||
|
print(f"⚠ {component_type}/{name}: description is TODO")
|
||||||
|
warnings += 1
|
||||||
|
if "triggers" in data and data["triggers"] == ["TODO"]:
|
||||||
|
print(f"⚠ {component_type}/{name}: triggers is TODO")
|
||||||
|
warnings += 1
|
||||||
|
|
||||||
|
# Success message if all good
|
||||||
|
if not missing and not stale:
|
||||||
|
count = len(actual_set)
|
||||||
|
print(f"✓ {component_type}: {count} components, all present")
|
||||||
|
|
||||||
|
print("=" * 40)
|
||||||
|
|
||||||
|
if errors > 0:
|
||||||
|
print(f"\n✗ {errors} error(s), {warnings} warning(s)")
|
||||||
|
print(" Run: python3 generate-registry.py")
|
||||||
|
return 2
|
||||||
|
elif warnings > 0:
|
||||||
|
print(f"\n⚠ {warnings} warning(s)")
|
||||||
|
print(" Consider updating registry with manual hints")
|
||||||
|
return 1
|
||||||
|
else:
|
||||||
|
print("\n✓ Registry is valid")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
sys.exit(validate_registry())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -184,10 +184,10 @@ Invoke skill:gcal with context "tomorrow"
|
|||||||
|
|
||||||
## Implementation Checklist
|
## Implementation Checklist
|
||||||
|
|
||||||
- [ ] Create `~/.claude/state/component-registry.json` (initial)
|
- [x] Create `~/.claude/state/component-registry.json` (initial)
|
||||||
- [ ] Create `~/.claude/automation/generate-registry.py`
|
- [x] Create `~/.claude/automation/generate-registry.py`
|
||||||
- [ ] Create `~/.claude/automation/validate-registry.py`
|
- [x] Create `~/.claude/automation/validate-registry.py`
|
||||||
- [ ] Update `system-instructions.json` with component-lifecycle
|
- [x] Update `system-instructions.json` with component-lifecycle
|
||||||
- [ ] Update PA agent instructions to read registry
|
- [ ] Update PA agent instructions to read registry
|
||||||
- [ ] Test routing with registry
|
- [ ] Test routing with registry
|
||||||
- [ ] Add future consideration for registry improvements
|
- [ ] Add future consideration for registry improvements
|
||||||
|
|||||||
325
state/component-registry.json
Normal file
325
state/component-registry.json
Normal file
@@ -0,0 +1,325 @@
|
|||||||
|
{
|
||||||
|
"version": "1.0",
|
||||||
|
"generated": "2026-01-01T00:07:25.873943-08:00",
|
||||||
|
"description": "Component registry for PA session awareness. Read at session start for routing.",
|
||||||
|
"skills": {
|
||||||
|
"sysadmin-health": {
|
||||||
|
"description": "System health checks",
|
||||||
|
"triggers": [
|
||||||
|
"system health",
|
||||||
|
"disk space",
|
||||||
|
"memory",
|
||||||
|
"cpu"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"k8s-quick-status": {
|
||||||
|
"description": "Quick Kubernetes cluster status",
|
||||||
|
"triggers": [
|
||||||
|
"k8s status",
|
||||||
|
"cluster status",
|
||||||
|
"kubernetes"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"programmer-add-project": {
|
||||||
|
"description": "Add projects to programmer agent",
|
||||||
|
"triggers": [
|
||||||
|
"add project",
|
||||||
|
"new project",
|
||||||
|
"programmer project"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"gmail": {
|
||||||
|
"description": "Gmail read access",
|
||||||
|
"triggers": [
|
||||||
|
"email",
|
||||||
|
"gmail",
|
||||||
|
"inbox",
|
||||||
|
"unread",
|
||||||
|
"mail",
|
||||||
|
"message"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"gcal": {
|
||||||
|
"description": "Google Calendar read access",
|
||||||
|
"triggers": [
|
||||||
|
"calendar",
|
||||||
|
"gcal",
|
||||||
|
"schedule",
|
||||||
|
"meeting",
|
||||||
|
"event",
|
||||||
|
"agenda",
|
||||||
|
"busy",
|
||||||
|
"free"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"usage": {
|
||||||
|
"description": "Session usage tracking and stats",
|
||||||
|
"triggers": [
|
||||||
|
"usage",
|
||||||
|
"stats",
|
||||||
|
"sessions",
|
||||||
|
"how much",
|
||||||
|
"tracking"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"commands": {
|
||||||
|
"/pa": {
|
||||||
|
"description": "Personal assistant entrypoint",
|
||||||
|
"aliases": [
|
||||||
|
"/assistant",
|
||||||
|
"/ask"
|
||||||
|
],
|
||||||
|
"invokes": "agent:personal-assistant"
|
||||||
|
},
|
||||||
|
"/programmer": {
|
||||||
|
"description": "Code development tasks",
|
||||||
|
"aliases": [],
|
||||||
|
"invokes": "agent:programmer-orchestrator"
|
||||||
|
},
|
||||||
|
"/gcal": {
|
||||||
|
"description": "Google Calendar access",
|
||||||
|
"aliases": [
|
||||||
|
"/calendar",
|
||||||
|
"/cal"
|
||||||
|
],
|
||||||
|
"invokes": "skill:gcal"
|
||||||
|
},
|
||||||
|
"/usage": {
|
||||||
|
"description": "View usage statistics",
|
||||||
|
"aliases": [
|
||||||
|
"/stats"
|
||||||
|
],
|
||||||
|
"invokes": "skill:usage"
|
||||||
|
},
|
||||||
|
"/sysadmin:health": {
|
||||||
|
"description": "System health check",
|
||||||
|
"aliases": [],
|
||||||
|
"invokes": "skill:sysadmin-health"
|
||||||
|
},
|
||||||
|
"/sysadmin:update": {
|
||||||
|
"description": "System package updates",
|
||||||
|
"aliases": [],
|
||||||
|
"invokes": "workflow:sysadmin/system-update"
|
||||||
|
},
|
||||||
|
"/sysadmin:autonomy": {
|
||||||
|
"description": "Set sysadmin autonomy level",
|
||||||
|
"aliases": [],
|
||||||
|
"modifies": "state:sysadmin/session-autonomy"
|
||||||
|
},
|
||||||
|
"/k8s:cluster-status": {
|
||||||
|
"description": "Kubernetes cluster status",
|
||||||
|
"aliases": [],
|
||||||
|
"invokes": "skill:k8s-quick-status"
|
||||||
|
},
|
||||||
|
"/k8s:deploy": {
|
||||||
|
"description": "Deploy applications to Kubernetes",
|
||||||
|
"aliases": [],
|
||||||
|
"invokes": "workflow:deploy/deploy-app"
|
||||||
|
},
|
||||||
|
"/k8s:diagnose": {
|
||||||
|
"description": "Diagnose Kubernetes issues",
|
||||||
|
"aliases": [],
|
||||||
|
"invokes": "agent:k8s-diagnostician"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"agents": {
|
||||||
|
"linux-sysadmin": {
|
||||||
|
"description": "Workstation management",
|
||||||
|
"model": "sonnet",
|
||||||
|
"triggers": [
|
||||||
|
"system",
|
||||||
|
"linux",
|
||||||
|
"package",
|
||||||
|
"service",
|
||||||
|
"disk",
|
||||||
|
"process"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"k8s-orchestrator": {
|
||||||
|
"description": "Kubernetes cluster management",
|
||||||
|
"model": "opus",
|
||||||
|
"triggers": [
|
||||||
|
"kubernetes",
|
||||||
|
"k8s",
|
||||||
|
"cluster",
|
||||||
|
"deploy"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"k8s-diagnostician": {
|
||||||
|
"description": "Kubernetes troubleshooting",
|
||||||
|
"model": "sonnet",
|
||||||
|
"triggers": [
|
||||||
|
"pod issue",
|
||||||
|
"crashloop",
|
||||||
|
"k8s error",
|
||||||
|
"deployment failed"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"argocd-operator": {
|
||||||
|
"description": "ArgoCD GitOps operations",
|
||||||
|
"model": "sonnet",
|
||||||
|
"triggers": [
|
||||||
|
"argocd",
|
||||||
|
"gitops",
|
||||||
|
"sync",
|
||||||
|
"app sync"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"prometheus-analyst": {
|
||||||
|
"description": "Metrics and alerting analysis",
|
||||||
|
"model": "sonnet",
|
||||||
|
"triggers": [
|
||||||
|
"metrics",
|
||||||
|
"prometheus",
|
||||||
|
"alert",
|
||||||
|
"grafana"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"git-operator": {
|
||||||
|
"description": "Git repository operations",
|
||||||
|
"model": "sonnet",
|
||||||
|
"triggers": [
|
||||||
|
"git",
|
||||||
|
"commit",
|
||||||
|
"branch",
|
||||||
|
"merge",
|
||||||
|
"repo"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"programmer-orchestrator": {
|
||||||
|
"description": "Code development coordination",
|
||||||
|
"model": "opus",
|
||||||
|
"triggers": [
|
||||||
|
"code",
|
||||||
|
"develop",
|
||||||
|
"implement",
|
||||||
|
"program"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"code-planner": {
|
||||||
|
"description": "Code planning and design",
|
||||||
|
"model": "sonnet",
|
||||||
|
"triggers": [
|
||||||
|
"plan code",
|
||||||
|
"design",
|
||||||
|
"architecture"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"code-implementer": {
|
||||||
|
"description": "Code implementation",
|
||||||
|
"model": "sonnet",
|
||||||
|
"triggers": [
|
||||||
|
"write code",
|
||||||
|
"implement",
|
||||||
|
"build"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"code-reviewer": {
|
||||||
|
"description": "Code review",
|
||||||
|
"model": "sonnet",
|
||||||
|
"triggers": [
|
||||||
|
"review",
|
||||||
|
"code review",
|
||||||
|
"check code"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"master-orchestrator": {
|
||||||
|
"description": "Coordinate and enforce policies",
|
||||||
|
"model": "opus",
|
||||||
|
"triggers": []
|
||||||
|
},
|
||||||
|
"personal-assistant": {
|
||||||
|
"description": "User interface, ultimate oversight",
|
||||||
|
"model": "opus",
|
||||||
|
"triggers": [
|
||||||
|
"help",
|
||||||
|
"assist",
|
||||||
|
"question"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"workflows": {
|
||||||
|
"validate-agent-format": {
|
||||||
|
"description": "Validate agent file format",
|
||||||
|
"triggers": ["validate agent", "check agent format"]
|
||||||
|
},
|
||||||
|
"health/cluster-health-check": {
|
||||||
|
"description": "Kubernetes cluster health check",
|
||||||
|
"triggers": [
|
||||||
|
"cluster health",
|
||||||
|
"k8s health"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"health/cluster-daily-summary": {
|
||||||
|
"description": "Daily cluster health summary",
|
||||||
|
"triggers": [
|
||||||
|
"daily summary",
|
||||||
|
"cluster summary"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"deploy/deploy-app": {
|
||||||
|
"description": "Deploy application to Kubernetes",
|
||||||
|
"triggers": [
|
||||||
|
"deploy app",
|
||||||
|
"deploy to k8s"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"incidents/pod-crashloop": {
|
||||||
|
"description": "Handle pod crashloop",
|
||||||
|
"triggers": [
|
||||||
|
"crashloop",
|
||||||
|
"pod crashing",
|
||||||
|
"restart loop"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"incidents/node-issue-response": {
|
||||||
|
"description": "Respond to node issues",
|
||||||
|
"triggers": [
|
||||||
|
"node issue",
|
||||||
|
"node down",
|
||||||
|
"node problem"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"incidents/resource-pressure-response": {
|
||||||
|
"description": "Handle resource pressure",
|
||||||
|
"triggers": [
|
||||||
|
"resource pressure",
|
||||||
|
"out of memory",
|
||||||
|
"disk full"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"incidents/argocd-sync-failure": {
|
||||||
|
"description": "Handle ArgoCD sync failures",
|
||||||
|
"triggers": [
|
||||||
|
"sync failed",
|
||||||
|
"argocd error"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"sysadmin/health-check": {
|
||||||
|
"description": "System health check workflow",
|
||||||
|
"triggers": [
|
||||||
|
"system check",
|
||||||
|
"health check"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"sysadmin/system-update": {
|
||||||
|
"description": "System update workflow",
|
||||||
|
"triggers": [
|
||||||
|
"system update",
|
||||||
|
"update packages",
|
||||||
|
"upgrade"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"delegation_helpers": {
|
||||||
|
"gmail_delegate.py": {
|
||||||
|
"description": "Gmail API with tiered delegation",
|
||||||
|
"location": "~/.claude/mcp/delegation/gmail_delegate.py"
|
||||||
|
},
|
||||||
|
"gcal_delegate.py": {
|
||||||
|
"description": "Calendar API with tiered delegation",
|
||||||
|
"location": "~/.claude/mcp/delegation/gcal_delegate.py"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":"1.0.0","description":"Central process definitions for all agents","processes":{"model-selection":{"description":"Cost-efficient model selection","rules":["Start with lowest capable model","Escalate only when task complexity requires","Agents may request model upgrade from orchestrator","Log model usage for cost analysis"]},"cross-agent-communication":{"description":"How agents interact","flow":"Agent A → Master Orchestrator → Personal Assistant (if escalation needed)","hierarchy":"Personal Assistant → Master Orchestrator → Domain Agents","requirements":["All cross-agent requests routed through master orchestrator","Master validates, routes, and logs all requests","Personal assistant has ultimate escalation authority","No direct agent-to-agent communication"]},"autonomy-enforcement":{"description":"How autonomy levels are applied","default":"conservative","session_override_file":"~/.claude/state/sysadmin/session-autonomy.json","rules":["Master orchestrator enforces autonomy levels","Session overrides apply only for current session","Forbidden actions always blocked regardless of autonomy level"]},"file-management":{"description":"File and directory ownership","state-files":{"system-instructions.json":"master-orchestrator","future-considerations.json":"master-orchestrator","model-policy.json":"master-orchestrator","autonomy-levels.json":"master-orchestrator","session-autonomy.json":"user/CLI"},"directories":{"master-orchestrator":["agents/","state/","skills/","commands/","workflows/"],"linux-sysadmin":["state/sysadmin/","automation/"]},"override_authority":"personal-assistant"},"agent-lifecycle":{"add":["Create agents/<name>.md","Update supervisor hierarchy","Update CLAUDE.md diagram","Update model-policy.json"],"remove":["Remove agents/<name>.md","Update supervisor hierarchy","Update CLAUDE.md diagram","Update model-policy.json","Clean up state"]},"content-principles":{"no-redundancy":"Information lives in one authoritative location","lean-files":"Keep files concise - no verbose examples or unnecessary prose"},"deferral-capture":{"description":"Capture deferrals to future-considerations.json","trigger":"User says 'revisit later' or similar","behavior":"Watch user messages for items worth adding to future-considerations.json - both explicit deferrals and implicit mentions of future work"}}}
|
{"version":"1.0.0","description":"Central process definitions for all agents","processes":{"model-selection":{"description":"Cost-efficient model selection","rules":["Start with lowest capable model","Escalate only when task complexity requires","Agents may request model upgrade from orchestrator","Log model usage for cost analysis"]},"cross-agent-communication":{"description":"How agents interact","flow":"Agent A → Master Orchestrator → Personal Assistant (if escalation needed)","hierarchy":"Personal Assistant → Master Orchestrator → Domain Agents","requirements":["All cross-agent requests routed through master orchestrator","Master validates, routes, and logs all requests","Personal assistant has ultimate escalation authority","No direct agent-to-agent communication"]},"autonomy-enforcement":{"description":"How autonomy levels are applied","default":"conservative","session_override_file":"~/.claude/state/sysadmin/session-autonomy.json","rules":["Master orchestrator enforces autonomy levels","Session overrides apply only for current session","Forbidden actions always blocked regardless of autonomy level"]},"file-management":{"description":"File and directory ownership","state-files":{"system-instructions.json":"master-orchestrator","future-considerations.json":"master-orchestrator","model-policy.json":"master-orchestrator","autonomy-levels.json":"master-orchestrator","session-autonomy.json":"user/CLI"},"directories":{"master-orchestrator":["agents/","state/","skills/","commands/","workflows/"],"linux-sysadmin":["state/sysadmin/","automation/"]},"override_authority":"personal-assistant"},"agent-lifecycle":{"add":["Create agents/<name>.md","Update supervisor hierarchy","Update CLAUDE.md diagram","Update model-policy.json","Run generate-registry.py and add triggers"],"remove":["Remove agents/<name>.md","Update supervisor hierarchy","Update CLAUDE.md diagram","Update model-policy.json","Run generate-registry.py","Clean up state"]},"component-lifecycle":{"description":"Process for adding/removing skills, commands, workflows","add":["Create component file","Run automation/generate-registry.py","Add triggers and description to registry","Commit both files"],"remove":["Remove component file","Run automation/generate-registry.py (marks as removed)","Commit updated registry"],"validate":["Run automation/validate-registry.py","Fix any errors or TODO placeholders before commit"]},"content-principles":{"no-redundancy":"Information lives in one authoritative location","lean-files":"Keep files concise - no verbose examples or unnecessary prose"},"deferral-capture":{"description":"Capture deferrals to future-considerations.json","trigger":"User says 'revisit later' or similar","behavior":"Watch user messages for items worth adding to future-considerations.json - both explicit deferrals and implicit mentions of future work"}}}
|
||||||
|
|||||||
Reference in New Issue
Block a user