Add /log and /debug commands for troubleshooting

- /log command to view and analyze automation logs
  - Filter by pattern, date, or log type
  - List available log files
- /debug command generates comprehensive debug report
  - Version, core files, state, memory, scripts status
  - Environment info (Python, kubectl)
  - Disk usage by directory
  - JSON output mode for scripting
- Updated shell completions with 13 aliases total
- Test suite now covers 21 tests

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
OpenCode Test
2026-01-01 18:44:40 -08:00
parent 125bb4904b
commit c1a3c1812c
9 changed files with 596 additions and 6 deletions

213
automation/log-viewer.py Executable file
View File

@@ -0,0 +1,213 @@
#!/usr/bin/env python3
"""
View and analyze Claude Code logs.
Usage: python3 log-viewer.py [--tail N] [--grep PATTERN] [--since DATE] [--type TYPE]
"""
import argparse
import json
import os
import re
import sys
from datetime import datetime, timedelta
from pathlib import Path
from typing import List, Optional
CLAUDE_DIR = Path.home() / ".claude"
LOG_DIR = CLAUDE_DIR / "logs"
def get_log_files(log_type: Optional[str] = None) -> List[Path]:
"""Get list of log files, optionally filtered by type."""
if not LOG_DIR.exists():
return []
files = list(LOG_DIR.glob("*.log"))
if log_type:
files = [f for f in files if log_type in f.name]
return sorted(files, key=lambda f: f.stat().st_mtime, reverse=True)
def parse_log_line(line: str) -> Optional[dict]:
"""Parse a log line into structured data."""
# Try JSON format
try:
return json.loads(line)
except json.JSONDecodeError:
pass
# Try timestamp format: [YYYY-MM-DD HH:MM:SS] message
match = re.match(r'\[(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})\] (.+)', line)
if match:
return {
"timestamp": match.group(1),
"message": match.group(2)
}
# Plain text
return {"message": line.strip()} if line.strip() else None
def filter_by_date(entries: List[dict], since: str) -> List[dict]:
"""Filter entries by date."""
try:
since_date = datetime.strptime(since, "%Y-%m-%d")
except ValueError:
try:
# Try relative (e.g., "1d", "7d", "1h")
if since.endswith("d"):
days = int(since[:-1])
since_date = datetime.now() - timedelta(days=days)
elif since.endswith("h"):
hours = int(since[:-1])
since_date = datetime.now() - timedelta(hours=hours)
else:
return entries
except ValueError:
return entries
result = []
for entry in entries:
ts = entry.get("timestamp", "")
if ts:
try:
entry_date = datetime.strptime(ts[:19], "%Y-%m-%d %H:%M:%S")
if entry_date >= since_date:
result.append(entry)
except ValueError:
result.append(entry) # Include if can't parse
else:
result.append(entry)
return result
def grep_entries(entries: List[dict], pattern: str) -> List[dict]:
"""Filter entries by grep pattern."""
regex = re.compile(pattern, re.IGNORECASE)
result = []
for entry in entries:
message = entry.get("message", "")
if regex.search(message):
result.append(entry)
return result
def tail_file(path: Path, n: int = 50) -> List[str]:
"""Get last N lines from a file."""
try:
with open(path) as f:
lines = f.readlines()
return lines[-n:]
except Exception:
return []
def format_entry(entry: dict) -> str:
"""Format a log entry for display."""
ts = entry.get("timestamp", "")
msg = entry.get("message", "")
if ts:
return f"[{ts}] {msg}"
return msg
def list_logs():
"""List available log files."""
files = get_log_files()
if not files:
print("No log files found.")
return
print(f"\n📋 Log Files ({len(files)})\n")
print(f"{'File':<40} {'Size':<10} {'Modified'}")
print("-" * 70)
for f in files:
stat = f.stat()
size = stat.st_size
if size > 1024 * 1024:
size_str = f"{size / 1024 / 1024:.1f}M"
elif size > 1024:
size_str = f"{size / 1024:.1f}K"
else:
size_str = f"{size}B"
mtime = datetime.fromtimestamp(stat.st_mtime).strftime("%Y-%m-%d %H:%M")
print(f"{f.name:<40} {size_str:<10} {mtime}")
print("")
def view_log(filename: str, tail: int = 50, grep: Optional[str] = None,
since: Optional[str] = None):
"""View a specific log file."""
# Find the log file
log_path = LOG_DIR / filename
if not log_path.exists():
# Try with .log extension
log_path = LOG_DIR / f"{filename}.log"
if not log_path.exists():
print(f"Log file not found: {filename}")
return
lines = tail_file(log_path, tail * 2 if grep else tail) # Get more if filtering
entries = []
for line in lines:
entry = parse_log_line(line)
if entry:
entries.append(entry)
if since:
entries = filter_by_date(entries, since)
if grep:
entries = grep_entries(entries, grep)
# Limit to tail after filtering
entries = entries[-tail:]
print(f"\n📜 {log_path.name} (last {len(entries)} entries)\n")
print("-" * 70)
for entry in entries:
print(format_entry(entry))
print("")
def main():
parser = argparse.ArgumentParser(description="View Claude Code logs")
parser.add_argument("file", nargs="?", help="Log file to view")
parser.add_argument("--list", "-l", action="store_true", help="List log files")
parser.add_argument("--tail", "-n", type=int, default=50,
help="Number of lines to show (default: 50)")
parser.add_argument("--grep", "-g", type=str, help="Filter by pattern")
parser.add_argument("--since", "-s", type=str,
help="Show entries since date (YYYY-MM-DD or 1d/7d/1h)")
parser.add_argument("--type", "-t", type=str,
help="Filter log files by type (maintenance, etc.)")
args = parser.parse_args()
if args.list:
list_logs()
elif args.file:
view_log(args.file, tail=args.tail, grep=args.grep, since=args.since)
else:
# Default: show most recent log
files = get_log_files(args.type)
if files:
view_log(files[0].name, tail=args.tail, grep=args.grep, since=args.since)
else:
print("No log files found. Run some automation scripts first.")
if __name__ == "__main__":
main()