Initial commit

This commit is contained in:
Zhongwei Li
2025-11-29 17:51:51 +08:00
commit aaa5832fbf
40 changed files with 3362 additions and 0 deletions

View File

@@ -0,0 +1,111 @@
#!/usr/bin/env python3
"""
Create workflow checkpoint before compaction for resumability.
Only runs when WARPIO_LOG=true.
"""
import json
import sys
import os
from datetime import datetime
from pathlib import Path
def create_checkpoint(transcript_path, trigger):
"""Create resumable checkpoint from current state."""
checkpoint = {
'timestamp': datetime.now().isoformat(),
'trigger': trigger, # 'manual' or 'auto'
'transcript': transcript_path,
'environment': {
'warpio_version': os.getenv('WARPIO_VERSION', '1.0.0'),
'working_dir': os.getcwd(),
'python_env': sys.executable
},
'resume_instructions': []
}
# Parse transcript for key state
try:
with open(transcript_path, 'r') as f:
lines = f.readlines()
# Extract key workflow state
experts_used = set()
last_files = []
for line in reversed(lines): # Recent state is more relevant
try:
data = json.loads(line)
if 'subagent_type' in str(data):
experts_used.add(data.get('subagent_type', ''))
if 'file_path' in str(data) and len(last_files) < 5:
if 'tool_input' in data:
file_path = data['tool_input'].get('file_path', '')
if file_path:
last_files.append(file_path)
except:
continue
checkpoint['state'] = {
'experts_active': list(experts_used),
'recent_files': last_files
}
# Generate resume instructions
if experts_used:
checkpoint['resume_instructions'].append(
f"Resume with experts: {', '.join(experts_used)}"
)
if last_files:
checkpoint['resume_instructions'].append(
f"Continue processing: {last_files[0]}"
)
except:
pass
return checkpoint
def main():
"""Create checkpoint with minimal overhead."""
# Only run if logging enabled
if not os.getenv('WARPIO_LOG'):
sys.exit(0)
try:
input_data = json.load(sys.stdin)
session_id = input_data.get('session_id', '')
transcript = input_data.get('transcript_path', '')
trigger = input_data.get('trigger', 'manual')
# Create checkpoint
checkpoint = create_checkpoint(transcript, trigger)
checkpoint['session_id'] = session_id
# Write checkpoint
log_dir = Path(os.getenv('WARPIO_LOG_DIR', '.warpio-logs'))
session_dir = log_dir / f"session-{datetime.now().strftime('%Y%m%d-%H%M%S')}"
session_dir.mkdir(parents=True, exist_ok=True)
checkpoint_file = session_dir / f"checkpoint-{datetime.now().strftime('%H%M%S')}.json"
with open(checkpoint_file, 'w') as f:
json.dump(checkpoint, f, indent=2)
# Create symlink to latest checkpoint
latest = session_dir / 'latest-checkpoint.json'
if latest.exists():
latest.unlink()
latest.symlink_to(checkpoint_file.name)
# Provide feedback
print(f"✓ Checkpoint created: {checkpoint_file.name}")
except:
pass # Silent fail
sys.exit(0)
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,27 @@
#!/bin/bash
# Warpio SessionStart Hook - Just update statusLine path
PLUGIN_ROOT="${CLAUDE_PLUGIN_ROOT}"
# Update statusLine path if needed (for installed-via-curl users)
if [ -f ".claude/settings.local.json" ] && [ -n "$PLUGIN_ROOT" ]; then
if command -v jq &>/dev/null; then
if ! grep -q "${PLUGIN_ROOT}" ".claude/settings.local.json" 2>/dev/null; then
jq --arg path "${PLUGIN_ROOT}/scripts/warpio-status.sh" \
'.statusLine.command = $path' \
.claude/settings.local.json > .claude/settings.local.json.tmp
mv .claude/settings.local.json.tmp .claude/settings.local.json
fi
fi
fi
# === NORMAL STARTUP: Warpio active ===
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🚀 WARPIO Scientific Computing Platform"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "✅ 13 Expert Agents | 19 Commands | 17 MCP Tools"
echo "🔬 Powered by IOWarp.ai"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo
echo "📖 /warpio-help | /warpio-expert-list | /warpio-status"
echo

View File

@@ -0,0 +1,128 @@
#!/usr/bin/env python3
"""
Generate comprehensive session summary at workflow completion.
Only runs when WARPIO_LOG=true.
"""
import json
import sys
import os
from datetime import datetime
from pathlib import Path
from collections import defaultdict
def parse_transcript(transcript_path):
"""Parse transcript for workflow summary."""
summary = {
'experts_used': set(),
'mcps_by_expert': defaultdict(set),
'total_mcp_calls': 0,
'orchestration_pattern': 'single',
'files_processed': set(),
'performance_metrics': {}
}
try:
with open(transcript_path, 'r') as f:
lines = f.readlines()
for line in lines:
try:
data = json.loads(line)
# Track expert usage
if 'subagent_type' in str(data):
expert = data.get('subagent_type', '')
summary['experts_used'].add(expert)
# Track MCP usage
if 'tool_name' in data:
tool = data['tool_name']
if tool.startswith('mcp__'):
summary['total_mcp_calls'] += 1
# Determine expert from context
parts = tool.split('__')
if len(parts) >= 2:
server = parts[1]
# Map MCP to likely expert
if server in ['hdf5', 'adios', 'parquet']:
summary['mcps_by_expert']['data-expert'].add(tool)
elif server in ['plot', 'pandas']:
summary['mcps_by_expert']['analysis-expert'].add(tool)
elif server in ['darshan', 'node_hardware']:
summary['mcps_by_expert']['hpc-expert'].add(tool)
elif server in ['arxiv', 'context7']:
summary['mcps_by_expert']['research-expert'].add(tool)
# Track files
if 'file_path' in str(data):
if isinstance(data, dict) and 'tool_input' in data:
file_path = data['tool_input'].get('file_path', '')
if file_path:
summary['files_processed'].add(file_path)
except:
continue
# Determine orchestration pattern
if len(summary['experts_used']) > 1:
summary['orchestration_pattern'] = 'multi-expert'
# Convert sets to lists for JSON serialization
summary['experts_used'] = list(summary['experts_used'])
summary['files_processed'] = list(summary['files_processed'])
summary['mcps_by_expert'] = {k: list(v) for k, v in summary['mcps_by_expert'].items()}
except:
pass
return summary
def main():
"""Generate session summary with minimal overhead."""
# Only run if logging enabled
if not os.getenv('WARPIO_LOG'):
sys.exit(0)
try:
input_data = json.load(sys.stdin)
session_id = input_data.get('session_id', '')
transcript = input_data.get('transcript_path', '')
# Parse transcript for summary
summary = parse_transcript(transcript)
# Add metadata
summary['session_id'] = session_id
summary['timestamp'] = datetime.now().isoformat()
summary['warpio_version'] = os.getenv('WARPIO_VERSION', '1.0.0')
# Write summary
log_dir = Path(os.getenv('WARPIO_LOG_DIR', '.warpio-logs'))
session_dir = log_dir / f"session-{datetime.now().strftime('%Y%m%d-%H%M%S')}"
session_dir.mkdir(parents=True, exist_ok=True)
with open(session_dir / 'session-summary.json', 'w') as f:
json.dump(summary, f, indent=2)
# Also create human-readable summary
with open(session_dir / 'summary.md', 'w') as f:
f.write(f"# Warpio Session Summary\n\n")
f.write(f"**Session ID**: {session_id}\n")
f.write(f"**Timestamp**: {summary['timestamp']}\n\n")
f.write(f"## Orchestration\n")
f.write(f"- Pattern: {summary['orchestration_pattern']}\n")
f.write(f"- Experts Used: {', '.join(summary['experts_used'])}\n")
f.write(f"- Total MCP Calls: {summary['total_mcp_calls']}\n\n")
f.write(f"## Files Processed\n")
for file in summary['files_processed'][:10]: # First 10
f.write(f"- {file}\n")
if len(summary['files_processed']) > 10:
f.write(f"- ... and {len(summary['files_processed']) - 10} more\n")
except:
pass # Silent fail
sys.exit(0)
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,72 @@
#!/usr/bin/env python3
"""
Log expert results and MCP usage at subagent completion.
Only runs when WARPIO_LOG=true.
"""
import json
import sys
import os
from datetime import datetime
from pathlib import Path
def extract_expert_info(transcript_path):
"""Extract expert type and MCP usage from transcript."""
expert_name = "unknown"
mcps_used = []
try:
# Parse transcript to identify expert and MCPs
# This is simplified - in production would parse the JSONL properly
with open(transcript_path, 'r') as f:
for line in f:
if 'subagent_type' in line:
data = json.loads(line)
expert_name = data.get('subagent_type', 'unknown')
if 'tool_name' in line and 'mcp__' in line:
data = json.loads(line)
tool = data.get('tool_name', '')
if tool.startswith('mcp__'):
mcps_used.append(tool)
except:
pass
return expert_name, list(set(mcps_used))
def main():
"""Log expert completion with minimal overhead."""
# Only run if logging enabled
if not os.getenv('WARPIO_LOG'):
sys.exit(0)
try:
input_data = json.load(sys.stdin)
session_id = input_data.get('session_id', '')
transcript = input_data.get('transcript_path', '')
# Extract expert info from transcript
expert_name, mcps_used = extract_expert_info(transcript)
# Create log entry
log_entry = {
'timestamp': datetime.now().isoformat(),
'session_id': session_id,
'expert': expert_name,
'mcps_used': mcps_used,
'mcp_count': len(mcps_used)
}
# Write to session log
log_dir = Path(os.getenv('WARPIO_LOG_DIR', '.warpio-logs'))
session_dir = log_dir / f"session-{datetime.now().strftime('%Y%m%d-%H%M%S')}"
session_dir.mkdir(parents=True, exist_ok=True)
with open(session_dir / 'expert-results.jsonl', 'a') as f:
f.write(json.dumps(log_entry) + '\n')
except:
pass # Silent fail to not disrupt workflow
sys.exit(0)
if __name__ == '__main__':
main()

47
hooks/hooks.json Normal file
View File

@@ -0,0 +1,47 @@
{
"SessionStart": [
{
"matcher": "startup|resume|clear",
"hooks": [
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/SessionStart/warpio-init.sh"
}
]
}
],
"SubagentStop": [
{
"hooks": [
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/SubagentStop/expert-result-logger.py",
"timeout": 5
}
]
}
],
"Stop": [
{
"hooks": [
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/Stop/session-summary-logger.py",
"timeout": 10
}
]
}
],
"PreCompact": [
{
"matcher": "manual|auto",
"hooks": [
{
"type": "command",
"command": "${CLAUDE_PLUGIN_ROOT}/hooks/PreCompact/workflow-checkpoint.py",
"timeout": 5
}
]
}
]
}