Initial commit

This commit is contained in:
Zhongwei Li
2025-11-30 08:44:01 +08:00
commit f39a842409
10 changed files with 1497 additions and 0 deletions

View File

@@ -0,0 +1,239 @@
#!/bin/bash
# Analyze project and suggest CLAUDE.md improvements
# Usage: bash scripts/analyze-claude-md.sh [project-path]
set -e
PROJECT_PATH="${1:-.}"
if [ ! -d "$PROJECT_PATH" ]; then
echo "Error: Project path not found: $PROJECT_PATH"
exit 1
fi
cd "$PROJECT_PATH"
echo "{"
echo ' "project_path": "'"$(pwd)"'",'
echo ' "timestamp": "'"$(date -u +"%Y-%m-%dT%H:%M:%SZ")"'",'
# Check if CLAUDE.md exists
if [ -f ".claude/CLAUDE.md" ]; then
echo ' "claude_md_exists": true,'
echo ' "claude_md_location": ".claude/CLAUDE.md",'
echo ' "claude_md_size": '"$(wc -l < .claude/CLAUDE.md)"','
else
echo ' "claude_md_exists": false,'
fi
# Detect package manager and commands
echo ' "detected_package_manager": {'
if [ -f "package.json" ]; then
echo ' "type": "npm",'
echo ' "has_package_json": true,'
# Extract scripts
if command -v jq &> /dev/null && [ -f "package.json" ]; then
SCRIPTS=$(jq -r '.scripts | keys[]' package.json 2>/dev/null | head -20)
if [ -n "$SCRIPTS" ]; then
echo ' "scripts": ['
echo "$SCRIPTS" | while IFS= read -r script; do
echo " \"$script\","
done | sed '$ s/,$//'
echo ' ],'
fi
fi
if [ -f "pnpm-lock.yaml" ]; then
echo ' "lockfile": "pnpm"'
elif [ -f "yarn.lock" ]; then
echo ' "lockfile": "yarn"'
elif [ -f "package-lock.json" ]; then
echo ' "lockfile": "npm"'
else
echo ' "lockfile": "none"'
fi
elif [ -f "Cargo.toml" ]; then
echo ' "type": "cargo",'
echo ' "has_cargo_toml": true'
elif [ -f "requirements.txt" ] || [ -f "pyproject.toml" ]; then
echo ' "type": "python",'
if [ -f "pyproject.toml" ]; then
echo ' "has_pyproject": true'
else
echo ' "has_requirements": true'
fi
elif [ -f "go.mod" ]; then
echo ' "type": "go",'
echo ' "has_go_mod": true'
else
echo ' "type": "unknown"'
fi
echo ' },'
# Detect testing framework
echo ' "testing": {'
if [ -f "package.json" ]; then
if grep -q '"vitest"' package.json 2>/dev/null; then
echo ' "framework": "vitest"'
elif grep -q '"jest"' package.json 2>/dev/null; then
echo ' "framework": "jest"'
elif grep -q '"mocha"' package.json 2>/dev/null; then
echo ' "framework": "mocha"'
else
echo ' "framework": "unknown"'
fi
elif [ -f "pytest.ini" ] || grep -r "pytest" . 2>/dev/null | head -1 &> /dev/null; then
echo ' "framework": "pytest"'
elif [ -f "Cargo.toml" ]; then
echo ' "framework": "cargo test"'
elif [ -f "go.mod" ]; then
echo ' "framework": "go test"'
else
echo ' "framework": "unknown"'
fi
echo ' },'
# Detect framework
echo ' "framework": {'
if [ -f "package.json" ]; then
if grep -q '"next"' package.json 2>/dev/null; then
echo ' "type": "nextjs"'
elif grep -q '"react"' package.json 2>/dev/null; then
echo ' "type": "react"'
elif grep -q '"vue"' package.json 2>/dev/null; then
echo ' "type": "vue"'
elif grep -q '"@angular/core"' package.json 2>/dev/null; then
echo ' "type": "angular"'
elif grep -q '"express"' package.json 2>/dev/null; then
echo ' "type": "express"'
elif grep -q '"fastify"' package.json 2>/dev/null; then
echo ' "type": "fastify"'
else
echo ' "type": "unknown"'
fi
elif [ -f "Cargo.toml" ]; then
if grep -q "actix-web" Cargo.toml 2>/dev/null; then
echo ' "type": "actix-web"'
elif grep -q "rocket" Cargo.toml 2>/dev/null; then
echo ' "type": "rocket"'
else
echo ' "type": "rust"'
fi
elif [ -f "requirements.txt" ] || [ -f "pyproject.toml" ]; then
if grep -r "django" . 2>/dev/null | head -1 &> /dev/null; then
echo ' "type": "django"'
elif grep -r "flask" . 2>/dev/null | head -1 &> /dev/null; then
echo ' "type": "flask"'
elif grep -r "fastapi" . 2>/dev/null | head -1 &> /dev/null; then
echo ' "type": "fastapi"'
else
echo ' "type": "python"'
fi
else
echo ' "type": "unknown"'
fi
echo ' },'
# Check for common files
echo ' "project_files": {'
echo ' "has_readme": '"$([ -f "README.md" ] && echo "true" || echo "false")"','
echo ' "has_dockerfile": '"$([ -f "Dockerfile" ] && echo "true" || echo "false")"','
echo ' "has_ci": '"$([ -d ".github/workflows" ] || [ -f ".gitlab-ci.yml" ] || [ -f ".circleci/config.yml" ] && echo "true" || echo "false")"','
echo ' "has_gitignore": '"$([ -f ".gitignore" ] && echo "true" || echo "false")"','
echo ' "has_editorconfig": '"$([ -f ".editorconfig" ] && echo "true" || echo "false")"''
echo ' },'
# Directory structure
echo ' "directory_structure": {'
DIRS_TO_CHECK=("src" "lib" "app" "pages" "components" "api" "tests" "test" "__tests__" "scripts" "docs")
FOUND_DIRS=""
for dir in "${DIRS_TO_CHECK[@]}"; do
if [ -d "$dir" ]; then
FOUND_DIRS="$FOUND_DIRS\"$dir\", "
fi
done
if [ -n "$FOUND_DIRS" ]; then
echo ' "key_directories": ['"${FOUND_DIRS%, }"']'
else
echo ' "key_directories": []'
fi
echo ' },'
# Suggest CLAUDE.md improvements
echo ' "claude_md_suggestions": ['
SUGGESTIONS=()
# Check for package.json scripts
if [ -f "package.json" ] && command -v jq &> /dev/null; then
SCRIPTS=$(jq -r '.scripts | keys[]' package.json 2>/dev/null)
if [ -n "$SCRIPTS" ]; then
SUGGESTIONS+=(' "Document npm scripts (dev, build, test, lint) in CLAUDE.md"')
fi
fi
# Check for testing
if [ -f "package.json" ]; then
if grep -q '"vitest"' package.json 2>/dev/null || grep -q '"jest"' package.json 2>/dev/null; then
SUGGESTIONS+=(' "Document testing approach and test commands"')
fi
fi
# Check for TypeScript
if [ -f "tsconfig.json" ]; then
SUGGESTIONS+=(' "Document TypeScript configuration and type checking commands"')
fi
# Check for linting
if [ -f ".eslintrc" ] || [ -f ".eslintrc.js" ] || [ -f ".eslintrc.json" ]; then
SUGGESTIONS+=(' "Document linting rules and lint commands"')
fi
# Check for Docker
if [ -f "Dockerfile" ]; then
SUGGESTIONS+=(' "Document Docker build and run commands"')
fi
# Check for environment variables
if [ -f ".env.example" ] || [ -f ".env.local.example" ]; then
SUGGESTIONS+=(' "Document required environment variables and setup"')
fi
# Check for monorepo
if [ -f "pnpm-workspace.yaml" ] || [ -f "lerna.json" ]; then
SUGGESTIONS+=(' "Document monorepo structure and workspace commands"')
fi
# Check for CI/CD
if [ -d ".github/workflows" ]; then
SUGGESTIONS+=(' "Document CI/CD workflow and deployment process"')
fi
# Check for database
if grep -r "prisma" . 2>/dev/null | head -1 &> /dev/null || [ -f "prisma/schema.prisma" ]; then
SUGGESTIONS+=(' "Document database schema and migration commands"')
fi
# Print suggestions
for i in "${!SUGGESTIONS[@]}"; do
if [ $i -eq $((${#SUGGESTIONS[@]} - 1)) ]; then
echo "${SUGGESTIONS[$i]}"
else
echo "${SUGGESTIONS[$i]},"
fi
done
echo ' ]'
echo "}"

View File

@@ -0,0 +1,274 @@
#!/usr/bin/env bash
set -euo pipefail
# Claude Code History Analyzer - Fixed Version
# Key fixes:
# 1. Simplified jq syntax (remove != null checks)
# 2. Better error handling for empty results
# 3. Validate temp files before JSON generation
# 4. More robust file processing
# Parse arguments
CURRENT_PROJECT_ONLY=false
SINGLE_FILE=""
SETTINGS_FILE="${HOME}/.claude/settings.json"
DISCOVER_GITHUB=true
while [[ $# -gt 0 ]]; do
case $1 in
--current-project)
CURRENT_PROJECT_ONLY=true
shift
;;
--file)
SINGLE_FILE="$2"
shift 2
;;
--discover-github)
DISCOVER_GITHUB=true
shift
;;
*)
shift
;;
esac
done
CLAUDE_PROJECTS_DIR="${HOME}/.claude/projects"
# Colors for status output (to stderr)
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Check if jq is installed
if ! command -v jq &>/dev/null; then
echo -e "${RED}Error: jq is required but not installed${NC}" >&2
echo "Install with: brew install jq (macOS) or apt install jq (Linux)" >&2
exit 1
fi
# Create temporary files for aggregation
TEMP_DIR=$(mktemp -d)
TOOLS_FILE="$TEMP_DIR/tools.txt"
PROJECTS_FILE="$TEMP_DIR/projects.txt"
MODELS_FILE="$TEMP_DIR/models.txt"
AUTO_ALLOWED_FILE="$TEMP_DIR/auto_allowed.txt"
# Cleanup on exit
trap "rm -rf '$TEMP_DIR'" EXIT
# Read existing auto-allowed tools from settings
if [ -f "$SETTINGS_FILE" ]; then
jq -r '.autoAllowedTools[]? // empty' "$SETTINGS_FILE" 2>/dev/null | sort >"$AUTO_ALLOWED_FILE" || touch "$AUTO_ALLOWED_FILE"
else
touch "$AUTO_ALLOWED_FILE"
fi
echo -e "${BLUE}Claude Code History Analyzer${NC}" >&2
echo -e "${BLUE}=============================${NC}\n" >&2
# Determine which files to analyze
if [ -n "$SINGLE_FILE" ]; then
if [ ! -f "$SINGLE_FILE" ]; then
echo -e "${RED}Error: File not found: $SINGLE_FILE${NC}" >&2
exit 1
fi
echo -e "Analyzing single file: ${GREEN}$(basename "$SINGLE_FILE")${NC}\n" >&2
FILES_TO_ANALYZE=("$SINGLE_FILE")
SCOPE="single_file"
SCOPE_DETAIL="$(basename "$SINGLE_FILE")"
elif [ "$CURRENT_PROJECT_ONLY" = true ]; then
CURRENT_DIR=$(pwd)
PROJECT_PATH=$(echo "$CURRENT_DIR" | sed 's/\//-/g')
PROJECT_DIR="${CLAUDE_PROJECTS_DIR}/${PROJECT_PATH}"
if [ ! -d "$PROJECT_DIR" ]; then
echo -e "${RED}Error: No Claude Code history found for current project${NC}" >&2
echo -e "Looking for: $PROJECT_DIR" >&2
exit 1
fi
echo -e "Analyzing current project: ${GREEN}${CURRENT_DIR}${NC}\n" >&2
mapfile -t FILES_TO_ANALYZE < <(find "$PROJECT_DIR" -name "*.jsonl" 2>/dev/null)
SCOPE="current_project"
SCOPE_DETAIL="$CURRENT_DIR"
else
if [ ! -d "$CLAUDE_PROJECTS_DIR" ]; then
echo -e "${RED}Error: Claude Code projects directory not found at $CLAUDE_PROJECTS_DIR${NC}" >&2
exit 1
fi
mapfile -t FILES_TO_ANALYZE < <(find "$CLAUDE_PROJECTS_DIR" -name "*.jsonl" 2>/dev/null)
SCOPE="all_projects"
SCOPE_DETAIL=""
fi
TOTAL_FILES=${#FILES_TO_ANALYZE[@]}
echo -e "Found ${GREEN}${TOTAL_FILES}${NC} conversation file(s)\n" >&2
if [ "$TOTAL_FILES" -eq 0 ]; then
echo -e "${RED}No conversation files found${NC}" >&2
exit 1
fi
# Extract all tool usage - FIXED: Simplified jq syntax
echo -e "${YELLOW}Extracting tool usage...${NC}" >&2
for file in "${FILES_TO_ANALYZE[@]}"; do
# Process one file at a time to avoid memory issues
jq -r 'select(.message.content) | .message.content[] | select(.type == "tool_use") | .name' "$file" 2>/dev/null || true
done | sort | uniq -c | sort -rn >"$TOOLS_FILE"
# Check if we got any results
if [ ! -s "$TOOLS_FILE" ]; then
echo -e "${YELLOW}Warning: No tool usage found in conversation files${NC}" >&2
touch "$TOOLS_FILE" # Create empty file to continue
fi
# Extract project paths (only if analyzing all projects)
if [ "$CURRENT_PROJECT_ONLY" = false ] && [ -z "$SINGLE_FILE" ]; then
echo -e "${YELLOW}Extracting project distribution...${NC}" >&2
find "$CLAUDE_PROJECTS_DIR" -type d -mindepth 1 -maxdepth 1 2>/dev/null |
while read -r dir; do
count=$(find "$dir" -name "*.jsonl" 2>/dev/null | wc -l | tr -d ' ')
basename=$(basename "$dir")
echo "$count $basename"
done | sort -rn >"$PROJECTS_FILE"
fi
# Extract model usage - FIXED: Simplified jq syntax
echo -e "${YELLOW}Extracting model usage...${NC}" >&2
for file in "${FILES_TO_ANALYZE[@]}"; do
jq -r 'select(.message.model) | .message.model' "$file" 2>/dev/null || true
done | sort | uniq -c | sort -rn >"$MODELS_FILE"
# Check if we got any results
if [ ! -s "$MODELS_FILE" ]; then
echo -e "${YELLOW}Warning: No model usage found in conversation files${NC}" >&2
touch "$MODELS_FILE"
fi
echo -e "${GREEN}✓ Data extraction complete!${NC}\n" >&2
echo -e "${BLUE}Outputting structured data for analysis...${NC}\n" >&2
# Output structured data as JSON for Claude to interpret
cat <<EOF
{
"metadata": {
"generated_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || date +"%Y-%m-%dT%H:%M:%SZ")",
"scope": "$SCOPE",
"scope_detail": "$SCOPE_DETAIL",
"total_conversations": $TOTAL_FILES
},
"tool_usage": [
EOF
# Tool usage array - FIXED: Handle empty file
if [ -s "$TOOLS_FILE" ]; then
first=true
while read -r count tool; do
if [ "$first" = true ]; then
first=false
else
echo ","
fi
# Escape tool name for JSON
tool_escaped=$(echo "$tool" | sed 's/"/\\"/g')
printf ' {"tool": "%s", "count": %d}' "$tool_escaped" "$count"
done <"$TOOLS_FILE"
echo ""
fi
cat <<EOF
],
"auto_allowed_tools": [
EOF
# Auto-allowed tools array - FIXED: Handle empty file
if [ -s "$AUTO_ALLOWED_FILE" ]; then
first=true
while read -r tool; do
if [ "$first" = true ]; then
first=false
else
echo ","
fi
tool_escaped=$(echo "$tool" | sed 's/"/\\"/g')
usage=$(grep -w "$tool" "$TOOLS_FILE" 2>/dev/null | awk '{print $1}' || echo "0")
printf ' {"tool": "%s", "usage_count": %d}' "$tool_escaped" "$usage"
done <"$AUTO_ALLOWED_FILE"
echo ""
fi
cat <<EOF
],
"model_usage": [
EOF
# Model usage array - FIXED: Handle empty file
if [ -s "$MODELS_FILE" ]; then
first=true
while read -r count model; do
if [ "$first" = true ]; then
first=false
else
echo ","
fi
model_escaped=$(echo "$model" | sed 's/"/\\"/g')
printf ' {"model": "%s", "count": %d}' "$model_escaped" "$count"
done <"$MODELS_FILE"
echo ""
fi
echo " ]"
# Add project activity if available
if [ "$CURRENT_PROJECT_ONLY" = false ] && [ -z "$SINGLE_FILE" ] && [ -s "$PROJECTS_FILE" ]; then
cat <<EOF
,
"project_activity": [
EOF
first=true
while read -r count project; do
if [ "$first" = true ]; then
first=false
else
echo ","
fi
# Decode project path
project_decoded=$(echo "$project" | sed 's/-/\//g' | sed 's/^/~/')
project_escaped=$(echo "$project_decoded" | sed 's/"/\\"/g')
printf ' {"project": "%s", "conversations": %d}' "$project_escaped" "$count"
done <"$PROJECTS_FILE"
echo ""
echo " ]"
fi
echo "}"
# Add GitHub discovery data if requested
if [ "$DISCOVER_GITHUB" = true ]; then
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
echo -e "\n${BLUE}Running GitHub discovery...${NC}" >&2
# Run GitHub discovery and capture output
if [ -f "$SCRIPT_DIR/github-discovery.sh" ]; then
# FIXED: Only add JSON continuation if GitHub discovery succeeds
if output=$(bash "$SCRIPT_DIR/github-discovery.sh" all 2>/dev/null); then
echo ","
echo '"github_discovery": '
echo "$output"
else
echo -e "${YELLOW}Warning: GitHub discovery failed or returned no results${NC}" >&2
fi
else
echo -e "${YELLOW}Warning: github-discovery.sh not found${NC}" >&2
fi
fi

View File

@@ -0,0 +1,69 @@
#!/usr/bin/env bash
set -euo pipefail
# Claude Code Feature Analyzer
# Fetches latest docs and compares with user's actual usage patterns
CLAUDE_DOCS_URL="https://docs.claude.com/en/docs/claude-code"
TEMP_DIR=$(mktemp -d)
DOCS_FILE="$TEMP_DIR/claude-code-docs.html"
FEATURES_FILE="$TEMP_DIR/features.txt"
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
echo -e "${BLUE}Fetching latest Claude Code documentation...${NC}\n"
# Try to fetch docs
if command -v curl &> /dev/null; then
if curl -s -L "$CLAUDE_DOCS_URL" -o "$DOCS_FILE" 2>/dev/null; then
echo -e "${GREEN}✓ Documentation fetched${NC}\n"
else
echo -e "${YELLOW}⚠ Could not fetch docs, using fallback feature list${NC}\n"
DOCS_FILE=""
fi
else
echo -e "${YELLOW}⚠ curl not found, using fallback feature list${NC}\n"
DOCS_FILE=""
fi
# Known Claude Code features (fallback + common features)
cat > "$FEATURES_FILE" <<'EOF'
Agents: Custom AI assistants for specific tasks
Slash Commands: Quick shortcuts for common operations
Skills: Modular capabilities you can add
Tool Auto-Allow: Automatically approve specific tools
Project Context: Automatic codebase understanding
Git Integration: Native git operations
Terminal Integration: Execute commands directly
File Watching: Automatic file change detection
Multi-file Editing: Edit multiple files simultaneously
Code Generation: Create new files and boilerplate
Refactoring: Automated code improvements
Testing: Generate and run tests
Documentation: Auto-generate docs from code
Search: Advanced codebase search
Context Memory: Remember past interactions
Web Search: Look up current information
API Integration: Connect to external services
Custom Tools: Build your own tools
Workflows: Chain multiple operations
Templates: Reusable code patterns
EOF
echo -e "${BLUE}Available Claude Code Features${NC}"
echo -e "${BLUE}==============================${NC}\n"
while IFS=: read -r feature description; do
echo "📌 $feature"
echo " $description"
echo ""
done < "$FEATURES_FILE"
# Cleanup
rm -rf "$TEMP_DIR"
echo -e "${GREEN}Feature list complete${NC}"

View File

@@ -0,0 +1,178 @@
#!/usr/bin/env bash
set -euo pipefail
# GitHub Claude Code Discovery
# Searches GitHub for skills, agents, and slash commands based on usage patterns
# Parse arguments
SEARCH_TYPE="${1:-all}" # all, agents, skills, commands
QUERY="${2:-}"
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
RED='\033[0;31m'
NC='\033[0m'
# Check if gh CLI is installed
HAS_GH=false
if command -v gh &>/dev/null; then
HAS_GH=true
fi
echo -e "${BLUE}GitHub Claude Code Discovery${NC}" >&2
echo -e "${BLUE}============================${NC}\n" >&2
# Function to search GitHub using gh CLI
search_with_gh() {
local path="$1"
local query="$2"
local limit="${3:-10}"
if [ -z "$query" ]; then
gh search code "path:$path" --limit "$limit" --json path,repository,url 2>/dev/null
else
gh search code "$query path:$path" --limit "$limit" --json path,repository,url 2>/dev/null
fi
}
# Function to search GitHub using web URLs (fallback)
get_search_url() {
local path="$1"
local query="$2"
if [ -z "$query" ]; then
echo "https://github.com/search?type=code&q=path:${path}"
else
# URL encode the query
query_encoded=$(echo "$query" | sed 's/ /+/g')
echo "https://github.com/search?type=code&q=${query_encoded}+path:${path}"
fi
}
# Output JSON structure
cat <<EOF
{
"has_gh_cli": $HAS_GH,
"searches": [
EOF
first_search=true
# Search for agents
if [ "$SEARCH_TYPE" = "all" ] || [ "$SEARCH_TYPE" = "agents" ]; then
if [ "$first_search" = false ]; then
echo ","
fi
first_search=false
echo -e "${YELLOW}Searching for agents...${NC}" >&2
cat <<EOF
{
"type": "agents",
"path": ".claude/agents",
"web_url": "$(get_search_url ".claude/agents" "$QUERY")",
EOF
if [ "$HAS_GH" = true ]; then
echo ' "results": '
search_with_gh ".claude/agents" "$QUERY" 20 || echo '[]'
else
echo ' "results": []'
fi
echo -n " }"
fi
# Search for skills
if [ "$SEARCH_TYPE" = "all" ] || [ "$SEARCH_TYPE" = "skills" ]; then
if [ "$first_search" = false ]; then
echo ","
fi
first_search=false
echo -e "${YELLOW}Searching for skills...${NC}" >&2
cat <<EOF
{
"type": "skills",
"path": ".claude/skills",
"web_url": "$(get_search_url ".claude/skills" "$QUERY")",
EOF
if [ "$HAS_GH" = true ]; then
echo ' "results": '
search_with_gh ".claude/skills" "$QUERY" 20 || echo '[]'
else
echo ' "results": []'
fi
echo -n " }"
fi
# Search for slash commands
if [ "$SEARCH_TYPE" = "all" ] || [ "$SEARCH_TYPE" = "commands" ]; then
if [ "$first_search" = false ]; then
echo ","
fi
first_search=false
echo -e "${YELLOW}Searching for slash commands...${NC}" >&2
cat <<EOF
{
"type": "slash_commands",
"path": ".claude/commands",
"web_url": "$(get_search_url ".claude/commands" "$QUERY")",
EOF
if [ "$HAS_GH" = true ]; then
echo ' "results": '
search_with_gh ".claude/commands" "$QUERY" 20 || echo '[]'
else
echo ' "results": []'
fi
echo -n " }"
fi
# Search for CLAUDE.md project context files
if [ "$SEARCH_TYPE" = "all" ] || [ "$SEARCH_TYPE" = "context" ]; then
if [ "$first_search" = false ]; then
echo ","
fi
first_search=false
echo -e "${YELLOW}Searching for project context files...${NC}" >&2
cat <<EOF
{
"type": "project_context",
"path": ".claude/CLAUDE.md",
"web_url": "$(get_search_url ".claude/CLAUDE.md" "$QUERY")",
EOF
if [ "$HAS_GH" = true ]; then
echo ' "results": '
search_with_gh ".claude/CLAUDE.md" "$QUERY" 20 || echo '[]'
else
echo ' "results": []'
fi
echo -n " }"
fi
cat <<EOF
]
}
EOF
echo -e "\n${GREEN}✓ Search complete!${NC}" >&2
if [ "$HAS_GH" = false ]; then
echo -e "${YELLOW}Note: Install 'gh' CLI for direct search results${NC}" >&2
echo -e "Install: ${BLUE}brew install gh${NC} (macOS) or see https://cli.github.com" >&2
fi