Initial commit

This commit is contained in:
Zhongwei Li
2025-11-30 08:43:30 +08:00
commit 58302db858
24 changed files with 6096 additions and 0 deletions

71
scripts/add-agents-md.sh Executable file
View File

@@ -0,0 +1,71 @@
#!/usr/bin/env bash
#
# Add AGENTS.md to Documentation/ folder
#
# This script creates an AGENTS.md file in the Documentation/ directory
# to provide context for AI assistants working with TYPO3 documentation.
#
set -e
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
# Script configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SKILL_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)"
PROJECT_DIR="$(pwd)"
DOC_DIR="${PROJECT_DIR}/Documentation"
AGENTS_FILE="${DOC_DIR}/AGENTS.md"
echo -e "${GREEN}=== Add AGENTS.md to Documentation/ ===${NC}"
echo
# Check if Documentation directory exists
if [ ! -d "${DOC_DIR}" ]; then
echo -e "${RED}Error: Documentation/ directory not found${NC}"
echo "Current directory: ${PROJECT_DIR}"
echo "Please run this script from your TYPO3 extension root directory"
exit 1
fi
# Check if AGENTS.md already exists
if [ -f "${AGENTS_FILE}" ]; then
echo -e "${YELLOW}⚠ AGENTS.md already exists in Documentation/${NC}"
echo
read -p "Do you want to overwrite it? (y/N) " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
echo "Aborted."
exit 0
fi
fi
# Copy AGENTS.md template
echo -e "${YELLOW}Creating AGENTS.md from template...${NC}"
cp "${SKILL_DIR}/templates/AGENTS.md" "${AGENTS_FILE}"
echo -e "${GREEN}✓ Created ${AGENTS_FILE}${NC}"
echo
echo "Next steps:"
echo "1. Edit Documentation/AGENTS.md to customize:"
echo " - Documentation Strategy section"
echo " - Target Audience"
echo " - Main Topics"
echo
echo "2. The AGENTS.md file provides context for AI assistants:"
echo " - TYPO3 RST syntax and directives"
echo " - Documentation structure patterns"
echo " - Rendering and validation procedures"
echo " - Cross-reference patterns"
echo
echo "3. This file helps AI assistants:"
echo " - Understand documentation purpose and audience"
echo " - Apply correct RST syntax and TYPO3 directives"
echo " - Follow documentation best practices"
echo " - Navigate the documentation structure"

357
scripts/analyze-docs.sh Executable file
View File

@@ -0,0 +1,357 @@
#!/usr/bin/env bash
#
# Analyze Documentation Coverage
#
# Compares extracted data with existing Documentation/ to identify:
# - Missing documentation
# - Outdated documentation
# - Inconsistencies
#
# Generates: Documentation/ANALYSIS.md
#
set -e
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Configuration
PROJECT_DIR="$(pwd)"
DATA_DIR="${PROJECT_DIR}/.claude/docs-extraction/data"
DOC_DIR="${PROJECT_DIR}/Documentation"
ANALYSIS_FILE="${DOC_DIR}/ANALYSIS.md"
# TYPO3 Official Architecture Weights (from typo3-extension-architecture.md)
# BaseWeight for gap priority calculation: Priority = BaseWeight * Severity * UserImpact
# File Type Weights
declare -A BASE_WEIGHTS=(
["ext_conf_template"]=10 # User-facing configuration
["controller"]=9 # Core application logic
["model"]=9 # Domain entities
["tca"]=8 # Database configuration
["ext_emconf"]=8 # Extension metadata
["service"]=7 # Business logic
["repository"]=6 # Data access
["viewhelper"]=5 # Template helpers
["utility"]=4 # Helper functions
["other"]=3 # Miscellaneous
)
# Severity Multipliers
SEVERITY_MISSING=3 # Completely undocumented
SEVERITY_OUTDATED=2 # Exists but wrong/incomplete
SEVERITY_INCOMPLETE=1 # Partial documentation
# User Impact Multipliers
IMPACT_USER=3 # End users, editors
IMPACT_INTEGRATOR=2 # TypoScript, TSconfig
IMPACT_DEVELOPER=1 # API, internal code
# Function to calculate gap priority
# Usage: calculate_priority <base_weight> <severity> <impact>
calculate_priority() {
local base_weight=$1
local severity=$2
local impact=$3
echo $((base_weight * severity * impact))
}
# Function to determine class type from file path
get_class_type() {
local file_path=$1
if [[ "$file_path" == *"Controller"* ]]; then
echo "controller"
elif [[ "$file_path" == *"Domain/Model"* ]]; then
echo "model"
elif [[ "$file_path" == *"Domain/Repository"* ]]; then
echo "repository"
elif [[ "$file_path" == *"Service"* ]]; then
echo "service"
elif [[ "$file_path" == *"ViewHelper"* ]]; then
echo "viewhelper"
elif [[ "$file_path" == *"Utility"* ]]; then
echo "utility"
else
echo "other"
fi
}
echo -e "${GREEN}=== Documentation Coverage Analysis ===${NC}"
echo
# Check if Documentation/ exists
if [ ! -d "${DOC_DIR}" ]; then
echo -e "${YELLOW}No Documentation/ directory found${NC}"
echo "Run this from a TYPO3 extension root directory"
exit 1
fi
# Check if extraction data exists
if [ ! -d "${DATA_DIR}" ]; then
echo -e "${YELLOW}No extraction data found${NC}"
echo "Run scripts/extract-all.sh first"
exit 1
fi
echo "Project: ${PROJECT_DIR}"
echo "Documentation: ${DOC_DIR}"
echo "Extraction Data: ${DATA_DIR}"
echo
# Start analysis report
cat > "${ANALYSIS_FILE}" <<'EOF'
# Documentation Analysis Report
**Generated:** $(date -u +"%Y-%m-%d %H:%M:%S UTC")
## Summary
This report compares extracted project data with existing documentation to identify gaps and inconsistencies.
EOF
# Replace the date placeholder
sed -i "s/\$(date -u +\"%Y-%m-%d %H:%M:%S UTC\")/$(date -u +"%Y-%m-%d %H:%M:%S UTC")/g" "${ANALYSIS_FILE}"
# Analyze PHP APIs
echo -e "${BLUE}Analyzing PHP APIs...${NC}"
if [ -f "${DATA_DIR}/php_apis.json" ]; then
total_classes=$(jq '.classes | length' "${DATA_DIR}/php_apis.json" 2>/dev/null || echo 0)
# Count documented classes (look for .rst files in API/ or Developer/)
documented_classes=0
if [ -d "${DOC_DIR}/API" ] || [ -d "${DOC_DIR}/Developer" ]; then
documented_classes=$(find "${DOC_DIR}" -name "*.rst" -type f -exec grep -l ".. php:class::" {} \; 2>/dev/null | wc -l)
fi
missing_classes=$((total_classes - documented_classes))
cat >> "${ANALYSIS_FILE}" <<EOF
### PHP Classes
- **Total Classes:** ${total_classes}
- **Documented Classes:** ${documented_classes}
- **Missing Documentation:** ${missing_classes}
EOF
if [ $missing_classes -gt 0 ]; then
echo "## Missing Class Documentation" >> "${ANALYSIS_FILE}"
echo >> "${ANALYSIS_FILE}"
echo "Classes listed by **priority score** (Priority = BaseWeight × Severity × Impact)" >> "${ANALYSIS_FILE}"
echo >> "${ANALYSIS_FILE}"
# Create temporary file with priority calculations
temp_classes=$(mktemp)
# Extract classes with priority calculations
jq -r '.classes[] | @json' "${DATA_DIR}/php_apis.json" 2>/dev/null | while IFS= read -r class_json; do
file_path=$(echo "$class_json" | jq -r '.file')
class_type=$(get_class_type "$file_path")
base_weight=${BASE_WEIGHTS[$class_type]}
priority=$(calculate_priority $base_weight $SEVERITY_MISSING $IMPACT_DEVELOPER)
echo "$priority|$class_json" >> "$temp_classes"
done
# Sort by priority (descending) and output formatted
sort -t'|' -k1 -rn "$temp_classes" 2>/dev/null | while IFS='|' read -r priority class_json; do
namespace=$(echo "$class_json" | jq -r '.namespace')
name=$(echo "$class_json" | jq -r '.name')
file=$(echo "$class_json" | jq -r '.file')
desc=$(echo "$class_json" | jq -r '.description')
cat >> "${ANALYSIS_FILE}" <<CLASSEOF
### ${namespace}\\${name}
- **Priority Score:** ${priority} ⚠️
- **File:** \`${file}\`
- **Type:** ${class_type}
- **Description:** ${desc}
- **Suggested Location:** \`Documentation/API/${name}.rst\`
CLASSEOF
done
rm -f "$temp_classes"
fi
echo " Classes: ${documented_classes}/${total_classes} documented"
fi
# Analyze Configuration Options
echo -e "${BLUE}Analyzing Configuration Options...${NC}"
if [ -f "${DATA_DIR}/config_options.json" ]; then
total_options=$(jq '.config_options | length' "${DATA_DIR}/config_options.json" 2>/dev/null || echo 0)
# Count documented confval directives
documented_options=0
if find "${DOC_DIR}" -name "*.rst" -type f -exec grep -q ".. confval::" {} \; 2>/dev/null; then
documented_options=$(find "${DOC_DIR}" -name "*.rst" -type f -exec grep ".. confval::" {} \; 2>/dev/null | wc -l)
fi
missing_options=$((total_options - documented_options))
cat >> "${ANALYSIS_FILE}" <<EOF
### Configuration Options
- **Total Options:** ${total_options}
- **Documented Options:** ${documented_options}
- **Missing Documentation:** ${missing_options}
EOF
if [ $missing_options -gt 0 ]; then
echo "## Missing Configuration Documentation" >> "${ANALYSIS_FILE}"
echo >> "${ANALYSIS_FILE}"
echo "Configuration options listed by **priority score** (Priority = BaseWeight × Severity × Impact)" >> "${ANALYSIS_FILE}"
echo >> "${ANALYSIS_FILE}"
# Configuration options are user-facing (HIGH priority)
base_weight=${BASE_WEIGHTS["ext_conf_template"]}
priority=$(calculate_priority $base_weight $SEVERITY_MISSING $IMPACT_USER)
# List undocumented options with priority
jq -r --arg priority "$priority" '.config_options[] |
"### " + .key + "\n\n" +
"- **Priority Score:** " + $priority + " 🚨\n" +
"- **Type:** " + .type + "\n" +
"- **Default:** `" + .default + "`\n" +
"- **Description:** " + .description + "\n" +
(if .security_warning then "- **⚠️ Security Warning:** " + .security_warning + "\n" else "" end) +
"- **Suggested Location:** `Documentation/Integration/Configuration.rst`\n\n" +
"**Template:**\n\n```rst\n" +
".. confval:: " + .key + "\n\n" +
" :type: " + .type + "\n" +
" :Default: " + .default + "\n" +
" :Path: $GLOBALS['"'"'TYPO3_CONF_VARS'"'"']['"'"'EXTENSIONS'"'"']['"'"'ext_key'"'"']['"'"'" + .key + "'"'"']\n\n" +
" " + .description + "\n" +
(if .security_warning then "\n .. warning::\n " + .security_warning + "\n" else "" end) +
"```\n"' \
"${DATA_DIR}/config_options.json" 2>/dev/null >> "${ANALYSIS_FILE}" || true
fi
echo " Options: ${documented_options}/${total_options} documented"
fi
# Check Extension Metadata
echo -e "${BLUE}Analyzing Extension Metadata...${NC}"
if [ -f "${DATA_DIR}/extension_meta.json" ]; then
ext_version=$(jq -r '.metadata.version // "unknown"' "${DATA_DIR}/extension_meta.json" 2>/dev/null)
ext_title=$(jq -r '.metadata.title // "unknown"' "${DATA_DIR}/extension_meta.json" 2>/dev/null)
cat >> "${ANALYSIS_FILE}" <<EOF
### Extension Metadata
- **Title:** ${ext_title}
- **Version:** ${ext_version}
- **Location:** Check `Documentation/Index.rst` and `Documentation/Settings.cfg`
**Recommended Actions:**
- Verify version number in Settings.cfg matches ext_emconf.php
- Ensure extension title is documented in Index.rst
- Check TYPO3/PHP version constraints are in Installation requirements
EOF
echo " Extension: ${ext_title} v${ext_version}"
fi
# Priority Score Explanation
cat >> "${ANALYSIS_FILE}" <<'EOF'
## Priority Score System
Documentation gaps are ranked using **TYPO3 Official Architecture Weighting**:
```
Priority = BaseWeight × Severity × UserImpact
```
### Base Weights (by file type)
- **Configuration (ext_conf_template.txt):** 10 - User-facing settings
- **Controllers:** 9 - Core application logic
- **Models:** 9 - Domain entities
- **TCA:** 8 - Database configuration
- **Services:** 7 - Business logic
- **Repositories:** 6 - Data access
- **ViewHelpers:** 5 - Template helpers
- **Utilities:** 4 - Helper functions
### Severity Multipliers
- **Missing (3):** No documentation exists
- **Outdated (2):** Documentation exists but incorrect
- **Incomplete (1):** Partial documentation
### User Impact Multipliers
- **End Users (3):** Editors, content creators
- **Integrators (2):** TypoScript, TSconfig users
- **Developers (1):** PHP API users
### Example Calculations
- Missing config option: `10 × 3 × 3 = 90` 🚨 (HIGHEST)
- Missing controller: `9 × 3 × 1 = 27` ⚠️
- Missing utility: `4 × 3 × 1 = 12`
**Reference:** See `references/typo3-extension-architecture.md`
## Recommendations
Items above are **already sorted by priority score**. Focus on highest scores first.
### Immediate Actions (Priority Score ≥50)
1. **Document configuration options** (Score: 90) - Critical for users
2. **Document controllers and models** (Score: 27) - Essential for developers
### Quality Improvements
1. Run validation: `scripts/validate_docs.sh`
2. Render locally: `scripts/render_docs.sh`
3. Fix any rendering warnings or broken cross-references
### Enhancements
1. Add usage examples for all configuration options
2. Add code examples for all API methods
3. Consider adding screenshots for user-facing features
## Next Steps
1. **Review this analysis** - Focus on highest priority scores first
2. **Manual documentation** - Create missing RST files using provided templates
3. **Validate** - `scripts/validate_docs.sh`
4. **Render** - `scripts/render_docs.sh`
5. **Commit** - Add new documentation to version control
---
**Analysis Date:** $(date -u +"%Y-%m-%d %H:%M:%S UTC")
**Extraction Data:** See `.claude/docs-extraction/data/`
**Weighting Source:** TYPO3 Official Extension Architecture
EOF
# Replace date placeholder again
sed -i "s/\$(date -u +\"%Y-%m-%d %H:%M:%S UTC\")/$(date -u +"%Y-%m-%d %H:%M:%S UTC")/g" "${ANALYSIS_FILE}"
echo
echo -e "${GREEN}=== Analysis Complete ===${NC}"
echo
echo "Report generated: ${ANALYSIS_FILE}"
echo
echo -e "${YELLOW}Review the analysis report for documentation gaps and recommendations.${NC}"
echo

158
scripts/extract-all.sh Executable file
View File

@@ -0,0 +1,158 @@
#!/usr/bin/env bash
#
# Extract All Documentation Data
#
# Orchestrates extraction from all available sources:
# - PHP source code
# - Extension configuration
# - TYPO3 configuration
# - Composer dependencies
# - Project files (README, CHANGELOG)
# - Build configurations (optional)
# - Repository metadata (optional)
#
set -e
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
# Script configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SKILL_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)"
PROJECT_DIR="$(pwd)"
EXTRACTION_DIR="${PROJECT_DIR}/.claude/docs-extraction"
DATA_DIR="${EXTRACTION_DIR}/data"
CACHE_DIR="${EXTRACTION_DIR}/cache"
echo -e "${GREEN}=== TYPO3 Documentation Extraction ===${NC}"
echo
echo "Project: ${PROJECT_DIR}"
echo "Extraction Directory: ${EXTRACTION_DIR}"
echo
# Create directories
mkdir -p "${DATA_DIR}"
mkdir -p "${CACHE_DIR}"
# Extraction flags
EXTRACT_BUILD=false
EXTRACT_REPO=false
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
--build)
EXTRACT_BUILD=true
shift
;;
--repo)
EXTRACT_REPO=true
shift
;;
--all)
EXTRACT_BUILD=true
EXTRACT_REPO=true
shift
;;
-h|--help)
echo "Usage: $0 [OPTIONS]"
echo
echo "Options:"
echo " --build Include build configuration extraction (.github, phpunit.xml, etc.)"
echo " --repo Include repository metadata extraction (requires network)"
echo " --all Extract everything (build + repo)"
echo " -h, --help Show this help message"
exit 0
;;
*)
echo -e "${RED}Unknown option: $1${NC}"
exit 1
;;
esac
done
# Core extractions (always run)
echo -e "${BLUE}Running core extractions...${NC}"
echo
# 1. PHP Code
echo -e "${YELLOW}→ Extracting PHP code...${NC}"
if "${SCRIPT_DIR}/extract-php.sh"; then
echo -e "${GREEN}✓ PHP extraction complete${NC}"
else
echo -e "${RED}✗ PHP extraction failed${NC}"
fi
echo
# 2. Extension Configuration
echo -e "${YELLOW}→ Extracting extension configuration...${NC}"
if "${SCRIPT_DIR}/extract-extension-config.sh"; then
echo -e "${GREEN}✓ Extension config extraction complete${NC}"
else
echo -e "${RED}✗ Extension config extraction failed${NC}"
fi
echo
# 3. Composer Dependencies
echo -e "${YELLOW}→ Extracting composer dependencies...${NC}"
if "${SCRIPT_DIR}/extract-composer.sh"; then
echo -e "${GREEN}✓ Composer extraction complete${NC}"
else
echo -e "${RED}✗ Composer extraction failed${NC}"
fi
echo
# 4. Project Files
echo -e "${YELLOW}→ Extracting project files...${NC}"
if "${SCRIPT_DIR}/extract-project-files.sh"; then
echo -e "${GREEN}✓ Project files extraction complete${NC}"
else
echo -e "${RED}✗ Project files extraction failed${NC}"
fi
echo
# Optional extractions
if [ "$EXTRACT_BUILD" = true ]; then
echo -e "${BLUE}Running build configuration extraction...${NC}"
echo
echo -e "${YELLOW}→ Extracting build configs...${NC}"
if "${SCRIPT_DIR}/extract-build-configs.sh"; then
echo -e "${GREEN}✓ Build config extraction complete${NC}"
else
echo -e "${RED}✗ Build config extraction failed${NC}"
fi
echo
fi
if [ "$EXTRACT_REPO" = true ]; then
echo -e "${BLUE}Running repository metadata extraction...${NC}"
echo
echo -e "${YELLOW}→ Extracting repository metadata...${NC}"
if "${SCRIPT_DIR}/extract-repo-metadata.sh"; then
echo -e "${GREEN}✓ Repository metadata extraction complete${NC}"
else
echo -e "${RED}✗ Repository metadata extraction failed (network or auth issue?)${NC}"
fi
echo
fi
# Summary
echo -e "${GREEN}=== Extraction Complete ===${NC}"
echo
echo "Extracted data saved to: ${DATA_DIR}"
echo
echo "Next steps:"
echo "1. Review extracted data: ls -lh ${DATA_DIR}"
echo "2. Run gap analysis: ${SCRIPT_DIR}/analyze-docs.sh"
echo "3. Generate RST templates: ${SCRIPT_DIR}/generate-templates.sh"
echo "4. Review templates: Documentation/GENERATED/"
echo

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env bash
#
# Extract Build Configuration
#
# Extracts configuration from:
# - .github/workflows/*.yml (GitHub Actions)
# - .gitlab-ci.yml (GitLab CI)
# - phpunit.xml (PHPUnit config)
# - phpstan.neon (PHPStan config)
#
set -e
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
# Configuration
PROJECT_DIR="$(pwd)"
DATA_DIR="${PROJECT_DIR}/.claude/docs-extraction/data"
OUTPUT_FILE="${DATA_DIR}/build_configs.json"
mkdir -p "${DATA_DIR}"
echo "Extracting build configurations..."
# Start JSON
echo '{' > "${OUTPUT_FILE}"
echo ' "extraction_date": "'$(date -u +"%Y-%m-%dT%H:%M:%SZ")'",' >> "${OUTPUT_FILE}"
# GitHub Actions
if [ -d "${PROJECT_DIR}/.github/workflows" ]; then
workflow_files=$(find "${PROJECT_DIR}/.github/workflows" -name "*.yml" -o -name "*.yaml" 2>/dev/null || true)
if [ -n "$workflow_files" ]; then
echo ' "github_actions": {' >> "${OUTPUT_FILE}"
echo ' "exists": true,' >> "${OUTPUT_FILE}"
echo ' "files": [' >> "${OUTPUT_FILE}"
first=true
for wf in $workflow_files; do
if [ "$first" = false ]; then echo ' ,' >> "${OUTPUT_FILE}"; fi
first=false
rel_path="${wf#$PROJECT_DIR/}"
echo ' "'${rel_path}'"' >> "${OUTPUT_FILE}"
done
echo ' ]' >> "${OUTPUT_FILE}"
echo ' },' >> "${OUTPUT_FILE}"
else
echo ' "github_actions": { "exists": false },' >> "${OUTPUT_FILE}"
fi
else
echo ' "github_actions": { "exists": false },' >> "${OUTPUT_FILE}"
fi
# GitLab CI
if [ -f "${PROJECT_DIR}/.gitlab-ci.yml" ]; then
echo ' "gitlab_ci": { "exists": true, "file": ".gitlab-ci.yml" },' >> "${OUTPUT_FILE}"
else
echo ' "gitlab_ci": { "exists": false },' >> "${OUTPUT_FILE}"
fi
# PHPUnit
phpunit_files=$(find "${PROJECT_DIR}" -maxdepth 2 -name "phpunit.xml*" 2>/dev/null || true)
if [ -n "$phpunit_files" ]; then
echo ' "phpunit": { "exists": true, "files": [' >> "${OUTPUT_FILE}"
first=true
for pf in $phpunit_files; do
if [ "$first" = false ]; then echo ' ,' >> "${OUTPUT_FILE}"; fi
first=false
rel_path="${pf#$PROJECT_DIR/}"
echo ' "'${rel_path}'"' >> "${OUTPUT_FILE}"
done
echo ' ] },' >> "${OUTPUT_FILE}"
else
echo ' "phpunit": { "exists": false },' >> "${OUTPUT_FILE}"
fi
# PHPStan
if [ -f "${PROJECT_DIR}/phpstan.neon" ] || [ -f "${PROJECT_DIR}/phpstan.neon.dist" ]; then
echo ' "phpstan": { "exists": true }' >> "${OUTPUT_FILE}"
else
echo ' "phpstan": { "exists": false }' >> "${OUTPUT_FILE}"
fi
# Close JSON
echo '}' >> "${OUTPUT_FILE}"
echo -e "${GREEN}✓ Build configs extracted: ${OUTPUT_FILE}${NC}"

61
scripts/extract-composer.sh Executable file
View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
#
# Extract Composer Dependencies
#
# Extracts dependency information from composer.json
#
set -e
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
# Configuration
PROJECT_DIR="$(pwd)"
DATA_DIR="${PROJECT_DIR}/.claude/docs-extraction/data"
COMPOSER_FILE="${PROJECT_DIR}/composer.json"
OUTPUT_FILE="${DATA_DIR}/dependencies.json"
mkdir -p "${DATA_DIR}"
if [ ! -f "${COMPOSER_FILE}" ]; then
echo -e "${YELLOW}No composer.json found, skipping${NC}"
echo '{"dependencies": {}}' > "${OUTPUT_FILE}"
exit 0
fi
echo "Extracting composer.json..."
# Extract relevant sections using jq if available, otherwise use PHP
if command -v jq &> /dev/null; then
jq '{
extraction_date: now | todate,
name: .name,
description: .description,
type: .type,
require: .require,
"require-dev": (."require-dev" // {}),
autoload: .autoload,
scripts: (.scripts // {})
}' "${COMPOSER_FILE}" > "${OUTPUT_FILE}"
else
# Fallback to PHP
php -r "
\$data = json_decode(file_get_contents('${COMPOSER_FILE}'), true);
echo json_encode([
'extraction_date' => date('c'),
'name' => \$data['name'] ?? '',
'description' => \$data['description'] ?? '',
'type' => \$data['type'] ?? '',
'require' => \$data['require'] ?? [],
'require-dev' => \$data['require-dev'] ?? [],
'autoload' => \$data['autoload'] ?? [],
'scripts' => \$data['scripts'] ?? []
], JSON_PRETTY_PRINT | JSON_UNESCAPED_SLASHES);
" > "${OUTPUT_FILE}"
fi
echo -e "${GREEN}✓ composer.json extracted: ${OUTPUT_FILE}${NC}"

View File

@@ -0,0 +1,123 @@
#!/usr/bin/env bash
#
# Extract Extension Configuration
#
# Extracts metadata and configuration from:
# - ext_emconf.php (extension metadata)
# - ext_conf_template.txt (configuration options)
#
set -e
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
# Configuration
PROJECT_DIR="$(pwd)"
DATA_DIR="${PROJECT_DIR}/.claude/docs-extraction/data"
EXT_EMCONF="${PROJECT_DIR}/ext_emconf.php"
EXT_CONF_TEMPLATE="${PROJECT_DIR}/ext_conf_template.txt"
mkdir -p "${DATA_DIR}"
# Extract ext_emconf.php
if [ -f "${EXT_EMCONF}" ]; then
echo "Extracting ext_emconf.php..."
OUTPUT_FILE="${DATA_DIR}/extension_meta.json"
# Use PHP to parse ext_emconf.php properly
php -r "
\$_EXTKEY = 'temp';
include '${EXT_EMCONF}';
echo json_encode([
'extraction_date' => date('c'),
'metadata' => \$EM_CONF[\$_EXTKEY] ?? []
], JSON_PRETTY_PRINT | JSON_UNESCAPED_SLASHES);
" > "${OUTPUT_FILE}"
echo -e "${GREEN}✓ ext_emconf.php extracted: ${OUTPUT_FILE}${NC}"
else
echo -e "${YELLOW}No ext_emconf.php found, skipping${NC}"
fi
# Extract ext_conf_template.txt
if [ -f "${EXT_CONF_TEMPLATE}" ]; then
echo "Extracting ext_conf_template.txt..."
OUTPUT_FILE="${DATA_DIR}/config_options.json"
# Parse ext_conf_template.txt format:
# # cat=category/subcategory; type=type; label=Label: Description
# settingName = defaultValue
echo '{' > "${OUTPUT_FILE}"
echo ' "extraction_date": "'$(date -u +"%Y-%m-%dT%H:%M:%SZ")'",' >> "${OUTPUT_FILE}"
echo ' "config_options": [' >> "${OUTPUT_FILE}"
first=true
while IFS= read -r line; do
# Check if comment line with metadata
if [[ $line =~ ^#\ cat= ]]; then
# Extract metadata from comment
category=$(echo "$line" | sed -n 's/.*cat=\([^/;]*\).*/\1/p')
subcategory=$(echo "$line" | sed -n 's/.*cat=[^/]*\/\([^;]*\).*/\1/p')
type=$(echo "$line" | sed -n 's/.*type=\([^;]*\).*/\1/p')
label_and_desc=$(echo "$line" | sed -n 's/.*label=\(.*\)/\1/p')
label=$(echo "$label_and_desc" | cut -d':' -f1)
description=$(echo "$label_and_desc" | cut -d':' -f2- | sed 's/^ *//')
# Check for WARNING in description
security_warning=""
if echo "$description" | grep -qi "WARNING:"; then
security_warning=$(echo "$description" | sed -n 's/.*WARNING: \(.*\)/\1/p')
description=$(echo "$description" | sed 's/WARNING:.*//' | sed 's/ *$//')
fi
# Read next line for setting name and default
read -r next_line
if [[ $next_line =~ ^([^=]+)\ =\ (.+)$ ]]; then
setting_name="${BASH_REMATCH[1]}"
setting_name=$(echo "$setting_name" | sed 's/ *$//')
default_value="${BASH_REMATCH[2]}"
default_value=$(echo "$default_value" | sed 's/^ *//;s/ *$//')
# Add comma for non-first entries
if [ "$first" = false ]; then
echo ' ,' >> "${OUTPUT_FILE}"
fi
first=false
# Write JSON entry
echo ' {' >> "${OUTPUT_FILE}"
echo ' "key": "'${setting_name}'",' >> "${OUTPUT_FILE}"
echo ' "category": "'${category}'",' >> "${OUTPUT_FILE}"
echo ' "subcategory": "'${subcategory}'",' >> "${OUTPUT_FILE}"
echo ' "type": "'${type}'",' >> "${OUTPUT_FILE}"
echo ' "label": "'"${label}"'",' >> "${OUTPUT_FILE}"
echo ' "description": "'"${description}"'",' >> "${OUTPUT_FILE}"
echo ' "default": "'"${default_value}"'"' >> "${OUTPUT_FILE}"
if [ -n "$security_warning" ]; then
echo ' ,' >> "${OUTPUT_FILE}"
echo ' "security_warning": "'"${security_warning}"'"' >> "${OUTPUT_FILE}"
fi
echo -n ' }' >> "${OUTPUT_FILE}"
fi
fi
done < "${EXT_CONF_TEMPLATE}"
# Close JSON
echo >> "${OUTPUT_FILE}"
echo ' ]' >> "${OUTPUT_FILE}"
echo '}' >> "${OUTPUT_FILE}"
echo -e "${GREEN}✓ ext_conf_template.txt extracted: ${OUTPUT_FILE}${NC}"
else
echo -e "${YELLOW}No ext_conf_template.txt found, skipping${NC}"
fi

152
scripts/extract-php.sh Executable file
View File

@@ -0,0 +1,152 @@
#!/usr/bin/env bash
#
# Extract PHP Code Documentation
#
# Parses PHP files in Classes/ directory to extract:
# - Class names, namespaces, descriptions
# - Method signatures and docblocks
# - Constants with descriptions
# - Security-critical comments
#
set -e
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
# Configuration
PROJECT_DIR="$(pwd)"
DATA_DIR="${PROJECT_DIR}/.claude/docs-extraction/data"
OUTPUT_FILE="${DATA_DIR}/php_apis.json"
CLASSES_DIR="${PROJECT_DIR}/Classes"
# TYPO3 Architecture Documentation Priorities
# Based on references/typo3-extension-architecture.md
get_doc_priority() {
local file_path=$1
if [[ "$file_path" == *"Controller"* ]]; then
echo "HIGH"
elif [[ "$file_path" == *"Domain/Model"* ]]; then
echo "HIGH"
elif [[ "$file_path" == *"Domain/Repository"* ]]; then
echo "MEDIUM-HIGH"
elif [[ "$file_path" == *"Service"* ]]; then
echo "MEDIUM-HIGH"
elif [[ "$file_path" == *"ViewHelper"* ]]; then
echo "MEDIUM"
elif [[ "$file_path" == *"Utility"* ]]; then
echo "MEDIUM"
else
echo "MEDIUM"
fi
}
get_class_category() {
local file_path=$1
if [[ "$file_path" == *"Controller"* ]]; then
echo "controller"
elif [[ "$file_path" == *"Domain/Model"* ]]; then
echo "model"
elif [[ "$file_path" == *"Domain/Repository"* ]]; then
echo "repository"
elif [[ "$file_path" == *"Service"* ]]; then
echo "service"
elif [[ "$file_path" == *"ViewHelper"* ]]; then
echo "viewhelper"
elif [[ "$file_path" == *"Utility"* ]]; then
echo "utility"
else
echo "other"
fi
}
# Check if Classes/ exists
if [ ! -d "${CLASSES_DIR}" ]; then
echo -e "${YELLOW}No Classes/ directory found, skipping PHP extraction${NC}"
echo '{"classes": []}' > "${OUTPUT_FILE}"
exit 0
fi
# Create output directory
mkdir -p "${DATA_DIR}"
echo "Scanning PHP files in: ${CLASSES_DIR}"
# Initialize JSON output
echo '{' > "${OUTPUT_FILE}"
echo ' "extraction_date": "'$(date -u +"%Y-%m-%dT%H:%M:%SZ")'",' >> "${OUTPUT_FILE}"
echo ' "classes": [' >> "${OUTPUT_FILE}"
# Find all PHP files
php_files=$(find "${CLASSES_DIR}" -type f -name "*.php" | sort)
file_count=$(echo "$php_files" | wc -l)
current=0
for php_file in $php_files; do
current=$((current + 1))
rel_path="${php_file#$PROJECT_DIR/}"
echo " Processing: ${rel_path} (${current}/${file_count})"
# Extract class information using grep and sed
# This is a simplified extraction - full parsing would require PHP parser
# Get namespace
namespace=$(grep -m 1 '^namespace ' "$php_file" | sed 's/namespace //;s/;//' || echo "")
# Get class name
class_name=$(grep -m 1 '^class \|^final class \|^abstract class ' "$php_file" | \
sed 's/^class //;s/^final class //;s/^abstract class //;s/ extends.*//;s/ implements.*//;s/{//;s/ //g' || echo "")
if [ -z "$class_name" ]; then
continue
fi
# Get class docblock (simplified - just get the first /** */ block)
class_desc=$(awk '/\/\*\*/{flag=1;next}/\*\//{flag=0}flag' "$php_file" | head -20 | grep -v '^ \* @' | sed 's/^ \* //;s/^ \*$//' | tr '\n' ' ' | sed 's/ */ /g;s/^ //;s/ $//')
# Get author
author=$(grep '@author' "$php_file" | head -1 | sed 's/.*@author //;s/ *$//' || echo "")
# Get license
license=$(grep '@license' "$php_file" | head -1 | sed 's/.*@license //;s/ *$//' || echo "")
# Get documentation priority
doc_priority=$(get_doc_priority "$rel_path")
class_category=$(get_class_category "$rel_path")
# Build JSON entry (simplified structure)
if [ $current -gt 1 ]; then
echo ' ,' >> "${OUTPUT_FILE}"
fi
echo ' {' >> "${OUTPUT_FILE}"
echo ' "name": "'${class_name}'",' >> "${OUTPUT_FILE}"
echo ' "namespace": "'${namespace}'",' >> "${OUTPUT_FILE}"
echo ' "file": "'${rel_path}'",' >> "${OUTPUT_FILE}"
echo ' "description": "'${class_desc}'",' >> "${OUTPUT_FILE}"
echo ' "author": "'${author}'",' >> "${OUTPUT_FILE}"
echo ' "license": "'${license}'",' >> "${OUTPUT_FILE}"
echo ' "documentation_priority": "'${doc_priority}'",' >> "${OUTPUT_FILE}"
echo ' "category": "'${class_category}'"' >> "${OUTPUT_FILE}"
echo -n ' }' >> "${OUTPUT_FILE}"
done
# Close JSON
echo >> "${OUTPUT_FILE}"
echo ' ]' >> "${OUTPUT_FILE}"
echo '}' >> "${OUTPUT_FILE}"
echo -e "${GREEN}✓ PHP extraction complete: ${OUTPUT_FILE}${NC}"
echo " Found ${file_count} PHP files"
# NOTE: This is a simplified extractor using bash/grep/sed
# For production use, consider using:
# - nikic/php-parser (PHP)
# - phpdocumentor/reflection-docblock (PHP)
# - Python script with libcst or ast

View File

@@ -0,0 +1,71 @@
#!/usr/bin/env bash
#
# Extract Project Files
#
# Extracts content from:
# - README.md
# - CHANGELOG.md
# - CONTRIBUTING.md (if exists)
#
set -e
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
# Configuration
PROJECT_DIR="$(pwd)"
DATA_DIR="${PROJECT_DIR}/.claude/docs-extraction/data"
OUTPUT_FILE="${DATA_DIR}/project_files.json"
mkdir -p "${DATA_DIR}"
echo "Extracting project files..."
# Start JSON
echo '{' > "${OUTPUT_FILE}"
echo ' "extraction_date": "'$(date -u +"%Y-%m-%dT%H:%M:%SZ")'",' >> "${OUTPUT_FILE}"
# Extract README.md
if [ -f "${PROJECT_DIR}/README.md" ]; then
echo ' "readme": {' >> "${OUTPUT_FILE}"
echo ' "exists": true,' >> "${OUTPUT_FILE}"
echo ' "path": "README.md",' >> "${OUTPUT_FILE}"
# Get first 100 lines as preview
readme_content=$(head -100 "${PROJECT_DIR}/README.md" | sed 's/"/\\"/g' | sed ':a;N;$!ba;s/\n/\\n/g')
echo ' "content_preview": "'${readme_content}'"' >> "${OUTPUT_FILE}"
echo ' },' >> "${OUTPUT_FILE}"
else
echo ' "readme": { "exists": false },' >> "${OUTPUT_FILE}"
fi
# Extract CHANGELOG.md
if [ -f "${PROJECT_DIR}/CHANGELOG.md" ]; then
echo ' "changelog": {' >> "${OUTPUT_FILE}"
echo ' "exists": true,' >> "${OUTPUT_FILE}"
echo ' "path": "CHANGELOG.md",' >> "${OUTPUT_FILE}"
# Get first 50 lines as preview
changelog_content=$(head -50 "${PROJECT_DIR}/CHANGELOG.md" | sed 's/"/\\"/g' | sed ':a;N;$!ba;s/\n/\\n/g')
echo ' "content_preview": "'${changelog_content}'"' >> "${OUTPUT_FILE}"
echo ' },' >> "${OUTPUT_FILE}"
else
echo ' "changelog": { "exists": false },' >> "${OUTPUT_FILE}"
fi
# Extract CONTRIBUTING.md
if [ -f "${PROJECT_DIR}/CONTRIBUTING.md" ]; then
echo ' "contributing": {' >> "${OUTPUT_FILE}"
echo ' "exists": true,' >> "${OUTPUT_FILE}"
echo ' "path": "CONTRIBUTING.md"' >> "${OUTPUT_FILE}"
echo ' }' >> "${OUTPUT_FILE}"
else
echo ' "contributing": { "exists": false }' >> "${OUTPUT_FILE}"
fi
# Close JSON
echo '}' >> "${OUTPUT_FILE}"
echo -e "${GREEN}✓ Project files extracted: ${OUTPUT_FILE}${NC}"

141
scripts/extract-repo-metadata.sh Executable file
View File

@@ -0,0 +1,141 @@
#!/usr/bin/env bash
#
# Extract Repository Metadata
#
# Extracts metadata from GitHub or GitLab using CLI tools:
# - Repository description, topics, stats
# - Recent releases
# - Contributors
# - Open issues (optionally)
#
set -e
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m'
# Configuration
PROJECT_DIR="$(pwd)"
DATA_DIR="${PROJECT_DIR}/.claude/docs-extraction/data"
CACHE_DIR="${PROJECT_DIR}/.claude/docs-extraction/cache"
OUTPUT_FILE="${DATA_DIR}/repo_metadata.json"
CACHE_FILE="${CACHE_DIR}/repo_metadata.json"
mkdir -p "${DATA_DIR}"
mkdir -p "${CACHE_DIR}"
# Check cache (24 hour TTL)
if [ -f "${CACHE_FILE}" ]; then
cache_age=$(($(date +%s) - $(stat -c %Y "${CACHE_FILE}" 2>/dev/null || stat -f %m "${CACHE_FILE}" 2>/dev/null || echo 0)))
if [ $cache_age -lt 86400 ]; then
echo -e "${YELLOW}Using cached repository metadata (${cache_age}s old)${NC}"
cp "${CACHE_FILE}" "${OUTPUT_FILE}"
exit 0
fi
fi
# Detect repository type
if git remote -v 2>/dev/null | grep -q github.com; then
REPO_TYPE="github"
REPO_URL=$(git remote -v | grep github.com | head -1 | sed 's/.*github.com[:/]\(.*\)\.git.*/\1/')
elif git remote -v 2>/dev/null | grep -q gitlab.com; then
REPO_TYPE="gitlab"
REPO_URL=$(git remote -v | grep gitlab.com | head -1 | sed 's/.*gitlab.com[:/]\(.*\)\.git.*/\1/')
else
echo -e "${YELLOW}No GitHub/GitLab remote found, skipping repository metadata${NC}"
echo '{"repository": {"exists": false}}' > "${OUTPUT_FILE}"
exit 0
fi
echo "Detected ${REPO_TYPE} repository: ${REPO_URL}"
# Extract based on repository type
if [ "$REPO_TYPE" = "github" ]; then
# Check if gh CLI is available
if ! command -v gh &> /dev/null; then
echo -e "${YELLOW}gh CLI not found, skipping GitHub metadata${NC}"
echo '{"repository": {"exists": false, "reason": "gh CLI not installed"}}' > "${OUTPUT_FILE}"
exit 0
fi
echo "Extracting GitHub metadata..."
# Get repository info
gh api "repos/${REPO_URL}" --jq '{
extraction_date: now | todate,
repository: {
type: "github",
name: .name,
full_name: .full_name,
description: .description,
topics: .topics,
stars: .stargazers_count,
forks: .forks_count,
open_issues: .open_issues_count,
created_at: .created_at,
updated_at: .updated_at,
homepage: .homepage
}
}' > "${OUTPUT_FILE}"
# Get releases
gh api "repos/${REPO_URL}/releases?per_page=5" --jq 'map({
tag: .tag_name,
name: .name,
published_at: .published_at,
prerelease: .prerelease
})' > /tmp/releases.json
# Get contributors
gh api "repos/${REPO_URL}/contributors?per_page=10" --jq 'map({
login: .login,
contributions: .contributions
})' > /tmp/contributors.json
# Merge into output file
jq --slurpfile releases /tmp/releases.json --slurpfile contributors /tmp/contributors.json \
'. + {releases: $releases[0], contributors: $contributors[0]}' "${OUTPUT_FILE}" > /tmp/merged.json
mv /tmp/merged.json "${OUTPUT_FILE}"
# Clean up temp files
rm -f /tmp/releases.json /tmp/contributors.json
echo -e "${GREEN}✓ GitHub metadata extracted: ${OUTPUT_FILE}${NC}"
elif [ "$REPO_TYPE" = "gitlab" ]; then
# Check if glab CLI is available
if ! command -v glab &> /dev/null; then
echo -e "${YELLOW}glab CLI not found, skipping GitLab metadata${NC}"
echo '{"repository": {"exists": false, "reason": "glab CLI not installed"}}' > "${OUTPUT_FILE}"
exit 0
fi
echo "Extracting GitLab metadata..."
# Get repository info
glab api "projects/$(echo ${REPO_URL} | sed 's/\//%2F/g')" --jq '{
extraction_date: now | todate,
repository: {
type: "gitlab",
name: .name,
full_name: .path_with_namespace,
description: .description,
topics: .topics,
stars: .star_count,
forks: .forks_count,
open_issues: .open_issues_count,
created_at: .created_at,
updated_at: .last_activity_at
}
}' > "${OUTPUT_FILE}"
echo -e "${GREEN}✓ GitLab metadata extracted: ${OUTPUT_FILE}${NC}"
fi
# Cache the result
cp "${OUTPUT_FILE}" "${CACHE_FILE}"
echo "Cached metadata for 24 hours"

41
scripts/render_docs.sh Executable file
View File

@@ -0,0 +1,41 @@
#!/usr/bin/env bash
#
# Render TYPO3 documentation locally using Docker
#
# Usage: ./render_docs.sh [project_root]
#
set -e
PROJECT_ROOT="${1:-.}"
if [ ! -d "$PROJECT_ROOT/Documentation" ]; then
echo "Error: Documentation/ directory not found at $PROJECT_ROOT"
echo "Usage: $0 [project_root]"
exit 1
fi
echo "🚀 Rendering TYPO3 documentation..."
echo " Project: $PROJECT_ROOT"
docker run --rm \
-v "$(cd "$PROJECT_ROOT" && pwd)":/project \
ghcr.io/typo3-documentation/render-guides:latest \
--config=Documentation
OUTPUT_DIR="$PROJECT_ROOT/Documentation-GENERATED-temp"
if [ -f "$OUTPUT_DIR/Index.html" ]; then
echo ""
echo "✅ Documentation rendered successfully!"
echo " Output: $OUTPUT_DIR/Index.html"
echo ""
echo "To view:"
echo " open $OUTPUT_DIR/Index.html"
echo " # or"
echo " xdg-open $OUTPUT_DIR/Index.html"
else
echo ""
echo "❌ Rendering failed or output not found"
exit 1
fi

121
scripts/validate_docs.sh Executable file
View File

@@ -0,0 +1,121 @@
#!/usr/bin/env bash
#
# Validate TYPO3 documentation RST files
#
# Usage: ./validate_docs.sh [project_root]
#
set -e
PROJECT_ROOT="${1:-.}"
DOC_DIR="$PROJECT_ROOT/Documentation"
if [ ! -d "$DOC_DIR" ]; then
echo "Error: Documentation/ directory not found at $PROJECT_ROOT"
echo "Usage: $0 [project_root]"
exit 1
fi
echo "🔍 Validating TYPO3 documentation..."
echo " Directory: $DOC_DIR"
echo ""
# Check for RST files
RST_FILES=$(find "$DOC_DIR" -name "*.rst" 2>/dev/null | wc -l)
if [ "$RST_FILES" -eq 0 ]; then
echo "❌ No RST files found in Documentation/"
exit 1
fi
echo "Found $RST_FILES RST files"
echo ""
# Check for Settings.cfg
if [ ! -f "$DOC_DIR/Settings.cfg" ]; then
echo "⚠️ Warning: Settings.cfg not found"
echo " This file is required for TYPO3 Intercept builds"
fi
# Check for Index.rst
if [ ! -f "$DOC_DIR/Index.rst" ]; then
echo "❌ Error: Index.rst not found"
echo " This is the main entry point and is required"
exit 1
fi
echo "✅ Index.rst found"
# Validate RST syntax if rst2html.py is available
if command -v rst2html.py &> /dev/null; then
echo ""
echo "Checking RST syntax..."
ERRORS=0
while IFS= read -r -d '' file; do
if ! rst2html.py --strict "$file" > /dev/null 2>&1; then
echo "❌ Syntax error in: $file"
((ERRORS++))
fi
done < <(find "$DOC_DIR" -name "*.rst" -print0)
if [ $ERRORS -eq 0 ]; then
echo "✅ All RST files have valid syntax"
else
echo ""
echo "❌ Found $ERRORS files with syntax errors"
exit 1
fi
else
echo "⚠️ rst2html.py not found - skipping syntax validation"
echo " Install with: pip install docutils"
fi
# Check for common issues
echo ""
echo "Checking for common issues..."
# Check for broken internal references (basic check)
WARNINGS=0
# Check for :ref: without proper labels
while IFS= read -r -d '' file; do
if grep -q ':ref:`[^<]*`' "$file"; then
REF_COUNT=$(grep -o ':ref:`[^`]*`' "$file" | wc -l)
if [ "$REF_COUNT" -gt 0 ]; then
echo " Found $REF_COUNT :ref: references in $(basename "$file")"
fi
fi
done < <(find "$DOC_DIR" -name "*.rst" -print0)
# Check for UTF-8 encoding
while IFS= read -r -d '' file; do
if ! file -b --mime-encoding "$file" | grep -q utf-8; then
echo "⚠️ Non-UTF-8 encoding in: $file"
((WARNINGS++))
fi
done < <(find "$DOC_DIR" -name "*.rst" -print0)
# Check for trailing whitespace
while IFS= read -r -d '' file; do
if grep -q '[[:space:]]$' "$file"; then
echo "⚠️ Trailing whitespace in: $file"
((WARNINGS++))
fi
done < <(find "$DOC_DIR" -name "*.rst" -print0)
echo ""
if [ $WARNINGS -eq 0 ]; then
echo "✅ No common issues found"
else
echo "⚠️ Found $WARNINGS warnings (not blocking)"
fi
echo ""
echo "Validation summary:"
echo " RST files: $RST_FILES"
echo " Warnings: $WARNINGS"
echo ""
echo "✅ Documentation validation complete"
echo ""
echo "Next step: Render locally to check for broken references"
echo " ./render_docs.sh $PROJECT_ROOT"