Initial commit
This commit is contained in:
562
skills/start-right/scripts/generate_workflows.py
Executable file
562
skills/start-right/scripts/generate_workflows.py
Executable file
@@ -0,0 +1,562 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate GitHub Actions workflows for CI/CD
|
||||
|
||||
This script creates:
|
||||
- PR validation workflow (runs on feature branches)
|
||||
- Main branch validation workflow (runs on merge to main)
|
||||
- Release workflow (versioning, tagging, deployment)
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def run_command(cmd, check=True, capture_output=True):
|
||||
"""Run a shell command and return the result."""
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
shell=True,
|
||||
check=check,
|
||||
capture_output=capture_output,
|
||||
text=True
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def detect_project_type():
|
||||
"""Detect project type for workflow generation."""
|
||||
cwd = Path(".")
|
||||
|
||||
if (cwd / "package.json").exists():
|
||||
with open("package.json") as f:
|
||||
pkg = json.load(f)
|
||||
deps = {**pkg.get("dependencies", {}), **pkg.get("devDependencies", {})}
|
||||
|
||||
if "react" in deps or "next" in deps:
|
||||
return "react"
|
||||
elif "vite" in deps:
|
||||
return "vite"
|
||||
else:
|
||||
return "node"
|
||||
|
||||
elif (cwd / "Cargo.toml").exists():
|
||||
return "rust"
|
||||
|
||||
elif (cwd / "go.mod").exists():
|
||||
return "go"
|
||||
|
||||
elif (cwd / "Dockerfile").exists():
|
||||
return "docker"
|
||||
|
||||
elif any(cwd.glob("*.py")):
|
||||
return "python"
|
||||
|
||||
return "generic"
|
||||
|
||||
|
||||
def create_pr_workflow(project_type, checks):
|
||||
"""Create PR validation workflow."""
|
||||
workflow = {
|
||||
"name": "PR Validation",
|
||||
"on": {
|
||||
"pull_request": {
|
||||
"branches": ["main"]
|
||||
}
|
||||
},
|
||||
"jobs": {
|
||||
"validate": {
|
||||
"runs-on": "ubuntu-latest",
|
||||
"steps": [
|
||||
{
|
||||
"name": "Checkout code",
|
||||
"uses": "actions/checkout@v4"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add project-specific setup
|
||||
if project_type in ["node", "react", "vite"]:
|
||||
workflow["jobs"]["validate"]["steps"].extend([
|
||||
{
|
||||
"name": "Setup Node.js",
|
||||
"uses": "actions/setup-node@v4",
|
||||
"with": {
|
||||
"node-version": "20",
|
||||
"cache": "npm"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Install dependencies",
|
||||
"run": "npm ci"
|
||||
}
|
||||
])
|
||||
elif project_type == "rust":
|
||||
workflow["jobs"]["validate"]["steps"].extend([
|
||||
{
|
||||
"name": "Setup Rust",
|
||||
"uses": "actions-rs/toolchain@v1",
|
||||
"with": {
|
||||
"toolchain": "stable",
|
||||
"override": True
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Cache cargo",
|
||||
"uses": "actions/cache@v4",
|
||||
"with": {
|
||||
"path": "~/.cargo\ntarget",
|
||||
"key": "${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}"
|
||||
}
|
||||
}
|
||||
])
|
||||
elif project_type == "python":
|
||||
workflow["jobs"]["validate"]["steps"].extend([
|
||||
{
|
||||
"name": "Setup Python",
|
||||
"uses": "actions/setup-python@v5",
|
||||
"with": {
|
||||
"python-version": "3.11",
|
||||
"cache": "pip"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Install dependencies",
|
||||
"run": "pip install -r requirements.txt"
|
||||
}
|
||||
])
|
||||
elif project_type == "go":
|
||||
workflow["jobs"]["validate"]["steps"].extend([
|
||||
{
|
||||
"name": "Setup Go",
|
||||
"uses": "actions/setup-go@v5",
|
||||
"with": {
|
||||
"go-version": "1.21",
|
||||
"cache": True
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Install dependencies",
|
||||
"run": "go mod download"
|
||||
}
|
||||
])
|
||||
|
||||
# Add validation checks based on project type
|
||||
if "format" in checks:
|
||||
if project_type in ["node", "react", "vite"]:
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Check formatting",
|
||||
"run": "npm run format:check"
|
||||
})
|
||||
elif project_type == "rust":
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Check formatting",
|
||||
"run": "cargo fmt --check"
|
||||
})
|
||||
elif project_type == "python":
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Check formatting",
|
||||
"run": "black --check ."
|
||||
})
|
||||
|
||||
if "lint" in checks:
|
||||
if project_type in ["node", "react", "vite"]:
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Lint",
|
||||
"run": "npm run lint"
|
||||
})
|
||||
elif project_type == "rust":
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Lint",
|
||||
"run": "cargo clippy -- -D warnings"
|
||||
})
|
||||
elif project_type == "python":
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Lint",
|
||||
"run": "flake8 ."
|
||||
})
|
||||
|
||||
if "type-check" in checks:
|
||||
if project_type in ["node", "react", "vite"]:
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Type check",
|
||||
"run": "npm run type-check"
|
||||
})
|
||||
elif project_type == "python":
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Type check",
|
||||
"run": "mypy ."
|
||||
})
|
||||
|
||||
if "test" in checks:
|
||||
if project_type in ["node", "react", "vite"]:
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Run tests",
|
||||
"run": "npm test"
|
||||
})
|
||||
elif project_type == "rust":
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Run tests",
|
||||
"run": "cargo test"
|
||||
})
|
||||
elif project_type == "python":
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Run tests",
|
||||
"run": "pytest"
|
||||
})
|
||||
elif project_type == "go":
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Run tests",
|
||||
"run": "go test ./..."
|
||||
})
|
||||
|
||||
if "build" in checks:
|
||||
if project_type in ["node", "react", "vite"]:
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Build",
|
||||
"run": "npm run build"
|
||||
})
|
||||
elif project_type == "rust":
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Build",
|
||||
"run": "cargo build --release"
|
||||
})
|
||||
elif project_type == "go":
|
||||
workflow["jobs"]["validate"]["steps"].append({
|
||||
"name": "Build",
|
||||
"run": "go build -o bin/ ./..."
|
||||
})
|
||||
|
||||
return workflow
|
||||
|
||||
|
||||
def create_main_workflow(project_type, checks):
|
||||
"""Create main branch workflow with versioning and release."""
|
||||
workflow = {
|
||||
"name": "Main Branch CI/CD",
|
||||
"on": {
|
||||
"push": {
|
||||
"branches": ["main"]
|
||||
}
|
||||
},
|
||||
"jobs": {
|
||||
"validate": {
|
||||
"runs-on": "ubuntu-latest",
|
||||
"steps": [
|
||||
{
|
||||
"name": "Checkout code",
|
||||
"uses": "actions/checkout@v4",
|
||||
"with": {
|
||||
"fetch-depth": 0 # Full history for versioning
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Reuse PR validation steps
|
||||
pr_workflow = create_pr_workflow(project_type, checks)
|
||||
workflow["jobs"]["validate"]["steps"].extend(pr_workflow["jobs"]["validate"]["steps"][1:])
|
||||
|
||||
# Add versioning and tagging
|
||||
workflow["jobs"]["validate"]["steps"].extend([
|
||||
{
|
||||
"name": "Bump version and push tag",
|
||||
"id": "version",
|
||||
"uses": "anothrNick/github-tag-action@1.67.0",
|
||||
"env": {
|
||||
"GITHUB_TOKEN": "${{ secrets.GITHUB_TOKEN }}",
|
||||
"WITH_V": "true",
|
||||
"DEFAULT_BUMP": "patch"
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
# Add release job
|
||||
workflow["jobs"]["release"] = {
|
||||
"needs": "validate",
|
||||
"runs-on": "ubuntu-latest",
|
||||
"permissions": {
|
||||
"contents": "write"
|
||||
},
|
||||
"steps": [
|
||||
{
|
||||
"name": "Checkout code",
|
||||
"uses": "actions/checkout@v4"
|
||||
},
|
||||
{
|
||||
"name": "Call release workflow",
|
||||
"uses": "./.github/workflows/release.yml",
|
||||
"with": {
|
||||
"version": "${{ needs.validate.outputs.new_tag }}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Set output
|
||||
workflow["jobs"]["validate"]["outputs"] = {
|
||||
"new_tag": "${{ steps.version.outputs.new_tag }}"
|
||||
}
|
||||
|
||||
return workflow
|
||||
|
||||
|
||||
def create_release_workflow(project_type, release_type):
|
||||
"""Create reusable release workflow based on project and release type."""
|
||||
workflow = {
|
||||
"name": "Release",
|
||||
"on": {
|
||||
"workflow_call": {
|
||||
"inputs": {
|
||||
"version": {
|
||||
"required": True,
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"jobs": {
|
||||
"release": {
|
||||
"runs-on": "ubuntu-latest",
|
||||
"permissions": {
|
||||
"contents": "write",
|
||||
"packages": "write"
|
||||
},
|
||||
"steps": [
|
||||
{
|
||||
"name": "Checkout code",
|
||||
"uses": "actions/checkout@v4"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if release_type == "npm":
|
||||
workflow["jobs"]["release"]["steps"].extend([
|
||||
{
|
||||
"name": "Setup Node.js",
|
||||
"uses": "actions/setup-node@v4",
|
||||
"with": {
|
||||
"node-version": "20",
|
||||
"registry-url": "https://registry.npmjs.org/"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Install dependencies",
|
||||
"run": "npm ci"
|
||||
},
|
||||
{
|
||||
"name": "Build",
|
||||
"run": "npm run build"
|
||||
},
|
||||
{
|
||||
"name": "Publish to npm",
|
||||
"run": "npm publish",
|
||||
"env": {
|
||||
"NODE_AUTH_TOKEN": "${{ secrets.NPM_TOKEN }}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create GitHub Release",
|
||||
"uses": "softprops/action-gh-release@v1",
|
||||
"with": {
|
||||
"tag_name": "${{ inputs.version }}",
|
||||
"name": "Release ${{ inputs.version }}",
|
||||
"generate_release_notes": True
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
elif release_type == "github-pages":
|
||||
workflow["jobs"]["release"]["steps"].extend([
|
||||
{
|
||||
"name": "Setup Node.js",
|
||||
"uses": "actions/setup-node@v4",
|
||||
"with": {
|
||||
"node-version": "20"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Install dependencies",
|
||||
"run": "npm ci"
|
||||
},
|
||||
{
|
||||
"name": "Build",
|
||||
"run": "npm run build"
|
||||
},
|
||||
{
|
||||
"name": "Deploy to GitHub Pages",
|
||||
"uses": "peaceiris/actions-gh-pages@v3",
|
||||
"with": {
|
||||
"github_token": "${{ secrets.GITHUB_TOKEN }}",
|
||||
"publish_dir": "./dist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create GitHub Release",
|
||||
"uses": "softprops/action-gh-release@v1",
|
||||
"with": {
|
||||
"tag_name": "${{ inputs.version }}",
|
||||
"body": "Deployed to GitHub Pages: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}",
|
||||
"generate_release_notes": True
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
elif release_type == "docker":
|
||||
workflow["jobs"]["release"]["steps"].extend([
|
||||
{
|
||||
"name": "Set up Docker Buildx",
|
||||
"uses": "docker/setup-buildx-action@v3"
|
||||
},
|
||||
{
|
||||
"name": "Login to GitHub Container Registry",
|
||||
"uses": "docker/login-action@v3",
|
||||
"with": {
|
||||
"registry": "ghcr.io",
|
||||
"username": "${{ github.actor }}",
|
||||
"password": "${{ secrets.GITHUB_TOKEN }}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Build and push",
|
||||
"uses": "docker/build-push-action@v5",
|
||||
"with": {
|
||||
"context": ".",
|
||||
"push": True,
|
||||
"tags": "ghcr.io/${{ github.repository }}:${{ inputs.version }},ghcr.io/${{ github.repository }}:latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create GitHub Release",
|
||||
"uses": "softprops/action-gh-release@v1",
|
||||
"with": {
|
||||
"tag_name": "${{ inputs.version }}",
|
||||
"body": "Docker image: `ghcr.io/${{ github.repository }}:${{ inputs.version }}`",
|
||||
"generate_release_notes": True
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
elif release_type == "binary":
|
||||
# For Rust, Go, or other compiled languages
|
||||
workflow["jobs"]["release"]["steps"].extend([
|
||||
{
|
||||
"name": "Build binaries",
|
||||
"run": "# Add build commands for your project"
|
||||
},
|
||||
{
|
||||
"name": "Create GitHub Release",
|
||||
"uses": "softprops/action-gh-release@v1",
|
||||
"with": {
|
||||
"tag_name": "${{ inputs.version }}",
|
||||
"files": "bin/*", # Adjust path as needed
|
||||
"generate_release_notes": True
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
elif release_type == "skill":
|
||||
# For Claude Code skills
|
||||
workflow["jobs"]["release"]["steps"].extend([
|
||||
{
|
||||
"name": "Create GitHub Release",
|
||||
"uses": "softprops/action-gh-release@v1",
|
||||
"with": {
|
||||
"tag_name": "${{ inputs.version }}",
|
||||
"generate_release_notes": True
|
||||
}
|
||||
}
|
||||
])
|
||||
|
||||
return workflow
|
||||
|
||||
|
||||
def write_workflow(workflow, filename):
|
||||
"""Write workflow to .github/workflows directory."""
|
||||
workflows_dir = Path(".github/workflows")
|
||||
workflows_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
filepath = workflows_dir / filename
|
||||
|
||||
import yaml
|
||||
try:
|
||||
with open(filepath, "w") as f:
|
||||
yaml.dump(workflow, f, default_flow_style=False, sort_keys=False)
|
||||
except ImportError:
|
||||
# Manual YAML writing if pyyaml not available
|
||||
import json
|
||||
yaml_str = json.dumps(workflow, indent=2)
|
||||
# Basic conversion (not perfect but works for simple cases)
|
||||
with open(filepath, "w") as f:
|
||||
f.write(yaml_str.replace('"', '').replace(',', ''))
|
||||
|
||||
print(f"✅ Created {filepath}")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: generate_workflows.py [--checks format,lint,test,build] [--release npm|github-pages|docker|binary|skill]")
|
||||
print("\nOptions:")
|
||||
print(" --checks <list> Comma-separated validation checks")
|
||||
print(" --release <type> Release strategy")
|
||||
print("\nRelease types:")
|
||||
print(" npm Publish to npm registry")
|
||||
print(" github-pages Deploy to GitHub Pages")
|
||||
print(" docker Build and push Docker image")
|
||||
print(" binary Build and release binary artifacts")
|
||||
print(" skill Claude Code skill (no deployment needed)")
|
||||
sys.exit(1)
|
||||
|
||||
# Parse arguments
|
||||
checks = ["format", "lint", "test", "build"] # defaults
|
||||
if "--checks" in sys.argv:
|
||||
idx = sys.argv.index("--checks")
|
||||
if idx + 1 < len(sys.argv):
|
||||
checks = sys.argv[idx + 1].split(",")
|
||||
|
||||
release_type = None
|
||||
if "--release" in sys.argv:
|
||||
idx = sys.argv.index("--release")
|
||||
if idx + 1 < len(sys.argv):
|
||||
release_type = sys.argv[idx + 1]
|
||||
|
||||
project_type = detect_project_type()
|
||||
|
||||
print("⚙️ Generating GitHub Actions workflows...")
|
||||
print(f" Project type: {project_type}")
|
||||
print(f" Checks: {', '.join(checks)}")
|
||||
if release_type:
|
||||
print(f" Release type: {release_type}")
|
||||
print()
|
||||
|
||||
# Create PR workflow
|
||||
pr_workflow = create_pr_workflow(project_type, checks)
|
||||
write_workflow(pr_workflow, "pr-validation.yml")
|
||||
|
||||
# Create main branch workflow
|
||||
main_workflow = create_main_workflow(project_type, checks)
|
||||
write_workflow(main_workflow, "main-ci-cd.yml")
|
||||
|
||||
# Create release workflow if specified
|
||||
if release_type:
|
||||
release_workflow = create_release_workflow(project_type, release_type)
|
||||
write_workflow(release_workflow, "release.yml")
|
||||
|
||||
print("\n✅ GitHub Actions workflows created!")
|
||||
print("\nWorkflows:")
|
||||
print(" - pr-validation.yml: Runs on PRs to main")
|
||||
print(" - main-ci-cd.yml: Runs on merge to main, handles versioning")
|
||||
if release_type:
|
||||
print(f" - release.yml: Handles {release_type} deployment")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
215
skills/start-right/scripts/init_git_repo.py
Executable file
215
skills/start-right/scripts/init_git_repo.py
Executable file
@@ -0,0 +1,215 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Initialize git repository and create GitHub remote
|
||||
|
||||
This script handles:
|
||||
- Git initialization with main as default branch
|
||||
- GitHub repository creation (public/private)
|
||||
- Remote configuration
|
||||
- Initial commit setup
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def run_command(cmd, check=True, capture_output=True):
|
||||
"""Run a shell command and return the result."""
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
shell=True,
|
||||
check=check,
|
||||
capture_output=capture_output,
|
||||
text=True
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def check_prerequisites():
|
||||
"""Verify git and gh CLI are installed and authenticated."""
|
||||
errors = []
|
||||
|
||||
# Check git
|
||||
try:
|
||||
run_command("git --version")
|
||||
except subprocess.CalledProcessError:
|
||||
errors.append("Git is not installed")
|
||||
|
||||
# Check gh CLI
|
||||
try:
|
||||
run_command("gh --version")
|
||||
except subprocess.CalledProcessError:
|
||||
errors.append("GitHub CLI (gh) is not installed")
|
||||
else:
|
||||
# Check gh authentication
|
||||
try:
|
||||
run_command("gh auth status")
|
||||
except subprocess.CalledProcessError:
|
||||
errors.append("GitHub CLI is not authenticated (run: gh auth login)")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def init_git():
|
||||
"""Initialize git repository with main as default branch."""
|
||||
if Path(".git").exists():
|
||||
print("⚠️ Git repository already initialized")
|
||||
return False
|
||||
|
||||
run_command("git init -b main")
|
||||
print("✅ Initialized git repository with 'main' as default branch")
|
||||
return True
|
||||
|
||||
|
||||
def create_github_repo(repo_name, visibility="public", org=None):
|
||||
"""Create GitHub repository and set as remote."""
|
||||
# Build gh repo create command
|
||||
cmd = f"gh repo create {repo_name} --{visibility} --source=."
|
||||
|
||||
if org:
|
||||
cmd += f" --org {org}"
|
||||
|
||||
try:
|
||||
result = run_command(cmd)
|
||||
print(f"✅ Created GitHub repository: {repo_name} ({visibility})")
|
||||
return True
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Failed to create GitHub repository: {e.stderr}")
|
||||
return False
|
||||
|
||||
|
||||
def create_gitignore(project_type=None):
|
||||
"""Create an appropriate .gitignore file."""
|
||||
common_patterns = [
|
||||
"# Dependencies",
|
||||
"node_modules/",
|
||||
".pnp",
|
||||
".pnp.js",
|
||||
"",
|
||||
"# Testing",
|
||||
"coverage/",
|
||||
"*.log",
|
||||
"",
|
||||
"# Environment",
|
||||
".env",
|
||||
".env.local",
|
||||
".env.*.local",
|
||||
"",
|
||||
"# IDE",
|
||||
".vscode/",
|
||||
".idea/",
|
||||
"*.swp",
|
||||
"*.swo",
|
||||
"*~",
|
||||
"",
|
||||
"# OS",
|
||||
".DS_Store",
|
||||
"Thumbs.db",
|
||||
"",
|
||||
"# Build outputs",
|
||||
"dist/",
|
||||
"build/",
|
||||
"*.exe",
|
||||
"*.dll",
|
||||
"*.so",
|
||||
"*.dylib",
|
||||
""
|
||||
]
|
||||
|
||||
type_specific = {
|
||||
"node": [
|
||||
"# Node specific",
|
||||
"npm-debug.log*",
|
||||
"yarn-debug.log*",
|
||||
"yarn-error.log*",
|
||||
".npm",
|
||||
],
|
||||
"python": [
|
||||
"# Python specific",
|
||||
"__pycache__/",
|
||||
"*.py[cod]",
|
||||
"*$py.class",
|
||||
".Python",
|
||||
"venv/",
|
||||
"ENV/",
|
||||
".venv/",
|
||||
"*.egg-info/",
|
||||
],
|
||||
"rust": [
|
||||
"# Rust specific",
|
||||
"target/",
|
||||
"Cargo.lock",
|
||||
"**/*.rs.bk",
|
||||
],
|
||||
"go": [
|
||||
"# Go specific",
|
||||
"*.exe",
|
||||
"*.exe~",
|
||||
"*.test",
|
||||
"*.out",
|
||||
"vendor/",
|
||||
],
|
||||
}
|
||||
|
||||
gitignore_content = common_patterns
|
||||
if project_type and project_type in type_specific:
|
||||
gitignore_content.extend([""] + type_specific[project_type])
|
||||
|
||||
with open(".gitignore", "w") as f:
|
||||
f.write("\n".join(gitignore_content))
|
||||
|
||||
print("✅ Created .gitignore")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: init_git_repo.py <repo-name> [--private] [--org <org-name>] [--type <type>]")
|
||||
print("\nOptions:")
|
||||
print(" --private Create private repository (default: public)")
|
||||
print(" --org <name> Create under organization")
|
||||
print(" --type <type> Project type for .gitignore (node|python|rust|go)")
|
||||
sys.exit(1)
|
||||
|
||||
repo_name = sys.argv[1]
|
||||
visibility = "private" if "--private" in sys.argv else "public"
|
||||
org = sys.argv[sys.argv.index("--org") + 1] if "--org" in sys.argv else None
|
||||
project_type = sys.argv[sys.argv.index("--type") + 1] if "--type" in sys.argv else None
|
||||
|
||||
print("🚀 Initializing repository setup...")
|
||||
print(f" Repository: {repo_name}")
|
||||
print(f" Visibility: {visibility}")
|
||||
if org:
|
||||
print(f" Organization: {org}")
|
||||
if project_type:
|
||||
print(f" Type: {project_type}")
|
||||
print()
|
||||
|
||||
# Check prerequisites
|
||||
errors = check_prerequisites()
|
||||
if errors:
|
||||
print("❌ Prerequisites not met:")
|
||||
for error in errors:
|
||||
print(f" - {error}")
|
||||
sys.exit(1)
|
||||
|
||||
# Initialize git
|
||||
init_git()
|
||||
|
||||
# Create .gitignore
|
||||
create_gitignore(project_type)
|
||||
|
||||
# Create GitHub repo
|
||||
if not create_github_repo(repo_name, visibility, org):
|
||||
sys.exit(1)
|
||||
|
||||
print("\n✅ Repository setup complete!")
|
||||
print("\nNext steps:")
|
||||
print(" 1. Configure branch protection rules")
|
||||
print(" 2. Set up CI/CD workflows")
|
||||
print(" 3. Configure git hooks")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
133
skills/start-right/scripts/setup_branch_protection.py
Executable file
133
skills/start-right/scripts/setup_branch_protection.py
Executable file
@@ -0,0 +1,133 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Configure GitHub branch protection rules
|
||||
|
||||
This script sets up branch protection for the main branch to:
|
||||
- Prevent direct pushes to main
|
||||
- Require pull request reviews
|
||||
- Require status checks to pass
|
||||
- Automatically delete head branches after merge
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import json
|
||||
|
||||
|
||||
def run_command(cmd, check=True, capture_output=True):
|
||||
"""Run a shell command and return the result."""
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
shell=True,
|
||||
check=check,
|
||||
capture_output=capture_output,
|
||||
text=True
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def get_repo_info():
|
||||
"""Get current repository owner and name."""
|
||||
try:
|
||||
result = run_command("gh repo view --json owner,name")
|
||||
repo_data = json.loads(result.stdout)
|
||||
return repo_data["owner"]["login"], repo_data["name"]
|
||||
except Exception as e:
|
||||
print(f"❌ Failed to get repository info: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def enable_branch_protection(branch="main", required_checks=None):
|
||||
"""Enable branch protection rules."""
|
||||
owner, repo = get_repo_info()
|
||||
|
||||
# Base protection rules
|
||||
protection_rules = {
|
||||
"required_pull_request_reviews": {
|
||||
"required_approving_review_count": 0, # Solo dev doesn't need reviews
|
||||
"dismiss_stale_reviews": True,
|
||||
},
|
||||
"enforce_admins": False, # Allow admins to bypass for solo dev
|
||||
"required_status_checks": {
|
||||
"strict": True,
|
||||
"contexts": required_checks or []
|
||||
},
|
||||
"restrictions": None, # No push restrictions for solo dev
|
||||
"allow_force_pushes": False,
|
||||
"allow_deletions": False,
|
||||
}
|
||||
|
||||
# Use gh api to set branch protection
|
||||
cmd = f'''gh api repos/{owner}/{repo}/branches/{branch}/protection \\
|
||||
-X PUT \\
|
||||
-H "Accept: application/vnd.github+json" \\
|
||||
-f required_status_checks[strict]=true \\
|
||||
-f required_status_checks[contexts][]=build \\
|
||||
-f enforce_admins=false \\
|
||||
-f required_pull_request_reviews[required_approving_review_count]=0 \\
|
||||
-f required_pull_request_reviews[dismiss_stale_reviews]=true \\
|
||||
-f allow_force_pushes=false \\
|
||||
-f allow_deletions=false'''
|
||||
|
||||
try:
|
||||
run_command(cmd)
|
||||
print(f"✅ Enabled branch protection for '{branch}'")
|
||||
return True
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Failed to enable branch protection: {e.stderr}")
|
||||
return False
|
||||
|
||||
|
||||
def configure_repo_settings():
|
||||
"""Configure repository settings for PR workflow."""
|
||||
owner, repo = get_repo_info()
|
||||
|
||||
# Enable auto-delete of head branches
|
||||
cmd = f'''gh api repos/{owner}/{repo} \\
|
||||
-X PATCH \\
|
||||
-H "Accept: application/vnd.github+json" \\
|
||||
-f delete_branch_on_merge=true \\
|
||||
-f allow_squash_merge=true \\
|
||||
-f allow_merge_commit=false \\
|
||||
-f allow_rebase_merge=false'''
|
||||
|
||||
try:
|
||||
run_command(cmd)
|
||||
print("✅ Configured repository settings:")
|
||||
print(" - Auto-delete head branches after merge: enabled")
|
||||
print(" - Squash merging: enabled")
|
||||
print(" - Merge commits: disabled")
|
||||
print(" - Rebase merging: disabled")
|
||||
return True
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Failed to configure repository settings: {e.stderr}")
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
required_checks = []
|
||||
if len(sys.argv) > 1:
|
||||
# Accept comma-separated list of required status checks
|
||||
required_checks = sys.argv[1].split(",")
|
||||
|
||||
print("🔒 Configuring branch protection...")
|
||||
print()
|
||||
|
||||
# Configure repository settings
|
||||
configure_repo_settings()
|
||||
print()
|
||||
|
||||
# Enable branch protection
|
||||
enable_branch_protection("main", required_checks)
|
||||
|
||||
print("\n✅ Branch protection configured!")
|
||||
print("\nProtection rules applied:")
|
||||
print(" - Direct pushes to 'main' blocked")
|
||||
print(" - Pull requests required")
|
||||
print(" - Status checks required (if configured)")
|
||||
print(" - Feature branches auto-deleted after merge")
|
||||
print(" - Squash merge enforced")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
269
skills/start-right/scripts/setup_git_hooks.py
Executable file
269
skills/start-right/scripts/setup_git_hooks.py
Executable file
@@ -0,0 +1,269 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Set up git hooks using husky (Node.js) or lefthook (universal)
|
||||
|
||||
This script:
|
||||
- Detects project type
|
||||
- Installs and configures appropriate git hooks tool
|
||||
- Sets up pre-commit and pre-push hooks with validation checks
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def run_command(cmd, check=True, capture_output=True):
|
||||
"""Run a shell command and return the result."""
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
shell=True,
|
||||
check=check,
|
||||
capture_output=capture_output,
|
||||
text=True
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def is_node_project():
|
||||
"""Check if this is a Node.js project."""
|
||||
return Path("package.json").exists()
|
||||
|
||||
|
||||
def setup_husky(checks):
|
||||
"""Set up husky for Node.js projects."""
|
||||
print("📦 Installing husky...")
|
||||
|
||||
# Install husky
|
||||
try:
|
||||
run_command("npm install --save-dev husky")
|
||||
run_command("npx husky init")
|
||||
print("✅ Husky installed and initialized")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"❌ Failed to install husky: {e}")
|
||||
return False
|
||||
|
||||
# Create pre-commit hook
|
||||
pre_commit_commands = []
|
||||
if "format" in checks:
|
||||
pre_commit_commands.append("npm run format:check || (echo '❌ Format check failed. Run npm run format' && exit 1)")
|
||||
if "lint" in checks:
|
||||
pre_commit_commands.append("npm run lint")
|
||||
if "type-check" in checks:
|
||||
pre_commit_commands.append("npm run type-check")
|
||||
|
||||
if pre_commit_commands:
|
||||
hook_content = "#!/bin/sh\n. \"$(dirname \"$0\")/_/husky.sh\"\n\n"
|
||||
hook_content += "\n".join(pre_commit_commands)
|
||||
|
||||
with open(".husky/pre-commit", "w") as f:
|
||||
f.write(hook_content)
|
||||
Path(".husky/pre-commit").chmod(0o755)
|
||||
print("✅ Created pre-commit hook")
|
||||
|
||||
# Create pre-push hook
|
||||
pre_push_commands = []
|
||||
if "test" in checks:
|
||||
pre_push_commands.append("npm run test")
|
||||
if "build" in checks:
|
||||
pre_push_commands.append("npm run build")
|
||||
|
||||
if pre_push_commands:
|
||||
hook_content = "#!/bin/sh\n. \"$(dirname \"$0\")/_/husky.sh\"\n\n"
|
||||
hook_content += "\n".join(pre_push_commands)
|
||||
|
||||
with open(".husky/pre-push", "w") as f:
|
||||
f.write(hook_content)
|
||||
Path(".husky/pre-push").chmod(0o755)
|
||||
print("✅ Created pre-push hook")
|
||||
|
||||
# Update package.json with scripts if they don't exist
|
||||
update_package_json_scripts(checks)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def setup_lefthook(checks):
|
||||
"""Set up lefthook for any project type."""
|
||||
print("📦 Installing lefthook...")
|
||||
|
||||
# Check if lefthook is installed
|
||||
try:
|
||||
run_command("lefthook version")
|
||||
except subprocess.CalledProcessError:
|
||||
print("Installing lefthook globally...")
|
||||
# Try to install via common package managers
|
||||
try:
|
||||
run_command("brew install lefthook", check=False)
|
||||
except:
|
||||
try:
|
||||
run_command("go install github.com/evilmartians/lefthook@latest", check=False)
|
||||
except:
|
||||
print("❌ Could not install lefthook. Please install manually:")
|
||||
print(" brew install lefthook")
|
||||
print(" OR")
|
||||
print(" go install github.com/evilmartians/lefthook@latest")
|
||||
return False
|
||||
|
||||
# Create lefthook.yml configuration
|
||||
config = {
|
||||
"pre-commit": {
|
||||
"parallel": True,
|
||||
"commands": {}
|
||||
},
|
||||
"pre-push": {
|
||||
"parallel": False,
|
||||
"commands": {}
|
||||
}
|
||||
}
|
||||
|
||||
# Pre-commit checks
|
||||
if "format" in checks:
|
||||
config["pre-commit"]["commands"]["format-check"] = {
|
||||
"run": "npm run format:check || echo 'Run: npm run format'",
|
||||
}
|
||||
|
||||
if "lint" in checks:
|
||||
config["pre-commit"]["commands"]["lint"] = {
|
||||
"run": "npm run lint" if is_node_project() else "echo 'Configure linting for your project'",
|
||||
}
|
||||
|
||||
if "type-check" in checks:
|
||||
config["pre-commit"]["commands"]["type-check"] = {
|
||||
"run": "npm run type-check",
|
||||
}
|
||||
|
||||
# Pre-push checks
|
||||
if "test" in checks:
|
||||
config["pre-push"]["commands"]["test"] = {
|
||||
"run": "npm run test" if is_node_project() else "echo 'Configure tests for your project'",
|
||||
}
|
||||
|
||||
if "build" in checks:
|
||||
config["pre-push"]["commands"]["build"] = {
|
||||
"run": "npm run build" if is_node_project() else "echo 'Configure build for your project'",
|
||||
}
|
||||
|
||||
# Write configuration
|
||||
import yaml
|
||||
try:
|
||||
with open("lefthook.yml", "w") as f:
|
||||
yaml.dump(config, f, default_flow_style=False)
|
||||
except ImportError:
|
||||
# Fallback to manual YAML writing if pyyaml not available
|
||||
with open("lefthook.yml", "w") as f:
|
||||
f.write("pre-commit:\n")
|
||||
f.write(" parallel: true\n")
|
||||
f.write(" commands:\n")
|
||||
for cmd_name, cmd_config in config["pre-commit"]["commands"].items():
|
||||
f.write(f" {cmd_name}:\n")
|
||||
f.write(f" run: {cmd_config['run']}\n")
|
||||
|
||||
f.write("\npre-push:\n")
|
||||
f.write(" parallel: false\n")
|
||||
f.write(" commands:\n")
|
||||
for cmd_name, cmd_config in config["pre-push"]["commands"].items():
|
||||
f.write(f" {cmd_name}:\n")
|
||||
f.write(f" run: {cmd_config['run']}\n")
|
||||
|
||||
print("✅ Created lefthook.yml")
|
||||
|
||||
# Install git hooks
|
||||
try:
|
||||
run_command("lefthook install")
|
||||
print("✅ Installed git hooks")
|
||||
except subprocess.CalledProcessError:
|
||||
print("⚠️ Run 'lefthook install' to activate hooks")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def update_package_json_scripts(checks):
|
||||
"""Update package.json with necessary npm scripts if they don't exist."""
|
||||
if not is_node_project():
|
||||
return
|
||||
|
||||
with open("package.json", "r") as f:
|
||||
pkg = json.load(f)
|
||||
|
||||
scripts = pkg.get("scripts", {})
|
||||
modified = False
|
||||
|
||||
suggested_scripts = {
|
||||
"format": "prettier --write .",
|
||||
"format:check": "prettier --check .",
|
||||
"lint": "eslint .",
|
||||
"type-check": "tsc --noEmit",
|
||||
"test": "jest",
|
||||
"build": "tsc"
|
||||
}
|
||||
|
||||
for script_name, script_cmd in suggested_scripts.items():
|
||||
if script_name not in scripts:
|
||||
# Only add if the check is enabled
|
||||
check_type = script_name.split(":")[0] if ":" in script_name else script_name
|
||||
if check_type in checks:
|
||||
scripts[script_name] = script_cmd
|
||||
modified = True
|
||||
print(f"ℹ️ Added npm script: {script_name}")
|
||||
|
||||
if modified:
|
||||
pkg["scripts"] = scripts
|
||||
with open("package.json", "w") as f:
|
||||
json.dump(pkg, f, indent=2)
|
||||
print("✅ Updated package.json scripts")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: setup_git_hooks.py [--husky|--lefthook] [--checks format,lint,type-check,test,build]")
|
||||
print("\nOptions:")
|
||||
print(" --husky Use husky (Node.js projects only)")
|
||||
print(" --lefthook Use lefthook (universal)")
|
||||
print(" --checks <list> Comma-separated list of checks to enable")
|
||||
print("\nExample:")
|
||||
print(" setup_git_hooks.py --husky --checks format,lint,test")
|
||||
sys.exit(1)
|
||||
|
||||
# Parse arguments
|
||||
use_husky = "--husky" in sys.argv
|
||||
use_lefthook = "--lefthook" in sys.argv
|
||||
|
||||
# Get checks list
|
||||
checks = ["format", "lint", "test"] # defaults
|
||||
if "--checks" in sys.argv:
|
||||
idx = sys.argv.index("--checks")
|
||||
if idx + 1 < len(sys.argv):
|
||||
checks = sys.argv[idx + 1].split(",")
|
||||
|
||||
# Auto-detect if not specified
|
||||
if not use_husky and not use_lefthook:
|
||||
if is_node_project():
|
||||
use_husky = True
|
||||
else:
|
||||
use_lefthook = True
|
||||
|
||||
print("🪝 Setting up git hooks...")
|
||||
print(f" Tool: {'husky' if use_husky else 'lefthook'}")
|
||||
print(f" Checks: {', '.join(checks)}")
|
||||
print()
|
||||
|
||||
if use_husky:
|
||||
if not is_node_project():
|
||||
print("❌ Husky requires a Node.js project (package.json)")
|
||||
print(" Use --lefthook for non-Node projects")
|
||||
sys.exit(1)
|
||||
success = setup_husky(checks)
|
||||
else:
|
||||
success = setup_lefthook(checks)
|
||||
|
||||
if success:
|
||||
print("\n✅ Git hooks configured!")
|
||||
print("\nHooks will run:")
|
||||
print(" Pre-commit:", ", ".join([c for c in checks if c in ["format", "lint", "type-check"]]))
|
||||
print(" Pre-push:", ", ".join([c for c in checks if c in ["test", "build"]]))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
231
skills/start-right/scripts/setup_tooling.py
Executable file
231
skills/start-right/scripts/setup_tooling.py
Executable file
@@ -0,0 +1,231 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Detect project type and set up appropriate tooling configuration
|
||||
|
||||
This script:
|
||||
- Detects project type from files/directories
|
||||
- Creates appropriate configuration files for linting, formatting, type checking
|
||||
- Sets up test frameworks
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def run_command(cmd, check=True, capture_output=True):
|
||||
"""Run a shell command and return the result."""
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
shell=True,
|
||||
check=check,
|
||||
capture_output=capture_output,
|
||||
text=True
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
def detect_project_type():
|
||||
"""Detect project type from existing files."""
|
||||
cwd = Path(".")
|
||||
|
||||
if (cwd / "package.json").exists():
|
||||
with open("package.json") as f:
|
||||
pkg = json.load(f)
|
||||
deps = {**pkg.get("dependencies", {}), **pkg.get("devDependencies", {})}
|
||||
|
||||
if "react" in deps or "next" in deps:
|
||||
return "react"
|
||||
elif "vue" in deps:
|
||||
return "vue"
|
||||
elif "typescript" in deps or (cwd / "tsconfig.json").exists():
|
||||
return "typescript"
|
||||
else:
|
||||
return "node"
|
||||
|
||||
elif (cwd / "Cargo.toml").exists():
|
||||
return "rust"
|
||||
|
||||
elif (cwd / "go.mod").exists():
|
||||
return "go"
|
||||
|
||||
elif any(cwd.glob("*.py")) or (cwd / "requirements.txt").exists() or (cwd / "pyproject.toml").exists():
|
||||
return "python"
|
||||
|
||||
elif (cwd / "Dockerfile").exists():
|
||||
return "docker"
|
||||
|
||||
return "unknown"
|
||||
|
||||
|
||||
def setup_node_tooling():
|
||||
"""Set up tooling for Node.js projects."""
|
||||
configs = {}
|
||||
|
||||
# ESLint configuration
|
||||
configs[".eslintrc.json"] = {
|
||||
"env": {
|
||||
"node": True,
|
||||
"es2021": True
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": "latest",
|
||||
"sourceType": "module"
|
||||
},
|
||||
"rules": {}
|
||||
}
|
||||
|
||||
# Prettier configuration
|
||||
configs[".prettierrc.json"] = {
|
||||
"semi": True,
|
||||
"singleQuote": True,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "es5"
|
||||
}
|
||||
|
||||
# Prettier ignore
|
||||
configs[".prettierignore"] = """node_modules
|
||||
dist
|
||||
build
|
||||
coverage
|
||||
.next
|
||||
"""
|
||||
|
||||
return configs
|
||||
|
||||
|
||||
def setup_typescript_tooling():
|
||||
"""Set up tooling for TypeScript projects."""
|
||||
configs = setup_node_tooling()
|
||||
|
||||
# Update ESLint for TypeScript
|
||||
configs[".eslintrc.json"] = {
|
||||
"env": {
|
||||
"node": True,
|
||||
"es2021": True
|
||||
},
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": "latest",
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"rules": {}
|
||||
}
|
||||
|
||||
# Basic TypeScript config if not exists
|
||||
if not Path("tsconfig.json").exists():
|
||||
configs["tsconfig.json"] = {
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"module": "commonjs",
|
||||
"lib": ["ES2020"],
|
||||
"outDir": "./dist",
|
||||
"rootDir": "./src",
|
||||
"strict": True,
|
||||
"esModuleInterop": True,
|
||||
"skipLibCheck": True,
|
||||
"forceConsistentCasingInFileNames": True
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
|
||||
return configs
|
||||
|
||||
|
||||
def setup_python_tooling():
|
||||
"""Set up tooling for Python projects."""
|
||||
configs = {}
|
||||
|
||||
# Black configuration (pyproject.toml section)
|
||||
configs[".black.toml"] = """[tool.black]
|
||||
line-length = 88
|
||||
target-version = ['py39', 'py310', 'py311']
|
||||
include = '\\.pyi?$'
|
||||
"""
|
||||
|
||||
# Flake8 configuration
|
||||
configs[".flake8"] = """[flake8]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203, W503
|
||||
exclude = .git,__pycache__,venv,.venv,build,dist
|
||||
"""
|
||||
|
||||
# MyPy configuration
|
||||
configs["mypy.ini"] = """[mypy]
|
||||
python_version = 3.9
|
||||
warn_return_any = True
|
||||
warn_unused_configs = True
|
||||
disallow_untyped_defs = True
|
||||
"""
|
||||
|
||||
return configs
|
||||
|
||||
|
||||
def setup_rust_tooling():
|
||||
"""Set up tooling for Rust projects."""
|
||||
configs = {}
|
||||
|
||||
# Rustfmt configuration
|
||||
configs["rustfmt.toml"] = """edition = "2021"
|
||||
max_width = 100
|
||||
hard_tabs = false
|
||||
tab_spaces = 4
|
||||
"""
|
||||
|
||||
# Clippy configuration (in Cargo.toml, return as string for manual addition)
|
||||
return configs
|
||||
|
||||
|
||||
def write_configs(configs):
|
||||
"""Write configuration files to disk."""
|
||||
for filename, content in configs.items():
|
||||
if isinstance(content, dict):
|
||||
with open(filename, "w") as f:
|
||||
json.dump(content, f, indent=2)
|
||||
else:
|
||||
with open(filename, "w") as f:
|
||||
f.write(content)
|
||||
print(f"✅ Created {filename}")
|
||||
|
||||
|
||||
def main():
|
||||
project_type = sys.argv[1] if len(sys.argv) > 1 else detect_project_type()
|
||||
|
||||
print(f"🔧 Setting up tooling for {project_type} project...")
|
||||
print()
|
||||
|
||||
configs = {}
|
||||
|
||||
if project_type in ["node", "javascript"]:
|
||||
configs = setup_node_tooling()
|
||||
elif project_type in ["typescript", "react", "vue"]:
|
||||
configs = setup_typescript_tooling()
|
||||
elif project_type == "python":
|
||||
configs = setup_python_tooling()
|
||||
elif project_type == "rust":
|
||||
configs = setup_rust_tooling()
|
||||
else:
|
||||
print(f"⚠️ Unknown project type: {project_type}")
|
||||
print("Skipping tooling setup.")
|
||||
return
|
||||
|
||||
if configs:
|
||||
write_configs(configs)
|
||||
print(f"\n✅ Tooling configuration complete for {project_type}!")
|
||||
|
||||
print("\nNext steps:")
|
||||
print(" 1. Install dependencies for linting/formatting tools")
|
||||
print(" 2. Set up pre-commit hooks")
|
||||
print(" 3. Configure GitHub Actions workflows")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user