Initial commit
This commit is contained in:
0
skills/web-frameworks/scripts/__init__.py
Normal file
0
skills/web-frameworks/scripts/__init__.py
Normal file
547
skills/web-frameworks/scripts/nextjs_init.py
Normal file
547
skills/web-frameworks/scripts/nextjs_init.py
Normal file
@@ -0,0 +1,547 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Next.js Project Initialization Script
|
||||
|
||||
Initialize new Next.js project with best practices, TypeScript, and optimized configuration.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class NextJSInitializer:
|
||||
"""Initialize Next.js project with best practices."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
directory: Optional[Path] = None,
|
||||
typescript: bool = True,
|
||||
app_router: bool = True,
|
||||
src_dir: bool = False,
|
||||
tailwind: bool = False,
|
||||
eslint: bool = True,
|
||||
import_alias: str = "@/*",
|
||||
):
|
||||
"""
|
||||
Initialize NextJSInitializer.
|
||||
|
||||
Args:
|
||||
name: Project name
|
||||
directory: Target directory (default: current directory / name)
|
||||
typescript: Enable TypeScript
|
||||
app_router: Use App Router (recommended)
|
||||
src_dir: Use src/ directory
|
||||
tailwind: Include Tailwind CSS
|
||||
eslint: Include ESLint
|
||||
import_alias: Import alias pattern
|
||||
"""
|
||||
self.name = name
|
||||
self.directory = directory or Path.cwd() / name
|
||||
self.typescript = typescript
|
||||
self.app_router = app_router
|
||||
self.src_dir = src_dir
|
||||
self.tailwind = tailwind
|
||||
self.eslint = eslint
|
||||
self.import_alias = import_alias
|
||||
|
||||
def validate_name(self) -> None:
|
||||
"""Validate project name."""
|
||||
if not self.name:
|
||||
raise ValueError("Project name cannot be empty")
|
||||
|
||||
if not self.name.replace("-", "").replace("_", "").isalnum():
|
||||
raise ValueError(
|
||||
"Project name can only contain letters, numbers, hyphens, and underscores"
|
||||
)
|
||||
|
||||
if self.name[0].isdigit():
|
||||
raise ValueError("Project name cannot start with a number")
|
||||
|
||||
def check_directory(self) -> None:
|
||||
"""Check if target directory exists."""
|
||||
if self.directory.exists():
|
||||
raise FileExistsError(f"Directory '{self.directory}' already exists")
|
||||
|
||||
def create_directory_structure(self) -> None:
|
||||
"""Create project directory structure."""
|
||||
print(f"Creating directory structure in {self.directory}...")
|
||||
|
||||
# Create base directories
|
||||
self.directory.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Determine app/pages directory location
|
||||
base_dir = self.directory / "src" if self.src_dir else self.directory
|
||||
|
||||
if self.app_router:
|
||||
app_dir = base_dir / "app"
|
||||
app_dir.mkdir(parents=True, exist_ok=True)
|
||||
(app_dir / "favicon.ico").touch()
|
||||
self._create_app_router_files(app_dir)
|
||||
else:
|
||||
pages_dir = base_dir / "pages"
|
||||
pages_dir.mkdir(parents=True, exist_ok=True)
|
||||
self._create_pages_router_files(pages_dir)
|
||||
|
||||
# Create additional directories
|
||||
(self.directory / "public").mkdir(exist_ok=True)
|
||||
(base_dir / "components").mkdir(parents=True, exist_ok=True)
|
||||
(base_dir / "lib").mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _create_app_router_files(self, app_dir: Path) -> None:
|
||||
"""Create App Router files."""
|
||||
ext = "tsx" if self.typescript else "jsx"
|
||||
|
||||
# Create layout
|
||||
layout_content = self._get_layout_content()
|
||||
(app_dir / f"layout.{ext}").write_text(layout_content)
|
||||
|
||||
# Create page
|
||||
page_content = self._get_page_content()
|
||||
(app_dir / f"page.{ext}").write_text(page_content)
|
||||
|
||||
# Create global styles
|
||||
if self.tailwind:
|
||||
globals_content = self._get_tailwind_globals()
|
||||
else:
|
||||
globals_content = self._get_basic_globals()
|
||||
(app_dir / "globals.css").write_text(globals_content)
|
||||
|
||||
def _create_pages_router_files(self, pages_dir: Path) -> None:
|
||||
"""Create Pages Router files."""
|
||||
ext = "tsx" if self.typescript else "jsx"
|
||||
|
||||
# Create _app
|
||||
app_content = self._get_app_content()
|
||||
(pages_dir / f"_app.{ext}").write_text(app_content)
|
||||
|
||||
# Create index
|
||||
index_content = self._get_index_content()
|
||||
(pages_dir / f"index.{ext}").write_text(index_content)
|
||||
|
||||
def create_config_files(self) -> None:
|
||||
"""Create configuration files."""
|
||||
print("Creating configuration files...")
|
||||
|
||||
# package.json
|
||||
package_json = self._get_package_json()
|
||||
(self.directory / "package.json").write_text(
|
||||
json.dumps(package_json, indent=2)
|
||||
)
|
||||
|
||||
# next.config.js
|
||||
next_config = self._get_next_config()
|
||||
(self.directory / "next.config.js").write_text(next_config)
|
||||
|
||||
# tsconfig.json
|
||||
if self.typescript:
|
||||
tsconfig = self._get_tsconfig()
|
||||
(self.directory / "tsconfig.json").write_text(
|
||||
json.dumps(tsconfig, indent=2)
|
||||
)
|
||||
|
||||
# .eslintrc.json
|
||||
if self.eslint:
|
||||
eslint_config = self._get_eslint_config()
|
||||
(self.directory / ".eslintrc.json").write_text(
|
||||
json.dumps(eslint_config, indent=2)
|
||||
)
|
||||
|
||||
# tailwind.config
|
||||
if self.tailwind:
|
||||
tailwind_config = self._get_tailwind_config()
|
||||
ext = "ts" if self.typescript else "js"
|
||||
(self.directory / f"tailwind.config.{ext}").write_text(tailwind_config)
|
||||
|
||||
postcss_config = self._get_postcss_config()
|
||||
(self.directory / "postcss.config.js").write_text(postcss_config)
|
||||
|
||||
# .gitignore
|
||||
gitignore = self._get_gitignore()
|
||||
(self.directory / ".gitignore").write_text(gitignore)
|
||||
|
||||
# README.md
|
||||
readme = self._get_readme()
|
||||
(self.directory / "README.md").write_text(readme)
|
||||
|
||||
def _get_package_json(self) -> dict:
|
||||
"""Generate package.json content."""
|
||||
dependencies = {
|
||||
"next": "latest",
|
||||
"react": "latest",
|
||||
"react-dom": "latest",
|
||||
}
|
||||
|
||||
dev_dependencies = {}
|
||||
|
||||
if self.typescript:
|
||||
dev_dependencies.update(
|
||||
{
|
||||
"typescript": "^5.0.0",
|
||||
"@types/node": "^20.0.0",
|
||||
"@types/react": "^18.0.0",
|
||||
"@types/react-dom": "^18.0.0",
|
||||
}
|
||||
)
|
||||
|
||||
if self.eslint:
|
||||
dev_dependencies["eslint"] = "^8.0.0"
|
||||
dev_dependencies["eslint-config-next"] = "latest"
|
||||
|
||||
if self.tailwind:
|
||||
dependencies["tailwindcss"] = "^3.3.0"
|
||||
dependencies["autoprefixer"] = "^10.0.0"
|
||||
dependencies["postcss"] = "^8.0.0"
|
||||
|
||||
return {
|
||||
"name": self.name,
|
||||
"version": "0.1.0",
|
||||
"private": True,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint" if self.eslint else None,
|
||||
},
|
||||
"dependencies": dependencies,
|
||||
"devDependencies": dev_dependencies,
|
||||
}
|
||||
|
||||
def _get_layout_content(self) -> str:
|
||||
"""Generate layout.tsx content."""
|
||||
import_css = (
|
||||
"import './globals.css'\n" if not self.tailwind else "import './globals.css'\n"
|
||||
)
|
||||
|
||||
if self.typescript:
|
||||
return f"""{import_css}
|
||||
export const metadata = {{
|
||||
title: '{self.name}',
|
||||
description: 'Generated by Next.js',
|
||||
}}
|
||||
|
||||
export default function RootLayout({{
|
||||
children,
|
||||
}}: {{
|
||||
children: React.ReactNode
|
||||
}}) {{
|
||||
return (
|
||||
<html lang="en">
|
||||
<body>{{children}}</body>
|
||||
</html>
|
||||
)
|
||||
}}
|
||||
"""
|
||||
return f"""{import_css}
|
||||
export const metadata = {{
|
||||
title: '{self.name}',
|
||||
description: 'Generated by Next.js',
|
||||
}}
|
||||
|
||||
export default function RootLayout({{ children }}) {{
|
||||
return (
|
||||
<html lang="en">
|
||||
<body>{{children}}</body>
|
||||
</html>
|
||||
)
|
||||
}}
|
||||
"""
|
||||
|
||||
def _get_page_content(self) -> str:
|
||||
"""Generate page.tsx content."""
|
||||
return """export default function Home() {
|
||||
return (
|
||||
<main>
|
||||
<h1>Welcome to Next.js!</h1>
|
||||
<p>Get started by editing this page.</p>
|
||||
</main>
|
||||
)
|
||||
}
|
||||
"""
|
||||
|
||||
def _get_next_config(self) -> str:
|
||||
"""Generate next.config.js content."""
|
||||
return """/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
reactStrictMode: true,
|
||||
images: {
|
||||
remotePatterns: [
|
||||
// Add your image domains here
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = nextConfig
|
||||
"""
|
||||
|
||||
def _get_tsconfig(self) -> dict:
|
||||
"""Generate tsconfig.json content."""
|
||||
return {
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": True,
|
||||
"skipLibCheck": True,
|
||||
"strict": True,
|
||||
"noEmit": True,
|
||||
"esModuleInterop": True,
|
||||
"module": "esnext",
|
||||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": True,
|
||||
"isolatedModules": True,
|
||||
"jsx": "preserve",
|
||||
"incremental": True,
|
||||
"plugins": [{"name": "next"}],
|
||||
"paths": {self.import_alias: ["./*"]},
|
||||
},
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
|
||||
"exclude": ["node_modules"],
|
||||
}
|
||||
|
||||
def _get_eslint_config(self) -> dict:
|
||||
"""Generate .eslintrc.json content."""
|
||||
return {"extends": "next/core-web-vitals"}
|
||||
|
||||
def _get_tailwind_config(self) -> str:
|
||||
"""Generate tailwind.config content."""
|
||||
if self.typescript:
|
||||
return """import type { Config } from 'tailwindcss'
|
||||
|
||||
const config: Config = {
|
||||
content: [
|
||||
'./pages/**/*.{js,ts,jsx,tsx,mdx}',
|
||||
'./components/**/*.{js,ts,jsx,tsx,mdx}',
|
||||
'./app/**/*.{js,ts,jsx,tsx,mdx}',
|
||||
],
|
||||
theme: {
|
||||
extend: {},
|
||||
},
|
||||
plugins: [],
|
||||
}
|
||||
export default config
|
||||
"""
|
||||
return """/** @type {import('tailwindcss').Config} */
|
||||
module.exports = {
|
||||
content: [
|
||||
'./pages/**/*.{js,ts,jsx,tsx,mdx}',
|
||||
'./components/**/*.{js,ts,jsx,tsx,mdx}',
|
||||
'./app/**/*.{js,ts,jsx,tsx,mdx}',
|
||||
],
|
||||
theme: {
|
||||
extend: {},
|
||||
},
|
||||
plugins: [],
|
||||
}
|
||||
"""
|
||||
|
||||
def _get_postcss_config(self) -> str:
|
||||
"""Generate postcss.config.js content."""
|
||||
return """module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
}
|
||||
"""
|
||||
|
||||
def _get_tailwind_globals(self) -> str:
|
||||
"""Generate globals.css with Tailwind."""
|
||||
return """@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
"""
|
||||
|
||||
def _get_basic_globals(self) -> str:
|
||||
"""Generate basic globals.css."""
|
||||
return """* {
|
||||
box-sizing: border-box;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html,
|
||||
body {
|
||||
max-width: 100vw;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
a {
|
||||
color: inherit;
|
||||
text-decoration: none;
|
||||
}
|
||||
"""
|
||||
|
||||
def _get_gitignore(self) -> str:
|
||||
"""Generate .gitignore content."""
|
||||
return """# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# local env files
|
||||
.env*.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
"""
|
||||
|
||||
def _get_readme(self) -> str:
|
||||
"""Generate README.md content."""
|
||||
return f"""# {self.name}
|
||||
|
||||
This is a [Next.js](https://nextjs.org/) project bootstrapped with next.js initialization script.
|
||||
|
||||
## Getting Started
|
||||
|
||||
First, install dependencies:
|
||||
|
||||
```bash
|
||||
npm install
|
||||
# or
|
||||
yarn install
|
||||
# or
|
||||
pnpm install
|
||||
```
|
||||
|
||||
Then, run the development server:
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
# or
|
||||
yarn dev
|
||||
# or
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
|
||||
|
||||
## Learn More
|
||||
|
||||
To learn more about Next.js, take a look at the following resources:
|
||||
|
||||
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
|
||||
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
|
||||
|
||||
## Deploy on Vercel
|
||||
|
||||
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new).
|
||||
|
||||
Check out the [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details.
|
||||
"""
|
||||
|
||||
def _get_app_content(self) -> str:
|
||||
"""Generate _app content for Pages Router."""
|
||||
return """export default function App({ Component, pageProps }) {
|
||||
return <Component {...pageProps} />
|
||||
}
|
||||
"""
|
||||
|
||||
def _get_index_content(self) -> str:
|
||||
"""Generate index content for Pages Router."""
|
||||
return """export default function Home() {
|
||||
return (
|
||||
<main>
|
||||
<h1>Welcome to Next.js!</h1>
|
||||
<p>Get started by editing this page.</p>
|
||||
</main>
|
||||
)
|
||||
}
|
||||
"""
|
||||
|
||||
def initialize(self) -> None:
|
||||
"""Run full initialization process."""
|
||||
try:
|
||||
print(f"Initializing Next.js project: {self.name}")
|
||||
print(f"TypeScript: {self.typescript}")
|
||||
print(f"App Router: {self.app_router}")
|
||||
print(f"Tailwind CSS: {self.tailwind}")
|
||||
print(f"ESLint: {self.eslint}")
|
||||
print()
|
||||
|
||||
self.validate_name()
|
||||
self.check_directory()
|
||||
self.create_directory_structure()
|
||||
self.create_config_files()
|
||||
|
||||
print()
|
||||
print(f"✓ Project initialized successfully!")
|
||||
print()
|
||||
print(f"Next steps:")
|
||||
print(f" cd {self.name}")
|
||||
print(f" npm install")
|
||||
print(f" npm run dev")
|
||||
print()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
"""CLI entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Initialize Next.js project with best practices"
|
||||
)
|
||||
parser.add_argument("name", help="Project name")
|
||||
parser.add_argument(
|
||||
"--directory", type=Path, help="Target directory (default: ./<name>)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-typescript", action="store_true", help="Disable TypeScript"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pages-router", action="store_true", help="Use Pages Router instead of App Router"
|
||||
)
|
||||
parser.add_argument("--src-dir", action="store_true", help="Use src/ directory")
|
||||
parser.add_argument("--tailwind", action="store_true", help="Include Tailwind CSS")
|
||||
parser.add_argument("--no-eslint", action="store_true", help="Disable ESLint")
|
||||
parser.add_argument(
|
||||
"--import-alias", default="@/*", help="Import alias pattern (default: @/*)"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
initializer = NextJSInitializer(
|
||||
name=args.name,
|
||||
directory=args.directory,
|
||||
typescript=not args.no_typescript,
|
||||
app_router=not args.pages_router,
|
||||
src_dir=args.src_dir,
|
||||
tailwind=args.tailwind,
|
||||
eslint=not args.no_eslint,
|
||||
import_alias=args.import_alias,
|
||||
)
|
||||
|
||||
initializer.initialize()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
16
skills/web-frameworks/scripts/requirements.txt
Normal file
16
skills/web-frameworks/scripts/requirements.txt
Normal file
@@ -0,0 +1,16 @@
|
||||
# Web Frameworks Skill Dependencies
|
||||
# Python 3.10+ required
|
||||
|
||||
# No Python package dependencies - uses only standard library
|
||||
|
||||
# Testing dependencies (dev)
|
||||
pytest>=8.0.0
|
||||
pytest-cov>=4.1.0
|
||||
pytest-mock>=3.12.0
|
||||
|
||||
# Note: This skill works with Node.js frameworks
|
||||
# Requires Node.js and package managers:
|
||||
# - Node.js 18+: https://nodejs.org/
|
||||
# - npm (comes with Node.js)
|
||||
# - pnpm: npm install -g pnpm
|
||||
# - yarn: npm install -g yarn
|
||||
1
skills/web-frameworks/scripts/tests/coverage-web.json
Normal file
1
skills/web-frameworks/scripts/tests/coverage-web.json
Normal file
File diff suppressed because one or more lines are too long
3
skills/web-frameworks/scripts/tests/requirements.txt
Normal file
3
skills/web-frameworks/scripts/tests/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
pytest>=7.0.0
|
||||
pytest-cov>=4.0.0
|
||||
pytest-mock>=3.10.0
|
||||
319
skills/web-frameworks/scripts/tests/test_nextjs_init.py
Normal file
319
skills/web-frameworks/scripts/tests/test_nextjs_init.py
Normal file
@@ -0,0 +1,319 @@
|
||||
"""Tests for nextjs-init.py script."""
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
# Add parent directory to path to import the script
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from nextjs_init import NextJSInitializer
|
||||
|
||||
|
||||
class TestNextJSInitializer:
|
||||
"""Test suite for NextJSInitializer."""
|
||||
|
||||
def test_init_with_defaults(self, tmp_path):
|
||||
"""Test initialization with default parameters."""
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=tmp_path / "test-app"
|
||||
)
|
||||
|
||||
assert initializer.name == "test-app"
|
||||
assert initializer.typescript is True
|
||||
assert initializer.app_router is True
|
||||
assert initializer.src_dir is False
|
||||
assert initializer.tailwind is False
|
||||
assert initializer.eslint is True
|
||||
|
||||
def test_validate_name_valid(self, tmp_path):
|
||||
"""Test name validation with valid names."""
|
||||
valid_names = ["my-app", "my_app", "myapp123", "test-app-1"]
|
||||
|
||||
for name in valid_names:
|
||||
initializer = NextJSInitializer(
|
||||
name=name,
|
||||
directory=tmp_path / name
|
||||
)
|
||||
initializer.validate_name() # Should not raise
|
||||
|
||||
def test_validate_name_invalid(self, tmp_path):
|
||||
"""Test name validation with invalid names."""
|
||||
invalid_cases = [
|
||||
("", ValueError, "empty"),
|
||||
("123app", ValueError, "starts with number"),
|
||||
("my app", ValueError, "contains space"),
|
||||
("my@app", ValueError, "contains special char"),
|
||||
]
|
||||
|
||||
for name, expected_error, reason in invalid_cases:
|
||||
initializer = NextJSInitializer(
|
||||
name=name,
|
||||
directory=tmp_path / (name or "empty")
|
||||
)
|
||||
|
||||
with pytest.raises(expected_error):
|
||||
initializer.validate_name()
|
||||
|
||||
def test_check_directory_exists(self, tmp_path):
|
||||
"""Test directory existence check."""
|
||||
existing_dir = tmp_path / "existing"
|
||||
existing_dir.mkdir()
|
||||
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=existing_dir
|
||||
)
|
||||
|
||||
with pytest.raises(FileExistsError):
|
||||
initializer.check_directory()
|
||||
|
||||
def test_create_directory_structure_app_router(self, tmp_path):
|
||||
"""Test directory structure creation with App Router."""
|
||||
project_dir = tmp_path / "test-app"
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=project_dir,
|
||||
app_router=True
|
||||
)
|
||||
|
||||
initializer.create_directory_structure()
|
||||
|
||||
# Check directories
|
||||
assert (project_dir / "app").exists()
|
||||
assert (project_dir / "public").exists()
|
||||
assert (project_dir / "components").exists()
|
||||
assert (project_dir / "lib").exists()
|
||||
|
||||
# Check App Router files
|
||||
assert (project_dir / "app" / "layout.tsx").exists()
|
||||
assert (project_dir / "app" / "page.tsx").exists()
|
||||
assert (project_dir / "app" / "globals.css").exists()
|
||||
|
||||
def test_create_directory_structure_with_src(self, tmp_path):
|
||||
"""Test directory structure with src/ directory."""
|
||||
project_dir = tmp_path / "test-app"
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=project_dir,
|
||||
src_dir=True
|
||||
)
|
||||
|
||||
initializer.create_directory_structure()
|
||||
|
||||
# Check src structure
|
||||
assert (project_dir / "src" / "app").exists()
|
||||
assert (project_dir / "src" / "components").exists()
|
||||
assert (project_dir / "src" / "lib").exists()
|
||||
|
||||
def test_package_json_generation(self, tmp_path):
|
||||
"""Test package.json generation."""
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=tmp_path / "test-app",
|
||||
typescript=True,
|
||||
tailwind=True,
|
||||
eslint=True
|
||||
)
|
||||
|
||||
package_json = initializer._get_package_json()
|
||||
|
||||
assert package_json["name"] == "test-app"
|
||||
assert package_json["version"] == "0.1.0"
|
||||
assert package_json["private"] is True
|
||||
|
||||
# Check scripts
|
||||
assert "dev" in package_json["scripts"]
|
||||
assert "build" in package_json["scripts"]
|
||||
assert "start" in package_json["scripts"]
|
||||
assert "lint" in package_json["scripts"]
|
||||
|
||||
# Check dependencies
|
||||
assert "next" in package_json["dependencies"]
|
||||
assert "react" in package_json["dependencies"]
|
||||
assert "react-dom" in package_json["dependencies"]
|
||||
|
||||
# Check TypeScript dependencies
|
||||
assert "typescript" in package_json["devDependencies"]
|
||||
assert "@types/node" in package_json["devDependencies"]
|
||||
assert "@types/react" in package_json["devDependencies"]
|
||||
|
||||
# Check Tailwind dependencies
|
||||
assert "tailwindcss" in package_json["dependencies"]
|
||||
|
||||
# Check ESLint dependencies
|
||||
assert "eslint" in package_json["devDependencies"]
|
||||
|
||||
def test_tsconfig_generation(self, tmp_path):
|
||||
"""Test tsconfig.json generation."""
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=tmp_path / "test-app",
|
||||
typescript=True,
|
||||
import_alias="@/*"
|
||||
)
|
||||
|
||||
tsconfig = initializer._get_tsconfig()
|
||||
|
||||
assert "compilerOptions" in tsconfig
|
||||
assert tsconfig["compilerOptions"]["strict"] is True
|
||||
assert tsconfig["compilerOptions"]["jsx"] == "preserve"
|
||||
assert "@/*" in tsconfig["compilerOptions"]["paths"]
|
||||
assert "next-env.d.ts" in tsconfig["include"]
|
||||
|
||||
def test_layout_content_typescript(self, tmp_path):
|
||||
"""Test layout.tsx content generation."""
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=tmp_path / "test-app",
|
||||
typescript=True
|
||||
)
|
||||
|
||||
content = initializer._get_layout_content()
|
||||
|
||||
assert "import './globals.css'" in content
|
||||
assert "export const metadata" in content
|
||||
assert "children: React.ReactNode" in content
|
||||
assert "<html lang=\"en\">" in content
|
||||
|
||||
def test_layout_content_javascript(self, tmp_path):
|
||||
"""Test layout.jsx content generation."""
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=tmp_path / "test-app",
|
||||
typescript=False
|
||||
)
|
||||
|
||||
content = initializer._get_layout_content()
|
||||
|
||||
assert "import './globals.css'" in content
|
||||
assert "export const metadata" in content
|
||||
assert "React.ReactNode" not in content # No TypeScript types
|
||||
|
||||
def test_tailwind_config_typescript(self, tmp_path):
|
||||
"""Test Tailwind config generation with TypeScript."""
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=tmp_path / "test-app",
|
||||
typescript=True,
|
||||
tailwind=True
|
||||
)
|
||||
|
||||
config = initializer._get_tailwind_config()
|
||||
|
||||
assert "import type { Config }" in config
|
||||
assert "const config: Config" in config
|
||||
assert "content:" in config
|
||||
|
||||
def test_tailwind_config_javascript(self, tmp_path):
|
||||
"""Test Tailwind config generation with JavaScript."""
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=tmp_path / "test-app",
|
||||
typescript=False,
|
||||
tailwind=True
|
||||
)
|
||||
|
||||
config = initializer._get_tailwind_config()
|
||||
|
||||
assert "module.exports" in config
|
||||
assert "content:" in config
|
||||
|
||||
def test_gitignore_generation(self, tmp_path):
|
||||
"""Test .gitignore generation."""
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=tmp_path / "test-app"
|
||||
)
|
||||
|
||||
gitignore = initializer._get_gitignore()
|
||||
|
||||
assert "/node_modules" in gitignore
|
||||
assert "/.next/" in gitignore
|
||||
assert ".env*.local" in gitignore
|
||||
assert ".DS_Store" in gitignore
|
||||
|
||||
def test_readme_generation(self, tmp_path):
|
||||
"""Test README.md generation."""
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=tmp_path / "test-app"
|
||||
)
|
||||
|
||||
readme = initializer._get_readme()
|
||||
|
||||
assert "# test-app" in readme
|
||||
assert "Next.js" in readme
|
||||
assert "npm run dev" in readme
|
||||
|
||||
def test_create_config_files(self, tmp_path):
|
||||
"""Test configuration files creation."""
|
||||
project_dir = tmp_path / "test-app"
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=project_dir,
|
||||
typescript=True,
|
||||
tailwind=True,
|
||||
eslint=True
|
||||
)
|
||||
|
||||
initializer.create_directory_structure()
|
||||
initializer.create_config_files()
|
||||
|
||||
# Check all config files exist
|
||||
assert (project_dir / "package.json").exists()
|
||||
assert (project_dir / "next.config.js").exists()
|
||||
assert (project_dir / "tsconfig.json").exists()
|
||||
assert (project_dir / ".eslintrc.json").exists()
|
||||
assert (project_dir / "tailwind.config.ts").exists()
|
||||
assert (project_dir / "postcss.config.js").exists()
|
||||
assert (project_dir / ".gitignore").exists()
|
||||
assert (project_dir / "README.md").exists()
|
||||
|
||||
# Verify package.json is valid JSON
|
||||
with open(project_dir / "package.json") as f:
|
||||
package_json = json.load(f)
|
||||
assert package_json["name"] == "test-app"
|
||||
|
||||
def test_full_initialization(self, tmp_path):
|
||||
"""Test full initialization process."""
|
||||
project_dir = tmp_path / "test-app"
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=project_dir,
|
||||
typescript=True,
|
||||
app_router=True,
|
||||
tailwind=True
|
||||
)
|
||||
|
||||
initializer.initialize()
|
||||
|
||||
# Verify directory exists
|
||||
assert project_dir.exists()
|
||||
|
||||
# Verify structure
|
||||
assert (project_dir / "app").exists()
|
||||
assert (project_dir / "public").exists()
|
||||
|
||||
# Verify config files
|
||||
assert (project_dir / "package.json").exists()
|
||||
assert (project_dir / "tsconfig.json").exists()
|
||||
assert (project_dir / "next.config.js").exists()
|
||||
|
||||
def test_pages_router_structure(self, tmp_path):
|
||||
"""Test Pages Router directory structure."""
|
||||
project_dir = tmp_path / "test-app"
|
||||
initializer = NextJSInitializer(
|
||||
name="test-app",
|
||||
directory=project_dir,
|
||||
app_router=False # Use Pages Router
|
||||
)
|
||||
|
||||
initializer.create_directory_structure()
|
||||
|
||||
# Check Pages Router files
|
||||
assert (project_dir / "pages" / "_app.tsx").exists()
|
||||
assert (project_dir / "pages" / "index.tsx").exists()
|
||||
374
skills/web-frameworks/scripts/tests/test_turborepo_migrate.py
Normal file
374
skills/web-frameworks/scripts/tests/test_turborepo_migrate.py
Normal file
@@ -0,0 +1,374 @@
|
||||
"""Tests for turborepo-migrate.py script."""
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
# Add parent directory to path to import the script
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from turborepo_migrate import TurborepoMigrator
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_monorepo(tmp_path):
|
||||
"""Create a mock monorepo structure."""
|
||||
# Root package.json
|
||||
root_pkg = {
|
||||
"name": "test-monorepo",
|
||||
"private": True,
|
||||
"workspaces": ["apps/*", "packages/*"],
|
||||
"scripts": {
|
||||
"build": "npm run build --workspaces",
|
||||
"test": "npm run test --workspaces"
|
||||
}
|
||||
}
|
||||
|
||||
(tmp_path / "package.json").write_text(json.dumps(root_pkg, indent=2))
|
||||
|
||||
# Create apps
|
||||
apps_dir = tmp_path / "apps"
|
||||
apps_dir.mkdir()
|
||||
|
||||
web_dir = apps_dir / "web"
|
||||
web_dir.mkdir()
|
||||
(web_dir / "package.json").write_text(json.dumps({
|
||||
"name": "web",
|
||||
"version": "1.0.0",
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"test": "jest",
|
||||
"lint": "eslint ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@repo/ui": "*",
|
||||
"next": "latest"
|
||||
}
|
||||
}, indent=2))
|
||||
|
||||
# Create Next.js output directory
|
||||
(web_dir / ".next").mkdir()
|
||||
|
||||
# Create packages
|
||||
packages_dir = tmp_path / "packages"
|
||||
packages_dir.mkdir()
|
||||
|
||||
ui_dir = packages_dir / "ui"
|
||||
ui_dir.mkdir()
|
||||
(ui_dir / "package.json").write_text(json.dumps({
|
||||
"name": "@repo/ui",
|
||||
"version": "0.0.0",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "jest",
|
||||
"lint": "eslint ."
|
||||
},
|
||||
"dependencies": {
|
||||
"react": "latest"
|
||||
}
|
||||
}, indent=2))
|
||||
|
||||
# Create dist directory
|
||||
(ui_dir / "dist").mkdir()
|
||||
|
||||
return tmp_path
|
||||
|
||||
|
||||
class TestTurborepoMigrator:
|
||||
"""Test suite for TurborepoMigrator."""
|
||||
|
||||
def test_init(self, tmp_path):
|
||||
"""Test migrator initialization."""
|
||||
migrator = TurborepoMigrator(
|
||||
path=tmp_path,
|
||||
dry_run=True,
|
||||
package_manager="npm"
|
||||
)
|
||||
|
||||
assert migrator.path == tmp_path.resolve()
|
||||
assert migrator.dry_run is True
|
||||
assert migrator.package_manager == "npm"
|
||||
|
||||
def test_validate_path_exists(self, mock_monorepo):
|
||||
"""Test path validation with valid monorepo."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo)
|
||||
migrator.validate_path() # Should not raise
|
||||
|
||||
def test_validate_path_not_exists(self, tmp_path):
|
||||
"""Test path validation with non-existent path."""
|
||||
migrator = TurborepoMigrator(path=tmp_path / "nonexistent")
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
migrator.validate_path()
|
||||
|
||||
def test_validate_path_not_directory(self, tmp_path):
|
||||
"""Test path validation with file instead of directory."""
|
||||
file_path = tmp_path / "file.txt"
|
||||
file_path.touch()
|
||||
|
||||
migrator = TurborepoMigrator(path=file_path)
|
||||
|
||||
with pytest.raises(NotADirectoryError):
|
||||
migrator.validate_path()
|
||||
|
||||
def test_validate_path_no_package_json(self, tmp_path):
|
||||
"""Test path validation without package.json."""
|
||||
empty_dir = tmp_path / "empty"
|
||||
empty_dir.mkdir()
|
||||
|
||||
migrator = TurborepoMigrator(path=empty_dir)
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
migrator.validate_path()
|
||||
|
||||
def test_analyze_workspace_npm(self, mock_monorepo):
|
||||
"""Test workspace analysis for npm/yarn workspaces."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo)
|
||||
migrator.analyze_workspace()
|
||||
|
||||
assert migrator.workspace_config["type"] == "npm/yarn"
|
||||
assert "apps/*" in migrator.workspace_config["patterns"]
|
||||
assert "packages/*" in migrator.workspace_config["patterns"]
|
||||
|
||||
def test_analyze_workspace_pnpm(self, tmp_path):
|
||||
"""Test workspace analysis for pnpm workspaces."""
|
||||
# Create root package.json without workspaces
|
||||
(tmp_path / "package.json").write_text(json.dumps({
|
||||
"name": "test-monorepo",
|
||||
"private": True
|
||||
}))
|
||||
|
||||
# Create pnpm-workspace.yaml
|
||||
(tmp_path / "pnpm-workspace.yaml").write_text("""packages:
|
||||
- 'apps/*'
|
||||
- 'packages/*'
|
||||
""")
|
||||
|
||||
migrator = TurborepoMigrator(path=tmp_path)
|
||||
migrator.analyze_workspace()
|
||||
|
||||
assert migrator.workspace_config["type"] == "pnpm"
|
||||
assert migrator.workspace_config["file"] == "pnpm-workspace.yaml"
|
||||
|
||||
def test_discover_packages(self, mock_monorepo):
|
||||
"""Test package discovery."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo)
|
||||
migrator.analyze_workspace()
|
||||
migrator.discover_packages()
|
||||
|
||||
assert len(migrator.packages) == 2
|
||||
|
||||
package_names = {pkg["name"] for pkg in migrator.packages}
|
||||
assert "web" in package_names
|
||||
assert "@repo/ui" in package_names
|
||||
|
||||
def test_analyze_scripts(self, mock_monorepo):
|
||||
"""Test script analysis."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo)
|
||||
migrator.analyze_workspace()
|
||||
migrator.discover_packages()
|
||||
|
||||
common_scripts = migrator.analyze_scripts()
|
||||
|
||||
# All packages have build, test, lint
|
||||
assert "build" in common_scripts
|
||||
assert "test" in common_scripts
|
||||
assert "lint" in common_scripts
|
||||
|
||||
# Check package counts
|
||||
assert len(common_scripts["build"]) == 2
|
||||
assert len(common_scripts["test"]) == 2
|
||||
|
||||
def test_infer_build_outputs(self, mock_monorepo):
|
||||
"""Test build output inference."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo)
|
||||
migrator.analyze_workspace()
|
||||
migrator.discover_packages()
|
||||
|
||||
outputs = migrator._infer_build_outputs()
|
||||
|
||||
# Should detect .next and dist directories
|
||||
assert ".next/**" in outputs
|
||||
assert "!.next/cache/**" in outputs
|
||||
assert "dist/**" in outputs
|
||||
|
||||
def test_generate_turbo_config(self, mock_monorepo):
|
||||
"""Test turbo.json generation."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo)
|
||||
migrator.analyze_workspace()
|
||||
migrator.discover_packages()
|
||||
|
||||
common_scripts = migrator.analyze_scripts()
|
||||
turbo_config = migrator.generate_turbo_config(common_scripts)
|
||||
|
||||
assert "$schema" in turbo_config
|
||||
assert "pipeline" in turbo_config
|
||||
|
||||
# Check build task
|
||||
assert "build" in turbo_config["pipeline"]
|
||||
assert turbo_config["pipeline"]["build"]["dependsOn"] == ["^build"]
|
||||
assert "outputs" in turbo_config["pipeline"]["build"]
|
||||
|
||||
# Check test task
|
||||
assert "test" in turbo_config["pipeline"]
|
||||
assert "coverage/**" in turbo_config["pipeline"]["test"]["outputs"]
|
||||
|
||||
# Check lint task
|
||||
assert "lint" in turbo_config["pipeline"]
|
||||
|
||||
# Note: dev task won't be in pipeline because it's only in 1 package
|
||||
# (needs to be in 2+ packages to be considered "common")
|
||||
# This is correct behavior - only truly common scripts are included
|
||||
|
||||
def test_update_root_package_json(self, mock_monorepo):
|
||||
"""Test root package.json update."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo)
|
||||
migrator.analyze_workspace()
|
||||
migrator.discover_packages()
|
||||
|
||||
updated_package_json = migrator.update_root_package_json()
|
||||
|
||||
# Check turbo added to devDependencies
|
||||
assert "turbo" in updated_package_json["devDependencies"]
|
||||
assert updated_package_json["devDependencies"]["turbo"] == "latest"
|
||||
|
||||
# Check scripts updated (only common scripts are added)
|
||||
assert updated_package_json["scripts"]["build"] == "turbo run build"
|
||||
assert updated_package_json["scripts"]["test"] == "turbo run test"
|
||||
assert updated_package_json["scripts"]["lint"] == "turbo run lint"
|
||||
# dev is only in one package, so it won't be added
|
||||
|
||||
def test_generate_migration_report(self, mock_monorepo):
|
||||
"""Test migration report generation."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo)
|
||||
migrator.analyze_workspace()
|
||||
migrator.discover_packages()
|
||||
|
||||
common_scripts = migrator.analyze_scripts()
|
||||
turbo_config = migrator.generate_turbo_config(common_scripts)
|
||||
updated_package_json = migrator.update_root_package_json()
|
||||
|
||||
report = migrator.generate_migration_report(turbo_config, updated_package_json)
|
||||
|
||||
assert "TURBOREPO MIGRATION REPORT" in report
|
||||
assert "PACKAGES:" in report
|
||||
assert "TURBO.JSON PIPELINE:" in report
|
||||
assert "ROOT PACKAGE.JSON SCRIPTS:" in report
|
||||
assert "RECOMMENDATIONS:" in report
|
||||
|
||||
# Check package names appear
|
||||
assert "web" in report
|
||||
assert "@repo/ui" in report
|
||||
|
||||
def test_write_files_dry_run(self, mock_monorepo, capsys):
|
||||
"""Test file writing in dry-run mode."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo, dry_run=True)
|
||||
migrator.analyze_workspace()
|
||||
migrator.discover_packages()
|
||||
|
||||
common_scripts = migrator.analyze_scripts()
|
||||
turbo_config = migrator.generate_turbo_config(common_scripts)
|
||||
updated_package_json = migrator.update_root_package_json()
|
||||
|
||||
migrator.write_files(turbo_config, updated_package_json)
|
||||
|
||||
# Check files not created
|
||||
assert not (mock_monorepo / "turbo.json").exists()
|
||||
|
||||
# Check output
|
||||
captured = capsys.readouterr()
|
||||
assert "DRY RUN" in captured.out
|
||||
|
||||
def test_write_files_actual(self, mock_monorepo):
|
||||
"""Test actual file writing."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo, dry_run=False)
|
||||
migrator.analyze_workspace()
|
||||
migrator.discover_packages()
|
||||
|
||||
common_scripts = migrator.analyze_scripts()
|
||||
turbo_config = migrator.generate_turbo_config(common_scripts)
|
||||
updated_package_json = migrator.update_root_package_json()
|
||||
|
||||
migrator.write_files(turbo_config, updated_package_json)
|
||||
|
||||
# Check turbo.json created
|
||||
assert (mock_monorepo / "turbo.json").exists()
|
||||
|
||||
# Verify content
|
||||
with open(mock_monorepo / "turbo.json") as f:
|
||||
saved_config = json.load(f)
|
||||
assert saved_config["$schema"] == turbo_config["$schema"]
|
||||
assert "pipeline" in saved_config
|
||||
|
||||
# Check package.json updated
|
||||
with open(mock_monorepo / "package.json") as f:
|
||||
saved_package = json.load(f)
|
||||
assert "turbo" in saved_package["devDependencies"]
|
||||
|
||||
def test_full_migration_dry_run(self, mock_monorepo):
|
||||
"""Test full migration process in dry-run mode."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo, dry_run=True)
|
||||
migrator.migrate()
|
||||
|
||||
# Files should not be created in dry-run
|
||||
assert not (mock_monorepo / "turbo.json").exists()
|
||||
|
||||
# Original package.json should be unchanged
|
||||
with open(mock_monorepo / "package.json") as f:
|
||||
package_json = json.load(f)
|
||||
assert "turbo" not in package_json.get("devDependencies", {})
|
||||
|
||||
def test_full_migration_actual(self, mock_monorepo):
|
||||
"""Test full migration process."""
|
||||
migrator = TurborepoMigrator(path=mock_monorepo, dry_run=False)
|
||||
migrator.migrate()
|
||||
|
||||
# Check turbo.json created
|
||||
assert (mock_monorepo / "turbo.json").exists()
|
||||
|
||||
with open(mock_monorepo / "turbo.json") as f:
|
||||
turbo_config = json.load(f)
|
||||
assert "$schema" in turbo_config
|
||||
assert "pipeline" in turbo_config
|
||||
assert "build" in turbo_config["pipeline"]
|
||||
|
||||
# Check package.json updated
|
||||
with open(mock_monorepo / "package.json") as f:
|
||||
package_json = json.load(f)
|
||||
assert "turbo" in package_json["devDependencies"]
|
||||
assert package_json["scripts"]["build"] == "turbo run build"
|
||||
|
||||
def test_parse_pnpm_workspace(self, tmp_path):
|
||||
"""Test pnpm-workspace.yaml parsing."""
|
||||
yaml_content = """packages:
|
||||
- 'apps/*'
|
||||
- 'packages/*'
|
||||
- 'tools/*'
|
||||
"""
|
||||
yaml_file = tmp_path / "pnpm-workspace.yaml"
|
||||
yaml_file.write_text(yaml_content)
|
||||
|
||||
migrator = TurborepoMigrator(path=tmp_path)
|
||||
patterns = migrator._parse_pnpm_workspace(yaml_file)
|
||||
|
||||
assert len(patterns) == 3
|
||||
assert "apps/*" in patterns
|
||||
assert "packages/*" in patterns
|
||||
assert "tools/*" in patterns
|
||||
|
||||
def test_monorepo_without_workspaces(self, tmp_path):
|
||||
"""Test migration fails for non-workspace monorepo."""
|
||||
# Create package.json without workspaces
|
||||
(tmp_path / "package.json").write_text(json.dumps({
|
||||
"name": "not-a-monorepo",
|
||||
"version": "1.0.0"
|
||||
}))
|
||||
|
||||
migrator = TurborepoMigrator(path=tmp_path)
|
||||
|
||||
# migrate() calls sys.exit(1) on error, so we catch SystemExit
|
||||
with pytest.raises(SystemExit):
|
||||
migrator.migrate()
|
||||
394
skills/web-frameworks/scripts/turborepo_migrate.py
Normal file
394
skills/web-frameworks/scripts/turborepo_migrate.py
Normal file
@@ -0,0 +1,394 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Turborepo Migration Script
|
||||
|
||||
Convert existing monorepo to Turborepo with intelligent pipeline generation.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Set
|
||||
|
||||
|
||||
class TurborepoMigrator:
|
||||
"""Migrate existing monorepo to Turborepo."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: Path,
|
||||
dry_run: bool = False,
|
||||
package_manager: str = "npm",
|
||||
):
|
||||
"""
|
||||
Initialize TurborepoMigrator.
|
||||
|
||||
Args:
|
||||
path: Path to existing monorepo
|
||||
dry_run: Preview changes without writing files
|
||||
package_manager: Package manager (npm, yarn, pnpm, bun)
|
||||
"""
|
||||
self.path = path.resolve()
|
||||
self.dry_run = dry_run
|
||||
self.package_manager = package_manager
|
||||
self.packages: List[Dict] = []
|
||||
self.workspace_config: Dict = {}
|
||||
|
||||
def validate_path(self) -> None:
|
||||
"""Validate monorepo path."""
|
||||
if not self.path.exists():
|
||||
raise FileNotFoundError(f"Path '{self.path}' does not exist")
|
||||
|
||||
if not self.path.is_dir():
|
||||
raise NotADirectoryError(f"Path '{self.path}' is not a directory")
|
||||
|
||||
package_json = self.path / "package.json"
|
||||
if not package_json.exists():
|
||||
raise FileNotFoundError(
|
||||
f"No package.json found in '{self.path}'. Not a valid monorepo."
|
||||
)
|
||||
|
||||
def analyze_workspace(self) -> None:
|
||||
"""Analyze existing workspace configuration."""
|
||||
print("Analyzing workspace...")
|
||||
|
||||
package_json = self.path / "package.json"
|
||||
with open(package_json) as f:
|
||||
root_config = json.load(f)
|
||||
|
||||
# Detect workspace configuration
|
||||
if "workspaces" in root_config:
|
||||
self.workspace_config = {
|
||||
"type": "npm/yarn",
|
||||
"patterns": root_config["workspaces"],
|
||||
}
|
||||
elif (self.path / "pnpm-workspace.yaml").exists():
|
||||
self.workspace_config = {
|
||||
"type": "pnpm",
|
||||
"file": "pnpm-workspace.yaml",
|
||||
}
|
||||
else:
|
||||
raise ValueError(
|
||||
"No workspace configuration found. Monorepo structure not detected."
|
||||
)
|
||||
|
||||
print(f" Workspace type: {self.workspace_config['type']}")
|
||||
|
||||
def discover_packages(self) -> None:
|
||||
"""Discover all packages in workspace."""
|
||||
print("Discovering packages...")
|
||||
|
||||
if self.workspace_config["type"] == "npm/yarn":
|
||||
patterns = self.workspace_config["patterns"]
|
||||
if isinstance(patterns, dict):
|
||||
patterns = patterns.get("packages", [])
|
||||
else:
|
||||
# Parse pnpm-workspace.yaml
|
||||
yaml_file = self.path / "pnpm-workspace.yaml"
|
||||
patterns = self._parse_pnpm_workspace(yaml_file)
|
||||
|
||||
# Find all packages matching patterns
|
||||
for pattern in patterns:
|
||||
self._find_packages_by_pattern(pattern)
|
||||
|
||||
print(f" Found {len(self.packages)} packages")
|
||||
for pkg in self.packages:
|
||||
print(f" - {pkg['name']} ({pkg['path'].relative_to(self.path)})")
|
||||
|
||||
def _parse_pnpm_workspace(self, yaml_file: Path) -> List[str]:
|
||||
"""Parse pnpm-workspace.yaml file."""
|
||||
patterns = []
|
||||
with open(yaml_file) as f:
|
||||
in_packages = False
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line.startswith("packages:"):
|
||||
in_packages = True
|
||||
continue
|
||||
if in_packages and line.startswith("- "):
|
||||
pattern = line[2:].strip().strip("'\"")
|
||||
patterns.append(pattern)
|
||||
return patterns
|
||||
|
||||
def _find_packages_by_pattern(self, pattern: str) -> None:
|
||||
"""Find packages matching glob pattern."""
|
||||
import glob
|
||||
|
||||
# Convert pattern to absolute path
|
||||
search_pattern = str(self.path / pattern)
|
||||
|
||||
for match in glob.glob(search_pattern):
|
||||
match_path = Path(match)
|
||||
package_json = match_path / "package.json"
|
||||
|
||||
if package_json.exists():
|
||||
with open(package_json) as f:
|
||||
pkg_data = json.load(f)
|
||||
|
||||
self.packages.append(
|
||||
{
|
||||
"name": pkg_data.get("name", match_path.name),
|
||||
"path": match_path,
|
||||
"scripts": pkg_data.get("scripts", {}),
|
||||
"dependencies": pkg_data.get("dependencies", {}),
|
||||
"devDependencies": pkg_data.get("devDependencies", {}),
|
||||
}
|
||||
)
|
||||
|
||||
def analyze_scripts(self) -> Dict[str, Set[str]]:
|
||||
"""Analyze common scripts across packages."""
|
||||
print("Analyzing scripts...")
|
||||
|
||||
script_map: Dict[str, Set[str]] = {}
|
||||
|
||||
for pkg in self.packages:
|
||||
for script_name in pkg["scripts"]:
|
||||
if script_name not in script_map:
|
||||
script_map[script_name] = set()
|
||||
script_map[script_name].add(pkg["name"])
|
||||
|
||||
common_scripts = {
|
||||
name: packages
|
||||
for name, packages in script_map.items()
|
||||
if len(packages) >= 2 # Present in at least 2 packages
|
||||
}
|
||||
|
||||
print(f" Found {len(common_scripts)} common scripts:")
|
||||
for script, packages in common_scripts.items():
|
||||
print(f" - {script} ({len(packages)} packages)")
|
||||
|
||||
return common_scripts
|
||||
|
||||
def generate_turbo_config(self, common_scripts: Dict[str, Set[str]]) -> Dict:
|
||||
"""Generate turbo.json configuration."""
|
||||
print("Generating turbo.json configuration...")
|
||||
|
||||
pipeline = {}
|
||||
|
||||
# Build task
|
||||
if "build" in common_scripts:
|
||||
pipeline["build"] = {
|
||||
"dependsOn": ["^build"],
|
||||
"outputs": self._infer_build_outputs(),
|
||||
}
|
||||
|
||||
# Test task
|
||||
if "test" in common_scripts:
|
||||
pipeline["test"] = {
|
||||
"dependsOn": ["build"],
|
||||
"outputs": ["coverage/**"],
|
||||
}
|
||||
|
||||
# Lint task
|
||||
if "lint" in common_scripts:
|
||||
pipeline["lint"] = {"dependsOn": ["^build"]}
|
||||
|
||||
# Typecheck task
|
||||
if "typecheck" in common_scripts or "type-check" in common_scripts:
|
||||
task_name = "typecheck" if "typecheck" in common_scripts else "type-check"
|
||||
pipeline[task_name] = {"dependsOn": ["^build"]}
|
||||
|
||||
# Dev task
|
||||
if "dev" in common_scripts or "start" in common_scripts:
|
||||
dev_task = "dev" if "dev" in common_scripts else "start"
|
||||
pipeline[dev_task] = {"cache": False, "persistent": True}
|
||||
|
||||
# Clean task
|
||||
if "clean" in common_scripts:
|
||||
pipeline["clean"] = {"cache": False}
|
||||
|
||||
turbo_config = {
|
||||
"$schema": "https://turbo.build/schema.json",
|
||||
"globalDependencies": ["**/.env.*local"],
|
||||
"pipeline": pipeline,
|
||||
}
|
||||
|
||||
return turbo_config
|
||||
|
||||
def _infer_build_outputs(self) -> List[str]:
|
||||
"""Infer build output directories from packages."""
|
||||
outputs = set()
|
||||
|
||||
for pkg in self.packages:
|
||||
pkg_path = pkg["path"]
|
||||
|
||||
# Check common output directories
|
||||
if (pkg_path / "dist").exists():
|
||||
outputs.add("dist/**")
|
||||
if (pkg_path / "build").exists():
|
||||
outputs.add("build/**")
|
||||
if (pkg_path / ".next").exists():
|
||||
outputs.add(".next/**")
|
||||
outputs.add("!.next/cache/**")
|
||||
if (pkg_path / "out").exists():
|
||||
outputs.add("out/**")
|
||||
|
||||
return sorted(list(outputs)) or ["dist/**"]
|
||||
|
||||
def update_root_package_json(self) -> Dict:
|
||||
"""Update root package.json with Turborepo scripts."""
|
||||
print("Updating root package.json...")
|
||||
|
||||
package_json_path = self.path / "package.json"
|
||||
with open(package_json_path) as f:
|
||||
package_json = json.load(f)
|
||||
|
||||
# Add turbo to devDependencies
|
||||
if "devDependencies" not in package_json:
|
||||
package_json["devDependencies"] = {}
|
||||
|
||||
package_json["devDependencies"]["turbo"] = "latest"
|
||||
|
||||
# Update scripts to use turbo
|
||||
if "scripts" not in package_json:
|
||||
package_json["scripts"] = {}
|
||||
|
||||
common_tasks = ["build", "dev", "test", "lint", "typecheck", "clean"]
|
||||
for task in common_tasks:
|
||||
# Check if task exists in any package
|
||||
if any(task in pkg["scripts"] for pkg in self.packages):
|
||||
package_json["scripts"][task] = f"turbo run {task}"
|
||||
|
||||
return package_json
|
||||
|
||||
def generate_migration_report(
|
||||
self, turbo_config: Dict, updated_package_json: Dict
|
||||
) -> str:
|
||||
"""Generate migration report."""
|
||||
report = []
|
||||
|
||||
report.append("=" * 60)
|
||||
report.append("TURBOREPO MIGRATION REPORT")
|
||||
report.append("=" * 60)
|
||||
report.append("")
|
||||
|
||||
report.append(f"Monorepo Path: {self.path}")
|
||||
report.append(f"Package Manager: {self.package_manager}")
|
||||
report.append(f"Total Packages: {len(self.packages)}")
|
||||
report.append("")
|
||||
|
||||
report.append("PACKAGES:")
|
||||
for pkg in self.packages:
|
||||
rel_path = pkg["path"].relative_to(self.path)
|
||||
report.append(f" - {pkg['name']} ({rel_path})")
|
||||
report.append("")
|
||||
|
||||
report.append("TURBO.JSON PIPELINE:")
|
||||
for task, config in turbo_config["pipeline"].items():
|
||||
report.append(f" {task}:")
|
||||
for key, value in config.items():
|
||||
report.append(f" {key}: {value}")
|
||||
report.append("")
|
||||
|
||||
report.append("ROOT PACKAGE.JSON SCRIPTS:")
|
||||
for script, command in updated_package_json.get("scripts", {}).items():
|
||||
report.append(f" {script}: {command}")
|
||||
report.append("")
|
||||
|
||||
report.append("RECOMMENDATIONS:")
|
||||
report.append(" 1. Review generated turbo.json pipeline configuration")
|
||||
report.append(" 2. Adjust output directories based on your build tools")
|
||||
report.append(" 3. Configure remote caching: turbo login && turbo link")
|
||||
report.append(" 4. Run 'npm install' to install Turborepo")
|
||||
report.append(" 5. Test with: turbo run build --dry-run")
|
||||
report.append("")
|
||||
|
||||
if self.dry_run:
|
||||
report.append("DRY RUN MODE: No files were modified")
|
||||
else:
|
||||
report.append("FILES CREATED/MODIFIED:")
|
||||
report.append(f" - {self.path / 'turbo.json'}")
|
||||
report.append(f" - {self.path / 'package.json'}")
|
||||
|
||||
report.append("")
|
||||
report.append("=" * 60)
|
||||
|
||||
return "\n".join(report)
|
||||
|
||||
def write_files(self, turbo_config: Dict, updated_package_json: Dict) -> None:
|
||||
"""Write configuration files."""
|
||||
if self.dry_run:
|
||||
print("\nDRY RUN - Files that would be created/modified:")
|
||||
print(f" - {self.path / 'turbo.json'}")
|
||||
print(f" - {self.path / 'package.json'}")
|
||||
return
|
||||
|
||||
print("Writing files...")
|
||||
|
||||
# Write turbo.json
|
||||
turbo_json_path = self.path / "turbo.json"
|
||||
with open(turbo_json_path, "w") as f:
|
||||
json.dump(turbo_config, f, indent=2)
|
||||
print(f" ✓ Created {turbo_json_path}")
|
||||
|
||||
# Write updated package.json
|
||||
package_json_path = self.path / "package.json"
|
||||
with open(package_json_path, "w") as f:
|
||||
json.dump(updated_package_json, f, indent=2)
|
||||
print(f" ✓ Updated {package_json_path}")
|
||||
|
||||
def migrate(self) -> None:
|
||||
"""Run migration process."""
|
||||
try:
|
||||
print(f"Migrating monorepo to Turborepo: {self.path}")
|
||||
print(f"Dry run: {self.dry_run}")
|
||||
print()
|
||||
|
||||
self.validate_path()
|
||||
self.analyze_workspace()
|
||||
self.discover_packages()
|
||||
|
||||
common_scripts = self.analyze_scripts()
|
||||
turbo_config = self.generate_turbo_config(common_scripts)
|
||||
updated_package_json = self.update_root_package_json()
|
||||
|
||||
print()
|
||||
self.write_files(turbo_config, updated_package_json)
|
||||
|
||||
print()
|
||||
report = self.generate_migration_report(turbo_config, updated_package_json)
|
||||
print(report)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
"""CLI entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Migrate existing monorepo to Turborepo"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--path",
|
||||
type=Path,
|
||||
default=Path.cwd(),
|
||||
help="Path to monorepo (default: current directory)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Preview changes without writing files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--package-manager",
|
||||
choices=["npm", "yarn", "pnpm", "bun"],
|
||||
default="npm",
|
||||
help="Package manager (default: npm)",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
migrator = TurborepoMigrator(
|
||||
path=args.path,
|
||||
dry_run=args.dry_run,
|
||||
package_manager=args.package_manager,
|
||||
)
|
||||
|
||||
migrator.migrate()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user