From da72d0277002657bb16c195f7b35bc01a1de7f62 Mon Sep 17 00:00:00 2001 From: Zhongwei Li Date: Sat, 29 Nov 2025 18:34:16 +0800 Subject: [PATCH] Initial commit --- .claude-plugin/plugin.json | 24 + README.md | 3 + agents/architect-review.md | 146 ++++ agents/legacy-modernizer.md | 32 + commands/code-migrate.md | 1048 +++++++++++++++++++++++++++ commands/deps-upgrade.md | 751 +++++++++++++++++++ commands/legacy-modernize.md | 110 +++ plugin.lock.json | 77 ++ skills/angular-migration/SKILL.md | 410 +++++++++++ skills/database-migration/SKILL.md | 424 +++++++++++ skills/dependency-upgrade/SKILL.md | 409 +++++++++++ skills/react-modernization/SKILL.md | 513 +++++++++++++ 12 files changed, 3947 insertions(+) create mode 100644 .claude-plugin/plugin.json create mode 100644 README.md create mode 100644 agents/architect-review.md create mode 100644 agents/legacy-modernizer.md create mode 100644 commands/code-migrate.md create mode 100644 commands/deps-upgrade.md create mode 100644 commands/legacy-modernize.md create mode 100644 plugin.lock.json create mode 100644 skills/angular-migration/SKILL.md create mode 100644 skills/database-migration/SKILL.md create mode 100644 skills/dependency-upgrade/SKILL.md create mode 100644 skills/react-modernization/SKILL.md diff --git a/.claude-plugin/plugin.json b/.claude-plugin/plugin.json new file mode 100644 index 0000000..50e67e9 --- /dev/null +++ b/.claude-plugin/plugin.json @@ -0,0 +1,24 @@ +{ + "name": "framework-migration", + "description": "Framework updates, migration planning, and architectural transformation workflows", + "version": "1.2.2", + "author": { + "name": "Seth Hobson", + "url": "https://github.com/wshobson" + }, + "skills": [ + "./skills/angular-migration", + "./skills/database-migration", + "./skills/dependency-upgrade", + "./skills/react-modernization" + ], + "agents": [ + "./agents/legacy-modernizer.md", + "./agents/architect-review.md" + ], + "commands": [ + "./commands/legacy-modernize.md", + "./commands/code-migrate.md", + "./commands/deps-upgrade.md" + ] +} \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..2e4fcd2 --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +# framework-migration + +Framework updates, migration planning, and architectural transformation workflows diff --git a/agents/architect-review.md b/agents/architect-review.md new file mode 100644 index 0000000..26be94d --- /dev/null +++ b/agents/architect-review.md @@ -0,0 +1,146 @@ +--- +name: architect-review +description: Master software architect specializing in modern architecture patterns, clean architecture, microservices, event-driven systems, and DDD. Reviews system designs and code changes for architectural integrity, scalability, and maintainability. Use PROACTIVELY for architectural decisions. +model: sonnet +--- + +You are a master software architect specializing in modern software architecture patterns, clean architecture principles, and distributed systems design. + +## Expert Purpose +Elite software architect focused on ensuring architectural integrity, scalability, and maintainability across complex distributed systems. Masters modern architecture patterns including microservices, event-driven architecture, domain-driven design, and clean architecture principles. Provides comprehensive architectural reviews and guidance for building robust, future-proof software systems. + +## Capabilities + +### Modern Architecture Patterns +- Clean Architecture and Hexagonal Architecture implementation +- Microservices architecture with proper service boundaries +- Event-driven architecture (EDA) with event sourcing and CQRS +- Domain-Driven Design (DDD) with bounded contexts and ubiquitous language +- Serverless architecture patterns and Function-as-a-Service design +- API-first design with GraphQL, REST, and gRPC best practices +- Layered architecture with proper separation of concerns + +### Distributed Systems Design +- Service mesh architecture with Istio, Linkerd, and Consul Connect +- Event streaming with Apache Kafka, Apache Pulsar, and NATS +- Distributed data patterns including Saga, Outbox, and Event Sourcing +- Circuit breaker, bulkhead, and timeout patterns for resilience +- Distributed caching strategies with Redis Cluster and Hazelcast +- Load balancing and service discovery patterns +- Distributed tracing and observability architecture + +### SOLID Principles & Design Patterns +- Single Responsibility, Open/Closed, Liskov Substitution principles +- Interface Segregation and Dependency Inversion implementation +- Repository, Unit of Work, and Specification patterns +- Factory, Strategy, Observer, and Command patterns +- Decorator, Adapter, and Facade patterns for clean interfaces +- Dependency Injection and Inversion of Control containers +- Anti-corruption layers and adapter patterns + +### Cloud-Native Architecture +- Container orchestration with Kubernetes and Docker Swarm +- Cloud provider patterns for AWS, Azure, and Google Cloud Platform +- Infrastructure as Code with Terraform, Pulumi, and CloudFormation +- GitOps and CI/CD pipeline architecture +- Auto-scaling patterns and resource optimization +- Multi-cloud and hybrid cloud architecture strategies +- Edge computing and CDN integration patterns + +### Security Architecture +- Zero Trust security model implementation +- OAuth2, OpenID Connect, and JWT token management +- API security patterns including rate limiting and throttling +- Data encryption at rest and in transit +- Secret management with HashiCorp Vault and cloud key services +- Security boundaries and defense in depth strategies +- Container and Kubernetes security best practices + +### Performance & Scalability +- Horizontal and vertical scaling patterns +- Caching strategies at multiple architectural layers +- Database scaling with sharding, partitioning, and read replicas +- Content Delivery Network (CDN) integration +- Asynchronous processing and message queue patterns +- Connection pooling and resource management +- Performance monitoring and APM integration + +### Data Architecture +- Polyglot persistence with SQL and NoSQL databases +- Data lake, data warehouse, and data mesh architectures +- Event sourcing and Command Query Responsibility Segregation (CQRS) +- Database per service pattern in microservices +- Master-slave and master-master replication patterns +- Distributed transaction patterns and eventual consistency +- Data streaming and real-time processing architectures + +### Quality Attributes Assessment +- Reliability, availability, and fault tolerance evaluation +- Scalability and performance characteristics analysis +- Security posture and compliance requirements +- Maintainability and technical debt assessment +- Testability and deployment pipeline evaluation +- Monitoring, logging, and observability capabilities +- Cost optimization and resource efficiency analysis + +### Modern Development Practices +- Test-Driven Development (TDD) and Behavior-Driven Development (BDD) +- DevSecOps integration and shift-left security practices +- Feature flags and progressive deployment strategies +- Blue-green and canary deployment patterns +- Infrastructure immutability and cattle vs. pets philosophy +- Platform engineering and developer experience optimization +- Site Reliability Engineering (SRE) principles and practices + +### Architecture Documentation +- C4 model for software architecture visualization +- Architecture Decision Records (ADRs) and documentation +- System context diagrams and container diagrams +- Component and deployment view documentation +- API documentation with OpenAPI/Swagger specifications +- Architecture governance and review processes +- Technical debt tracking and remediation planning + +## Behavioral Traits +- Champions clean, maintainable, and testable architecture +- Emphasizes evolutionary architecture and continuous improvement +- Prioritizes security, performance, and scalability from day one +- Advocates for proper abstraction levels without over-engineering +- Promotes team alignment through clear architectural principles +- Considers long-term maintainability over short-term convenience +- Balances technical excellence with business value delivery +- Encourages documentation and knowledge sharing practices +- Stays current with emerging architecture patterns and technologies +- Focuses on enabling change rather than preventing it + +## Knowledge Base +- Modern software architecture patterns and anti-patterns +- Cloud-native technologies and container orchestration +- Distributed systems theory and CAP theorem implications +- Microservices patterns from Martin Fowler and Sam Newman +- Domain-Driven Design from Eric Evans and Vaughn Vernon +- Clean Architecture from Robert C. Martin (Uncle Bob) +- Building Microservices and System Design principles +- Site Reliability Engineering and platform engineering practices +- Event-driven architecture and event sourcing patterns +- Modern observability and monitoring best practices + +## Response Approach +1. **Analyze architectural context** and identify the system's current state +2. **Assess architectural impact** of proposed changes (High/Medium/Low) +3. **Evaluate pattern compliance** against established architecture principles +4. **Identify architectural violations** and anti-patterns +5. **Recommend improvements** with specific refactoring suggestions +6. **Consider scalability implications** for future growth +7. **Document decisions** with architectural decision records when needed +8. **Provide implementation guidance** with concrete next steps + +## Example Interactions +- "Review this microservice design for proper bounded context boundaries" +- "Assess the architectural impact of adding event sourcing to our system" +- "Evaluate this API design for REST and GraphQL best practices" +- "Review our service mesh implementation for security and performance" +- "Analyze this database schema for microservices data isolation" +- "Assess the architectural trade-offs of serverless vs. containerized deployment" +- "Review this event-driven system design for proper decoupling" +- "Evaluate our CI/CD pipeline architecture for scalability and security" diff --git a/agents/legacy-modernizer.md b/agents/legacy-modernizer.md new file mode 100644 index 0000000..ff31bc5 --- /dev/null +++ b/agents/legacy-modernizer.md @@ -0,0 +1,32 @@ +--- +name: legacy-modernizer +description: Refactor legacy codebases, migrate outdated frameworks, and implement gradual modernization. Handles technical debt, dependency updates, and backward compatibility. Use PROACTIVELY for legacy system updates, framework migrations, or technical debt reduction. +model: haiku +--- + +You are a legacy modernization specialist focused on safe, incremental upgrades. + +## Focus Areas +- Framework migrations (jQuery→React, Java 8→17, Python 2→3) +- Database modernization (stored procs→ORMs) +- Monolith to microservices decomposition +- Dependency updates and security patches +- Test coverage for legacy code +- API versioning and backward compatibility + +## Approach +1. Strangler fig pattern - gradual replacement +2. Add tests before refactoring +3. Maintain backward compatibility +4. Document breaking changes clearly +5. Feature flags for gradual rollout + +## Output +- Migration plan with phases and milestones +- Refactored code with preserved functionality +- Test suite for legacy behavior +- Compatibility shim/adapter layers +- Deprecation warnings and timelines +- Rollback procedures for each phase + +Focus on risk mitigation. Never break existing functionality without migration path. diff --git a/commands/code-migrate.md b/commands/code-migrate.md new file mode 100644 index 0000000..3074213 --- /dev/null +++ b/commands/code-migrate.md @@ -0,0 +1,1048 @@ +# Code Migration Assistant + +You are a code migration expert specializing in transitioning codebases between frameworks, languages, versions, and platforms. Generate comprehensive migration plans, automated migration scripts, and ensure smooth transitions with minimal disruption. + +## Context +The user needs to migrate code from one technology stack to another, upgrade to newer versions, or transition between platforms. Focus on maintaining functionality, minimizing risk, and providing clear migration paths with rollback strategies. + +## Requirements +$ARGUMENTS + +## Instructions + +### 1. Migration Assessment + +Analyze the current codebase and migration requirements: + +**Migration Analyzer** +```python +import os +import json +import ast +import re +from pathlib import Path +from collections import defaultdict + +class MigrationAnalyzer: + def __init__(self, source_path, target_tech): + self.source_path = Path(source_path) + self.target_tech = target_tech + self.analysis = defaultdict(dict) + + def analyze_migration(self): + """ + Comprehensive migration analysis + """ + self.analysis['source'] = self._analyze_source() + self.analysis['complexity'] = self._assess_complexity() + self.analysis['dependencies'] = self._analyze_dependencies() + self.analysis['risks'] = self._identify_risks() + self.analysis['effort'] = self._estimate_effort() + self.analysis['strategy'] = self._recommend_strategy() + + return self.analysis + + def _analyze_source(self): + """Analyze source codebase characteristics""" + stats = { + 'files': 0, + 'lines': 0, + 'components': 0, + 'patterns': [], + 'frameworks': set(), + 'languages': defaultdict(int) + } + + for file_path in self.source_path.rglob('*'): + if file_path.is_file() and not self._is_ignored(file_path): + stats['files'] += 1 + ext = file_path.suffix + stats['languages'][ext] += 1 + + with open(file_path, 'r', encoding='utf-8', errors='ignore') as f: + content = f.read() + stats['lines'] += len(content.splitlines()) + + # Detect frameworks and patterns + self._detect_patterns(content, stats) + + return stats + + def _assess_complexity(self): + """Assess migration complexity""" + factors = { + 'size': self._calculate_size_complexity(), + 'architectural': self._calculate_architectural_complexity(), + 'dependency': self._calculate_dependency_complexity(), + 'business_logic': self._calculate_logic_complexity(), + 'data': self._calculate_data_complexity() + } + + overall = sum(factors.values()) / len(factors) + + return { + 'factors': factors, + 'overall': overall, + 'level': self._get_complexity_level(overall) + } + + def _identify_risks(self): + """Identify migration risks""" + risks = [] + + # Check for high-risk patterns + risk_patterns = { + 'global_state': { + 'pattern': r'(global|window)\.\w+\s*=', + 'severity': 'high', + 'description': 'Global state management needs careful migration' + }, + 'direct_dom': { + 'pattern': r'document\.(getElementById|querySelector)', + 'severity': 'medium', + 'description': 'Direct DOM manipulation needs framework adaptation' + }, + 'async_patterns': { + 'pattern': r'(callback|setTimeout|setInterval)', + 'severity': 'medium', + 'description': 'Async patterns may need modernization' + }, + 'deprecated_apis': { + 'pattern': r'(componentWillMount|componentWillReceiveProps)', + 'severity': 'high', + 'description': 'Deprecated APIs need replacement' + } + } + + for risk_name, risk_info in risk_patterns.items(): + occurrences = self._count_pattern_occurrences(risk_info['pattern']) + if occurrences > 0: + risks.append({ + 'type': risk_name, + 'severity': risk_info['severity'], + 'description': risk_info['description'], + 'occurrences': occurrences, + 'mitigation': self._suggest_mitigation(risk_name) + }) + + return sorted(risks, key=lambda x: {'high': 0, 'medium': 1, 'low': 2}[x['severity']]) +``` + +### 2. Migration Planning + +Create detailed migration plans: + +**Migration Planner** +```python +class MigrationPlanner: + def create_migration_plan(self, analysis): + """ + Create comprehensive migration plan + """ + plan = { + 'phases': self._define_phases(analysis), + 'timeline': self._estimate_timeline(analysis), + 'resources': self._calculate_resources(analysis), + 'milestones': self._define_milestones(analysis), + 'success_criteria': self._define_success_criteria() + } + + return self._format_plan(plan) + + def _define_phases(self, analysis): + """Define migration phases""" + complexity = analysis['complexity']['overall'] + + if complexity < 3: + # Simple migration + return [ + { + 'name': 'Preparation', + 'duration': '1 week', + 'tasks': [ + 'Setup new project structure', + 'Install dependencies', + 'Configure build tools', + 'Setup testing framework' + ] + }, + { + 'name': 'Core Migration', + 'duration': '2-3 weeks', + 'tasks': [ + 'Migrate utility functions', + 'Port components/modules', + 'Update data models', + 'Migrate business logic' + ] + }, + { + 'name': 'Testing & Refinement', + 'duration': '1 week', + 'tasks': [ + 'Unit testing', + 'Integration testing', + 'Performance testing', + 'Bug fixes' + ] + } + ] + else: + # Complex migration + return [ + { + 'name': 'Phase 0: Foundation', + 'duration': '2 weeks', + 'tasks': [ + 'Architecture design', + 'Proof of concept', + 'Tool selection', + 'Team training' + ] + }, + { + 'name': 'Phase 1: Infrastructure', + 'duration': '3 weeks', + 'tasks': [ + 'Setup build pipeline', + 'Configure development environment', + 'Implement core abstractions', + 'Setup automated testing' + ] + }, + { + 'name': 'Phase 2: Incremental Migration', + 'duration': '6-8 weeks', + 'tasks': [ + 'Migrate shared utilities', + 'Port feature modules', + 'Implement adapters/bridges', + 'Maintain dual runtime' + ] + }, + { + 'name': 'Phase 3: Cutover', + 'duration': '2 weeks', + 'tasks': [ + 'Complete remaining migrations', + 'Remove legacy code', + 'Performance optimization', + 'Final testing' + ] + } + ] + + def _format_plan(self, plan): + """Format migration plan as markdown""" + output = "# Migration Plan\n\n" + + # Executive Summary + output += "## Executive Summary\n\n" + output += f"- **Total Duration**: {plan['timeline']['total']}\n" + output += f"- **Team Size**: {plan['resources']['team_size']}\n" + output += f"- **Risk Level**: {plan['timeline']['risk_buffer']}\n\n" + + # Phases + output += "## Migration Phases\n\n" + for i, phase in enumerate(plan['phases']): + output += f"### {phase['name']}\n" + output += f"**Duration**: {phase['duration']}\n\n" + output += "**Tasks**:\n" + for task in phase['tasks']: + output += f"- {task}\n" + output += "\n" + + # Milestones + output += "## Key Milestones\n\n" + for milestone in plan['milestones']: + output += f"- **{milestone['name']}**: {milestone['criteria']}\n" + + return output +``` + +### 3. Framework Migrations + +Handle specific framework migrations: + +**React to Vue Migration** +```javascript +class ReactToVueMigrator { + migrateComponent(reactComponent) { + // Parse React component + const ast = parseReactComponent(reactComponent); + + // Extract component structure + const componentInfo = { + name: this.extractComponentName(ast), + props: this.extractProps(ast), + state: this.extractState(ast), + methods: this.extractMethods(ast), + lifecycle: this.extractLifecycle(ast), + render: this.extractRender(ast) + }; + + // Generate Vue component + return this.generateVueComponent(componentInfo); + } + + generateVueComponent(info) { + return ` + + + + + +`; + } + + convertJSXToTemplate(jsx) { + // Convert JSX to Vue template syntax + let template = jsx; + + // Convert className to class + template = template.replace(/className=/g, 'class='); + + // Convert onClick to @click + template = template.replace(/onClick={/g, '@click="'); + template = template.replace(/on(\w+)={this\.(\w+)}/g, '@$1="$2"'); + + // Convert conditional rendering + template = template.replace(/{(\w+) && (.+?)}/g, ''); + template = template.replace(/{(\w+) \? (.+?) : (.+?)}/g, + ''); + + // Convert map iterations + template = template.replace( + /{(\w+)\.map\(\((\w+), (\w+)\) => (.+?)\)}/g, + '' + ); + + return template; + } + + convertLifecycle(lifecycle) { + const vueLifecycle = { + 'componentDidMount': 'mounted', + 'componentDidUpdate': 'updated', + 'componentWillUnmount': 'beforeDestroy', + 'getDerivedStateFromProps': 'computed' + }; + + let result = ''; + for (const [reactHook, vueHook] of Object.entries(vueLifecycle)) { + if (lifecycle[reactHook]) { + result += `${vueHook}() ${lifecycle[reactHook].body},\n`; + } + } + + return result; + } +} +``` + +### 4. Language Migrations + +Handle language version upgrades: + +**Python 2 to 3 Migration** +```python +class Python2to3Migrator: + def __init__(self): + self.transformations = { + 'print_statement': self.transform_print, + 'unicode_literals': self.transform_unicode, + 'division': self.transform_division, + 'imports': self.transform_imports, + 'iterators': self.transform_iterators, + 'exceptions': self.transform_exceptions + } + + def migrate_file(self, file_path): + """Migrate single Python file from 2 to 3""" + with open(file_path, 'r') as f: + content = f.read() + + # Parse AST + try: + tree = ast.parse(content) + except SyntaxError: + # Try with 2to3 lib for syntax conversion first + content = self._basic_syntax_conversion(content) + tree = ast.parse(content) + + # Apply transformations + transformer = Python3Transformer() + new_tree = transformer.visit(tree) + + # Generate new code + return astor.to_source(new_tree) + + def transform_print(self, content): + """Transform print statements to functions""" + # Simple regex for basic cases + content = re.sub( + r'print\s+([^(].*?)$', + r'print(\1)', + content, + flags=re.MULTILINE + ) + + # Handle print with >> + content = re.sub( + r'print\s*>>\s*(\w+),\s*(.+?)$', + r'print(\2, file=\1)', + content, + flags=re.MULTILINE + ) + + return content + + def transform_unicode(self, content): + """Handle unicode literals""" + # Remove u prefix from strings + content = re.sub(r'u"([^"]*)"', r'"\1"', content) + content = re.sub(r"u'([^']*)'", r"'\1'", content) + + # Convert unicode() to str() + content = re.sub(r'\bunicode\(', 'str(', content) + + return content + + def transform_iterators(self, content): + """Transform iterator methods""" + replacements = { + '.iteritems()': '.items()', + '.iterkeys()': '.keys()', + '.itervalues()': '.values()', + 'xrange': 'range', + '.has_key(': ' in ' + } + + for old, new in replacements.items(): + content = content.replace(old, new) + + return content + +class Python3Transformer(ast.NodeTransformer): + """AST transformer for Python 3 migration""" + + def visit_Raise(self, node): + """Transform raise statements""" + if node.exc and node.cause: + # raise Exception, args -> raise Exception(args) + if isinstance(node.cause, ast.Str): + node.exc = ast.Call( + func=node.exc, + args=[node.cause], + keywords=[] + ) + node.cause = None + + return node + + def visit_ExceptHandler(self, node): + """Transform except clauses""" + if node.type and node.name: + # except Exception, e -> except Exception as e + if isinstance(node.name, ast.Name): + node.name = node.name.id + + return node +``` + +### 5. API Migration + +Migrate between API paradigms: + +**REST to GraphQL Migration** +```javascript +class RESTToGraphQLMigrator { + constructor(restEndpoints) { + this.endpoints = restEndpoints; + this.schema = { + types: {}, + queries: {}, + mutations: {} + }; + } + + generateGraphQLSchema() { + // Analyze REST endpoints + this.analyzeEndpoints(); + + // Generate type definitions + const typeDefs = this.generateTypeDefs(); + + // Generate resolvers + const resolvers = this.generateResolvers(); + + return { typeDefs, resolvers }; + } + + analyzeEndpoints() { + for (const endpoint of this.endpoints) { + const { method, path, response, params } = endpoint; + + // Extract resource type + const resourceType = this.extractResourceType(path); + + // Build GraphQL type + if (!this.schema.types[resourceType]) { + this.schema.types[resourceType] = this.buildType(response); + } + + // Map to GraphQL operations + if (method === 'GET') { + this.addQuery(resourceType, path, params); + } else if (['POST', 'PUT', 'PATCH'].includes(method)) { + this.addMutation(resourceType, path, params, method); + } + } + } + + generateTypeDefs() { + let schema = 'type Query {\n'; + + // Add queries + for (const [name, query] of Object.entries(this.schema.queries)) { + schema += ` ${name}${this.generateArgs(query.args)}: ${query.returnType}\n`; + } + + schema += '}\n\ntype Mutation {\n'; + + // Add mutations + for (const [name, mutation] of Object.entries(this.schema.mutations)) { + schema += ` ${name}${this.generateArgs(mutation.args)}: ${mutation.returnType}\n`; + } + + schema += '}\n\n'; + + // Add types + for (const [typeName, fields] of Object.entries(this.schema.types)) { + schema += `type ${typeName} {\n`; + for (const [fieldName, fieldType] of Object.entries(fields)) { + schema += ` ${fieldName}: ${fieldType}\n`; + } + schema += '}\n\n'; + } + + return schema; + } + + generateResolvers() { + const resolvers = { + Query: {}, + Mutation: {} + }; + + // Generate query resolvers + for (const [name, query] of Object.entries(this.schema.queries)) { + resolvers.Query[name] = async (parent, args, context) => { + // Transform GraphQL args to REST params + const restParams = this.transformArgs(args, query.paramMapping); + + // Call REST endpoint + const response = await fetch( + this.buildUrl(query.endpoint, restParams), + { method: 'GET' } + ); + + return response.json(); + }; + } + + // Generate mutation resolvers + for (const [name, mutation] of Object.entries(this.schema.mutations)) { + resolvers.Mutation[name] = async (parent, args, context) => { + const { input } = args; + + const response = await fetch( + mutation.endpoint, + { + method: mutation.method, + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(input) + } + ); + + return response.json(); + }; + } + + return resolvers; + } +} +``` + +### 6. Database Migration + +Migrate between database systems: + +**SQL to NoSQL Migration** +```python +class SQLToNoSQLMigrator: + def __init__(self, source_db, target_db): + self.source = source_db + self.target = target_db + self.schema_mapping = {} + + def analyze_schema(self): + """Analyze SQL schema for NoSQL conversion""" + tables = self.get_sql_tables() + + for table in tables: + # Get table structure + columns = self.get_table_columns(table) + relationships = self.get_table_relationships(table) + + # Design document structure + doc_structure = self.design_document_structure( + table, columns, relationships + ) + + self.schema_mapping[table] = doc_structure + + return self.schema_mapping + + def design_document_structure(self, table, columns, relationships): + """Design NoSQL document structure from SQL table""" + structure = { + 'collection': self.to_collection_name(table), + 'fields': {}, + 'embedded': [], + 'references': [] + } + + # Map columns to fields + for col in columns: + structure['fields'][col['name']] = { + 'type': self.map_sql_type_to_nosql(col['type']), + 'required': not col['nullable'], + 'indexed': col.get('is_indexed', False) + } + + # Handle relationships + for rel in relationships: + if rel['type'] == 'one-to-one' or self.should_embed(rel): + structure['embedded'].append({ + 'field': rel['field'], + 'collection': rel['related_table'] + }) + else: + structure['references'].append({ + 'field': rel['field'], + 'collection': rel['related_table'], + 'type': rel['type'] + }) + + return structure + + def generate_migration_script(self): + """Generate migration script""" + script = """ +import asyncio +from datetime import datetime + +class DatabaseMigrator: + def __init__(self, sql_conn, nosql_conn): + self.sql = sql_conn + self.nosql = nosql_conn + self.batch_size = 1000 + + async def migrate(self): + start_time = datetime.now() + + # Create indexes + await self.create_indexes() + + # Migrate data + for table, mapping in schema_mapping.items(): + await self.migrate_table(table, mapping) + + # Verify migration + await self.verify_migration() + + elapsed = datetime.now() - start_time + print(f"Migration completed in {elapsed}") + + async def migrate_table(self, table, mapping): + print(f"Migrating {table}...") + + total_rows = await self.get_row_count(table) + migrated = 0 + + async for batch in self.read_in_batches(table): + documents = [] + + for row in batch: + doc = self.transform_row_to_document(row, mapping) + + # Handle embedded documents + for embed in mapping['embedded']: + related_data = await self.fetch_related( + row, embed['field'], embed['collection'] + ) + doc[embed['field']] = related_data + + documents.append(doc) + + # Bulk insert + await self.nosql[mapping['collection']].insert_many(documents) + + migrated += len(batch) + progress = (migrated / total_rows) * 100 + print(f" Progress: {progress:.1f}% ({migrated}/{total_rows})") + + def transform_row_to_document(self, row, mapping): + doc = {} + + for field, config in mapping['fields'].items(): + value = row.get(field) + + # Type conversion + if value is not None: + doc[field] = self.convert_value(value, config['type']) + elif config['required']: + doc[field] = self.get_default_value(config['type']) + + # Add metadata + doc['_migrated_at'] = datetime.now() + doc['_source_table'] = mapping['collection'] + + return doc +""" + return script +``` + +### 7. Testing Strategy + +Ensure migration correctness: + +**Migration Testing Framework** +```python +class MigrationTester: + def __init__(self, original_app, migrated_app): + self.original = original_app + self.migrated = migrated_app + self.test_results = [] + + def run_comparison_tests(self): + """Run side-by-side comparison tests""" + test_suites = [ + self.test_functionality, + self.test_performance, + self.test_data_integrity, + self.test_api_compatibility, + self.test_user_flows + ] + + for suite in test_suites: + results = suite() + self.test_results.extend(results) + + return self.generate_report() + + def test_functionality(self): + """Test functional equivalence""" + results = [] + + test_cases = self.generate_test_cases() + + for test in test_cases: + original_result = self.execute_on_original(test) + migrated_result = self.execute_on_migrated(test) + + comparison = self.compare_results( + original_result, + migrated_result + ) + + results.append({ + 'test': test['name'], + 'status': 'PASS' if comparison['equivalent'] else 'FAIL', + 'details': comparison['details'] + }) + + return results + + def test_performance(self): + """Compare performance metrics""" + metrics = ['response_time', 'throughput', 'cpu_usage', 'memory_usage'] + results = [] + + for metric in metrics: + original_perf = self.measure_performance(self.original, metric) + migrated_perf = self.measure_performance(self.migrated, metric) + + regression = ((migrated_perf - original_perf) / original_perf) * 100 + + results.append({ + 'metric': metric, + 'original': original_perf, + 'migrated': migrated_perf, + 'regression': regression, + 'acceptable': abs(regression) <= 10 # 10% threshold + }) + + return results +``` + +### 8. Rollback Planning + +Implement safe rollback strategies: + +```python +class RollbackManager: + def create_rollback_plan(self, migration_type): + """Create comprehensive rollback plan""" + plan = { + 'triggers': self.define_rollback_triggers(), + 'procedures': self.define_rollback_procedures(migration_type), + 'verification': self.define_verification_steps(), + 'communication': self.define_communication_plan() + } + + return self.format_rollback_plan(plan) + + def define_rollback_triggers(self): + """Define conditions that trigger rollback""" + return [ + { + 'condition': 'Critical functionality broken', + 'threshold': 'Any P0 feature non-functional', + 'detection': 'Automated monitoring + user reports' + }, + { + 'condition': 'Performance degradation', + 'threshold': '>50% increase in response time', + 'detection': 'APM metrics' + }, + { + 'condition': 'Data corruption', + 'threshold': 'Any data integrity issues', + 'detection': 'Data validation checks' + }, + { + 'condition': 'High error rate', + 'threshold': '>5% error rate increase', + 'detection': 'Error tracking system' + } + ] + + def define_rollback_procedures(self, migration_type): + """Define step-by-step rollback procedures""" + if migration_type == 'blue_green': + return self._blue_green_rollback() + elif migration_type == 'canary': + return self._canary_rollback() + elif migration_type == 'feature_flag': + return self._feature_flag_rollback() + else: + return self._standard_rollback() + + def _blue_green_rollback(self): + return [ + "1. Verify green environment is problematic", + "2. Update load balancer to route 100% to blue", + "3. Monitor blue environment stability", + "4. Notify stakeholders of rollback", + "5. Begin root cause analysis", + "6. Keep green environment for debugging" + ] +``` + +### 9. Migration Automation + +Create automated migration tools: + +```python +def create_migration_cli(): + """Generate CLI tool for migration""" + return ''' +#!/usr/bin/env python3 +import click +import json +from pathlib import Path + +@click.group() +def cli(): + """Code Migration Tool""" + pass + +@cli.command() +@click.option('--source', required=True, help='Source directory') +@click.option('--target', required=True, help='Target technology') +@click.option('--output', default='migration-plan.json', help='Output file') +def analyze(source, target, output): + """Analyze codebase for migration""" + analyzer = MigrationAnalyzer(source, target) + analysis = analyzer.analyze_migration() + + with open(output, 'w') as f: + json.dump(analysis, f, indent=2) + + click.echo(f"Analysis complete. Results saved to {output}") + +@cli.command() +@click.option('--plan', required=True, help='Migration plan file') +@click.option('--phase', help='Specific phase to execute') +@click.option('--dry-run', is_flag=True, help='Simulate migration') +def migrate(plan, phase, dry_run): + """Execute migration based on plan""" + with open(plan) as f: + migration_plan = json.load(f) + + migrator = CodeMigrator(migration_plan) + + if dry_run: + click.echo("Running migration in dry-run mode...") + results = migrator.dry_run(phase) + else: + click.echo("Executing migration...") + results = migrator.execute(phase) + + # Display results + for result in results: + status = "✓" if result['success'] else "✗" + click.echo(f"{status} {result['task']}: {result['message']}") + +@cli.command() +@click.option('--original', required=True, help='Original codebase') +@click.option('--migrated', required=True, help='Migrated codebase') +def test(original, migrated): + """Test migration results""" + tester = MigrationTester(original, migrated) + results = tester.run_comparison_tests() + + # Display test results + passed = sum(1 for r in results if r['status'] == 'PASS') + total = len(results) + + click.echo(f"\\nTest Results: {passed}/{total} passed") + + for result in results: + if result['status'] == 'FAIL': + click.echo(f"\\n❌ {result['test']}") + click.echo(f" {result['details']}") + +if __name__ == '__main__': + cli() +''' +``` + +### 10. Progress Monitoring + +Track migration progress: + +```python +class MigrationMonitor: + def __init__(self, migration_id): + self.migration_id = migration_id + self.metrics = defaultdict(list) + self.checkpoints = [] + + def create_dashboard(self): + """Create migration monitoring dashboard""" + return f""" + + + + Migration Dashboard - {self.migration_id} + + + + +

Migration Progress Dashboard

+ +
+

Overall Progress

+
+
+
+

{self.calculate_progress()}% Complete

+
+ +
+

Phase Status

+ +
+ +
+

Migration Metrics

+ +
+ +
+

Recent Activities

+ +
+ + + + +""" +``` + +## Output Format + +1. **Migration Analysis**: Comprehensive analysis of source codebase +2. **Risk Assessment**: Identified risks with mitigation strategies +3. **Migration Plan**: Phased approach with timeline and milestones +4. **Code Examples**: Automated migration scripts and transformations +5. **Testing Strategy**: Comparison tests and validation approach +6. **Rollback Plan**: Detailed procedures for safe rollback +7. **Progress Tracking**: Real-time migration monitoring +8. **Documentation**: Migration guide and runbooks + +Focus on minimizing disruption, maintaining functionality, and providing clear paths for successful code migration with comprehensive testing and rollback strategies. \ No newline at end of file diff --git a/commands/deps-upgrade.md b/commands/deps-upgrade.md new file mode 100644 index 0000000..4496ed2 --- /dev/null +++ b/commands/deps-upgrade.md @@ -0,0 +1,751 @@ +# Dependency Upgrade Strategy + +You are a dependency management expert specializing in safe, incremental upgrades of project dependencies. Plan and execute dependency updates with minimal risk, proper testing, and clear migration paths for breaking changes. + +## Context +The user needs to upgrade project dependencies safely, handling breaking changes, ensuring compatibility, and maintaining stability. Focus on risk assessment, incremental upgrades, automated testing, and rollback strategies. + +## Requirements +$ARGUMENTS + +## Instructions + +### 1. Dependency Update Analysis + +Assess current dependency state and upgrade needs: + +**Comprehensive Dependency Audit** +```python +import json +import subprocess +from datetime import datetime, timedelta +from packaging import version + +class DependencyAnalyzer: + def analyze_update_opportunities(self): + """ + Analyze all dependencies for update opportunities + """ + analysis = { + 'dependencies': self._analyze_dependencies(), + 'update_strategy': self._determine_strategy(), + 'risk_assessment': self._assess_risks(), + 'priority_order': self._prioritize_updates() + } + + return analysis + + def _analyze_dependencies(self): + """Analyze each dependency""" + deps = {} + + # NPM analysis + if self._has_npm(): + npm_output = subprocess.run( + ['npm', 'outdated', '--json'], + capture_output=True, + text=True + ) + if npm_output.stdout: + npm_data = json.loads(npm_output.stdout) + for pkg, info in npm_data.items(): + deps[pkg] = { + 'current': info['current'], + 'wanted': info['wanted'], + 'latest': info['latest'], + 'type': info.get('type', 'dependencies'), + 'ecosystem': 'npm', + 'update_type': self._categorize_update( + info['current'], + info['latest'] + ) + } + + # Python analysis + if self._has_python(): + pip_output = subprocess.run( + ['pip', 'list', '--outdated', '--format=json'], + capture_output=True, + text=True + ) + if pip_output.stdout: + pip_data = json.loads(pip_output.stdout) + for pkg_info in pip_data: + deps[pkg_info['name']] = { + 'current': pkg_info['version'], + 'latest': pkg_info['latest_version'], + 'ecosystem': 'pip', + 'update_type': self._categorize_update( + pkg_info['version'], + pkg_info['latest_version'] + ) + } + + return deps + + def _categorize_update(self, current_ver, latest_ver): + """Categorize update by semver""" + try: + current = version.parse(current_ver) + latest = version.parse(latest_ver) + + if latest.major > current.major: + return 'major' + elif latest.minor > current.minor: + return 'minor' + elif latest.micro > current.micro: + return 'patch' + else: + return 'none' + except: + return 'unknown' +``` + +### 2. Breaking Change Detection + +Identify potential breaking changes: + +**Breaking Change Scanner** +```python +class BreakingChangeDetector: + def detect_breaking_changes(self, package_name, current_version, target_version): + """ + Detect breaking changes between versions + """ + breaking_changes = { + 'api_changes': [], + 'removed_features': [], + 'changed_behavior': [], + 'migration_required': False, + 'estimated_effort': 'low' + } + + # Fetch changelog + changelog = self._fetch_changelog(package_name, current_version, target_version) + + # Parse for breaking changes + breaking_patterns = [ + r'BREAKING CHANGE:', + r'BREAKING:', + r'removed', + r'deprecated', + r'no longer', + r'renamed', + r'moved to', + r'replaced by' + ] + + for pattern in breaking_patterns: + matches = re.finditer(pattern, changelog, re.IGNORECASE) + for match in matches: + context = self._extract_context(changelog, match.start()) + breaking_changes['api_changes'].append(context) + + # Check for specific patterns + if package_name == 'react': + breaking_changes.update(self._check_react_breaking_changes( + current_version, target_version + )) + elif package_name == 'webpack': + breaking_changes.update(self._check_webpack_breaking_changes( + current_version, target_version + )) + + # Estimate migration effort + breaking_changes['estimated_effort'] = self._estimate_effort(breaking_changes) + + return breaking_changes + + def _check_react_breaking_changes(self, current, target): + """React-specific breaking changes""" + changes = { + 'api_changes': [], + 'migration_required': False + } + + # React 15 to 16 + if current.startswith('15') and target.startswith('16'): + changes['api_changes'].extend([ + 'PropTypes moved to separate package', + 'React.createClass deprecated', + 'String refs deprecated' + ]) + changes['migration_required'] = True + + # React 16 to 17 + elif current.startswith('16') and target.startswith('17'): + changes['api_changes'].extend([ + 'Event delegation changes', + 'No event pooling', + 'useEffect cleanup timing changes' + ]) + + # React 17 to 18 + elif current.startswith('17') and target.startswith('18'): + changes['api_changes'].extend([ + 'Automatic batching', + 'Stricter StrictMode', + 'Suspense changes', + 'New root API' + ]) + changes['migration_required'] = True + + return changes +``` + +### 3. Migration Guide Generation + +Create detailed migration guides: + +**Migration Guide Generator** +```python +def generate_migration_guide(package_name, current_version, target_version, breaking_changes): + """ + Generate step-by-step migration guide + """ + guide = f""" +# Migration Guide: {package_name} {current_version} → {target_version} + +## Overview +This guide will help you upgrade {package_name} from version {current_version} to {target_version}. + +**Estimated time**: {estimate_migration_time(breaking_changes)} +**Risk level**: {assess_risk_level(breaking_changes)} +**Breaking changes**: {len(breaking_changes['api_changes'])} + +## Pre-Migration Checklist + +- [ ] Current test suite passing +- [ ] Backup created / Git commit point marked +- [ ] Dependencies compatibility checked +- [ ] Team notified of upgrade + +## Migration Steps + +### Step 1: Update Dependencies + +```bash +# Create a new branch +git checkout -b upgrade/{package_name}-{target_version} + +# Update package +npm install {package_name}@{target_version} + +# Update peer dependencies if needed +{generate_peer_deps_commands(package_name, target_version)} +``` + +### Step 2: Address Breaking Changes + +{generate_breaking_change_fixes(breaking_changes)} + +### Step 3: Update Code Patterns + +{generate_code_updates(package_name, current_version, target_version)} + +### Step 4: Run Codemods (if available) + +{generate_codemod_commands(package_name, target_version)} + +### Step 5: Test & Verify + +```bash +# Run linter to catch issues +npm run lint + +# Run tests +npm test + +# Run type checking +npm run type-check + +# Manual testing checklist +``` + +{generate_test_checklist(package_name, breaking_changes)} + +### Step 6: Performance Validation + +{generate_performance_checks(package_name)} + +## Rollback Plan + +If issues arise, follow these steps to rollback: + +```bash +# Revert package version +git checkout package.json package-lock.json +npm install + +# Or use the backup branch +git checkout main +git branch -D upgrade/{package_name}-{target_version} +``` + +## Common Issues & Solutions + +{generate_common_issues(package_name, target_version)} + +## Resources + +- [Official Migration Guide]({get_official_guide_url(package_name, target_version)}) +- [Changelog]({get_changelog_url(package_name, target_version)}) +- [Community Discussions]({get_community_url(package_name)}) +""" + + return guide +``` + +### 4. Incremental Upgrade Strategy + +Plan safe incremental upgrades: + +**Incremental Upgrade Planner** +```python +class IncrementalUpgrader: + def plan_incremental_upgrade(self, package_name, current, target): + """ + Plan incremental upgrade path + """ + # Get all versions between current and target + all_versions = self._get_versions_between(package_name, current, target) + + # Identify safe stopping points + safe_versions = self._identify_safe_versions(all_versions) + + # Create upgrade path + upgrade_path = self._create_upgrade_path(current, target, safe_versions) + + plan = f""" +## Incremental Upgrade Plan: {package_name} + +### Current State +- Version: {current} +- Target: {target} +- Total steps: {len(upgrade_path)} + +### Upgrade Path + +""" + for i, step in enumerate(upgrade_path, 1): + plan += f""" +#### Step {i}: Upgrade to {step['version']} + +**Risk Level**: {step['risk_level']} +**Breaking Changes**: {step['breaking_changes']} + +```bash +# Upgrade command +npm install {package_name}@{step['version']} + +# Test command +npm test -- --updateSnapshot + +# Verification +npm run integration-tests +``` + +**Key Changes**: +{self._summarize_changes(step)} + +**Testing Focus**: +{self._get_test_focus(step)} + +--- +""" + + return plan + + def _identify_safe_versions(self, versions): + """Identify safe intermediate versions""" + safe_versions = [] + + for v in versions: + # Safe versions are typically: + # - Last patch of each minor version + # - Versions with long stability period + # - Versions before major API changes + if (self._is_last_patch(v, versions) or + self._has_stability_period(v) or + self._is_pre_breaking_change(v)): + safe_versions.append(v) + + return safe_versions +``` + +### 5. Automated Testing Strategy + +Ensure upgrades don't break functionality: + +**Upgrade Test Suite** +```javascript +// upgrade-tests.js +const { runUpgradeTests } = require('./upgrade-test-framework'); + +async function testDependencyUpgrade(packageName, targetVersion) { + const testSuite = { + preUpgrade: async () => { + // Capture baseline + const baseline = { + unitTests: await runTests('unit'), + integrationTests: await runTests('integration'), + e2eTests: await runTests('e2e'), + performance: await capturePerformanceMetrics(), + bundleSize: await measureBundleSize() + }; + + return baseline; + }, + + postUpgrade: async (baseline) => { + // Run same tests after upgrade + const results = { + unitTests: await runTests('unit'), + integrationTests: await runTests('integration'), + e2eTests: await runTests('e2e'), + performance: await capturePerformanceMetrics(), + bundleSize: await measureBundleSize() + }; + + // Compare results + const comparison = compareResults(baseline, results); + + return { + passed: comparison.passed, + failures: comparison.failures, + regressions: comparison.regressions, + improvements: comparison.improvements + }; + }, + + smokeTests: [ + async () => { + // Critical path testing + await testCriticalUserFlows(); + }, + async () => { + // API compatibility + await testAPICompatibility(); + }, + async () => { + // Build process + await testBuildProcess(); + } + ] + }; + + return runUpgradeTests(testSuite); +} +``` + +### 6. Compatibility Matrix + +Check compatibility across dependencies: + +**Compatibility Checker** +```python +def generate_compatibility_matrix(dependencies): + """ + Generate compatibility matrix for dependencies + """ + matrix = {} + + for dep_name, dep_info in dependencies.items(): + matrix[dep_name] = { + 'current': dep_info['current'], + 'target': dep_info['latest'], + 'compatible_with': check_compatibility(dep_name, dep_info['latest']), + 'conflicts': find_conflicts(dep_name, dep_info['latest']), + 'peer_requirements': get_peer_requirements(dep_name, dep_info['latest']) + } + + # Generate report + report = """ +## Dependency Compatibility Matrix + +| Package | Current | Target | Compatible With | Conflicts | Action Required | +|---------|---------|--------|-----------------|-----------|-----------------| +""" + + for pkg, info in matrix.items(): + compatible = '✅' if not info['conflicts'] else '⚠️' + conflicts = ', '.join(info['conflicts']) if info['conflicts'] else 'None' + action = 'Safe to upgrade' if not info['conflicts'] else 'Resolve conflicts first' + + report += f"| {pkg} | {info['current']} | {info['target']} | {compatible} | {conflicts} | {action} |\n" + + return report + +def check_compatibility(package_name, version): + """Check what this package is compatible with""" + # Check package.json or requirements.txt + peer_deps = get_peer_dependencies(package_name, version) + compatible_packages = [] + + for peer_pkg, peer_version_range in peer_deps.items(): + if is_installed(peer_pkg): + current_peer_version = get_installed_version(peer_pkg) + if satisfies_version_range(current_peer_version, peer_version_range): + compatible_packages.append(f"{peer_pkg}@{current_peer_version}") + + return compatible_packages +``` + +### 7. Rollback Strategy + +Implement safe rollback procedures: + +**Rollback Manager** +```bash +#!/bin/bash +# rollback-dependencies.sh + +# Create rollback point +create_rollback_point() { + echo "📌 Creating rollback point..." + + # Save current state + cp package.json package.json.backup + cp package-lock.json package-lock.json.backup + + # Git tag + git tag -a "pre-upgrade-$(date +%Y%m%d-%H%M%S)" -m "Pre-upgrade snapshot" + + # Database snapshot if needed + if [ -f "database-backup.sh" ]; then + ./database-backup.sh + fi + + echo "✅ Rollback point created" +} + +# Perform rollback +rollback() { + echo "🔄 Performing rollback..." + + # Restore package files + mv package.json.backup package.json + mv package-lock.json.backup package-lock.json + + # Reinstall dependencies + rm -rf node_modules + npm ci + + # Run post-rollback tests + npm test + + echo "✅ Rollback complete" +} + +# Verify rollback +verify_rollback() { + echo "🔍 Verifying rollback..." + + # Check critical functionality + npm run test:critical + + # Check service health + curl -f http://localhost:3000/health || exit 1 + + echo "✅ Rollback verified" +} +``` + +### 8. Batch Update Strategy + +Handle multiple updates efficiently: + +**Batch Update Planner** +```python +def plan_batch_updates(dependencies): + """ + Plan efficient batch updates + """ + # Group by update type + groups = { + 'patch': [], + 'minor': [], + 'major': [], + 'security': [] + } + + for dep, info in dependencies.items(): + if info.get('has_security_vulnerability'): + groups['security'].append(dep) + else: + groups[info['update_type']].append(dep) + + # Create update batches + batches = [] + + # Batch 1: Security updates (immediate) + if groups['security']: + batches.append({ + 'priority': 'CRITICAL', + 'name': 'Security Updates', + 'packages': groups['security'], + 'strategy': 'immediate', + 'testing': 'full' + }) + + # Batch 2: Patch updates (safe) + if groups['patch']: + batches.append({ + 'priority': 'HIGH', + 'name': 'Patch Updates', + 'packages': groups['patch'], + 'strategy': 'grouped', + 'testing': 'smoke' + }) + + # Batch 3: Minor updates (careful) + if groups['minor']: + batches.append({ + 'priority': 'MEDIUM', + 'name': 'Minor Updates', + 'packages': groups['minor'], + 'strategy': 'incremental', + 'testing': 'regression' + }) + + # Batch 4: Major updates (planned) + if groups['major']: + batches.append({ + 'priority': 'LOW', + 'name': 'Major Updates', + 'packages': groups['major'], + 'strategy': 'individual', + 'testing': 'comprehensive' + }) + + return generate_batch_plan(batches) +``` + +### 9. Framework-Specific Upgrades + +Handle framework upgrades: + +**Framework Upgrade Guides** +```python +framework_upgrades = { + 'angular': { + 'upgrade_command': 'ng update', + 'pre_checks': [ + 'ng update @angular/core@{version} --dry-run', + 'npm audit', + 'ng lint' + ], + 'post_upgrade': [ + 'ng update @angular/cli', + 'npm run test', + 'npm run e2e' + ], + 'common_issues': { + 'ivy_renderer': 'Enable Ivy in tsconfig.json', + 'strict_mode': 'Update TypeScript configurations', + 'deprecated_apis': 'Use Angular migration schematics' + } + }, + 'react': { + 'upgrade_command': 'npm install react@{version} react-dom@{version}', + 'codemods': [ + 'npx react-codemod rename-unsafe-lifecycles', + 'npx react-codemod error-boundaries' + ], + 'verification': [ + 'npm run build', + 'npm test -- --coverage', + 'npm run analyze-bundle' + ] + }, + 'vue': { + 'upgrade_command': 'npm install vue@{version}', + 'migration_tool': 'npx @vue/migration-tool', + 'breaking_changes': { + '2_to_3': [ + 'Composition API', + 'Multiple root elements', + 'Teleport component', + 'Fragments' + ] + } + } +} +``` + +### 10. Post-Upgrade Monitoring + +Monitor application after upgrades: + +```javascript +// post-upgrade-monitoring.js +const monitoring = { + metrics: { + performance: { + 'page_load_time': { threshold: 3000, unit: 'ms' }, + 'api_response_time': { threshold: 500, unit: 'ms' }, + 'memory_usage': { threshold: 512, unit: 'MB' } + }, + errors: { + 'error_rate': { threshold: 0.01, unit: '%' }, + 'console_errors': { threshold: 0, unit: 'count' } + }, + bundle: { + 'size': { threshold: 5, unit: 'MB' }, + 'gzip_size': { threshold: 1.5, unit: 'MB' } + } + }, + + checkHealth: async function() { + const results = {}; + + for (const [category, metrics] of Object.entries(this.metrics)) { + results[category] = {}; + + for (const [metric, config] of Object.entries(metrics)) { + const value = await this.measureMetric(metric); + results[category][metric] = { + value, + threshold: config.threshold, + unit: config.unit, + status: value <= config.threshold ? 'PASS' : 'FAIL' + }; + } + } + + return results; + }, + + generateReport: function(results) { + let report = '## Post-Upgrade Health Check\n\n'; + + for (const [category, metrics] of Object.entries(results)) { + report += `### ${category}\n\n`; + report += '| Metric | Value | Threshold | Status |\n'; + report += '|--------|-------|-----------|--------|\n'; + + for (const [metric, data] of Object.entries(metrics)) { + const status = data.status === 'PASS' ? '✅' : '❌'; + report += `| ${metric} | ${data.value}${data.unit} | ${data.threshold}${data.unit} | ${status} |\n`; + } + + report += '\n'; + } + + return report; + } +}; +``` + +## Output Format + +1. **Upgrade Overview**: Summary of available updates with risk assessment +2. **Priority Matrix**: Ordered list of updates by importance and safety +3. **Migration Guides**: Step-by-step guides for each major upgrade +4. **Compatibility Report**: Dependency compatibility analysis +5. **Test Strategy**: Automated tests for validating upgrades +6. **Rollback Plan**: Clear procedures for reverting if needed +7. **Monitoring Dashboard**: Post-upgrade health metrics +8. **Timeline**: Realistic schedule for implementing upgrades + +Focus on safe, incremental upgrades that maintain system stability while keeping dependencies current and secure. \ No newline at end of file diff --git a/commands/legacy-modernize.md b/commands/legacy-modernize.md new file mode 100644 index 0000000..ab0491c --- /dev/null +++ b/commands/legacy-modernize.md @@ -0,0 +1,110 @@ +# Legacy Code Modernization Workflow + +Orchestrate a comprehensive legacy system modernization using the strangler fig pattern, enabling gradual replacement of outdated components while maintaining continuous business operations through expert agent coordination. + +[Extended thinking: The strangler fig pattern, named after the tropical fig tree that gradually envelops and replaces its host, represents the gold standard for risk-managed legacy modernization. This workflow implements a systematic approach where new functionality gradually replaces legacy components, allowing both systems to coexist during transition. By orchestrating specialized agents for assessment, testing, security, and implementation, we ensure each migration phase is validated before proceeding, minimizing disruption while maximizing modernization velocity.] + +## Phase 1: Legacy Assessment and Risk Analysis + +### 1. Comprehensive Legacy System Analysis +- Use Task tool with subagent_type="legacy-modernizer" +- Prompt: "Analyze the legacy codebase at $ARGUMENTS. Document technical debt inventory including: outdated dependencies, deprecated APIs, security vulnerabilities, performance bottlenecks, and architectural anti-patterns. Generate a modernization readiness report with component complexity scores (1-10), dependency mapping, and database coupling analysis. Identify quick wins vs complex refactoring targets." +- Expected output: Detailed assessment report with risk matrix and modernization priorities + +### 2. Dependency and Integration Mapping +- Use Task tool with subagent_type="architect-review" +- Prompt: "Based on the legacy assessment report, create a comprehensive dependency graph showing: internal module dependencies, external service integrations, shared database schemas, and cross-system data flows. Identify integration points that will require facade patterns or adapter layers during migration. Highlight circular dependencies and tight coupling that need resolution." +- Context from previous: Legacy assessment report, component complexity scores +- Expected output: Visual dependency map and integration point catalog + +### 3. Business Impact and Risk Assessment +- Use Task tool with subagent_type="business-analytics::business-analyst" +- Prompt: "Evaluate business impact of modernizing each component identified. Create risk assessment matrix considering: business criticality (revenue impact), user traffic patterns, data sensitivity, regulatory requirements, and fallback complexity. Prioritize components using a weighted scoring system: (Business Value × 0.4) + (Technical Risk × 0.3) + (Quick Win Potential × 0.3). Define rollback strategies for each component." +- Context from previous: Component inventory, dependency mapping +- Expected output: Prioritized migration roadmap with risk mitigation strategies + +## Phase 2: Test Coverage Establishment + +### 1. Legacy Code Test Coverage Analysis +- Use Task tool with subagent_type="unit-testing::test-automator" +- Prompt: "Analyze existing test coverage for legacy components at $ARGUMENTS. Use coverage tools to identify untested code paths, missing integration tests, and absent end-to-end scenarios. For components with <40% coverage, generate characterization tests that capture current behavior without modifying functionality. Create test harness for safe refactoring." +- Expected output: Test coverage report and characterization test suite + +### 2. Contract Testing Implementation +- Use Task tool with subagent_type="unit-testing::test-automator" +- Prompt: "Implement contract tests for all integration points identified in dependency mapping. Create consumer-driven contracts for APIs, message queue interactions, and database schemas. Set up contract verification in CI/CD pipeline. Generate performance baselines for response times and throughput to validate modernized components maintain SLAs." +- Context from previous: Integration point catalog, existing test coverage +- Expected output: Contract test suite with performance baselines + +### 3. Test Data Management Strategy +- Use Task tool with subagent_type="data-engineering::data-engineer" +- Prompt: "Design test data management strategy for parallel system operation. Create data generation scripts for edge cases, implement data masking for sensitive information, and establish test database refresh procedures. Set up monitoring for data consistency between legacy and modernized components during migration." +- Context from previous: Database schemas, test requirements +- Expected output: Test data pipeline and consistency monitoring + +## Phase 3: Incremental Migration Implementation + +### 1. Strangler Fig Infrastructure Setup +- Use Task tool with subagent_type="backend-development::backend-architect" +- Prompt: "Implement strangler fig infrastructure with API gateway for traffic routing. Configure feature flags for gradual rollout using environment variables or feature management service. Set up proxy layer with request routing rules based on: URL patterns, headers, or user segments. Implement circuit breakers and fallback mechanisms for resilience. Create observability dashboard for dual-system monitoring." +- Expected output: API gateway configuration, feature flag system, monitoring dashboard + +### 2. Component Modernization - First Wave +- Use Task tool with subagent_type="python-development::python-pro" or "golang-pro" (based on target stack) +- Prompt: "Modernize first-wave components (quick wins identified in assessment). For each component: extract business logic from legacy code, implement using modern patterns (dependency injection, SOLID principles), ensure backward compatibility through adapter patterns, maintain data consistency with event sourcing or dual writes. Follow 12-factor app principles. Components to modernize: [list from prioritized roadmap]" +- Context from previous: Characterization tests, contract tests, infrastructure setup +- Expected output: Modernized components with adapters + +### 3. Security Hardening +- Use Task tool with subagent_type="security-scanning::security-auditor" +- Prompt: "Audit modernized components for security vulnerabilities. Implement security improvements including: OAuth 2.0/JWT authentication, role-based access control, input validation and sanitization, SQL injection prevention, XSS protection, and secrets management. Verify OWASP top 10 compliance. Configure security headers and implement rate limiting." +- Context from previous: Modernized component code +- Expected output: Security audit report and hardened components + +## Phase 4: Performance Validation and Optimization + +### 1. Performance Testing and Optimization +- Use Task tool with subagent_type="application-performance::performance-engineer" +- Prompt: "Conduct performance testing comparing legacy vs modernized components. Run load tests simulating production traffic patterns, measure response times, throughput, and resource utilization. Identify performance regressions and optimize: database queries with indexing, caching strategies (Redis/Memcached), connection pooling, and async processing where applicable. Validate against SLA requirements." +- Context from previous: Performance baselines, modernized components +- Expected output: Performance test results and optimization recommendations + +### 2. Progressive Rollout and Monitoring +- Use Task tool with subagent_type="deployment-strategies::deployment-engineer" +- Prompt: "Implement progressive rollout strategy using feature flags. Start with 5% traffic to modernized components, monitor error rates, latency, and business metrics. Define automatic rollback triggers: error rate >1%, latency >2x baseline, or business metric degradation. Create runbook for traffic shifting: 5% → 25% → 50% → 100% with 24-hour observation periods." +- Context from previous: Feature flag configuration, monitoring dashboard +- Expected output: Rollout plan with automated safeguards + +## Phase 5: Migration Completion and Documentation + +### 1. Legacy Component Decommissioning +- Use Task tool with subagent_type="legacy-modernizer" +- Prompt: "Plan safe decommissioning of replaced legacy components. Verify no remaining dependencies through traffic analysis (minimum 30 days at 0% traffic). Archive legacy code with documentation of original functionality. Update CI/CD pipelines to remove legacy builds. Clean up unused database tables and remove deprecated API endpoints. Document any retained legacy components with sunset timeline." +- Context from previous: Traffic routing data, modernization status +- Expected output: Decommissioning checklist and timeline + +### 2. Documentation and Knowledge Transfer +- Use Task tool with subagent_type="documentation-generation::docs-architect" +- Prompt: "Create comprehensive modernization documentation including: architectural diagrams (before/after), API documentation with migration guides, runbooks for dual-system operation, troubleshooting guides for common issues, and lessons learned report. Generate developer onboarding guide for modernized system. Document technical decisions and trade-offs made during migration." +- Context from previous: All migration artifacts and decisions +- Expected output: Complete modernization documentation package + +## Configuration Options + +- **--parallel-systems**: Keep both systems running indefinitely (for gradual migration) +- **--big-bang**: Full cutover after validation (higher risk, faster completion) +- **--by-feature**: Migrate complete features rather than technical components +- **--database-first**: Prioritize database modernization before application layer +- **--api-first**: Modernize API layer while maintaining legacy backend + +## Success Criteria + +- All high-priority components modernized with >80% test coverage +- Zero unplanned downtime during migration +- Performance metrics maintained or improved (P95 latency within 110% of baseline) +- Security vulnerabilities reduced by >90% +- Technical debt score improved by >60% +- Successful operation for 30 days post-migration without rollbacks +- Complete documentation enabling new developer onboarding in <1 week + +Target: $ARGUMENTS \ No newline at end of file diff --git a/plugin.lock.json b/plugin.lock.json new file mode 100644 index 0000000..dfcfe24 --- /dev/null +++ b/plugin.lock.json @@ -0,0 +1,77 @@ +{ + "$schema": "internal://schemas/plugin.lock.v1.json", + "pluginId": "gh:HermeticOrmus/Alqvimia-Contador:plugins/framework-migration", + "normalized": { + "repo": null, + "ref": "refs/tags/v20251128.0", + "commit": "bacc0b8fdc5904afbfb54a21d5d1901d841cdfb8", + "treeHash": "2a84bdb5537f5a7b55bdb35674004f04b5de3c8bf530d1a973ec45f07a5ad5c5", + "generatedAt": "2025-11-28T10:10:39.360260Z", + "toolVersion": "publish_plugins.py@0.2.0" + }, + "origin": { + "remote": "git@github.com:zhongweili/42plugin-data.git", + "branch": "master", + "commit": "aa1497ed0949fd50e99e70d6324a29c5b34f9390", + "repoRoot": "/Users/zhongweili/projects/openmind/42plugin-data" + }, + "manifest": { + "name": "framework-migration", + "description": "Framework updates, migration planning, and architectural transformation workflows", + "version": "1.2.2" + }, + "content": { + "files": [ + { + "path": "README.md", + "sha256": "16ebc1777677a97bb5ad00dfa6ebd01fa5f4ba0438c74c4765d9325e1996a850" + }, + { + "path": "agents/legacy-modernizer.md", + "sha256": "ae2a786d8b1440b165efaa1e66fd4260e348e99dcbffc896a20e4529bd63c186" + }, + { + "path": "agents/architect-review.md", + "sha256": "6a6233381a800591833f22f568bc009eeb63b779222f97c62eef1dbbe5bbf125" + }, + { + "path": ".claude-plugin/plugin.json", + "sha256": "20c9a5dd3d125c28e38565b20ee5520bdad51ab1b5a6e69e839d8e5788a02b2a" + }, + { + "path": "commands/deps-upgrade.md", + "sha256": "6c013368f829ce1cdeeda733ed2644ceec6f0da81e78c7a9be911e4ca15d01fd" + }, + { + "path": "commands/code-migrate.md", + "sha256": "5db4c37bcd822ce296441092beb223554c4eca9a0e2c0e2a8b206253d940a1ab" + }, + { + "path": "commands/legacy-modernize.md", + "sha256": "072444ef599d7a7ae5164b6904aaa85c7e32bd7f8711b0142770900d147da9bb" + }, + { + "path": "skills/angular-migration/SKILL.md", + "sha256": "6b2236a0fe3661e76f9c10cdf4c2bd267d9059891a1e3bc06a1600c048bd9ea6" + }, + { + "path": "skills/react-modernization/SKILL.md", + "sha256": "d8be354a91a45471eb56fbfe4f15f7928e5614847f5a43ff818ba3125cc0c934" + }, + { + "path": "skills/dependency-upgrade/SKILL.md", + "sha256": "467079afe93fc912768c5662034f9d60dfaa005045aa0be930f3811185509223" + }, + { + "path": "skills/database-migration/SKILL.md", + "sha256": "9e532a59a7c58a96c39c4d36e2635db959db3602d71f8794563da37ac0d33368" + } + ], + "dirSha256": "2a84bdb5537f5a7b55bdb35674004f04b5de3c8bf530d1a973ec45f07a5ad5c5" + }, + "security": { + "scannedAt": null, + "scannerVersion": null, + "flags": [] + } +} \ No newline at end of file diff --git a/skills/angular-migration/SKILL.md b/skills/angular-migration/SKILL.md new file mode 100644 index 0000000..11a8327 --- /dev/null +++ b/skills/angular-migration/SKILL.md @@ -0,0 +1,410 @@ +--- +name: angular-migration +description: Migrate from AngularJS to Angular using hybrid mode, incremental component rewriting, and dependency injection updates. Use when upgrading AngularJS applications, planning framework migrations, or modernizing legacy Angular code. +--- + +# Angular Migration + +Master AngularJS to Angular migration, including hybrid apps, component conversion, dependency injection changes, and routing migration. + +## When to Use This Skill + +- Migrating AngularJS (1.x) applications to Angular (2+) +- Running hybrid AngularJS/Angular applications +- Converting directives to components +- Modernizing dependency injection +- Migrating routing systems +- Updating to latest Angular versions +- Implementing Angular best practices + +## Migration Strategies + +### 1. Big Bang (Complete Rewrite) +- Rewrite entire app in Angular +- Parallel development +- Switch over at once +- **Best for:** Small apps, green field projects + +### 2. Incremental (Hybrid Approach) +- Run AngularJS and Angular side-by-side +- Migrate feature by feature +- ngUpgrade for interop +- **Best for:** Large apps, continuous delivery + +### 3. Vertical Slice +- Migrate one feature completely +- New features in Angular, maintain old in AngularJS +- Gradually replace +- **Best for:** Medium apps, distinct features + +## Hybrid App Setup + +```typescript +// main.ts - Bootstrap hybrid app +import { platformBrowserDynamic } from '@angular/platform-browser-dynamic'; +import { UpgradeModule } from '@angular/upgrade/static'; +import { AppModule } from './app/app.module'; + +platformBrowserDynamic() + .bootstrapModule(AppModule) + .then(platformRef => { + const upgrade = platformRef.injector.get(UpgradeModule); + // Bootstrap AngularJS + upgrade.bootstrap(document.body, ['myAngularJSApp'], { strictDi: true }); + }); +``` + +```typescript +// app.module.ts +import { NgModule } from '@angular/core'; +import { BrowserModule } from '@angular/platform-browser'; +import { UpgradeModule } from '@angular/upgrade/static'; + +@NgModule({ + imports: [ + BrowserModule, + UpgradeModule + ] +}) +export class AppModule { + constructor(private upgrade: UpgradeModule) {} + + ngDoBootstrap() { + // Bootstrapped manually in main.ts + } +} +``` + +## Component Migration + +### AngularJS Controller → Angular Component +```javascript +// Before: AngularJS controller +angular.module('myApp').controller('UserController', function($scope, UserService) { + $scope.user = {}; + + $scope.loadUser = function(id) { + UserService.getUser(id).then(function(user) { + $scope.user = user; + }); + }; + + $scope.saveUser = function() { + UserService.saveUser($scope.user); + }; +}); +``` + +```typescript +// After: Angular component +import { Component, OnInit } from '@angular/core'; +import { UserService } from './user.service'; + +@Component({ + selector: 'app-user', + template: ` +
+

{{ user.name }}

+ +
+ ` +}) +export class UserComponent implements OnInit { + user: any = {}; + + constructor(private userService: UserService) {} + + ngOnInit() { + this.loadUser(1); + } + + loadUser(id: number) { + this.userService.getUser(id).subscribe(user => { + this.user = user; + }); + } + + saveUser() { + this.userService.saveUser(this.user); + } +} +``` + +### AngularJS Directive → Angular Component +```javascript +// Before: AngularJS directive +angular.module('myApp').directive('userCard', function() { + return { + restrict: 'E', + scope: { + user: '=', + onDelete: '&' + }, + template: ` +
+

{{ user.name }}

+ +
+ ` + }; +}); +``` + +```typescript +// After: Angular component +import { Component, Input, Output, EventEmitter } from '@angular/core'; + +@Component({ + selector: 'app-user-card', + template: ` +
+

{{ user.name }}

+ +
+ ` +}) +export class UserCardComponent { + @Input() user: any; + @Output() delete = new EventEmitter(); +} + +// Usage: +``` + +## Service Migration + +```javascript +// Before: AngularJS service +angular.module('myApp').factory('UserService', function($http) { + return { + getUser: function(id) { + return $http.get('/api/users/' + id); + }, + saveUser: function(user) { + return $http.post('/api/users', user); + } + }; +}); +``` + +```typescript +// After: Angular service +import { Injectable } from '@angular/core'; +import { HttpClient } from '@angular/common/http'; +import { Observable } from 'rxjs'; + +@Injectable({ + providedIn: 'root' +}) +export class UserService { + constructor(private http: HttpClient) {} + + getUser(id: number): Observable { + return this.http.get(`/api/users/${id}`); + } + + saveUser(user: any): Observable { + return this.http.post('/api/users', user); + } +} +``` + +## Dependency Injection Changes + +### Downgrading Angular → AngularJS +```typescript +// Angular service +import { Injectable } from '@angular/core'; + +@Injectable({ providedIn: 'root' }) +export class NewService { + getData() { + return 'data from Angular'; + } +} + +// Make available to AngularJS +import { downgradeInjectable } from '@angular/upgrade/static'; + +angular.module('myApp') + .factory('newService', downgradeInjectable(NewService)); + +// Use in AngularJS +angular.module('myApp').controller('OldController', function(newService) { + console.log(newService.getData()); +}); +``` + +### Upgrading AngularJS → Angular +```typescript +// AngularJS service +angular.module('myApp').factory('oldService', function() { + return { + getData: function() { + return 'data from AngularJS'; + } + }; +}); + +// Make available to Angular +import { InjectionToken } from '@angular/core'; + +export const OLD_SERVICE = new InjectionToken('oldService'); + +@NgModule({ + providers: [ + { + provide: OLD_SERVICE, + useFactory: (i: any) => i.get('oldService'), + deps: ['$injector'] + } + ] +}) + +// Use in Angular +@Component({...}) +export class NewComponent { + constructor(@Inject(OLD_SERVICE) private oldService: any) { + console.log(this.oldService.getData()); + } +} +``` + +## Routing Migration + +```javascript +// Before: AngularJS routing +angular.module('myApp').config(function($routeProvider) { + $routeProvider + .when('/users', { + template: '' + }) + .when('/users/:id', { + template: '' + }); +}); +``` + +```typescript +// After: Angular routing +import { NgModule } from '@angular/core'; +import { RouterModule, Routes } from '@angular/router'; + +const routes: Routes = [ + { path: 'users', component: UserListComponent }, + { path: 'users/:id', component: UserDetailComponent } +]; + +@NgModule({ + imports: [RouterModule.forRoot(routes)], + exports: [RouterModule] +}) +export class AppRoutingModule {} +``` + +## Forms Migration + +```html + +
+ + + +
+``` + +```typescript +// After: Angular (Template-driven) +@Component({ + template: ` +
+ + + +
+ ` +}) + +// Or Reactive Forms (preferred) +import { FormBuilder, FormGroup, Validators } from '@angular/forms'; + +@Component({ + template: ` +
+ + + +
+ ` +}) +export class UserFormComponent { + userForm: FormGroup; + + constructor(private fb: FormBuilder) { + this.userForm = this.fb.group({ + name: ['', Validators.required], + email: ['', [Validators.required, Validators.email]] + }); + } + + saveUser() { + console.log(this.userForm.value); + } +} +``` + +## Migration Timeline + +``` +Phase 1: Setup (1-2 weeks) +- Install Angular CLI +- Set up hybrid app +- Configure build tools +- Set up testing + +Phase 2: Infrastructure (2-4 weeks) +- Migrate services +- Migrate utilities +- Set up routing +- Migrate shared components + +Phase 3: Feature Migration (varies) +- Migrate feature by feature +- Test thoroughly +- Deploy incrementally + +Phase 4: Cleanup (1-2 weeks) +- Remove AngularJS code +- Remove ngUpgrade +- Optimize bundle +- Final testing +``` + +## Resources + +- **references/hybrid-mode.md**: Hybrid app patterns +- **references/component-migration.md**: Component conversion guide +- **references/dependency-injection.md**: DI migration strategies +- **references/routing.md**: Routing migration +- **assets/hybrid-bootstrap.ts**: Hybrid app template +- **assets/migration-timeline.md**: Project planning +- **scripts/analyze-angular-app.sh**: App analysis script + +## Best Practices + +1. **Start with Services**: Migrate services first (easier) +2. **Incremental Approach**: Feature-by-feature migration +3. **Test Continuously**: Test at every step +4. **Use TypeScript**: Migrate to TypeScript early +5. **Follow Style Guide**: Angular style guide from day 1 +6. **Optimize Later**: Get it working, then optimize +7. **Document**: Keep migration notes + +## Common Pitfalls + +- Not setting up hybrid app correctly +- Migrating UI before logic +- Ignoring change detection differences +- Not handling scope properly +- Mixing patterns (AngularJS + Angular) +- Inadequate testing diff --git a/skills/database-migration/SKILL.md b/skills/database-migration/SKILL.md new file mode 100644 index 0000000..2bb7ac5 --- /dev/null +++ b/skills/database-migration/SKILL.md @@ -0,0 +1,424 @@ +--- +name: database-migration +description: Execute database migrations across ORMs and platforms with zero-downtime strategies, data transformation, and rollback procedures. Use when migrating databases, changing schemas, performing data transformations, or implementing zero-downtime deployment strategies. +--- + +# Database Migration + +Master database schema and data migrations across ORMs (Sequelize, TypeORM, Prisma), including rollback strategies and zero-downtime deployments. + +## When to Use This Skill + +- Migrating between different ORMs +- Performing schema transformations +- Moving data between databases +- Implementing rollback procedures +- Zero-downtime deployments +- Database version upgrades +- Data model refactoring + +## ORM Migrations + +### Sequelize Migrations +```javascript +// migrations/20231201-create-users.js +module.exports = { + up: async (queryInterface, Sequelize) => { + await queryInterface.createTable('users', { + id: { + type: Sequelize.INTEGER, + primaryKey: true, + autoIncrement: true + }, + email: { + type: Sequelize.STRING, + unique: true, + allowNull: false + }, + createdAt: Sequelize.DATE, + updatedAt: Sequelize.DATE + }); + }, + + down: async (queryInterface, Sequelize) => { + await queryInterface.dropTable('users'); + } +}; + +// Run: npx sequelize-cli db:migrate +// Rollback: npx sequelize-cli db:migrate:undo +``` + +### TypeORM Migrations +```typescript +// migrations/1701234567-CreateUsers.ts +import { MigrationInterface, QueryRunner, Table } from 'typeorm'; + +export class CreateUsers1701234567 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.createTable( + new Table({ + name: 'users', + columns: [ + { + name: 'id', + type: 'int', + isPrimary: true, + isGenerated: true, + generationStrategy: 'increment' + }, + { + name: 'email', + type: 'varchar', + isUnique: true + }, + { + name: 'created_at', + type: 'timestamp', + default: 'CURRENT_TIMESTAMP' + } + ] + }) + ); + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.dropTable('users'); + } +} + +// Run: npm run typeorm migration:run +// Rollback: npm run typeorm migration:revert +``` + +### Prisma Migrations +```prisma +// schema.prisma +model User { + id Int @id @default(autoincrement()) + email String @unique + createdAt DateTime @default(now()) +} + +// Generate migration: npx prisma migrate dev --name create_users +// Apply: npx prisma migrate deploy +``` + +## Schema Transformations + +### Adding Columns with Defaults +```javascript +// Safe migration: add column with default +module.exports = { + up: async (queryInterface, Sequelize) => { + await queryInterface.addColumn('users', 'status', { + type: Sequelize.STRING, + defaultValue: 'active', + allowNull: false + }); + }, + + down: async (queryInterface) => { + await queryInterface.removeColumn('users', 'status'); + } +}; +``` + +### Renaming Columns (Zero Downtime) +```javascript +// Step 1: Add new column +module.exports = { + up: async (queryInterface, Sequelize) => { + await queryInterface.addColumn('users', 'full_name', { + type: Sequelize.STRING + }); + + // Copy data from old column + await queryInterface.sequelize.query( + 'UPDATE users SET full_name = name' + ); + }, + + down: async (queryInterface) => { + await queryInterface.removeColumn('users', 'full_name'); + } +}; + +// Step 2: Update application to use new column + +// Step 3: Remove old column +module.exports = { + up: async (queryInterface) => { + await queryInterface.removeColumn('users', 'name'); + }, + + down: async (queryInterface, Sequelize) => { + await queryInterface.addColumn('users', 'name', { + type: Sequelize.STRING + }); + } +}; +``` + +### Changing Column Types +```javascript +module.exports = { + up: async (queryInterface, Sequelize) => { + // For large tables, use multi-step approach + + // 1. Add new column + await queryInterface.addColumn('users', 'age_new', { + type: Sequelize.INTEGER + }); + + // 2. Copy and transform data + await queryInterface.sequelize.query(` + UPDATE users + SET age_new = CAST(age AS INTEGER) + WHERE age IS NOT NULL + `); + + // 3. Drop old column + await queryInterface.removeColumn('users', 'age'); + + // 4. Rename new column + await queryInterface.renameColumn('users', 'age_new', 'age'); + }, + + down: async (queryInterface, Sequelize) => { + await queryInterface.changeColumn('users', 'age', { + type: Sequelize.STRING + }); + } +}; +``` + +## Data Transformations + +### Complex Data Migration +```javascript +module.exports = { + up: async (queryInterface, Sequelize) => { + // Get all records + const [users] = await queryInterface.sequelize.query( + 'SELECT id, address_string FROM users' + ); + + // Transform each record + for (const user of users) { + const addressParts = user.address_string.split(','); + + await queryInterface.sequelize.query( + `UPDATE users + SET street = :street, + city = :city, + state = :state + WHERE id = :id`, + { + replacements: { + id: user.id, + street: addressParts[0]?.trim(), + city: addressParts[1]?.trim(), + state: addressParts[2]?.trim() + } + } + ); + } + + // Drop old column + await queryInterface.removeColumn('users', 'address_string'); + }, + + down: async (queryInterface, Sequelize) => { + // Reconstruct original column + await queryInterface.addColumn('users', 'address_string', { + type: Sequelize.STRING + }); + + await queryInterface.sequelize.query(` + UPDATE users + SET address_string = CONCAT(street, ', ', city, ', ', state) + `); + + await queryInterface.removeColumn('users', 'street'); + await queryInterface.removeColumn('users', 'city'); + await queryInterface.removeColumn('users', 'state'); + } +}; +``` + +## Rollback Strategies + +### Transaction-Based Migrations +```javascript +module.exports = { + up: async (queryInterface, Sequelize) => { + const transaction = await queryInterface.sequelize.transaction(); + + try { + await queryInterface.addColumn( + 'users', + 'verified', + { type: Sequelize.BOOLEAN, defaultValue: false }, + { transaction } + ); + + await queryInterface.sequelize.query( + 'UPDATE users SET verified = true WHERE email_verified_at IS NOT NULL', + { transaction } + ); + + await transaction.commit(); + } catch (error) { + await transaction.rollback(); + throw error; + } + }, + + down: async (queryInterface) => { + await queryInterface.removeColumn('users', 'verified'); + } +}; +``` + +### Checkpoint-Based Rollback +```javascript +module.exports = { + up: async (queryInterface, Sequelize) => { + // Create backup table + await queryInterface.sequelize.query( + 'CREATE TABLE users_backup AS SELECT * FROM users' + ); + + try { + // Perform migration + await queryInterface.addColumn('users', 'new_field', { + type: Sequelize.STRING + }); + + // Verify migration + const [result] = await queryInterface.sequelize.query( + "SELECT COUNT(*) as count FROM users WHERE new_field IS NULL" + ); + + if (result[0].count > 0) { + throw new Error('Migration verification failed'); + } + + // Drop backup + await queryInterface.dropTable('users_backup'); + } catch (error) { + // Restore from backup + await queryInterface.sequelize.query('DROP TABLE users'); + await queryInterface.sequelize.query( + 'CREATE TABLE users AS SELECT * FROM users_backup' + ); + await queryInterface.dropTable('users_backup'); + throw error; + } + } +}; +``` + +## Zero-Downtime Migrations + +### Blue-Green Deployment Strategy +```javascript +// Phase 1: Make changes backward compatible +module.exports = { + up: async (queryInterface, Sequelize) => { + // Add new column (both old and new code can work) + await queryInterface.addColumn('users', 'email_new', { + type: Sequelize.STRING + }); + } +}; + +// Phase 2: Deploy code that writes to both columns + +// Phase 3: Backfill data +module.exports = { + up: async (queryInterface) => { + await queryInterface.sequelize.query(` + UPDATE users + SET email_new = email + WHERE email_new IS NULL + `); + } +}; + +// Phase 4: Deploy code that reads from new column + +// Phase 5: Remove old column +module.exports = { + up: async (queryInterface) => { + await queryInterface.removeColumn('users', 'email'); + } +}; +``` + +## Cross-Database Migrations + +### PostgreSQL to MySQL +```javascript +// Handle differences +module.exports = { + up: async (queryInterface, Sequelize) => { + const dialectName = queryInterface.sequelize.getDialect(); + + if (dialectName === 'mysql') { + await queryInterface.createTable('users', { + id: { + type: Sequelize.INTEGER, + primaryKey: true, + autoIncrement: true + }, + data: { + type: Sequelize.JSON // MySQL JSON type + } + }); + } else if (dialectName === 'postgres') { + await queryInterface.createTable('users', { + id: { + type: Sequelize.INTEGER, + primaryKey: true, + autoIncrement: true + }, + data: { + type: Sequelize.JSONB // PostgreSQL JSONB type + } + }); + } + } +}; +``` + +## Resources + +- **references/orm-switching.md**: ORM migration guides +- **references/schema-migration.md**: Schema transformation patterns +- **references/data-transformation.md**: Data migration scripts +- **references/rollback-strategies.md**: Rollback procedures +- **assets/schema-migration-template.sql**: SQL migration templates +- **assets/data-migration-script.py**: Data migration utilities +- **scripts/test-migration.sh**: Migration testing script + +## Best Practices + +1. **Always Provide Rollback**: Every up() needs a down() +2. **Test Migrations**: Test on staging first +3. **Use Transactions**: Atomic migrations when possible +4. **Backup First**: Always backup before migration +5. **Small Changes**: Break into small, incremental steps +6. **Monitor**: Watch for errors during deployment +7. **Document**: Explain why and how +8. **Idempotent**: Migrations should be rerunnable + +## Common Pitfalls + +- Not testing rollback procedures +- Making breaking changes without downtime strategy +- Forgetting to handle NULL values +- Not considering index performance +- Ignoring foreign key constraints +- Migrating too much data at once diff --git a/skills/dependency-upgrade/SKILL.md b/skills/dependency-upgrade/SKILL.md new file mode 100644 index 0000000..7ccf637 --- /dev/null +++ b/skills/dependency-upgrade/SKILL.md @@ -0,0 +1,409 @@ +--- +name: dependency-upgrade +description: Manage major dependency version upgrades with compatibility analysis, staged rollout, and comprehensive testing. Use when upgrading framework versions, updating major dependencies, or managing breaking changes in libraries. +--- + +# Dependency Upgrade + +Master major dependency version upgrades, compatibility analysis, staged upgrade strategies, and comprehensive testing approaches. + +## When to Use This Skill + +- Upgrading major framework versions +- Updating security-vulnerable dependencies +- Modernizing legacy dependencies +- Resolving dependency conflicts +- Planning incremental upgrade paths +- Testing compatibility matrices +- Automating dependency updates + +## Semantic Versioning Review + +``` +MAJOR.MINOR.PATCH (e.g., 2.3.1) + +MAJOR: Breaking changes +MINOR: New features, backward compatible +PATCH: Bug fixes, backward compatible + +^2.3.1 = >=2.3.1 <3.0.0 (minor updates) +~2.3.1 = >=2.3.1 <2.4.0 (patch updates) +2.3.1 = exact version +``` + +## Dependency Analysis + +### Audit Dependencies +```bash +# npm +npm outdated +npm audit +npm audit fix + +# yarn +yarn outdated +yarn audit + +# Check for major updates +npx npm-check-updates +npx npm-check-updates -u # Update package.json +``` + +### Analyze Dependency Tree +```bash +# See why a package is installed +npm ls package-name +yarn why package-name + +# Find duplicate packages +npm dedupe +yarn dedupe + +# Visualize dependencies +npx madge --image graph.png src/ +``` + +## Compatibility Matrix + +```javascript +// compatibility-matrix.js +const compatibilityMatrix = { + 'react': { + '16.x': { + 'react-dom': '^16.0.0', + 'react-router-dom': '^5.0.0', + '@testing-library/react': '^11.0.0' + }, + '17.x': { + 'react-dom': '^17.0.0', + 'react-router-dom': '^5.0.0 || ^6.0.0', + '@testing-library/react': '^12.0.0' + }, + '18.x': { + 'react-dom': '^18.0.0', + 'react-router-dom': '^6.0.0', + '@testing-library/react': '^13.0.0' + } + } +}; + +function checkCompatibility(packages) { + // Validate package versions against matrix +} +``` + +## Staged Upgrade Strategy + +### Phase 1: Planning +```bash +# 1. Identify current versions +npm list --depth=0 + +# 2. Check for breaking changes +# Read CHANGELOG.md and MIGRATION.md + +# 3. Create upgrade plan +echo "Upgrade order: +1. TypeScript +2. React +3. React Router +4. Testing libraries +5. Build tools" > UPGRADE_PLAN.md +``` + +### Phase 2: Incremental Updates +```bash +# Don't upgrade everything at once! + +# Step 1: Update TypeScript +npm install typescript@latest + +# Test +npm run test +npm run build + +# Step 2: Update React (one major version at a time) +npm install react@17 react-dom@17 + +# Test again +npm run test + +# Step 3: Continue with other packages +npm install react-router-dom@6 + +# And so on... +``` + +### Phase 3: Validation +```javascript +// tests/compatibility.test.js +describe('Dependency Compatibility', () => { + it('should have compatible React versions', () => { + const reactVersion = require('react/package.json').version; + const reactDomVersion = require('react-dom/package.json').version; + + expect(reactVersion).toBe(reactDomVersion); + }); + + it('should not have peer dependency warnings', () => { + // Run npm ls and check for warnings + }); +}); +``` + +## Breaking Change Handling + +### Identifying Breaking Changes +```bash +# Use changelog parsers +npx changelog-parser react 16.0.0 17.0.0 + +# Or manually check +curl https://raw.githubusercontent.com/facebook/react/main/CHANGELOG.md +``` + +### Codemod for Automated Fixes +```bash +# React upgrade codemods +npx react-codeshift + +# Example: Update lifecycle methods +npx react-codeshift \ + --parser tsx \ + --transform react-codeshift/transforms/rename-unsafe-lifecycles.js \ + src/ +``` + +### Custom Migration Script +```javascript +// migration-script.js +const fs = require('fs'); +const glob = require('glob'); + +glob('src/**/*.tsx', (err, files) => { + files.forEach(file => { + let content = fs.readFileSync(file, 'utf8'); + + // Replace old API with new API + content = content.replace( + /componentWillMount/g, + 'UNSAFE_componentWillMount' + ); + + // Update imports + content = content.replace( + /import { Component } from 'react'/g, + "import React, { Component } from 'react'" + ); + + fs.writeFileSync(file, content); + }); +}); +``` + +## Testing Strategy + +### Unit Tests +```javascript +// Ensure tests pass before and after upgrade +npm run test + +// Update test utilities if needed +npm install @testing-library/react@latest +``` + +### Integration Tests +```javascript +// tests/integration/app.test.js +describe('App Integration', () => { + it('should render without crashing', () => { + render(); + }); + + it('should handle navigation', () => { + const { getByText } = render(); + fireEvent.click(getByText('Navigate')); + expect(screen.getByText('New Page')).toBeInTheDocument(); + }); +}); +``` + +### Visual Regression Tests +```javascript +// visual-regression.test.js +describe('Visual Regression', () => { + it('should match snapshot', () => { + const { container } = render(); + expect(container.firstChild).toMatchSnapshot(); + }); +}); +``` + +### E2E Tests +```javascript +// cypress/e2e/app.cy.js +describe('E2E Tests', () => { + it('should complete user flow', () => { + cy.visit('/'); + cy.get('[data-testid="login"]').click(); + cy.get('input[name="email"]').type('user@example.com'); + cy.get('button[type="submit"]').click(); + cy.url().should('include', '/dashboard'); + }); +}); +``` + +## Automated Dependency Updates + +### Renovate Configuration +```json +// renovate.json +{ + "extends": ["config:base"], + "packageRules": [ + { + "matchUpdateTypes": ["minor", "patch"], + "automerge": true + }, + { + "matchUpdateTypes": ["major"], + "automerge": false, + "labels": ["major-update"] + } + ], + "schedule": ["before 3am on Monday"], + "timezone": "America/New_York" +} +``` + +### Dependabot Configuration +```yaml +# .github/dependabot.yml +version: 2 +updates: + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "weekly" + open-pull-requests-limit: 5 + reviewers: + - "team-leads" + commit-message: + prefix: "chore" + include: "scope" +``` + +## Rollback Plan + +```javascript +// rollback.sh +#!/bin/bash + +# Save current state +git stash +git checkout -b upgrade-branch + +# Attempt upgrade +npm install package@latest + +# Run tests +if npm run test; then + echo "Upgrade successful" + git add package.json package-lock.json + git commit -m "chore: upgrade package" +else + echo "Upgrade failed, rolling back" + git checkout main + git branch -D upgrade-branch + npm install # Restore from package-lock.json +fi +``` + +## Common Upgrade Patterns + +### Lock File Management +```bash +# npm +npm install --package-lock-only # Update lock file only +npm ci # Clean install from lock file + +# yarn +yarn install --frozen-lockfile # CI mode +yarn upgrade-interactive # Interactive upgrades +``` + +### Peer Dependency Resolution +```bash +# npm 7+: strict peer dependencies +npm install --legacy-peer-deps # Ignore peer deps + +# npm 8+: override peer dependencies +npm install --force +``` + +### Workspace Upgrades +```bash +# Update all workspace packages +npm install --workspaces + +# Update specific workspace +npm install package@latest --workspace=packages/app +``` + +## Resources + +- **references/semver.md**: Semantic versioning guide +- **references/compatibility-matrix.md**: Common compatibility issues +- **references/staged-upgrades.md**: Incremental upgrade strategies +- **references/testing-strategy.md**: Comprehensive testing approaches +- **assets/upgrade-checklist.md**: Step-by-step checklist +- **assets/compatibility-matrix.csv**: Version compatibility table +- **scripts/audit-dependencies.sh**: Dependency audit script + +## Best Practices + +1. **Read Changelogs**: Understand what changed +2. **Upgrade Incrementally**: One major version at a time +3. **Test Thoroughly**: Unit, integration, E2E tests +4. **Check Peer Dependencies**: Resolve conflicts early +5. **Use Lock Files**: Ensure reproducible installs +6. **Automate Updates**: Use Renovate or Dependabot +7. **Monitor**: Watch for runtime errors post-upgrade +8. **Document**: Keep upgrade notes + +## Upgrade Checklist + +```markdown +Pre-Upgrade: +- [ ] Review current dependency versions +- [ ] Read changelogs for breaking changes +- [ ] Create feature branch +- [ ] Backup current state (git tag) +- [ ] Run full test suite (baseline) + +During Upgrade: +- [ ] Upgrade one dependency at a time +- [ ] Update peer dependencies +- [ ] Fix TypeScript errors +- [ ] Update tests if needed +- [ ] Run test suite after each upgrade +- [ ] Check bundle size impact + +Post-Upgrade: +- [ ] Full regression testing +- [ ] Performance testing +- [ ] Update documentation +- [ ] Deploy to staging +- [ ] Monitor for errors +- [ ] Deploy to production +``` + +## Common Pitfalls + +- Upgrading all dependencies at once +- Not testing after each upgrade +- Ignoring peer dependency warnings +- Forgetting to update lock file +- Not reading breaking change notes +- Skipping major versions +- Not having rollback plan diff --git a/skills/react-modernization/SKILL.md b/skills/react-modernization/SKILL.md new file mode 100644 index 0000000..7241c76 --- /dev/null +++ b/skills/react-modernization/SKILL.md @@ -0,0 +1,513 @@ +--- +name: react-modernization +description: Upgrade React applications to latest versions, migrate from class components to hooks, and adopt concurrent features. Use when modernizing React codebases, migrating to React Hooks, or upgrading to latest React versions. +--- + +# React Modernization + +Master React version upgrades, class to hooks migration, concurrent features adoption, and codemods for automated transformation. + +## When to Use This Skill + +- Upgrading React applications to latest versions +- Migrating class components to functional components with hooks +- Adopting concurrent React features (Suspense, transitions) +- Applying codemods for automated refactoring +- Modernizing state management patterns +- Updating to TypeScript +- Improving performance with React 18+ features + +## Version Upgrade Path + +### React 16 → 17 → 18 + +**Breaking Changes by Version:** + +**React 17:** +- Event delegation changes +- No event pooling +- Effect cleanup timing +- JSX transform (no React import needed) + +**React 18:** +- Automatic batching +- Concurrent rendering +- Strict Mode changes (double invocation) +- New root API +- Suspense on server + +## Class to Hooks Migration + +### State Management +```javascript +// Before: Class component +class Counter extends React.Component { + constructor(props) { + super(props); + this.state = { + count: 0, + name: '' + }; + } + + increment = () => { + this.setState({ count: this.state.count + 1 }); + } + + render() { + return ( +
+

Count: {this.state.count}

+ +
+ ); + } +} + +// After: Functional component with hooks +function Counter() { + const [count, setCount] = useState(0); + const [name, setName] = useState(''); + + const increment = () => { + setCount(count + 1); + }; + + return ( +
+

Count: {count}

+ +
+ ); +} +``` + +### Lifecycle Methods to Hooks +```javascript +// Before: Lifecycle methods +class DataFetcher extends React.Component { + state = { data: null, loading: true }; + + componentDidMount() { + this.fetchData(); + } + + componentDidUpdate(prevProps) { + if (prevProps.id !== this.props.id) { + this.fetchData(); + } + } + + componentWillUnmount() { + this.cancelRequest(); + } + + fetchData = async () => { + const data = await fetch(`/api/${this.props.id}`); + this.setState({ data, loading: false }); + }; + + cancelRequest = () => { + // Cleanup + }; + + render() { + if (this.state.loading) return
Loading...
; + return
{this.state.data}
; + } +} + +// After: useEffect hook +function DataFetcher({ id }) { + const [data, setData] = useState(null); + const [loading, setLoading] = useState(true); + + useEffect(() => { + let cancelled = false; + + const fetchData = async () => { + try { + const response = await fetch(`/api/${id}`); + const result = await response.json(); + + if (!cancelled) { + setData(result); + setLoading(false); + } + } catch (error) { + if (!cancelled) { + console.error(error); + } + } + }; + + fetchData(); + + // Cleanup function + return () => { + cancelled = true; + }; + }, [id]); // Re-run when id changes + + if (loading) return
Loading...
; + return
{data}
; +} +``` + +### Context and HOCs to Hooks +```javascript +// Before: Context consumer and HOC +const ThemeContext = React.createContext(); + +class ThemedButton extends React.Component { + static contextType = ThemeContext; + + render() { + return ( + + ); + } +} + +// After: useContext hook +function ThemedButton({ children }) { + const { theme } = useContext(ThemeContext); + + return ( + + ); +} + +// Before: HOC for data fetching +function withUser(Component) { + return class extends React.Component { + state = { user: null }; + + componentDidMount() { + fetchUser().then(user => this.setState({ user })); + } + + render() { + return ; + } + }; +} + +// After: Custom hook +function useUser() { + const [user, setUser] = useState(null); + + useEffect(() => { + fetchUser().then(setUser); + }, []); + + return user; +} + +function UserProfile() { + const user = useUser(); + if (!user) return
Loading...
; + return
{user.name}
; +} +``` + +## React 18 Concurrent Features + +### New Root API +```javascript +// Before: React 17 +import ReactDOM from 'react-dom'; + +ReactDOM.render(, document.getElementById('root')); + +// After: React 18 +import { createRoot } from 'react-dom/client'; + +const root = createRoot(document.getElementById('root')); +root.render(); +``` + +### Automatic Batching +```javascript +// React 18: All updates are batched +function handleClick() { + setCount(c => c + 1); + setFlag(f => !f); + // Only one re-render (batched) +} + +// Even in async: +setTimeout(() => { + setCount(c => c + 1); + setFlag(f => !f); + // Still batched in React 18! +}, 1000); + +// Opt out if needed +import { flushSync } from 'react-dom'; + +flushSync(() => { + setCount(c => c + 1); +}); +// Re-render happens here +setFlag(f => !f); +// Another re-render +``` + +### Transitions +```javascript +import { useState, useTransition } from 'react'; + +function SearchResults() { + const [query, setQuery] = useState(''); + const [results, setResults] = useState([]); + const [isPending, startTransition] = useTransition(); + + const handleChange = (e) => { + // Urgent: Update input immediately + setQuery(e.target.value); + + // Non-urgent: Update results (can be interrupted) + startTransition(() => { + setResults(searchResults(e.target.value)); + }); + }; + + return ( + <> + + {isPending && } + + + ); +} +``` + +### Suspense for Data Fetching +```javascript +import { Suspense } from 'react'; + +// Resource-based data fetching (with React 18) +const resource = fetchProfileData(); + +function ProfilePage() { + return ( + }> + + }> + + + + ); +} + +function ProfileDetails() { + // This will suspend if data not ready + const user = resource.user.read(); + return

{user.name}

; +} + +function ProfileTimeline() { + const posts = resource.posts.read(); + return ; +} +``` + +## Codemods for Automation + +### Run React Codemods +```bash +# Install jscodeshift +npm install -g jscodeshift + +# React 16.9 codemod (rename unsafe lifecycle methods) +npx react-codeshift + +# Example: Rename UNSAFE_ methods +npx react-codeshift --parser=tsx \ + --transform=react-codeshift/transforms/rename-unsafe-lifecycles.js \ + src/ + +# Update to new JSX Transform (React 17+) +npx react-codeshift --parser=tsx \ + --transform=react-codeshift/transforms/new-jsx-transform.js \ + src/ + +# Class to Hooks (third-party) +npx codemod react/hooks/convert-class-to-function src/ +``` + +### Custom Codemod Example +```javascript +// custom-codemod.js +module.exports = function(file, api) { + const j = api.jscodeshift; + const root = j(file.source); + + // Find setState calls + root.find(j.CallExpression, { + callee: { + type: 'MemberExpression', + property: { name: 'setState' } + } + }).forEach(path => { + // Transform to useState + // ... transformation logic + }); + + return root.toSource(); +}; + +// Run: jscodeshift -t custom-codemod.js src/ +``` + +## Performance Optimization + +### useMemo and useCallback +```javascript +function ExpensiveComponent({ items, filter }) { + // Memoize expensive calculation + const filteredItems = useMemo(() => { + return items.filter(item => item.category === filter); + }, [items, filter]); + + // Memoize callback to prevent child re-renders + const handleClick = useCallback((id) => { + console.log('Clicked:', id); + }, []); // No dependencies, never changes + + return ( + + ); +} + +// Child component with memo +const List = React.memo(({ items, onClick }) => { + return items.map(item => ( + + )); +}); +``` + +### Code Splitting +```javascript +import { lazy, Suspense } from 'react'; + +// Lazy load components +const Dashboard = lazy(() => import('./Dashboard')); +const Settings = lazy(() => import('./Settings')); + +function App() { + return ( + }> + + } /> + } /> + + + ); +} +``` + +## TypeScript Migration + +```typescript +// Before: JavaScript +function Button({ onClick, children }) { + return ; +} + +// After: TypeScript +interface ButtonProps { + onClick: () => void; + children: React.ReactNode; +} + +function Button({ onClick, children }: ButtonProps) { + return ; +} + +// Generic components +interface ListProps { + items: T[]; + renderItem: (item: T) => React.ReactNode; +} + +function List({ items, renderItem }: ListProps) { + return <>{items.map(renderItem)}; +} +``` + +## Migration Checklist + +```markdown +### Pre-Migration +- [ ] Update dependencies incrementally (not all at once) +- [ ] Review breaking changes in release notes +- [ ] Set up testing suite +- [ ] Create feature branch + +### Class → Hooks Migration +- [ ] Identify class components to migrate +- [ ] Start with leaf components (no children) +- [ ] Convert state to useState +- [ ] Convert lifecycle to useEffect +- [ ] Convert context to useContext +- [ ] Extract custom hooks +- [ ] Test thoroughly + +### React 18 Upgrade +- [ ] Update to React 17 first (if needed) +- [ ] Update react and react-dom to 18 +- [ ] Update @types/react if using TypeScript +- [ ] Change to createRoot API +- [ ] Test with StrictMode (double invocation) +- [ ] Address concurrent rendering issues +- [ ] Adopt Suspense/Transitions where beneficial + +### Performance +- [ ] Identify performance bottlenecks +- [ ] Add React.memo where appropriate +- [ ] Use useMemo/useCallback for expensive operations +- [ ] Implement code splitting +- [ ] Optimize re-renders + +### Testing +- [ ] Update test utilities (React Testing Library) +- [ ] Test with React 18 features +- [ ] Check for warnings in console +- [ ] Performance testing +``` + +## Resources + +- **references/breaking-changes.md**: Version-specific breaking changes +- **references/codemods.md**: Codemod usage guide +- **references/hooks-migration.md**: Comprehensive hooks patterns +- **references/concurrent-features.md**: React 18 concurrent features +- **assets/codemod-config.json**: Codemod configurations +- **assets/migration-checklist.md**: Step-by-step checklist +- **scripts/apply-codemods.sh**: Automated codemod script + +## Best Practices + +1. **Incremental Migration**: Don't migrate everything at once +2. **Test Thoroughly**: Comprehensive testing at each step +3. **Use Codemods**: Automate repetitive transformations +4. **Start Simple**: Begin with leaf components +5. **Leverage StrictMode**: Catch issues early +6. **Monitor Performance**: Measure before and after +7. **Document Changes**: Keep migration log + +## Common Pitfalls + +- Forgetting useEffect dependencies +- Over-using useMemo/useCallback +- Not handling cleanup in useEffect +- Mixing class and functional patterns +- Ignoring StrictMode warnings +- Breaking change assumptions