Initial commit
This commit is contained in:
15
.claude-plugin/plugin.json
Normal file
15
.claude-plugin/plugin.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "api-migration-tool",
|
||||
"description": "Migrate APIs between versions with backward compatibility",
|
||||
"version": "1.0.0",
|
||||
"author": {
|
||||
"name": "Jeremy Longshore",
|
||||
"email": "[email protected]"
|
||||
},
|
||||
"skills": [
|
||||
"./skills"
|
||||
],
|
||||
"commands": [
|
||||
"./commands"
|
||||
]
|
||||
}
|
||||
3
README.md
Normal file
3
README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# api-migration-tool
|
||||
|
||||
Migrate APIs between versions with backward compatibility
|
||||
928
commands/migrate-api.md
Normal file
928
commands/migrate-api.md
Normal file
@@ -0,0 +1,928 @@
|
||||
---
|
||||
description: Migrate API to new version with compatibility layers and automated scripts
|
||||
shortcut: migrate
|
||||
---
|
||||
|
||||
# Migrate API Version
|
||||
|
||||
Orchestrate comprehensive API version migrations with automated compatibility layers, breaking change detection, and zero-downtime deployment strategies. This command manages the complete lifecycle of API evolution from initial analysis through deployment and deprecation.
|
||||
|
||||
## Design Decisions
|
||||
|
||||
**Architecture Approach:**
|
||||
- Version routing at API gateway level for clean separation
|
||||
- Adapter pattern for backward compatibility transformations
|
||||
- Feature flags for gradual rollout control
|
||||
- Automated test generation across all supported versions
|
||||
|
||||
**Alternatives Considered:**
|
||||
- Hard cutover migration (rejected: high risk, no rollback)
|
||||
- Separate API endpoints per version (rejected: operational complexity)
|
||||
- GraphQL federation (chosen for microservices architectures)
|
||||
- BFF pattern for client-specific migrations
|
||||
|
||||
## When to Use
|
||||
|
||||
**USE when:**
|
||||
- Introducing breaking changes to API contracts
|
||||
- Deprecating legacy endpoints with controlled timelines
|
||||
- Migrating between API paradigms (REST to GraphQL)
|
||||
- Evolving data models with backward incompatible changes
|
||||
- Implementing new authentication mechanisms
|
||||
- Consolidating multiple API versions
|
||||
|
||||
**DON'T USE when:**
|
||||
- Adding backward-compatible endpoints (use versioned routes)
|
||||
- Making internal refactoring without contract changes
|
||||
- Deploying hotfixes or security patches
|
||||
- Changes affect only implementation, not interface
|
||||
|
||||
## Prerequisites
|
||||
|
||||
**Required:**
|
||||
- Complete OpenAPI/GraphQL schema for both versions
|
||||
- Comprehensive API test suite with >80% coverage
|
||||
- Version control with tagged releases
|
||||
- Deployment pipeline with rollback capability
|
||||
- API gateway with routing rules support
|
||||
- Monitoring and alerting infrastructure
|
||||
|
||||
**Recommended:**
|
||||
- Consumer registry with contact information
|
||||
- Deprecation policy documented and communicated
|
||||
- Traffic analysis showing endpoint usage patterns
|
||||
- Backward compatibility test matrix
|
||||
- Canary deployment environment
|
||||
|
||||
## Migration Process
|
||||
|
||||
**Step 1: Analysis and Impact Assessment**
|
||||
- Scan API schemas to detect breaking changes
|
||||
- Analyze usage patterns from API logs
|
||||
- Identify affected consumers and endpoints
|
||||
- Calculate complexity score for migration effort
|
||||
- Generate compatibility matrix between versions
|
||||
|
||||
**Step 2: Compatibility Layer Generation**
|
||||
- Create adapter functions for data transformation
|
||||
- Generate request/response mappers automatically
|
||||
- Build version-specific validation schemas
|
||||
- Implement fallback logic for missing fields
|
||||
- Create deprecation warning middleware
|
||||
|
||||
**Step 3: Migration Script Creation**
|
||||
- Generate database migration scripts for schema changes
|
||||
- Create data backfill scripts for new required fields
|
||||
- Build rollback procedures for each migration step
|
||||
- Generate test fixtures for both API versions
|
||||
- Create automated smoke tests for critical paths
|
||||
|
||||
**Step 4: Routing and Deployment Configuration**
|
||||
- Configure API gateway version routing rules
|
||||
- Set up feature flags for gradual rollout
|
||||
- Implement traffic splitting for canary deployment
|
||||
- Configure monitoring dashboards for version metrics
|
||||
- Set up deprecation warning headers and logs
|
||||
|
||||
**Step 5: Validation and Monitoring**
|
||||
- Execute automated test suite across all versions
|
||||
- Verify backward compatibility with consumer tests
|
||||
- Monitor error rates and performance metrics
|
||||
- Track adoption rates for new version
|
||||
- Schedule deprecation timeline communications
|
||||
|
||||
## Output Format
|
||||
|
||||
```yaml
|
||||
migration_plan:
|
||||
api_name: "User Service API"
|
||||
source_version: "v1"
|
||||
target_version: "v2"
|
||||
breaking_changes:
|
||||
- endpoint: "/users"
|
||||
change_type: "field_removed"
|
||||
field: "username"
|
||||
severity: "high"
|
||||
affected_consumers: 15
|
||||
- endpoint: "/users/{id}"
|
||||
change_type: "response_structure"
|
||||
details: "Nested address object"
|
||||
severity: "medium"
|
||||
affected_consumers: 8
|
||||
|
||||
compatibility_layer:
|
||||
adapters_generated: 12
|
||||
transformation_functions: 8
|
||||
fallback_strategies: 5
|
||||
|
||||
migration_scripts:
|
||||
- script: "001_add_email_unique_constraint.sql"
|
||||
type: "database"
|
||||
rollback: "001_rollback_email_constraint.sql"
|
||||
- script: "002_backfill_address_objects.js"
|
||||
type: "data_transformation"
|
||||
estimated_time: "15 minutes"
|
||||
|
||||
deployment_strategy:
|
||||
type: "canary"
|
||||
phases:
|
||||
- name: "internal_testing"
|
||||
traffic_percentage: 0
|
||||
duration: "3 days"
|
||||
- name: "early_adopters"
|
||||
traffic_percentage: 10
|
||||
duration: "1 week"
|
||||
- name: "general_rollout"
|
||||
traffic_percentage: 100
|
||||
duration: "2 weeks"
|
||||
|
||||
deprecation_timeline:
|
||||
warning_start: "2024-01-01"
|
||||
support_end: "2024-06-30"
|
||||
sunset_date: "2024-12-31"
|
||||
notification_plan: "Email + dashboard banner"
|
||||
|
||||
monitoring:
|
||||
dashboards:
|
||||
- "API Version Adoption Metrics"
|
||||
- "Error Rate by Version"
|
||||
- "Response Time Comparison"
|
||||
alerts:
|
||||
- "V2 error rate > 5%"
|
||||
- "V1 traffic spike (rollback indicator)"
|
||||
```
|
||||
|
||||
## Code Examples
|
||||
|
||||
### Example 1: REST API v1 to v2 Migration with Breaking Changes
|
||||
|
||||
```javascript
|
||||
// API v1 → v2 Migration: User endpoint restructure
|
||||
// BREAKING: Flattened user object to nested structure
|
||||
|
||||
// Source: /api/v1/users/{id}
|
||||
{
|
||||
"id": 123,
|
||||
"name": "John Doe",
|
||||
"email": "john@example.com",
|
||||
"street": "123 Main St",
|
||||
"city": "San Francisco",
|
||||
"state": "CA",
|
||||
"zip": "94105"
|
||||
}
|
||||
|
||||
// Target: /api/v2/users/{id}
|
||||
{
|
||||
"id": 123,
|
||||
"name": "John Doe",
|
||||
"email": "john@example.com",
|
||||
"address": {
|
||||
"street": "123 Main St",
|
||||
"city": "San Francisco",
|
||||
"state": "CA",
|
||||
"postalCode": "94105"
|
||||
},
|
||||
"metadata": {
|
||||
"createdAt": "2024-01-15T10:30:00Z",
|
||||
"version": "v2"
|
||||
}
|
||||
}
|
||||
|
||||
// Generated Compatibility Adapter
|
||||
class UserV1ToV2Adapter {
|
||||
transform(v1Response) {
|
||||
return {
|
||||
id: v1Response.id,
|
||||
name: v1Response.name,
|
||||
email: v1Response.email,
|
||||
address: {
|
||||
street: v1Response.street,
|
||||
city: v1Response.city,
|
||||
state: v1Response.state,
|
||||
postalCode: v1Response.zip // Field renamed
|
||||
},
|
||||
metadata: {
|
||||
createdAt: v1Response.created_at || new Date().toISOString(),
|
||||
version: "v2"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
reverseTransform(v2Response) {
|
||||
// For backward compatibility when v1 clients hit v2
|
||||
return {
|
||||
id: v2Response.id,
|
||||
name: v2Response.name,
|
||||
email: v2Response.email,
|
||||
street: v2Response.address?.street || "",
|
||||
city: v2Response.address?.city || "",
|
||||
state: v2Response.address?.state || "",
|
||||
zip: v2Response.address?.postalCode || ""
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// API Gateway Routing Configuration
|
||||
const routingRules = {
|
||||
"/api/v1/users/:id": {
|
||||
target: "/api/v2/users/:id",
|
||||
adapter: "UserV1ToV2Adapter",
|
||||
deprecationWarning: {
|
||||
header: "Deprecation",
|
||||
value: "API v1 will be sunset on 2024-12-31. Migrate to v2.",
|
||||
link: "https://docs.example.com/api-migration"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Migration Script: Database Schema Evolution
|
||||
// 001_restructure_user_addresses.sql
|
||||
BEGIN;
|
||||
|
||||
-- Create new address table for normalized structure
|
||||
CREATE TABLE user_addresses (
|
||||
id SERIAL PRIMARY KEY,
|
||||
user_id INTEGER REFERENCES users(id),
|
||||
street VARCHAR(255),
|
||||
city VARCHAR(100),
|
||||
state VARCHAR(2),
|
||||
postal_code VARCHAR(10),
|
||||
created_at TIMESTAMP DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Migrate existing flat data to nested structure
|
||||
INSERT INTO user_addresses (user_id, street, city, state, postal_code)
|
||||
SELECT id, street, city, state, zip
|
||||
FROM users
|
||||
WHERE street IS NOT NULL;
|
||||
|
||||
-- Add foreign key to users table
|
||||
ALTER TABLE users ADD COLUMN address_id INTEGER REFERENCES user_addresses(id);
|
||||
|
||||
UPDATE users u
|
||||
SET address_id = ua.id
|
||||
FROM user_addresses ua
|
||||
WHERE u.id = ua.user_id;
|
||||
|
||||
-- Deprecate old columns (don't drop yet for rollback safety)
|
||||
ALTER TABLE users
|
||||
ALTER COLUMN street DROP NOT NULL,
|
||||
ALTER COLUMN city DROP NOT NULL,
|
||||
ALTER COLUMN state DROP NOT NULL,
|
||||
ALTER COLUMN zip DROP NOT NULL;
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- Rollback Script: 001_rollback_restructure.sql
|
||||
BEGIN;
|
||||
UPDATE users u
|
||||
SET street = ua.street,
|
||||
city = ua.city,
|
||||
state = ua.state,
|
||||
zip = ua.postal_code
|
||||
FROM user_addresses ua
|
||||
WHERE u.address_id = ua.id;
|
||||
|
||||
ALTER TABLE users DROP COLUMN address_id;
|
||||
DROP TABLE user_addresses;
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
### Example 2: GraphQL Schema Evolution
|
||||
|
||||
```graphql
|
||||
# Schema v1 (Deprecated)
|
||||
type User {
|
||||
id: ID!
|
||||
username: String! # DEPRECATED: Replaced by email
|
||||
email: String
|
||||
fullName: String
|
||||
}
|
||||
|
||||
type Query {
|
||||
user(id: ID!): User
|
||||
users: [User!]!
|
||||
}
|
||||
|
||||
# Schema v2 (Current)
|
||||
type Address {
|
||||
street: String!
|
||||
city: String!
|
||||
state: String!
|
||||
postalCode: String!
|
||||
country: String!
|
||||
}
|
||||
|
||||
type User {
|
||||
id: ID!
|
||||
email: String! # Now required, primary identifier
|
||||
username: String @deprecated(reason: "Use email instead. Removed in v3.")
|
||||
profile: UserProfile!
|
||||
address: Address
|
||||
}
|
||||
|
||||
type UserProfile {
|
||||
firstName: String!
|
||||
lastName: String!
|
||||
displayName: String!
|
||||
avatar: String
|
||||
}
|
||||
|
||||
type Query {
|
||||
user(id: ID, email: String): User # Multiple lookup options
|
||||
users(filter: UserFilter): [User!]!
|
||||
}
|
||||
|
||||
input UserFilter {
|
||||
email: String
|
||||
city: String
|
||||
state: String
|
||||
}
|
||||
|
||||
# Schema v3 (Planned)
|
||||
type User {
|
||||
id: ID!
|
||||
email: String!
|
||||
profile: UserProfile!
|
||||
addresses: [Address!]! # Now supports multiple addresses
|
||||
}
|
||||
|
||||
# Migration Resolver Implementation
|
||||
const resolvers = {
|
||||
Query: {
|
||||
user: async (_, args, context) => {
|
||||
const version = context.apiVersion;
|
||||
|
||||
if (version === 'v1') {
|
||||
// Legacy lookup by username
|
||||
const user = await db.users.findByUsername(args.id);
|
||||
return {
|
||||
...user,
|
||||
username: user.username, // Still supported in v1
|
||||
fullName: `${user.firstName} ${user.lastName}`
|
||||
};
|
||||
} else {
|
||||
// Modern lookup by email or ID
|
||||
const user = await db.users.findOne({
|
||||
where: args.email ? { email: args.email } : { id: args.id }
|
||||
});
|
||||
return user;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
User: {
|
||||
// Compatibility field resolver for deprecated username
|
||||
username: (user, args, context) => {
|
||||
if (context.apiVersion === 'v1') {
|
||||
return user.username;
|
||||
}
|
||||
// Add deprecation warning to response headers
|
||||
context.res.set('Deprecation', 'username field is deprecated. Use email.');
|
||||
return user.username || user.email.split('@')[0];
|
||||
},
|
||||
|
||||
// Transform flat structure to nested for v2+
|
||||
profile: (user) => ({
|
||||
firstName: user.firstName || user.fullName?.split(' ')[0],
|
||||
lastName: user.lastName || user.fullName?.split(' ')[1],
|
||||
displayName: user.fullName,
|
||||
avatar: user.avatar
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
// Automated Schema Compatibility Tests
|
||||
describe('GraphQL Schema Migration Tests', () => {
|
||||
test('v1 clients can still query with username', async () => {
|
||||
const query = `query { user(id: "johndoe") { username email } }`;
|
||||
const result = await executeQuery(query, { apiVersion: 'v1' });
|
||||
expect(result.data.user.username).toBe('johndoe');
|
||||
});
|
||||
|
||||
test('v2 clients receive nested profile structure', async () => {
|
||||
const query = `query { user(email: "john@example.com") { profile { firstName lastName } } }`;
|
||||
const result = await executeQuery(query, { apiVersion: 'v2' });
|
||||
expect(result.data.user.profile.firstName).toBe('John');
|
||||
});
|
||||
|
||||
test('deprecated fields trigger warning headers', async () => {
|
||||
const query = `query { user(id: "123") { username } }`;
|
||||
const { response } = await executeQueryWithHeaders(query, { apiVersion: 'v2' });
|
||||
expect(response.headers.get('Deprecation')).toContain('username field is deprecated');
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Example 3: gRPC Service Versioning
|
||||
|
||||
```protobuf
|
||||
// service_v1.proto (Deprecated)
|
||||
syntax = "proto3";
|
||||
package user.v1;
|
||||
|
||||
message User {
|
||||
int32 id = 1;
|
||||
string username = 2;
|
||||
string email = 3;
|
||||
string full_name = 4;
|
||||
}
|
||||
|
||||
message GetUserRequest {
|
||||
int32 id = 1;
|
||||
}
|
||||
|
||||
message GetUserResponse {
|
||||
User user = 1;
|
||||
}
|
||||
|
||||
service UserService {
|
||||
rpc GetUser(GetUserRequest) returns (GetUserResponse);
|
||||
}
|
||||
|
||||
// service_v2.proto (Current)
|
||||
syntax = "proto3";
|
||||
package user.v2;
|
||||
|
||||
import "google/protobuf/timestamp.proto";
|
||||
|
||||
message Address {
|
||||
string street = 1;
|
||||
string city = 2;
|
||||
string state = 3;
|
||||
string postal_code = 4;
|
||||
string country = 5;
|
||||
}
|
||||
|
||||
message UserProfile {
|
||||
string first_name = 1;
|
||||
string last_name = 2;
|
||||
string display_name = 3;
|
||||
string avatar_url = 4;
|
||||
}
|
||||
|
||||
message User {
|
||||
int32 id = 1;
|
||||
string email = 2;
|
||||
UserProfile profile = 3;
|
||||
Address address = 4;
|
||||
google.protobuf.Timestamp created_at = 5;
|
||||
google.protobuf.Timestamp updated_at = 6;
|
||||
}
|
||||
|
||||
message GetUserRequest {
|
||||
oneof identifier {
|
||||
int32 id = 1;
|
||||
string email = 2;
|
||||
}
|
||||
}
|
||||
|
||||
message GetUserResponse {
|
||||
User user = 1;
|
||||
}
|
||||
|
||||
service UserService {
|
||||
rpc GetUser(GetUserRequest) returns (GetUserResponse);
|
||||
rpc ListUsers(ListUsersRequest) returns (ListUsersResponse);
|
||||
}
|
||||
|
||||
// Compatibility Bridge Service
|
||||
package user.bridge;
|
||||
|
||||
import "user/v1/service.proto";
|
||||
import "user/v2/service.proto";
|
||||
|
||||
class UserServiceBridge {
|
||||
constructor() {
|
||||
this.v2Service = new user.v2.UserServiceClient('localhost:50052');
|
||||
}
|
||||
|
||||
// Implement v1 interface while calling v2 backend
|
||||
async GetUser(call, callback) {
|
||||
const v1Request = call.request;
|
||||
|
||||
// Transform v1 request to v2 format
|
||||
const v2Request = {
|
||||
id: v1Request.id
|
||||
};
|
||||
|
||||
try {
|
||||
const v2Response = await this.v2Service.GetUser(v2Request);
|
||||
const v2User = v2Response.user;
|
||||
|
||||
// Transform v2 response back to v1 format
|
||||
const v1User = {
|
||||
id: v2User.id,
|
||||
username: v2User.email.split('@')[0], // Synthesize username
|
||||
email: v2User.email,
|
||||
full_name: v2User.profile.display_name
|
||||
};
|
||||
|
||||
callback(null, { user: v1User });
|
||||
|
||||
// Log deprecation warning
|
||||
console.warn(`[DEPRECATED] v1 API used by client ${call.getPeer()}`);
|
||||
|
||||
} catch (error) {
|
||||
callback(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Envoy gRPC Gateway Configuration for Version Routing
|
||||
static_resources:
|
||||
listeners:
|
||||
- name: user_service_listener
|
||||
address:
|
||||
socket_address:
|
||||
address: 0.0.0.0
|
||||
port_value: 50051
|
||||
filter_chains:
|
||||
- filters:
|
||||
- name: envoy.filters.network.http_connection_manager
|
||||
typed_config:
|
||||
"@type": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager
|
||||
stat_prefix: grpc_json
|
||||
codec_type: AUTO
|
||||
route_config:
|
||||
name: local_route
|
||||
virtual_hosts:
|
||||
- name: user_service
|
||||
domains: ["*"]
|
||||
routes:
|
||||
# Route v1 requests to compatibility bridge
|
||||
- match:
|
||||
prefix: "/user.v1.UserService"
|
||||
route:
|
||||
cluster: user_service_v1_bridge
|
||||
timeout: 30s
|
||||
# Route v2 requests to native service
|
||||
- match:
|
||||
prefix: "/user.v2.UserService"
|
||||
route:
|
||||
cluster: user_service_v2
|
||||
timeout: 30s
|
||||
|
||||
clusters:
|
||||
- name: user_service_v1_bridge
|
||||
connect_timeout: 1s
|
||||
type: STRICT_DNS
|
||||
lb_policy: ROUND_ROBIN
|
||||
http2_protocol_options: {}
|
||||
load_assignment:
|
||||
cluster_name: user_service_v1_bridge
|
||||
endpoints:
|
||||
- lb_endpoints:
|
||||
- endpoint:
|
||||
address:
|
||||
socket_address:
|
||||
address: user-bridge-service
|
||||
port_value: 50051
|
||||
|
||||
- name: user_service_v2
|
||||
connect_timeout: 1s
|
||||
type: STRICT_DNS
|
||||
lb_policy: ROUND_ROBIN
|
||||
http2_protocol_options: {}
|
||||
load_assignment:
|
||||
cluster_name: user_service_v2
|
||||
endpoints:
|
||||
- lb_endpoints:
|
||||
- endpoint:
|
||||
address:
|
||||
socket_address:
|
||||
address: user-service-v2
|
||||
port_value: 50052
|
||||
|
||||
// Migration Testing Framework
|
||||
describe('gRPC API Migration Tests', () => {
|
||||
const v1Client = new user.v1.UserServiceClient('localhost:50051');
|
||||
const v2Client = new user.v2.UserServiceClient('localhost:50052');
|
||||
|
||||
test('v1 clients receive compatible responses', async () => {
|
||||
const request = new user.v1.GetUserRequest({ id: 123 });
|
||||
const response = await v1Client.GetUser(request);
|
||||
|
||||
expect(response.user.username).toBeDefined();
|
||||
expect(response.user.email).toBeDefined();
|
||||
expect(response.user.full_name).toBeDefined();
|
||||
});
|
||||
|
||||
test('v2 clients receive enhanced data structures', async () => {
|
||||
const request = new user.v2.GetUserRequest({ email: 'john@example.com' });
|
||||
const response = await v2Client.GetUser(request);
|
||||
|
||||
expect(response.user.profile).toBeDefined();
|
||||
expect(response.user.profile.first_name).toBeDefined();
|
||||
expect(response.user.address).toBeDefined();
|
||||
});
|
||||
|
||||
test('data consistency between versions', async () => {
|
||||
const userId = 123;
|
||||
|
||||
const v1Response = await v1Client.GetUser({ id: userId });
|
||||
const v2Response = await v2Client.GetUser({ id: userId });
|
||||
|
||||
// Verify transformed data matches
|
||||
expect(v1Response.user.email).toBe(v2Response.user.email);
|
||||
expect(v1Response.user.full_name).toBe(v2Response.user.profile.display_name);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Configuration Options
|
||||
|
||||
**Basic Usage:**
|
||||
```bash
|
||||
/migrate-api \
|
||||
--source=v1 \
|
||||
--target=v2 \
|
||||
--api-spec=openapi.yaml \
|
||||
--consumers=consumer-registry.json
|
||||
```
|
||||
|
||||
**Available Options:**
|
||||
|
||||
`--strategy <type>` - Migration deployment strategy
|
||||
- `canary` - Gradual traffic shifting (default, safest)
|
||||
- `blue-green` - Instant switchover with rollback capability
|
||||
- `rolling` - Progressive deployment across instances
|
||||
- `feature-flag` - Application-controlled version selection
|
||||
- `parallel-run` - Run both versions, compare results
|
||||
|
||||
`--compatibility-mode <mode>` - Backward compatibility approach
|
||||
- `adapter` - Transform requests/responses between versions (default)
|
||||
- `proxy` - Route old endpoints to new implementation
|
||||
- `shim` - Minimal compatibility layer, consumers must adapt
|
||||
- `none` - No compatibility, hard cutover (dangerous)
|
||||
|
||||
`--deprecation-period <duration>` - Support window for old version
|
||||
- `3months` - Short deprecation (minor changes)
|
||||
- `6months` - Standard deprecation (default)
|
||||
- `12months` - Extended support (major changes)
|
||||
- `custom:YYYY-MM-DD` - Specific sunset date
|
||||
|
||||
`--breaking-changes-policy <policy>` - How to handle breaking changes
|
||||
- `require-adapters` - Force compatibility layer generation
|
||||
- `warn-consumers` - Send notifications, allow migration time
|
||||
- `block-deployment` - Prevent deploy until consumers updated
|
||||
- `document-only` - Just update documentation
|
||||
|
||||
`--traffic-split <percentage>` - Initial new version traffic
|
||||
- Default: `0` (dark launch)
|
||||
- Range: 0-100
|
||||
- Example: `10` for 10% canary deployment
|
||||
|
||||
`--rollback-threshold <percentage>` - Error rate trigger for auto-rollback
|
||||
- Default: `5` (5% error rate)
|
||||
- Range: 1-50
|
||||
- Example: `2` for strict quality requirements
|
||||
|
||||
`--test-coverage-required <percentage>` - Minimum test coverage before deploy
|
||||
- Default: `80`
|
||||
- Range: 0-100
|
||||
- Blocks deployment if coverage below threshold
|
||||
|
||||
`--generate-migration-guide` - Create consumer migration documentation
|
||||
- Generates markdown guide with code examples
|
||||
- Includes breaking change summaries
|
||||
- Provides timeline and support contacts
|
||||
|
||||
`--dry-run` - Simulate migration without making changes
|
||||
- Analyze breaking changes
|
||||
- Generate compatibility report
|
||||
- Estimate migration effort
|
||||
- No actual deployment
|
||||
|
||||
## Error Handling
|
||||
|
||||
**Common Errors and Solutions:**
|
||||
|
||||
**Error: Breaking changes detected without compatibility layer**
|
||||
```
|
||||
ERROR: 15 breaking changes detected in target API version
|
||||
- Removed field: User.username (affects 12 endpoints)
|
||||
- Changed type: Order.total (string → number)
|
||||
- Renamed endpoint: /users/search → /users/find
|
||||
|
||||
Solution: Either:
|
||||
1. Add --compatibility-mode=adapter to generate transformers
|
||||
2. Create manual adapters in adapters/ directory
|
||||
3. Use --breaking-changes-policy=warn-consumers for grace period
|
||||
```
|
||||
|
||||
**Error: Consumer test failures in compatibility mode**
|
||||
```
|
||||
ERROR: 3 consumer integration tests failed with v2 adapter
|
||||
- AcmeApp: Expected username field, received null
|
||||
- BetaCorp: Response schema validation failed
|
||||
- GammaInc: Authentication token format mismatch
|
||||
|
||||
Solution:
|
||||
1. Review consumer test failures: npm run test:consumers
|
||||
2. Update adapters to handle edge cases
|
||||
3. Contact affected consumers for migration coordination
|
||||
4. Use --traffic-split=0 for dark launch until resolved
|
||||
```
|
||||
|
||||
**Error: Database migration rollback required**
|
||||
```
|
||||
ERROR: Migration script 003_add_foreign_keys.sql failed
|
||||
Constraint violation: user_addresses.user_id references missing users
|
||||
|
||||
Solution:
|
||||
1. Execute rollback: psql -f 003_rollback.sql
|
||||
2. Fix data inconsistencies: npm run data:cleanup
|
||||
3. Re-run migration with --validate-data flag
|
||||
4. Check migration logs: tail -f logs/migration.log
|
||||
```
|
||||
|
||||
**Error: Traffic spike indicating rollback needed**
|
||||
```
|
||||
WARNING: v1 traffic increased from 10% to 45% in 5 minutes
|
||||
Possible rollback from consumers due to v2 issues
|
||||
|
||||
Solution:
|
||||
1. Check v2 error rates: /metrics/api/v2/errors
|
||||
2. Review recent v2 deployment logs
|
||||
3. Pause traffic shift: kubectl patch deployment api-gateway --type=json -p='[{"op":"replace","path":"/spec/template/spec/containers/0/env/1/value","value":"10"}]'
|
||||
4. Investigate root cause before continuing rollout
|
||||
```
|
||||
|
||||
**Error: Incompatible schema versions in distributed system**
|
||||
```
|
||||
ERROR: Service A running v2 schema, Service B still on v1
|
||||
Message deserialization failed: unknown field 'profile'
|
||||
|
||||
Solution:
|
||||
1. Implement schema registry: npm install @kafkajs/schema-registry
|
||||
2. Use forward-compatible schemas with optional fields
|
||||
3. Deploy with version negotiation: --enable-version-negotiation
|
||||
4. Coordinate deployment order across services
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
**DO:**
|
||||
- Start with comprehensive API usage analysis before planning migration
|
||||
- Generate automated compatibility tests for all breaking changes
|
||||
- Implement feature flags for granular control over version activation
|
||||
- Use semantic versioning and clearly communicate breaking changes
|
||||
- Monitor error rates and latency separately for each API version
|
||||
- Maintain detailed migration documentation with timelines
|
||||
- Create rollback procedures and test them before deployment
|
||||
- Send deprecation warnings months before sunset dates
|
||||
- Provide sandbox environments for consumers to test migrations
|
||||
- Use API gateways for centralized version routing and monitoring
|
||||
|
||||
**DON'T:**
|
||||
- Deploy breaking changes without backward compatibility period
|
||||
- Remove deprecated endpoints immediately after new version launch
|
||||
- Skip comprehensive testing of compatibility layers under load
|
||||
- Assume all consumers will migrate quickly (expect stragglers)
|
||||
- Make multiple major version jumps simultaneously
|
||||
- Ignore consumer feedback during migration planning
|
||||
- Deploy migrations during peak traffic periods
|
||||
- Use hard deadlines without grace period extensions
|
||||
- Forget to version your database schema along with API
|
||||
- Mix multiple unrelated breaking changes in single version
|
||||
|
||||
**TIPS:**
|
||||
- Use OpenAPI diff tools to automatically detect breaking changes
|
||||
- Implement consumer registry to track who uses which endpoints
|
||||
- Add X-API-Version header to all responses for debugging
|
||||
- Create automated alerts for unexpected version usage patterns
|
||||
- Use GraphQL deprecation directives for gradual field removal
|
||||
- Maintain change logs with migration impact assessments
|
||||
- Build compatibility dashboards showing adoption rates
|
||||
- Provide SDK updates simultaneously with API versions
|
||||
- Consider GraphQL for more flexible schema evolution
|
||||
- Use contract testing to verify consumer compatibility
|
||||
|
||||
## Related Commands
|
||||
|
||||
- `/api-contract-generator` - Generate OpenAPI specs from code
|
||||
- `/api-versioning-manager` - Manage multiple API versions
|
||||
- `/api-documentation-generator` - Update docs for new versions
|
||||
- `/api-monitoring-dashboard` - Track version adoption metrics
|
||||
- `/api-security-scanner` - Audit security across versions
|
||||
- `/api-load-tester` - Performance test both versions
|
||||
- `/api-sdk-generator` - Create client libraries for v2
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
**Migration Performance Impact:**
|
||||
- Compatibility adapters add 5-20ms latency per request
|
||||
- Dual-write patterns during migration can double database load
|
||||
- Traffic splitting requires load balancer state management
|
||||
- Monitoring overhead increases with multiple active versions
|
||||
|
||||
**Optimization Strategies:**
|
||||
- Cache adapter transformation results for identical requests
|
||||
- Use asynchronous migration for non-critical data changes
|
||||
- Implement read-through caches for backward compatibility lookups
|
||||
- Batch database migrations during low-traffic windows
|
||||
- Pre-warm caches before traffic switch to new version
|
||||
- Use connection pooling to handle parallel version load
|
||||
- Consider edge caching for frequently accessed compatibility transformations
|
||||
|
||||
**Capacity Planning:**
|
||||
- Expect 20-30% overhead during dual-version support period
|
||||
- Plan for 2x database capacity during migration window
|
||||
- Allocate extra API gateway resources for routing logic
|
||||
- Monitor memory usage in compatibility layer services
|
||||
- Scale horizontally rather than vertically for version isolation
|
||||
|
||||
## Security Considerations
|
||||
|
||||
**Version Transition Security:**
|
||||
- Audit authentication mechanisms for compatibility breaks
|
||||
- Verify authorization rules apply consistently across versions
|
||||
- Scan for security vulnerabilities in compatibility adapters
|
||||
- Review CORS policies for new version endpoints
|
||||
- Update API keys and tokens if authentication changes
|
||||
- Test rate limiting separately per API version
|
||||
- Ensure TLS/SSL certificates cover new version domains
|
||||
- Validate input sanitization in transformation layers
|
||||
- Check for data leakage in error messages across versions
|
||||
- Review audit logging captures version information
|
||||
|
||||
**Security Checklist:**
|
||||
- [ ] Authentication backward compatible or migration path clear
|
||||
- [ ] Authorization policies tested with both version payloads
|
||||
- [ ] Sensitive data transformations don't expose information
|
||||
- [ ] Rate limiting prevents abuse of compatibility layers
|
||||
- [ ] API keys revocation works across all versions
|
||||
- [ ] Security headers consistent across versions
|
||||
- [ ] OWASP Top 10 validated for new endpoints
|
||||
- [ ] Penetration testing completed for v2 before public launch
|
||||
|
||||
## Troubleshooting Guide
|
||||
|
||||
**Issue: Consumers report intermittent failures after migration**
|
||||
- Check load balancer health checks for version endpoints
|
||||
- Verify DNS propagation completed for new version domains
|
||||
- Review session affinity settings (sticky sessions may cause issues)
|
||||
- Confirm database connection pools sized for dual version load
|
||||
- Check for race conditions in data migration scripts
|
||||
|
||||
**Issue: Adapter performance degrading over time**
|
||||
- Monitor adapter service memory for leaks
|
||||
- Check for unbounded cache growth in transformation layer
|
||||
- Review database query performance for compatibility lookups
|
||||
- Consider pre-computing common transformations
|
||||
- Profile adapter code for inefficient object mapping
|
||||
|
||||
**Issue: Version metrics not appearing in dashboard**
|
||||
- Verify X-API-Version header added to all responses
|
||||
- Check logging configuration captures version metadata
|
||||
- Confirm monitoring agents updated to track new version
|
||||
- Review metric aggregation rules in monitoring system
|
||||
- Ensure API gateway properly tags requests by version
|
||||
|
||||
**Issue: Rollback triggered unexpectedly**
|
||||
- Review error rate thresholds (may be too sensitive)
|
||||
- Check if external service outages affected v2 only
|
||||
- Verify rollback threshold uses appropriate time windows
|
||||
- Investigate false positives in health checks
|
||||
- Review deployment timing vs. traffic pattern changes
|
||||
|
||||
## Version History
|
||||
|
||||
**v1.0.0** (2024-01-15)
|
||||
- Initial release with REST API migration support
|
||||
- Basic compatibility adapter generation
|
||||
- Canary deployment strategy
|
||||
- OpenAPI 3.0 diff analysis
|
||||
|
||||
**v1.1.0** (2024-02-10)
|
||||
- Added GraphQL schema evolution support
|
||||
- Implemented automatic deprecation warning injection
|
||||
- Enhanced consumer notification system
|
||||
- Added rollback automation based on error thresholds
|
||||
|
||||
**v1.2.0** (2024-03-05)
|
||||
- gRPC service versioning support with Envoy integration
|
||||
- Blue-green deployment strategy option
|
||||
- Database schema migration with automated rollback
|
||||
- Consumer registry integration for impact analysis
|
||||
|
||||
**v1.3.0** (2024-04-20)
|
||||
- Feature flag integration for gradual rollout control
|
||||
- Enhanced compatibility testing framework
|
||||
- Performance optimization for adapter transformations
|
||||
- Multi-region migration coordination
|
||||
|
||||
**v2.0.0** (2024-06-15)
|
||||
- Complete rewrite of adapter generation engine
|
||||
- Support for complex data transformation scenarios
|
||||
- Integration with major API gateway platforms
|
||||
- Real-time migration dashboard with adoption metrics
|
||||
- Automated consumer SDK generation for new versions
|
||||
|
||||
**v2.1.0** (2024-08-30) - Current
|
||||
- AI-powered breaking change impact analysis
|
||||
- Automated migration guide generation
|
||||
- Enhanced security scanning for version transitions
|
||||
- Support for WebSocket and Server-Sent Events migration
|
||||
- Contract testing automation across API versions
|
||||
101
plugin.lock.json
Normal file
101
plugin.lock.json
Normal file
@@ -0,0 +1,101 @@
|
||||
{
|
||||
"$schema": "internal://schemas/plugin.lock.v1.json",
|
||||
"pluginId": "gh:jeremylongshore/claude-code-plugins-plus:plugins/api-development/api-migration-tool",
|
||||
"normalized": {
|
||||
"repo": null,
|
||||
"ref": "refs/tags/v20251128.0",
|
||||
"commit": "e8cbbce825062f040036c0d1d3221a79247d5cea",
|
||||
"treeHash": "aa83fcdd0b5e226bb75e3b798984d4309cfa66a7ce3f79ae4ddd9fd4a741ae2b",
|
||||
"generatedAt": "2025-11-28T10:18:07.092851Z",
|
||||
"toolVersion": "publish_plugins.py@0.2.0"
|
||||
},
|
||||
"origin": {
|
||||
"remote": "git@github.com:zhongweili/42plugin-data.git",
|
||||
"branch": "master",
|
||||
"commit": "aa1497ed0949fd50e99e70d6324a29c5b34f9390",
|
||||
"repoRoot": "/Users/zhongweili/projects/openmind/42plugin-data"
|
||||
},
|
||||
"manifest": {
|
||||
"name": "api-migration-tool",
|
||||
"description": "Migrate APIs between versions with backward compatibility",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"content": {
|
||||
"files": [
|
||||
{
|
||||
"path": "README.md",
|
||||
"sha256": "73370b0b81e52049558cebc36bd7d9f1bf2f4e1ae2652cf5def4186647c2afb6"
|
||||
},
|
||||
{
|
||||
"path": ".claude-plugin/plugin.json",
|
||||
"sha256": "f33bed1b3346974837c0c3a52c1d263c208222ffa78c41ba33d5830031405142"
|
||||
},
|
||||
{
|
||||
"path": "commands/migrate-api.md",
|
||||
"sha256": "78bacf28dc56b1cc05194fafa76bafdb7c305c77f3bc945a2bdca85270848052"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/references/examples.md",
|
||||
"sha256": "922bbc3c4ebf38b76f515b5c1998ebde6bf902233e00e2c5a0e9176f975a7572"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/references/best-practices.md",
|
||||
"sha256": "c8f32b3566252f50daacd346d7045a1060c718ef5cfb07c55a0f2dec5f1fb39e"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/references/README.md",
|
||||
"sha256": "dbee96dafe5e129d221033a44de87fd4104f364605c83f4d2d358a64abeb1eca"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/scripts/helper-template.sh",
|
||||
"sha256": "0881d5660a8a7045550d09ae0acc15642c24b70de6f08808120f47f86ccdf077"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/scripts/validation.sh",
|
||||
"sha256": "92551a29a7f512d2036e4f1fb46c2a3dc6bff0f7dde4a9f699533e446db48502"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/scripts/README.md",
|
||||
"sha256": "a4e1456919ab40663a2367b3dff2ebf0713f2f311cb1e3568f3fa28204235222"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/assets/test-data.json",
|
||||
"sha256": "ac17dca3d6e253a5f39f2a2f1b388e5146043756b05d9ce7ac53a0042eee139d"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/assets/example_api_definition_v1.json",
|
||||
"sha256": "017d4434a116e9c2d597959bd15854f7f329be8c873d5bc98ba569ecf2220b75"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/assets/example_api_definition_v2.json",
|
||||
"sha256": "7dcff4dd027fb1722c34c1eb29f4df376fa6a2d3678c67a3cf75b0c0f8f859d6"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/assets/README.md",
|
||||
"sha256": "4d7648bf3741336d8ab284c6edf1c79f8a769f9d97d28ed89a49f8f5ea01be94"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/assets/config_template.json",
|
||||
"sha256": "439fa002d300ee0be73d78b5c34a544f649ee92830280eb61347b8e073e849ea"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/assets/migration_template.py",
|
||||
"sha256": "e7140ffa020869b1820e008ddba3db7cccd98f1fe4af01261a726d344c4dedcd"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/assets/skill-schema.json",
|
||||
"sha256": "f5639ba823a24c9ac4fb21444c0717b7aefde1a4993682897f5bf544f863c2cd"
|
||||
},
|
||||
{
|
||||
"path": "skills/skill-adapter/assets/config-template.json",
|
||||
"sha256": "0c2ba33d2d3c5ccb266c0848fc43caa68a2aa6a80ff315d4b378352711f83e1c"
|
||||
}
|
||||
],
|
||||
"dirSha256": "aa83fcdd0b5e226bb75e3b798984d4309cfa66a7ce3f79ae4ddd9fd4a741ae2b"
|
||||
},
|
||||
"security": {
|
||||
"scannedAt": null,
|
||||
"scannerVersion": null,
|
||||
"flags": []
|
||||
}
|
||||
}
|
||||
8
skills/skill-adapter/assets/README.md
Normal file
8
skills/skill-adapter/assets/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# Assets
|
||||
|
||||
Bundled resources for api-migration-tool skill
|
||||
|
||||
- [ ] migration_template.py: Template for creating API migration scripts.
|
||||
- [ ] config_template.json: Template for API migration configuration files.
|
||||
- [ ] example_api_definition_v1.json: Example API definition for version 1.
|
||||
- [ ] example_api_definition_v2.json: Example API definition for version 2.
|
||||
32
skills/skill-adapter/assets/config-template.json
Normal file
32
skills/skill-adapter/assets/config-template.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"skill": {
|
||||
"name": "skill-name",
|
||||
"version": "1.0.0",
|
||||
"enabled": true,
|
||||
"settings": {
|
||||
"verbose": false,
|
||||
"autoActivate": true,
|
||||
"toolRestrictions": true
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"keywords": [
|
||||
"example-trigger-1",
|
||||
"example-trigger-2"
|
||||
],
|
||||
"patterns": []
|
||||
},
|
||||
"tools": {
|
||||
"allowed": [
|
||||
"Read",
|
||||
"Grep",
|
||||
"Bash"
|
||||
],
|
||||
"restricted": []
|
||||
},
|
||||
"metadata": {
|
||||
"author": "Plugin Author",
|
||||
"category": "general",
|
||||
"tags": []
|
||||
}
|
||||
}
|
||||
113
skills/skill-adapter/assets/config_template.json
Normal file
113
skills/skill-adapter/assets/config_template.json
Normal file
@@ -0,0 +1,113 @@
|
||||
{
|
||||
"_comment": "API Migration Configuration Template",
|
||||
"source_api": {
|
||||
"_comment": "Details of the API you are migrating *from*",
|
||||
"version": "v1",
|
||||
"base_url": "https://api.example.com/v1",
|
||||
"authentication": {
|
||||
"type": "apiKey",
|
||||
"_comment": "Supported types: apiKey, oauth2, none",
|
||||
"apiKey": {
|
||||
"name": "X-API-Key",
|
||||
"in": "header"
|
||||
}
|
||||
},
|
||||
"endpoints": [
|
||||
{
|
||||
"path": "/users/{user_id}",
|
||||
"method": "GET",
|
||||
"description": "Retrieve user details by ID",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "user_id",
|
||||
"in": "path",
|
||||
"type": "integer",
|
||||
"required": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"path": "/products",
|
||||
"method": "GET",
|
||||
"description": "Retrieve a list of products",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "limit",
|
||||
"in": "query",
|
||||
"type": "integer",
|
||||
"description": "Maximum number of products to return",
|
||||
"default": 10
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"target_api": {
|
||||
"_comment": "Details of the API you are migrating *to*",
|
||||
"version": "v2",
|
||||
"base_url": "https://api.example.com/v2",
|
||||
"authentication": {
|
||||
"type": "oauth2",
|
||||
"flow": "authorizationCode",
|
||||
"authorizationUrl": "https://example.com/oauth/authorize",
|
||||
"tokenUrl": "https://example.com/oauth/token",
|
||||
"scopes": ["read", "write"]
|
||||
},
|
||||
"endpoints": [
|
||||
{
|
||||
"path": "/users/{userId}",
|
||||
"method": "GET",
|
||||
"description": "Retrieve user details by ID",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "userId",
|
||||
"in": "path",
|
||||
"type": "string",
|
||||
"required": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"path": "/products",
|
||||
"method": "GET",
|
||||
"description": "Retrieve a list of products",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "maxResults",
|
||||
"in": "query",
|
||||
"type": "integer",
|
||||
"description": "Maximum number of products to return",
|
||||
"default": 10
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"migration_strategy": {
|
||||
"_comment": "Configuration for how to handle breaking changes",
|
||||
"parameter_mapping": [
|
||||
{
|
||||
"source_parameter": "user_id",
|
||||
"target_parameter": "userId",
|
||||
"transformation": "toString"
|
||||
},
|
||||
{
|
||||
"source_parameter": "limit",
|
||||
"target_parameter": "maxResults"
|
||||
}
|
||||
],
|
||||
"data_mapping": {
|
||||
"_comment": "Example of how to map data fields (if needed). Leave empty if not required.",
|
||||
"user_name": "full_name"
|
||||
},
|
||||
"error_handling": {
|
||||
"_comment": "How to handle errors during migration",
|
||||
"strategy": "fallback",
|
||||
"_comment": "Options: fallback, retry, abort",
|
||||
"fallback_response": {
|
||||
"status_code": 500,
|
||||
"message": "Error during migration"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
240
skills/skill-adapter/assets/example_api_definition_v1.json
Normal file
240
skills/skill-adapter/assets/example_api_definition_v1.json
Normal file
@@ -0,0 +1,240 @@
|
||||
{
|
||||
"_comment": "Example API definition for version 1",
|
||||
"api_name": "User Management API",
|
||||
"version": "1.0",
|
||||
"description": "API for managing user accounts and profiles.",
|
||||
"base_path": "/api/v1",
|
||||
"endpoints": [
|
||||
{
|
||||
"path": "/users",
|
||||
"method": "GET",
|
||||
"description": "Retrieve a list of all users.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "limit",
|
||||
"type": "integer",
|
||||
"description": "Maximum number of users to return.",
|
||||
"required": false,
|
||||
"default": 20
|
||||
},
|
||||
{
|
||||
"name": "offset",
|
||||
"type": "integer",
|
||||
"description": "Offset for pagination.",
|
||||
"required": false,
|
||||
"default": 0
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/User"
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/users/{user_id}",
|
||||
"method": "GET",
|
||||
"description": "Retrieve a specific user by ID.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "user_id",
|
||||
"type": "string",
|
||||
"description": "ID of the user to retrieve.",
|
||||
"required": true,
|
||||
"in": "path"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/User"
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "User not found"
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/users",
|
||||
"method": "POST",
|
||||
"description": "Create a new user.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "user",
|
||||
"type": "object",
|
||||
"description": "User object to create.",
|
||||
"required": true,
|
||||
"in": "body",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/CreateUserRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "User created successfully",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/User"
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Invalid request body"
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/users/{user_id}",
|
||||
"method": "PUT",
|
||||
"description": "Update an existing user.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "user_id",
|
||||
"type": "string",
|
||||
"description": "ID of the user to update.",
|
||||
"required": true,
|
||||
"in": "path"
|
||||
},
|
||||
{
|
||||
"name": "user",
|
||||
"type": "object",
|
||||
"description": "User object with updated information.",
|
||||
"required": true,
|
||||
"in": "body",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/UpdateUserRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "User updated successfully",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/User"
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Invalid request body"
|
||||
},
|
||||
"404": {
|
||||
"description": "User not found"
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/users/{user_id}",
|
||||
"method": "DELETE",
|
||||
"description": "Delete a user.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "user_id",
|
||||
"type": "string",
|
||||
"description": "ID of the user to delete.",
|
||||
"required": true,
|
||||
"in": "path"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "User deleted successfully"
|
||||
},
|
||||
"404": {
|
||||
"description": "User not found"
|
||||
},
|
||||
"500": {
|
||||
"description": "Internal server error"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"definitions": {
|
||||
"User": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "Unique identifier for the user."
|
||||
},
|
||||
"username": {
|
||||
"type": "string",
|
||||
"description": "Username of the user."
|
||||
},
|
||||
"email": {
|
||||
"type": "string",
|
||||
"description": "Email address of the user."
|
||||
},
|
||||
"created_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "Timestamp of when the user was created."
|
||||
},
|
||||
"updated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "Timestamp of when the user was last updated."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"username",
|
||||
"email",
|
||||
"created_at",
|
||||
"updated_at"
|
||||
]
|
||||
},
|
||||
"CreateUserRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "string",
|
||||
"description": "Username of the user."
|
||||
},
|
||||
"email": {
|
||||
"type": "string",
|
||||
"description": "Email address of the user."
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
"description": "Password for the user."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"username",
|
||||
"email",
|
||||
"password"
|
||||
]
|
||||
},
|
||||
"UpdateUserRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "string",
|
||||
"description": "Username of the user."
|
||||
},
|
||||
"email": {
|
||||
"type": "string",
|
||||
"description": "Email address of the user."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
138
skills/skill-adapter/assets/example_api_definition_v2.json
Normal file
138
skills/skill-adapter/assets/example_api_definition_v2.json
Normal file
@@ -0,0 +1,138 @@
|
||||
{
|
||||
"_comment": "Example API Definition - Version 2.0",
|
||||
"api_name": "MyAwesomeAPI",
|
||||
"version": "2.0",
|
||||
"description": "This is the second version of the MyAwesomeAPI, introducing enhanced features and improved performance.",
|
||||
"base_url": "https://api.example.com/v2",
|
||||
"endpoints": [
|
||||
{
|
||||
"_comment": "Endpoint for retrieving user data",
|
||||
"path": "/users/{user_id}",
|
||||
"method": "GET",
|
||||
"description": "Retrieves user information based on the provided user ID.",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "user_id",
|
||||
"in": "path",
|
||||
"description": "The unique identifier of the user.",
|
||||
"required": true,
|
||||
"type": "integer"
|
||||
},
|
||||
{
|
||||
"name": "include_details",
|
||||
"in": "query",
|
||||
"description": "Whether to include additional user details (e.g., address, phone number).",
|
||||
"required": false,
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful retrieval of user data.",
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "integer",
|
||||
"description": "The user's unique identifier."
|
||||
},
|
||||
"username": {
|
||||
"type": "string",
|
||||
"description": "The user's username."
|
||||
},
|
||||
"email": {
|
||||
"type": "string",
|
||||
"description": "The user's email address."
|
||||
},
|
||||
"first_name": {
|
||||
"type": "string",
|
||||
"description": "The user's first name."
|
||||
},
|
||||
"last_name": {
|
||||
"type": "string",
|
||||
"description": "The user's last name."
|
||||
},
|
||||
"address": {
|
||||
"type": "string",
|
||||
"description": "The user's address (optional, only included if include_details=true)"
|
||||
},
|
||||
"phone_number": {
|
||||
"type": "string",
|
||||
"description": "The user's phone number (optional, only included if include_details=true)"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "User not found."
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"_comment": "Endpoint for creating a new user",
|
||||
"path": "/users",
|
||||
"method": "POST",
|
||||
"description": "Creates a new user account.",
|
||||
"requestBody": {
|
||||
"description": "User data to create a new account.",
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "string",
|
||||
"description": "The desired username.",
|
||||
"required": true
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
"description": "The desired password.",
|
||||
"required": true
|
||||
},
|
||||
"email": {
|
||||
"type": "string",
|
||||
"description": "The user's email address.",
|
||||
"required": true
|
||||
},
|
||||
"first_name": {
|
||||
"type": "string",
|
||||
"description": "The user's first name.",
|
||||
"required": false
|
||||
},
|
||||
"last_name": {
|
||||
"type": "string",
|
||||
"description": "The user's last name.",
|
||||
"required": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "User account created successfully.",
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "integer",
|
||||
"description": "The newly created user's unique identifier."
|
||||
},
|
||||
"username": {
|
||||
"type": "string",
|
||||
"description": "The user's username."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Invalid request body or validation errors."
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
156
skills/skill-adapter/assets/migration_template.py
Normal file
156
skills/skill-adapter/assets/migration_template.py
Normal file
@@ -0,0 +1,156 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Template for creating API migration scripts.
|
||||
|
||||
This module provides a template for creating scripts to migrate APIs
|
||||
between different versions while maintaining backward compatibility.
|
||||
It includes functions for loading data, transforming data, and saving data
|
||||
in the new format, along with error handling and logging.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
|
||||
|
||||
class MigrationError(Exception):
|
||||
"""Custom exception for migration-related errors."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def load_data(input_file):
|
||||
"""
|
||||
Loads data from a JSON file.
|
||||
|
||||
Args:
|
||||
input_file (str): Path to the input JSON file.
|
||||
|
||||
Returns:
|
||||
dict: The loaded data as a dictionary.
|
||||
|
||||
Raises:
|
||||
MigrationError: If the file cannot be opened or parsed.
|
||||
"""
|
||||
try:
|
||||
with open(input_file, "r") as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
except FileNotFoundError:
|
||||
raise MigrationError(f"Input file not found: {input_file}")
|
||||
except json.JSONDecodeError:
|
||||
raise MigrationError(f"Invalid JSON format in file: {input_file}")
|
||||
except Exception as e:
|
||||
raise MigrationError(f"Error loading data from {input_file}: {e}")
|
||||
|
||||
|
||||
def transform_data(data):
|
||||
"""
|
||||
Transforms the data from the old format to the new format.
|
||||
|
||||
This is where the core migration logic should be implemented.
|
||||
|
||||
Args:
|
||||
data (dict): The data in the old format.
|
||||
|
||||
Returns:
|
||||
dict: The data in the new format.
|
||||
|
||||
Raises:
|
||||
MigrationError: If there is an error during the transformation.
|
||||
"""
|
||||
try:
|
||||
# Example transformation: Add a version field
|
||||
transformed_data = data.copy()
|
||||
transformed_data["version"] = "2.0" # Example version
|
||||
# Add your transformation logic here
|
||||
return transformed_data
|
||||
except Exception as e:
|
||||
raise MigrationError(f"Error transforming data: {e}")
|
||||
|
||||
|
||||
def save_data(data, output_file):
|
||||
"""
|
||||
Saves the transformed data to a JSON file.
|
||||
|
||||
Args:
|
||||
data (dict): The transformed data.
|
||||
output_file (str): Path to the output JSON file.
|
||||
|
||||
Raises:
|
||||
MigrationError: If the file cannot be written to.
|
||||
"""
|
||||
try:
|
||||
with open(output_file, "w") as f:
|
||||
json.dump(data, f, indent=4)
|
||||
except Exception as e:
|
||||
raise MigrationError(f"Error saving data to {output_file}: {e}")
|
||||
|
||||
|
||||
def migrate_api(input_file, output_file):
|
||||
"""
|
||||
Migrates the API data from the old format to the new format.
|
||||
|
||||
Args:
|
||||
input_file (str): Path to the input JSON file.
|
||||
output_file (str): Path to the output JSON file.
|
||||
"""
|
||||
try:
|
||||
logging.info(f"Loading data from {input_file}...")
|
||||
data = load_data(input_file)
|
||||
|
||||
logging.info("Transforming data...")
|
||||
transformed_data = transform_data(data)
|
||||
|
||||
logging.info(f"Saving data to {output_file}...")
|
||||
save_data(transformed_data, output_file)
|
||||
|
||||
logging.info("API migration completed successfully.")
|
||||
|
||||
except MigrationError as e:
|
||||
logging.error(f"API migration failed: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Main function to parse arguments and run the migration.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Migrate API data between versions."
|
||||
)
|
||||
parser.add_argument(
|
||||
"input_file", help="Path to the input JSON file (old format)."
|
||||
)
|
||||
parser.add_argument(
|
||||
"output_file", help="Path to the output JSON file (new format)."
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
migrate_api(args.input_file, args.output_file)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Example usage:
|
||||
# Create a dummy input file
|
||||
if not os.path.exists("input.json"):
|
||||
with open("input.json", "w") as f:
|
||||
json.dump({"name": "Example API", "version": "1.0"}, f, indent=4)
|
||||
|
||||
# Run the migration
|
||||
try:
|
||||
main()
|
||||
except SystemExit:
|
||||
# Handle argparse exit (e.g., when -h is used)
|
||||
pass
|
||||
except Exception as e:
|
||||
logging.error(f"An unexpected error occurred: {e}")
|
||||
sys.exit(1)
|
||||
28
skills/skill-adapter/assets/skill-schema.json
Normal file
28
skills/skill-adapter/assets/skill-schema.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Claude Skill Configuration",
|
||||
"type": "object",
|
||||
"required": ["name", "description"],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"pattern": "^[a-z0-9-]+$",
|
||||
"maxLength": 64,
|
||||
"description": "Skill identifier (lowercase, hyphens only)"
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"maxLength": 1024,
|
||||
"description": "What the skill does and when to use it"
|
||||
},
|
||||
"allowed-tools": {
|
||||
"type": "string",
|
||||
"description": "Comma-separated list of allowed tools"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"pattern": "^\\d+\\.\\d+\\.\\d+$",
|
||||
"description": "Semantic version (x.y.z)"
|
||||
}
|
||||
}
|
||||
}
|
||||
27
skills/skill-adapter/assets/test-data.json
Normal file
27
skills/skill-adapter/assets/test-data.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"testCases": [
|
||||
{
|
||||
"name": "Basic activation test",
|
||||
"input": "trigger phrase example",
|
||||
"expected": {
|
||||
"activated": true,
|
||||
"toolsUsed": ["Read", "Grep"],
|
||||
"success": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Complex workflow test",
|
||||
"input": "multi-step trigger example",
|
||||
"expected": {
|
||||
"activated": true,
|
||||
"steps": 3,
|
||||
"toolsUsed": ["Read", "Write", "Bash"],
|
||||
"success": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"fixtures": {
|
||||
"sampleInput": "example data",
|
||||
"expectedOutput": "processed result"
|
||||
}
|
||||
}
|
||||
8
skills/skill-adapter/references/README.md
Normal file
8
skills/skill-adapter/references/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# References
|
||||
|
||||
Bundled resources for api-migration-tool skill
|
||||
|
||||
- [ ] api_migration_best_practices.md: Detailed guide on API migration strategies, backward compatibility, and versioning.
|
||||
- [ ] api_design_principles.md: Core principles for designing APIs that are easy to migrate and maintain.
|
||||
- [ ] api_versioning_strategies.md: Different approaches to API versioning (e.g., semantic versioning, URL-based versioning).
|
||||
- [ ] example_api_schemas.md: Examples of API schemas and data structures for different API versions.
|
||||
69
skills/skill-adapter/references/best-practices.md
Normal file
69
skills/skill-adapter/references/best-practices.md
Normal file
@@ -0,0 +1,69 @@
|
||||
# Skill Best Practices
|
||||
|
||||
Guidelines for optimal skill usage and development.
|
||||
|
||||
## For Users
|
||||
|
||||
### Activation Best Practices
|
||||
|
||||
1. **Use Clear Trigger Phrases**
|
||||
- Match phrases from skill description
|
||||
- Be specific about intent
|
||||
- Provide necessary context
|
||||
|
||||
2. **Provide Sufficient Context**
|
||||
- Include relevant file paths
|
||||
- Specify scope of analysis
|
||||
- Mention any constraints
|
||||
|
||||
3. **Understand Tool Permissions**
|
||||
- Check allowed-tools in frontmatter
|
||||
- Know what the skill can/cannot do
|
||||
- Request appropriate actions
|
||||
|
||||
### Workflow Optimization
|
||||
|
||||
- Start with simple requests
|
||||
- Build up to complex workflows
|
||||
- Verify each step before proceeding
|
||||
- Use skill consistently for related tasks
|
||||
|
||||
## For Developers
|
||||
|
||||
### Skill Development Guidelines
|
||||
|
||||
1. **Clear Descriptions**
|
||||
- Include explicit trigger phrases
|
||||
- Document all capabilities
|
||||
- Specify limitations
|
||||
|
||||
2. **Proper Tool Permissions**
|
||||
- Use minimal necessary tools
|
||||
- Document security implications
|
||||
- Test with restricted tools
|
||||
|
||||
3. **Comprehensive Documentation**
|
||||
- Provide usage examples
|
||||
- Document common pitfalls
|
||||
- Include troubleshooting guide
|
||||
|
||||
### Maintenance
|
||||
|
||||
- Keep version updated
|
||||
- Test after tool updates
|
||||
- Monitor user feedback
|
||||
- Iterate on descriptions
|
||||
|
||||
## Performance Tips
|
||||
|
||||
- Scope skills to specific domains
|
||||
- Avoid overlapping trigger phrases
|
||||
- Keep descriptions under 1024 chars
|
||||
- Test activation reliability
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- Never include secrets in skill files
|
||||
- Validate all inputs
|
||||
- Use read-only tools when possible
|
||||
- Document security requirements
|
||||
70
skills/skill-adapter/references/examples.md
Normal file
70
skills/skill-adapter/references/examples.md
Normal file
@@ -0,0 +1,70 @@
|
||||
# Skill Usage Examples
|
||||
|
||||
This document provides practical examples of how to use this skill effectively.
|
||||
|
||||
## Basic Usage
|
||||
|
||||
### Example 1: Simple Activation
|
||||
|
||||
**User Request:**
|
||||
```
|
||||
[Describe trigger phrase here]
|
||||
```
|
||||
|
||||
**Skill Response:**
|
||||
1. Analyzes the request
|
||||
2. Performs the required action
|
||||
3. Returns results
|
||||
|
||||
### Example 2: Complex Workflow
|
||||
|
||||
**User Request:**
|
||||
```
|
||||
[Describe complex scenario]
|
||||
```
|
||||
|
||||
**Workflow:**
|
||||
1. Step 1: Initial analysis
|
||||
2. Step 2: Data processing
|
||||
3. Step 3: Result generation
|
||||
4. Step 4: Validation
|
||||
|
||||
## Advanced Patterns
|
||||
|
||||
### Pattern 1: Chaining Operations
|
||||
|
||||
Combine this skill with other tools:
|
||||
```
|
||||
Step 1: Use this skill for [purpose]
|
||||
Step 2: Chain with [other tool]
|
||||
Step 3: Finalize with [action]
|
||||
```
|
||||
|
||||
### Pattern 2: Error Handling
|
||||
|
||||
If issues occur:
|
||||
- Check trigger phrase matches
|
||||
- Verify context is available
|
||||
- Review allowed-tools permissions
|
||||
|
||||
## Tips & Best Practices
|
||||
|
||||
- ✅ Be specific with trigger phrases
|
||||
- ✅ Provide necessary context
|
||||
- ✅ Check tool permissions match needs
|
||||
- ❌ Avoid vague requests
|
||||
- ❌ Don't mix unrelated tasks
|
||||
|
||||
## Common Issues
|
||||
|
||||
**Issue:** Skill doesn't activate
|
||||
**Solution:** Use exact trigger phrases from description
|
||||
|
||||
**Issue:** Unexpected results
|
||||
**Solution:** Check input format and context
|
||||
|
||||
## See Also
|
||||
|
||||
- Main SKILL.md for full documentation
|
||||
- scripts/ for automation helpers
|
||||
- assets/ for configuration examples
|
||||
8
skills/skill-adapter/scripts/README.md
Normal file
8
skills/skill-adapter/scripts/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# Scripts
|
||||
|
||||
Bundled resources for api-migration-tool skill
|
||||
|
||||
- [ ] api_version_compatibility_check.py: Checks compatibility between API versions using static analysis or API calls.
|
||||
- [ ] api_migration.py: Automates the migration of API code, including updating function calls and data structures.
|
||||
- [ ] api_deprecation_analyzer.py: Analyzes code for deprecated API usage and suggests replacements.
|
||||
- [ ] api_documentation_generator.py: Generates API documentation from code or configuration files.
|
||||
42
skills/skill-adapter/scripts/helper-template.sh
Executable file
42
skills/skill-adapter/scripts/helper-template.sh
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash
|
||||
# Helper script template for skill automation
|
||||
# Customize this for your skill's specific needs
|
||||
|
||||
set -e
|
||||
|
||||
function show_usage() {
|
||||
echo "Usage: $0 [options]"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " -h, --help Show this help message"
|
||||
echo " -v, --verbose Enable verbose output"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Parse arguments
|
||||
VERBOSE=false
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-h|--help)
|
||||
show_usage
|
||||
exit 0
|
||||
;;
|
||||
-v|--verbose)
|
||||
VERBOSE=true
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option: $1"
|
||||
show_usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Your skill logic here
|
||||
if [ "$VERBOSE" = true ]; then
|
||||
echo "Running skill automation..."
|
||||
fi
|
||||
|
||||
echo "✅ Complete"
|
||||
32
skills/skill-adapter/scripts/validation.sh
Executable file
32
skills/skill-adapter/scripts/validation.sh
Executable file
@@ -0,0 +1,32 @@
|
||||
#!/bin/bash
|
||||
# Skill validation helper
|
||||
# Validates skill activation and functionality
|
||||
|
||||
set -e
|
||||
|
||||
echo "🔍 Validating skill..."
|
||||
|
||||
# Check if SKILL.md exists
|
||||
if [ ! -f "../SKILL.md" ]; then
|
||||
echo "❌ Error: SKILL.md not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate frontmatter
|
||||
if ! grep -q "^---$" "../SKILL.md"; then
|
||||
echo "❌ Error: No frontmatter found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check required fields
|
||||
if ! grep -q "^name:" "../SKILL.md"; then
|
||||
echo "❌ Error: Missing 'name' field"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! grep -q "^description:" "../SKILL.md"; then
|
||||
echo "❌ Error: Missing 'description' field"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Skill validation passed"
|
||||
Reference in New Issue
Block a user