Initial commit

This commit is contained in:
Zhongwei Li
2025-11-29 18:52:04 +08:00
commit ff70ed56a4
17 changed files with 1281 additions and 0 deletions

View File

@@ -0,0 +1,7 @@
# Assets
Bundled resources for api-batch-processor skill
- [ ] job_template.json: A JSON template for defining batch processing jobs.
- [ ] progress_report_template.md: A Markdown template for generating progress reports for batch processing jobs.
- [ ] example_batch_config.json: Example configuration file for setting up a batch processing job.

View File

@@ -0,0 +1,32 @@
{
"skill": {
"name": "skill-name",
"version": "1.0.0",
"enabled": true,
"settings": {
"verbose": false,
"autoActivate": true,
"toolRestrictions": true
}
},
"triggers": {
"keywords": [
"example-trigger-1",
"example-trigger-2"
],
"patterns": []
},
"tools": {
"allowed": [
"Read",
"Grep",
"Bash"
],
"restricted": []
},
"metadata": {
"author": "Plugin Author",
"category": "general",
"tags": []
}
}

View File

@@ -0,0 +1,54 @@
{
"_comment": "Example configuration for a batch processing job. Customize this for your specific API and data.",
"job_name": "process_user_data_2024-10-27",
"description": "Batch process user data updates from the latest CSV file.",
"api_endpoint": "https://api.example.com/users",
"api_method": "PUT",
"api_headers": {
"Content-Type": "application/json",
"Authorization": "Bearer YOUR_API_KEY"
},
"data_source": {
"_comment": "Specify where the data comes from. Currently supports CSV files.",
"type": "csv",
"file_path": "/data/user_updates_2024-10-27.csv",
"delimiter": ",",
"quotechar": "\"",
"header": true,
"fields": {
"user_id": "user_id",
"email": "email",
"status": "status",
"subscription_type": "subscription"
}
},
"batch_size": 50,
"max_retries": 3,
"retry_delay": 5,
"error_handling": {
"_comment": "Defines how to handle errors during processing.",
"on_error": "continue",
"log_errors": true,
"error_log_path": "/logs/user_update_errors.log"
},
"success_handling": {
"_comment": "Defines how to handle successful updates.",
"log_successes": true,
"success_log_path": "/logs/user_update_successes.log"
},
"transformation": {
"_comment": "Optional transformation to apply to each data record before sending to the API. Use a python function name.",
"function_name": "transform_user_data"
},
"reporting": {
"_comment": "Options for reporting the job's progress.",
"progress_interval": 60,
"report_to_console": true,
"report_to_file": "/reports/user_update_report.txt"
},
"rate_limiting": {
"_comment": "Prevent overwhelming the API.",
"requests_per_second": 10
},
"dry_run": false
}

View File

@@ -0,0 +1,54 @@
{
"_comment": "Template for defining a batch processing job",
"job_name": "Example Batch Job",
"_comment": "A descriptive name for the job",
"description": "This is an example batch job that processes a list of user IDs.",
"_comment": "A more detailed description of the job's purpose",
"api_endpoint": "https://api.example.com/users/{user_id}",
"_comment": "The API endpoint to be called for each item in the batch. Use {item_id} placeholders for substitutions.",
"http_method": "GET",
"_comment": "The HTTP method to use for the API calls (GET, POST, PUT, DELETE, PATCH)",
"headers": {
"_comment": "Optional headers to include in the API requests",
"Content-Type": "application/json",
"Authorization": "Bearer YOUR_API_KEY"
},
"request_body_template": null,
"_comment": "Optional template for the request body. Leave null for GET requests. Can use {item_id} placeholders.",
"items": [
"user123",
"user456",
"user789",
"user101",
"user112"
],
"_comment_items": "An array of item IDs to process in the batch",
"item_id_key": null,
"_comment": "If items is a list of objects, this is the key to use for the item ID. If null, items is treated as a list of IDs.",
"max_concurrent_requests": 5,
"_comment": "The maximum number of concurrent API requests to make",
"retry_attempts": 3,
"_comment": "The number of times to retry a failed API request",
"retry_delay_seconds": 2,
"_comment": "The delay in seconds between retry attempts",
"success_codes": [
200,
201
],
"_comment": "HTTP status codes that indicate a successful API call",
"error_handling": "continue",
"_comment": "How to handle errors: 'continue' to process all items, 'stop' to halt on first error",
"callback_url": null,
"_comment": "Optional URL to call when the job is complete, passing job status and results. e.g. https://your-app.com/batch-callback",
"callback_method": "POST",
"_comment": "The HTTP method for the callback URL (POST, PUT)",
"callback_headers": {
"_comment": "Optional headers for the callback request",
"Content-Type": "application/json"
},
"metadata": {
"_comment": "Optional metadata to associate with the job. Useful for tracking or filtering jobs.",
"owner": "team-alpha",
"priority": "medium"
}
}

View File

@@ -0,0 +1,72 @@
# Batch Processing Job Progress Report
This report provides a summary of the progress for a batch processing job.
## Job Information
* **Job ID:** `[Insert Job ID Here - e.g., job-2024-10-26-001]`
* **Job Name:** `[Insert Job Name Here - e.g., Import Customer Data]`
* **Job Description:** `[Insert a brief description of the job - e.g., Imports customer data from CSV file into the database.]`
* **Start Time:** `[Insert Job Start Time - e.g., 2024-10-26 08:00:00 UTC]`
* **End Time:** `[Insert Job End Time - e.g., 2024-10-26 10:30:00 UTC (or "In Progress")]`
* **Status:** `[Insert Job Status - e.g., Completed, In Progress, Failed, Partially Completed]`
## Input Data
* **Source:** `[Insert Source of Input Data - e.g., CSV file: customer_data.csv, S3 Bucket: s3://my-bucket/data]`
* **Number of Records:** `[Insert Total Number of Records to Process - e.g., 10,000]`
## Processing Summary
| Metric | Value |
|-----------------|------------|
| Total Records | `[Insert Total Records]` |
| Records Processed | `[Insert Records Processed]` |
| Records Succeeded | `[Insert Records Succeeded]` |
| Records Failed | `[Insert Records Failed]` |
| Success Rate | `[Insert Success Rate (e.g., 95%)]` |
| Failure Rate | `[Insert Failure Rate (e.g., 5%)]` |
**Example:**
| Metric | Value |
|-----------------|------------|
| Total Records | 1000 |
| Records Processed | 750 |
| Records Succeeded | 700 |
| Records Failed | 50 |
| Success Rate | 93.33% |
| Failure Rate | 6.67% |
## Detailed Results (Optional)
This section can include more detailed information about the processed records. You can tailor this section to your specific needs.
* **Successful Records:** `[Insert a summary or link to successful record details - e.g., A list of successful record IDs can be found in successful_records.log]`
* **Failed Records:** `[Insert a summary or link to failed record details - e.g., A list of failed record IDs and error messages can be found in failed_records.log]`
* **Example Error Message:** `[Insert Example Error Message - e.g., "Invalid email format for record ID: 123"]`
## Performance Metrics
* **Processing Time:** `[Insert Total Processing Time - e.g., 2 hours 30 minutes]`
* **Average Processing Time per Record:** `[Insert Average Time per Record - e.g., 0.9 seconds]`
* **Peak Memory Usage:** `[Insert Peak Memory Usage - e.g., 2GB]`
## Errors and Warnings
* `[List any errors or warnings encountered during processing. Include timestamps and specific details.]`
* **Example:** `2024-10-26 09:15:00 UTC - Warning: Rate limit exceeded for API endpoint. Retrying in 60 seconds.`
* **Example:** `2024-10-26 09:30:00 UTC - Error: Database connection lost. Attempting to reconnect.`
## Recommendations
* `[Insert any recommendations for improving the job or addressing issues. - e.g., Increase the rate limit for the API endpoint to avoid rate limiting errors. Consider adding retry logic for database connection errors.]`
## Notes
* `[Insert any additional notes or comments about the job. - e.g., This job was executed with 4 parallel workers.]`
## Generated By
* `[Insert the tool or system that generated this report. - e.g., API Batch Processor Plugin]`
* **Generation Date:** `[Insert the date the report was generated. - e.g., 2024-10-26]`

View File

@@ -0,0 +1,28 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Claude Skill Configuration",
"type": "object",
"required": ["name", "description"],
"properties": {
"name": {
"type": "string",
"pattern": "^[a-z0-9-]+$",
"maxLength": 64,
"description": "Skill identifier (lowercase, hyphens only)"
},
"description": {
"type": "string",
"maxLength": 1024,
"description": "What the skill does and when to use it"
},
"allowed-tools": {
"type": "string",
"description": "Comma-separated list of allowed tools"
},
"version": {
"type": "string",
"pattern": "^\\d+\\.\\d+\\.\\d+$",
"description": "Semantic version (x.y.z)"
}
}
}

View File

@@ -0,0 +1,27 @@
{
"testCases": [
{
"name": "Basic activation test",
"input": "trigger phrase example",
"expected": {
"activated": true,
"toolsUsed": ["Read", "Grep"],
"success": true
}
},
{
"name": "Complex workflow test",
"input": "multi-step trigger example",
"expected": {
"activated": true,
"steps": 3,
"toolsUsed": ["Read", "Write", "Bash"],
"success": true
}
}
],
"fixtures": {
"sampleInput": "example data",
"expectedOutput": "processed result"
}
}

View File

@@ -0,0 +1,7 @@
# References
Bundled resources for api-batch-processor skill
- [ ] api_batch_processing_best_practices.md: Provides best practices for implementing batch API processing, including error handling and optimization techniques.
- [ ] job_queue_schema.md: Defines the schema for the job queue used in batch processing, including fields for job ID, status, and progress.
- [ ] error_handling_guide.md: A guide to handling common errors encountered during batch API processing.

View File

@@ -0,0 +1,69 @@
# Skill Best Practices
Guidelines for optimal skill usage and development.
## For Users
### Activation Best Practices
1. **Use Clear Trigger Phrases**
- Match phrases from skill description
- Be specific about intent
- Provide necessary context
2. **Provide Sufficient Context**
- Include relevant file paths
- Specify scope of analysis
- Mention any constraints
3. **Understand Tool Permissions**
- Check allowed-tools in frontmatter
- Know what the skill can/cannot do
- Request appropriate actions
### Workflow Optimization
- Start with simple requests
- Build up to complex workflows
- Verify each step before proceeding
- Use skill consistently for related tasks
## For Developers
### Skill Development Guidelines
1. **Clear Descriptions**
- Include explicit trigger phrases
- Document all capabilities
- Specify limitations
2. **Proper Tool Permissions**
- Use minimal necessary tools
- Document security implications
- Test with restricted tools
3. **Comprehensive Documentation**
- Provide usage examples
- Document common pitfalls
- Include troubleshooting guide
### Maintenance
- Keep version updated
- Test after tool updates
- Monitor user feedback
- Iterate on descriptions
## Performance Tips
- Scope skills to specific domains
- Avoid overlapping trigger phrases
- Keep descriptions under 1024 chars
- Test activation reliability
## Security Considerations
- Never include secrets in skill files
- Validate all inputs
- Use read-only tools when possible
- Document security requirements

View File

@@ -0,0 +1,70 @@
# Skill Usage Examples
This document provides practical examples of how to use this skill effectively.
## Basic Usage
### Example 1: Simple Activation
**User Request:**
```
[Describe trigger phrase here]
```
**Skill Response:**
1. Analyzes the request
2. Performs the required action
3. Returns results
### Example 2: Complex Workflow
**User Request:**
```
[Describe complex scenario]
```
**Workflow:**
1. Step 1: Initial analysis
2. Step 2: Data processing
3. Step 3: Result generation
4. Step 4: Validation
## Advanced Patterns
### Pattern 1: Chaining Operations
Combine this skill with other tools:
```
Step 1: Use this skill for [purpose]
Step 2: Chain with [other tool]
Step 3: Finalize with [action]
```
### Pattern 2: Error Handling
If issues occur:
- Check trigger phrase matches
- Verify context is available
- Review allowed-tools permissions
## Tips & Best Practices
- ✅ Be specific with trigger phrases
- ✅ Provide necessary context
- ✅ Check tool permissions match needs
- ❌ Avoid vague requests
- ❌ Don't mix unrelated tasks
## Common Issues
**Issue:** Skill doesn't activate
**Solution:** Use exact trigger phrases from description
**Issue:** Unexpected results
**Solution:** Check input format and context
## See Also
- Main SKILL.md for full documentation
- scripts/ for automation helpers
- assets/ for configuration examples

View File

@@ -0,0 +1,7 @@
# Scripts
Bundled resources for api-batch-processor skill
- [ ] batch_process_init.py: Initializes a batch processing job, setting up the queue and logging.
- [ ] batch_process_status.py: Checks the status of a batch processing job, providing progress updates.
- [ ] batch_process_cancel.py: Cancels a running batch processing job, cleaning up resources.

View File

@@ -0,0 +1,42 @@
#!/bin/bash
# Helper script template for skill automation
# Customize this for your skill's specific needs
set -e
function show_usage() {
echo "Usage: $0 [options]"
echo ""
echo "Options:"
echo " -h, --help Show this help message"
echo " -v, --verbose Enable verbose output"
echo ""
}
# Parse arguments
VERBOSE=false
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
show_usage
exit 0
;;
-v|--verbose)
VERBOSE=true
shift
;;
*)
echo "Unknown option: $1"
show_usage
exit 1
;;
esac
done
# Your skill logic here
if [ "$VERBOSE" = true ]; then
echo "Running skill automation..."
fi
echo "✅ Complete"

View File

@@ -0,0 +1,32 @@
#!/bin/bash
# Skill validation helper
# Validates skill activation and functionality
set -e
echo "🔍 Validating skill..."
# Check if SKILL.md exists
if [ ! -f "../SKILL.md" ]; then
echo "❌ Error: SKILL.md not found"
exit 1
fi
# Validate frontmatter
if ! grep -q "^---$" "../SKILL.md"; then
echo "❌ Error: No frontmatter found"
exit 1
fi
# Check required fields
if ! grep -q "^name:" "../SKILL.md"; then
echo "❌ Error: Missing 'name' field"
exit 1
fi
if ! grep -q "^description:" "../SKILL.md"; then
echo "❌ Error: Missing 'description' field"
exit 1
fi
echo "✅ Skill validation passed"