Initial commit
This commit is contained in:
82
skills/data.transform/README.md
Normal file
82
skills/data.transform/README.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# data.transform
|
||||
|
||||
Transform data between different formats (JSON, YAML, XML, CSV) with validation and error handling
|
||||
|
||||
## Overview
|
||||
|
||||
**Purpose:** Transform data between different formats (JSON, YAML, XML, CSV) with validation and error handling
|
||||
|
||||
**Command:** `/data/transform`
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```bash
|
||||
python3 skills/data/transform/data_transform.py
|
||||
```
|
||||
|
||||
### With Arguments
|
||||
|
||||
```bash
|
||||
python3 skills/data/transform/data_transform.py \
|
||||
--input_file_path "value" \
|
||||
--source_format "value" \
|
||||
--target_format "value" \
|
||||
--schema_path_(optional) "value" \
|
||||
--output-format json
|
||||
```
|
||||
|
||||
## Inputs
|
||||
|
||||
- **input_file_path**
|
||||
- **source_format**
|
||||
- **target_format**
|
||||
- **schema_path (optional)**
|
||||
|
||||
## Outputs
|
||||
|
||||
- **transformed_file**
|
||||
- **transformation_report.json**
|
||||
|
||||
## Artifact Metadata
|
||||
|
||||
### Produces
|
||||
|
||||
- `transformed-data`
|
||||
- `transformation-report`
|
||||
|
||||
## Permissions
|
||||
|
||||
- `filesystem:read`
|
||||
- `filesystem:write`
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
Support transformations between: - JSON ↔ YAML - JSON ↔ XML - JSON ↔ CSV - YAML ↔ XML - XML ↔ CSV Features: - Validate input against schema before transformation - Preserve data types during conversion - Handle nested structures appropriately - Report data loss warnings (e.g., CSV can't represent nesting) - Support custom transformation rules - Provide detailed error messages Output report should include: - Transformation success status - Source and target formats - Data validation results - Warnings about potential data loss - Transformation time and file sizes
|
||||
|
||||
## Integration
|
||||
|
||||
This skill can be used in agents by including it in `skills_available`:
|
||||
|
||||
```yaml
|
||||
name: my.agent
|
||||
skills_available:
|
||||
- data.transform
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
Run tests with:
|
||||
|
||||
```bash
|
||||
pytest skills/data/transform/test_data_transform.py -v
|
||||
```
|
||||
|
||||
## Created By
|
||||
|
||||
This skill was generated by **meta.skill**, the skill creator meta-agent.
|
||||
|
||||
---
|
||||
|
||||
*Part of the Betty Framework*
|
||||
1
skills/data.transform/__init__.py
Normal file
1
skills/data.transform/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Auto-generated package initializer for skills.
|
||||
120
skills/data.transform/data_transform.py
Executable file
120
skills/data.transform/data_transform.py
Executable file
@@ -0,0 +1,120 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
data.transform - Transform data between different formats (JSON, YAML, XML, CSV) with validation and error handling
|
||||
|
||||
Generated by meta.skill
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any, Optional
|
||||
|
||||
|
||||
from betty.config import BASE_DIR
|
||||
from betty.logging_utils import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
|
||||
class DataTransform:
|
||||
"""
|
||||
Transform data between different formats (JSON, YAML, XML, CSV) with validation and error handling
|
||||
"""
|
||||
|
||||
def __init__(self, base_dir: str = BASE_DIR):
|
||||
"""Initialize skill"""
|
||||
self.base_dir = Path(base_dir)
|
||||
|
||||
def execute(self, input_file_path: Optional[str] = None, source_format: Optional[str] = None, target_format: Optional[str] = None, schema_path_optional: Optional[str] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Execute the skill
|
||||
|
||||
Returns:
|
||||
Dict with execution results
|
||||
"""
|
||||
try:
|
||||
logger.info("Executing data.transform...")
|
||||
|
||||
# TODO: Implement skill logic here
|
||||
|
||||
# Implementation notes:
|
||||
# Support transformations between: - JSON ↔ YAML - JSON ↔ XML - JSON ↔ CSV - YAML ↔ XML - XML ↔ CSV Features: - Validate input against schema before transformation - Preserve data types during conversion - Handle nested structures appropriately - Report data loss warnings (e.g., CSV can't represent nesting) - Support custom transformation rules - Provide detailed error messages Output report should include: - Transformation success status - Source and target formats - Data validation results - Warnings about potential data loss - Transformation time and file sizes
|
||||
|
||||
# Placeholder implementation
|
||||
result = {
|
||||
"ok": True,
|
||||
"status": "success",
|
||||
"message": "Skill executed successfully"
|
||||
}
|
||||
|
||||
logger.info("Skill completed successfully")
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing skill: {e}")
|
||||
return {
|
||||
"ok": False,
|
||||
"status": "failed",
|
||||
"error": str(e)
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
"""CLI entry point"""
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Transform data between different formats (JSON, YAML, XML, CSV) with validation and error handling"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--input-file-path",
|
||||
help="input_file_path"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--source-format",
|
||||
help="source_format"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target-format",
|
||||
help="target_format"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--schema-path-optional",
|
||||
help="schema_path (optional)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-format",
|
||||
choices=["json", "yaml"],
|
||||
default="json",
|
||||
help="Output format"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Create skill instance
|
||||
skill = DataTransform()
|
||||
|
||||
# Execute skill
|
||||
result = skill.execute(
|
||||
input_file_path=args.input_file_path,
|
||||
source_format=args.source_format,
|
||||
target_format=args.target_format,
|
||||
schema_path_optional=args.schema_path_optional,
|
||||
)
|
||||
|
||||
# Output result
|
||||
if args.output_format == "json":
|
||||
print(json.dumps(result, indent=2))
|
||||
else:
|
||||
print(yaml.dump(result, default_flow_style=False))
|
||||
|
||||
# Exit with appropriate code
|
||||
sys.exit(0 if result.get("ok") else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
26
skills/data.transform/skill.yaml
Normal file
26
skills/data.transform/skill.yaml
Normal file
@@ -0,0 +1,26 @@
|
||||
name: data.transform
|
||||
version: 0.1.0
|
||||
description: Transform data between different formats (JSON, YAML, XML, CSV) with
|
||||
validation and error handling
|
||||
inputs:
|
||||
- input_file_path
|
||||
- source_format
|
||||
- target_format
|
||||
- schema_path (optional)
|
||||
outputs:
|
||||
- transformed_file
|
||||
- transformation_report.json
|
||||
status: active
|
||||
permissions:
|
||||
- filesystem:read
|
||||
- filesystem:write
|
||||
entrypoints:
|
||||
- command: /data/transform
|
||||
handler: data_transform.py
|
||||
runtime: python
|
||||
description: Transform data between different formats (JSON, YAML, XML, CSV) with
|
||||
validation and error handling
|
||||
artifact_metadata:
|
||||
produces:
|
||||
- type: transformed-data
|
||||
- type: transformation-report
|
||||
62
skills/data.transform/test_data_transform.py
Normal file
62
skills/data.transform/test_data_transform.py
Normal file
@@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Tests for data.transform
|
||||
|
||||
Generated by meta.skill
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import sys
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
|
||||
|
||||
from skills.data_transform import data_transform
|
||||
|
||||
|
||||
class TestDataTransform:
|
||||
"""Tests for DataTransform"""
|
||||
|
||||
def setup_method(self):
|
||||
"""Setup test fixtures"""
|
||||
self.skill = data_transform.DataTransform()
|
||||
|
||||
def test_initialization(self):
|
||||
"""Test skill initializes correctly"""
|
||||
assert self.skill is not None
|
||||
assert self.skill.base_dir is not None
|
||||
|
||||
def test_execute_basic(self):
|
||||
"""Test basic execution"""
|
||||
result = self.skill.execute()
|
||||
|
||||
assert result is not None
|
||||
assert "ok" in result
|
||||
assert "status" in result
|
||||
|
||||
def test_execute_success(self):
|
||||
"""Test successful execution"""
|
||||
result = self.skill.execute()
|
||||
|
||||
assert result["ok"] is True
|
||||
assert result["status"] == "success"
|
||||
|
||||
# TODO: Add more specific tests based on skill functionality
|
||||
|
||||
|
||||
def test_cli_help(capsys):
|
||||
"""Test CLI help message"""
|
||||
sys.argv = ["data_transform.py", "--help"]
|
||||
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
data_transform.main()
|
||||
|
||||
assert exc_info.value.code == 0
|
||||
captured = capsys.readouterr()
|
||||
assert "Transform data between different formats (JSON, YA" in captured.out
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main([__file__, "-v"])
|
||||
Reference in New Issue
Block a user