Initial commit

This commit is contained in:
Zhongwei Li
2025-11-29 18:26:08 +08:00
commit 8f22ddf339
295 changed files with 59710 additions and 0 deletions

View File

@@ -0,0 +1 @@
# Auto-generated package initializer for skills.

View File

@@ -0,0 +1,254 @@
#!/usr/bin/env python3
"""
artifact_define.py - Define artifact metadata for Betty Framework skills
Helps create artifact_metadata blocks that declare what artifacts a skill
produces and consumes, enabling interoperability.
"""
import os
import sys
import json
import yaml
from typing import Dict, Any, List, Optional
from pathlib import Path
from betty.config import BASE_DIR
from betty.logging_utils import setup_logger
logger = setup_logger(__name__)
# Known artifact types - loaded from registry/artifact_types.json
# To update the registry, modify registry/artifact_types.json and reload
from skills.artifact.define.registry_loader import (
KNOWN_ARTIFACT_TYPES,
load_artifact_registry,
reload_registry,
get_artifact_count,
is_registered,
get_artifact_metadata
)
# For backward compatibility, expose the registry as module-level variable
# Note: The registry is now data-driven and loaded from JSON
# Do not modify this file to add new artifact types
# Instead, update registry/artifact_types.json
def get_artifact_definition(artifact_type: str) -> Optional[Dict[str, Any]]:
"""
Get the definition for a known artifact type.
Args:
artifact_type: Artifact type identifier
Returns:
Artifact definition dictionary with schema, file_pattern, etc., or None if unknown
"""
if artifact_type in KNOWN_ARTIFACT_TYPES:
definition = {"type": artifact_type}
definition.update(KNOWN_ARTIFACT_TYPES[artifact_type])
return definition
return None
def validate_artifact_type(artifact_type: str) -> tuple[bool, Optional[str]]:
"""
Validate that an artifact type is known or suggest registering it.
Args:
artifact_type: Artifact type identifier
Returns:
Tuple of (is_valid, warning_message)
"""
if artifact_type in KNOWN_ARTIFACT_TYPES:
return True, None
warning = f"Artifact type '{artifact_type}' is not in the known registry. "
warning += "Consider documenting it in docs/ARTIFACT_STANDARDS.md and creating a schema."
return False, warning
def generate_artifact_metadata(
skill_name: str,
produces: Optional[List[str]] = None,
consumes: Optional[List[str]] = None
) -> Dict[str, Any]:
"""
Generate artifact metadata structure.
Args:
skill_name: Name of the skill
produces: List of artifact types produced
consumes: List of artifact types consumed
Returns:
Artifact metadata dictionary
"""
metadata = {}
warnings = []
# Build produces section
if produces:
produces_list = []
for artifact_type in produces:
is_known, warning = validate_artifact_type(artifact_type)
if warning:
warnings.append(warning)
artifact_def = {"type": artifact_type}
# Add known metadata if available
if artifact_type in KNOWN_ARTIFACT_TYPES:
known = KNOWN_ARTIFACT_TYPES[artifact_type]
if "schema" in known:
artifact_def["schema"] = known["schema"]
if "file_pattern" in known:
artifact_def["file_pattern"] = known["file_pattern"]
if "content_type" in known:
artifact_def["content_type"] = known["content_type"]
if "description" in known:
artifact_def["description"] = known["description"]
produces_list.append(artifact_def)
metadata["produces"] = produces_list
# Build consumes section
if consumes:
consumes_list = []
for artifact_type in consumes:
is_known, warning = validate_artifact_type(artifact_type)
if warning:
warnings.append(warning)
artifact_def = {
"type": artifact_type,
"required": True # Default to required
}
# Add description if known
if artifact_type in KNOWN_ARTIFACT_TYPES:
known = KNOWN_ARTIFACT_TYPES[artifact_type]
if "description" in known:
artifact_def["description"] = known["description"]
consumes_list.append(artifact_def)
metadata["consumes"] = consumes_list
return metadata, warnings
def format_as_yaml(metadata: Dict[str, Any]) -> str:
"""
Format artifact metadata as YAML for inclusion in skill.yaml.
Args:
metadata: Artifact metadata dictionary
Returns:
Formatted YAML string
"""
yaml_str = "artifact_metadata:\n"
yaml_str += yaml.dump(metadata, default_flow_style=False, indent=2, sort_keys=False)
return yaml_str
def main():
"""CLI entry point."""
import argparse
parser = argparse.ArgumentParser(
description="Define artifact metadata for Betty Framework skills"
)
parser.add_argument(
"skill_name",
help="Name of the skill (e.g., api.define)"
)
parser.add_argument(
"--produces",
nargs="+",
help="Artifact types this skill produces"
)
parser.add_argument(
"--consumes",
nargs="+",
help="Artifact types this skill consumes"
)
parser.add_argument(
"--output-file",
default="artifact_metadata.yaml",
help="Output file path"
)
args = parser.parse_args()
logger.info(f"Generating artifact metadata for skill: {args.skill_name}")
try:
# Generate metadata
metadata, warnings = generate_artifact_metadata(
args.skill_name,
produces=args.produces,
consumes=args.consumes
)
# Format as YAML
yaml_content = format_as_yaml(metadata)
# Save to file
output_path = args.output_file
with open(output_path, 'w') as f:
f.write(yaml_content)
logger.info(f"✅ Generated artifact metadata: {output_path}")
# Print to stdout
print("\n# Add this to your skill.yaml:\n")
print(yaml_content)
# Show warnings
if warnings:
logger.warning("\n⚠️ Warnings:")
for warning in warnings:
logger.warning(f" - {warning}")
# Print summary
logger.info("\n📋 Summary:")
if metadata.get("produces"):
logger.info(f" Produces: {', '.join(a['type'] for a in metadata['produces'])}")
if metadata.get("consumes"):
logger.info(f" Consumes: {', '.join(a['type'] for a in metadata['consumes'])}")
# Success result
result = {
"ok": True,
"status": "success",
"skill_name": args.skill_name,
"metadata": metadata,
"output_file": output_path,
"warnings": warnings
}
print("\n" + json.dumps(result, indent=2))
sys.exit(0)
except Exception as e:
logger.error(f"Failed to generate artifact metadata: {e}")
result = {
"ok": False,
"status": "failed",
"error": str(e)
}
print(json.dumps(result, indent=2))
sys.exit(1)
if __name__ == "__main__":
main()

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,116 @@
#!/usr/bin/env python3
"""
Artifact Registry Loader - Load artifact types from JSON
This module provides the single source of truth for artifact types.
The registry is loaded from registry/artifact_types.json at runtime.
"""
import json
from pathlib import Path
from typing import Dict, Any
from functools import lru_cache
from betty.config import BASE_DIR
from betty.logging_utils import setup_logger
logger = setup_logger(__name__)
@lru_cache(maxsize=1)
def load_artifact_registry() -> Dict[str, Dict[str, Any]]:
"""
Load artifact types from JSON registry file.
Returns:
Dictionary mapping artifact type names to their metadata
Raises:
FileNotFoundError: If registry file doesn't exist
json.JSONDecodeError: If registry file is invalid JSON
"""
registry_file = Path(BASE_DIR) / "registry" / "artifact_types.json"
if not registry_file.exists():
logger.error(f"Registry file not found: {registry_file}")
raise FileNotFoundError(f"Artifact registry not found at {registry_file}")
try:
with open(registry_file, 'r') as f:
data = json.load(f)
# Convert list format to dictionary format
registry = {}
for artifact in data.get('artifact_types', []):
name = artifact.get('name')
if not name:
continue
# Build metadata dictionary (exclude name since it's the key)
metadata = {}
if artifact.get('description'):
metadata['description'] = artifact['description']
if artifact.get('file_pattern'):
metadata['file_pattern'] = artifact['file_pattern']
if artifact.get('content_type'):
metadata['content_type'] = artifact['content_type']
if artifact.get('schema'):
metadata['schema'] = artifact['schema']
registry[name] = metadata
logger.info(f"Loaded {len(registry)} artifact types from registry")
return registry
except json.JSONDecodeError as e:
logger.error(f"Invalid JSON in registry file: {e}")
raise
except Exception as e:
logger.error(f"Error loading registry: {e}")
raise
# Load the registry on module import
try:
KNOWN_ARTIFACT_TYPES = load_artifact_registry()
except Exception as e:
logger.warning(f"Failed to load artifact registry: {e}")
logger.warning("Using empty registry as fallback")
KNOWN_ARTIFACT_TYPES = {}
def reload_registry():
"""
Reload the artifact registry from disk.
This clears the cache and forces a fresh load from the JSON file.
Useful for development and testing.
"""
load_artifact_registry.cache_clear()
global KNOWN_ARTIFACT_TYPES
KNOWN_ARTIFACT_TYPES = load_artifact_registry()
logger.info("Registry reloaded")
return KNOWN_ARTIFACT_TYPES
def get_artifact_count() -> int:
"""Get the number of registered artifact types"""
return len(KNOWN_ARTIFACT_TYPES)
def is_registered(artifact_type: str) -> bool:
"""Check if an artifact type is registered"""
return artifact_type in KNOWN_ARTIFACT_TYPES
def get_artifact_metadata(artifact_type: str) -> Dict[str, Any]:
"""
Get metadata for a specific artifact type.
Args:
artifact_type: The artifact type identifier
Returns:
Dictionary with artifact metadata, or empty dict if not found
"""
return KNOWN_ARTIFACT_TYPES.get(artifact_type, {})

View File

@@ -0,0 +1,93 @@
name: artifact.define
version: 0.1.0
description: >
Define artifact metadata for Betty Framework skills. Helps create artifact_metadata
blocks that declare what artifacts a skill produces and consumes, enabling
skill interoperability and autonomous agent composition.
inputs:
- name: skill_name
type: string
required: true
description: Name of the skill to define artifact metadata for
- name: produces
type: array
required: false
description: List of artifact types this skill produces (e.g., openapi-spec, validation-report)
- name: consumes
type: array
required: false
description: List of artifact types this skill consumes
- name: output_file
type: string
required: false
default: artifact_metadata.yaml
description: Where to save the generated artifact metadata
outputs:
- name: artifact_metadata
type: object
description: Generated artifact metadata block
- name: metadata_file
type: string
description: Path to saved artifact metadata file
- name: validation_result
type: object
description: Validation results for the artifact metadata
dependencies:
- context.schema
entrypoints:
- command: /skill/artifact/define
handler: artifact_define.py
runtime: python
description: >
Create artifact metadata for a skill. Validates artifact types against
known schemas, suggests file patterns, and generates properly formatted
artifact_metadata blocks for skill.yaml files.
parameters:
- name: skill_name
type: string
required: true
description: Name of the skill (e.g., api.define, workflow.validate)
- name: produces
type: array
required: false
description: Artifact types produced (e.g., ["openapi-spec", "validation-report"])
- name: consumes
type: array
required: false
description: Artifact types consumed
- name: output_file
type: string
required: false
default: artifact_metadata.yaml
description: Output file path
permissions:
- filesystem:read
- filesystem:write
status: active
tags:
- artifacts
- metadata
- scaffolding
- interoperability
- layer3
# This skill's own artifact metadata!
artifact_metadata:
produces:
- type: artifact-metadata-definition
description: Artifact metadata YAML block for skill.yaml files
file_pattern: "artifact_metadata.yaml"
content_type: application/yaml
consumes: [] # Doesn't consume artifacts, creates from user input