Initial commit
This commit is contained in:
21
skills/obsidian-vault-manager/scripts/core/fetch-youtube-transcript.sh
Executable file
21
skills/obsidian-vault-manager/scripts/core/fetch-youtube-transcript.sh
Executable file
@@ -0,0 +1,21 @@
|
||||
#!/bin/bash
|
||||
# Fetch YouTube transcript using youtube_transcript_api
|
||||
# Usage: ./fetch-youtube-transcript.sh VIDEO_ID
|
||||
|
||||
VIDEO_ID="$1"
|
||||
|
||||
if [[ -z "$VIDEO_ID" ]]; then
|
||||
echo "❌ Error: VIDEO_ID required" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Fetch transcript using uvx
|
||||
TRANSCRIPT=$(uvx youtube_transcript_api "$VIDEO_ID" --format text 2>&1)
|
||||
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo "❌ Error fetching transcript: $TRANSCRIPT" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Output transcript to stdout
|
||||
echo "$TRANSCRIPT"
|
||||
138
skills/obsidian-vault-manager/scripts/core/publish.sh
Executable file
138
skills/obsidian-vault-manager/scripts/core/publish.sh
Executable file
@@ -0,0 +1,138 @@
|
||||
#!/bin/bash
|
||||
# Publish Obsidian note to GitHub Pages (sharehub)
|
||||
# Handles image copying and path conversion
|
||||
# Usage: ./publish.sh NOTE_FILE
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
NOTE_FILE="$1"
|
||||
VAULT_PATH="/Users/zorro/Documents/Obsidian/Claudecode"
|
||||
SHAREHUB_PATH="/Users/zorro/Dev/sharehub"
|
||||
|
||||
# Add .md extension if not provided
|
||||
if [[ ! "$NOTE_FILE" =~ \.md$ ]]; then
|
||||
NOTE_FILE="${NOTE_FILE}.md"
|
||||
fi
|
||||
|
||||
# Check if file exists in vault
|
||||
if [[ ! -f "$VAULT_PATH/$NOTE_FILE" ]]; then
|
||||
echo "❌ Error: File not found: $NOTE_FILE"
|
||||
echo "Looking in: $VAULT_PATH/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Found note: $NOTE_FILE"
|
||||
echo ""
|
||||
|
||||
# Extract and copy referenced images
|
||||
cd "$VAULT_PATH"
|
||||
|
||||
# Find all image references (macOS compatible)
|
||||
IMAGE_PATHS=$(grep -o '!\[[^]]*\]([^)]*\.\(jpg\|jpeg\|png\|gif\|svg\|webp\))' "$NOTE_FILE" | sed 's/.*(\(.*\))/\1/' || true)
|
||||
|
||||
if [[ -n "$IMAGE_PATHS" ]]; then
|
||||
echo "📸 Found images to copy:"
|
||||
echo "$IMAGE_PATHS"
|
||||
echo ""
|
||||
|
||||
# Copy each image to sharehub
|
||||
while IFS= read -r IMG_PATH; do
|
||||
# Skip if empty or URL (http/https)
|
||||
if [[ -z "$IMG_PATH" ]] || [[ "$IMG_PATH" =~ ^https?:// ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Normalize path (remove leading ./)
|
||||
CLEAN_PATH="${IMG_PATH#./}"
|
||||
|
||||
# Source path in vault
|
||||
SRC="$VAULT_PATH/$CLEAN_PATH"
|
||||
|
||||
# Destination path in sharehub (preserve directory structure)
|
||||
DEST="$SHAREHUB_PATH/$CLEAN_PATH"
|
||||
DEST_DIR=$(dirname "$DEST")
|
||||
|
||||
if [[ -f "$SRC" ]]; then
|
||||
# Create destination directory if needed
|
||||
mkdir -p "$DEST_DIR"
|
||||
|
||||
# Copy image
|
||||
cp "$SRC" "$DEST"
|
||||
echo " ✅ Copied: $CLEAN_PATH"
|
||||
else
|
||||
echo " ⚠️ Not found: $SRC"
|
||||
fi
|
||||
done <<< "$IMAGE_PATHS"
|
||||
echo ""
|
||||
else
|
||||
echo "ℹ️ No local images found in note"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Read note content
|
||||
NOTE_CONTENT=$(cat "$NOTE_FILE")
|
||||
|
||||
# Convert image paths for GitHub Pages using Python for reliable regex
|
||||
# ./images/file.jpg → /sharehub/images/file.jpg
|
||||
# images/file.jpg → /sharehub/images/file.jpg
|
||||
CONVERTED_CONTENT=$(echo "$NOTE_CONTENT" | python3 -c '
|
||||
import sys, re
|
||||
|
||||
content = sys.stdin.read()
|
||||
|
||||
# Pattern 1: ./path/to/image.ext -> /sharehub/path/to/image.ext
|
||||
content = re.sub(r"!\[([^\]]*)\]\(\./([^)]+\.(jpg|jpeg|png|gif|svg|webp))\)", r"", content, flags=re.IGNORECASE)
|
||||
|
||||
# Pattern 2: path/to/image.ext (no leading ./) -> /sharehub/path/to/image.ext
|
||||
# But skip URLs (http:// or https://)
|
||||
content = re.sub(r"!\[([^\]]*)\]\((?!https?://|/)([^)]+\.(jpg|jpeg|png|gif|svg|webp))\)", r"", content, flags=re.IGNORECASE)
|
||||
|
||||
print(content, end="")
|
||||
')
|
||||
|
||||
echo "📝 Image path conversion complete"
|
||||
echo ""
|
||||
|
||||
# Write converted content to sharehub
|
||||
DEST_NOTE="$SHAREHUB_PATH/documents/$NOTE_FILE"
|
||||
echo "$CONVERTED_CONTENT" > "$DEST_NOTE"
|
||||
|
||||
echo "✅ Copied note to: documents/$NOTE_FILE"
|
||||
echo ""
|
||||
|
||||
# Git operations
|
||||
cd "$SHAREHUB_PATH"
|
||||
|
||||
echo "📋 Git status:"
|
||||
git status --short
|
||||
echo ""
|
||||
|
||||
# Add all changes (document + images)
|
||||
git add "documents/$NOTE_FILE"
|
||||
git add images/ 2>/dev/null || true
|
||||
|
||||
# Get note title from frontmatter for commit message
|
||||
NOTE_TITLE=$(grep -m1 '^title:' "documents/$NOTE_FILE" | sed 's/title: *["'"'"']*//;s/["'"'"']*$//' || echo "$NOTE_FILE")
|
||||
|
||||
# Commit
|
||||
git commit -m "Publish: $NOTE_TITLE
|
||||
|
||||
- Published documents/$NOTE_FILE
|
||||
- Copied associated images
|
||||
- Converted image paths for GitHub Pages
|
||||
|
||||
🤖 Generated with Claude Code
|
||||
Co-Authored-By: Claude <noreply@anthropic.com>"
|
||||
|
||||
# Push to GitHub
|
||||
echo "🚀 Pushing to GitHub..."
|
||||
git push origin main
|
||||
|
||||
echo ""
|
||||
echo "✅ Published successfully!"
|
||||
echo ""
|
||||
echo "📄 Document: https://zorrocheng-mc.github.io/sharehub/documents/${NOTE_FILE%.md}.html"
|
||||
echo "⏱️ GitHub Pages will deploy in ~60 seconds"
|
||||
echo ""
|
||||
|
||||
exit 0
|
||||
82
skills/obsidian-vault-manager/scripts/validation/validate-frontmatter.py
Executable file
82
skills/obsidian-vault-manager/scripts/validation/validate-frontmatter.py
Executable file
@@ -0,0 +1,82 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Validate frontmatter in markdown content.
|
||||
Can be run by Claude during workflow or configured as a hook in settings.json
|
||||
"""
|
||||
import sys
|
||||
import yaml
|
||||
|
||||
def validate_content(content):
|
||||
"""Validate frontmatter in markdown content"""
|
||||
if not content.startswith("---"):
|
||||
return False, "No frontmatter detected"
|
||||
|
||||
try:
|
||||
# Extract frontmatter
|
||||
parts = content.split("---", 2)
|
||||
if len(parts) < 3:
|
||||
return False, "Malformed frontmatter (missing closing ---)"
|
||||
|
||||
frontmatter = yaml.safe_load(parts[1])
|
||||
|
||||
if not frontmatter:
|
||||
return False, "Empty frontmatter"
|
||||
|
||||
# Required fields
|
||||
required = ["title", "tags", "date", "type"]
|
||||
missing = [f for f in required if f not in frontmatter]
|
||||
|
||||
if missing:
|
||||
return False, f"Missing required fields: {', '.join(missing)}"
|
||||
|
||||
# Validate tags is a list
|
||||
tags = frontmatter.get("tags", [])
|
||||
if not isinstance(tags, list):
|
||||
return False, "Tags must be a list"
|
||||
|
||||
if len(tags) == 0:
|
||||
return False, "Tags list is empty"
|
||||
|
||||
# Check for content type tag
|
||||
valid_types = ["video", "article", "book", "podcast", "idea", "study-guide", "repository", "reference", "project"]
|
||||
has_type = any(tag in valid_types for tag in tags)
|
||||
if not has_type:
|
||||
return False, f"No content type tag found. Expected one of: {', '.join(valid_types)}"
|
||||
|
||||
# Check for inbox tag
|
||||
if "inbox" not in tags:
|
||||
return False, "Warning: 'inbox' tag recommended for new content"
|
||||
|
||||
return True, "✅ Validation passed"
|
||||
|
||||
except yaml.YAMLError as e:
|
||||
return False, f"Invalid YAML syntax: {e}"
|
||||
except Exception as e:
|
||||
return False, f"Validation error: {e}"
|
||||
|
||||
def main():
|
||||
"""Main entry point for validation script"""
|
||||
if len(sys.argv) > 1:
|
||||
# Read from file if provided
|
||||
filename = sys.argv[1]
|
||||
try:
|
||||
with open(filename, 'r') as f:
|
||||
content = f.read()
|
||||
except FileNotFoundError:
|
||||
print(f"❌ Error: File not found: {filename}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
# Read from stdin
|
||||
content = sys.stdin.read()
|
||||
|
||||
valid, message = validate_content(content)
|
||||
|
||||
if valid:
|
||||
print(message)
|
||||
sys.exit(0)
|
||||
else:
|
||||
print(f"❌ {message}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
87
skills/obsidian-vault-manager/scripts/validation/validate-mcp-tools.sh
Executable file
87
skills/obsidian-vault-manager/scripts/validation/validate-mcp-tools.sh
Executable file
@@ -0,0 +1,87 @@
|
||||
#!/bin/bash
|
||||
# Validation script to check for common MCP tool name mistakes
|
||||
|
||||
set -e
|
||||
|
||||
SKILL_DIR="$HOME/.claude/skills/obsidian-vault-manager"
|
||||
SKILL_FILE="$SKILL_DIR/SKILL.md"
|
||||
|
||||
echo "🔍 Validating MCP tool names in SKILL.md..."
|
||||
echo ""
|
||||
|
||||
ERRORS=0
|
||||
|
||||
# Check for incorrect Obsidian tool references in MCP_DOCKER
|
||||
if grep -q "mcp__MCP_DOCKER__obsidian" "$SKILL_FILE"; then
|
||||
echo "❌ ERROR: Found MCP_DOCKER__obsidian references!"
|
||||
echo " MCP_DOCKER does not have Obsidian tools."
|
||||
echo " Use: mcp__obsidian-mcp-tools__* instead"
|
||||
echo ""
|
||||
grep -n "mcp__MCP_DOCKER__obsidian" "$SKILL_FILE"
|
||||
echo ""
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
|
||||
# Check for incorrect gitingest underscore naming
|
||||
if grep -q "mcp__MCP_DOCKER__gitingest_" "$SKILL_FILE"; then
|
||||
echo "❌ ERROR: Found gitingest with underscore!"
|
||||
echo " GitIngest tools use HYPHENS, not underscores."
|
||||
echo " Use: mcp__MCP_DOCKER__gitingest-analyze (with hyphen)"
|
||||
echo ""
|
||||
grep -n "mcp__MCP_DOCKER__gitingest_" "$SKILL_FILE"
|
||||
echo ""
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
|
||||
# Check for old non-Docker tool references that should be updated
|
||||
if grep -q "mcp__github__create_or_update_file" "$SKILL_FILE"; then
|
||||
echo "⚠️ WARNING: Found old mcp__github__ reference"
|
||||
echo " Consider using: mcp__MCP_DOCKER__create_or_update_file"
|
||||
echo ""
|
||||
grep -n "mcp__github__create_or_update_file" "$SKILL_FILE"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if grep -q "mcp__fetch__fetch" "$SKILL_FILE"; then
|
||||
echo "⚠️ WARNING: Found old mcp__fetch__ reference"
|
||||
echo " Consider using: mcp__MCP_DOCKER__fetch"
|
||||
echo ""
|
||||
grep -n "mcp__fetch__fetch" "$SKILL_FILE"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if grep -q "mcp__gitingest__gitingest-analyze" "$SKILL_FILE"; then
|
||||
echo "⚠️ WARNING: Found old mcp__gitingest__ reference"
|
||||
echo " Consider using: mcp__MCP_DOCKER__gitingest-analyze"
|
||||
echo ""
|
||||
grep -n "mcp__gitingest__gitingest-analyze" "$SKILL_FILE"
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Verify correct allowed-tools configuration
|
||||
echo "📋 Checking allowed-tools section..."
|
||||
if grep -q "mcp__obsidian-mcp-tools__\*" "$SKILL_FILE"; then
|
||||
echo "✅ Obsidian tools wildcard: CORRECT"
|
||||
else
|
||||
echo "❌ ERROR: Missing mcp__obsidian-mcp-tools__* wildcard!"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
|
||||
if grep -q "mcp__MCP_DOCKER__gitingest-analyze" "$SKILL_FILE"; then
|
||||
echo "✅ GitIngest tool (with hyphen): CORRECT"
|
||||
else
|
||||
echo "⚠️ WARNING: GitIngest tool not found in allowed-tools"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "═══════════════════════════════════════"
|
||||
|
||||
if [ $ERRORS -eq 0 ]; then
|
||||
echo "✅ Validation PASSED! No errors found."
|
||||
exit 0
|
||||
else
|
||||
echo "❌ Validation FAILED! Found $ERRORS error(s)."
|
||||
echo ""
|
||||
echo "Please review MCP_ARCHITECTURE.md for correct naming"
|
||||
exit 1
|
||||
fi
|
||||
Reference in New Issue
Block a user