Initial commit

This commit is contained in:
Zhongwei Li
2025-11-30 08:37:58 +08:00
commit f0a4617f0c
38 changed files with 4166 additions and 0 deletions

View File

@@ -0,0 +1,13 @@
{
"name": "lmorchard-agent-skills",
"description": "Collection of miscellaneous skills built by lmorchard",
"version": "0.0.0-2025.11.28",
"author": {
"name": "Les Orchard",
"email": "me@lmorchard.com"
},
"skills": [
"./skills/go-cli-builder",
"./skills/weeknotes-blog-post-composer"
]
}

3
README.md Normal file
View File

@@ -0,0 +1,3 @@
# lmorchard-agent-skills
Collection of miscellaneous skills built by lmorchard

184
plugin.lock.json Normal file
View File

@@ -0,0 +1,184 @@
{
"$schema": "internal://schemas/plugin.lock.v1.json",
"pluginId": "gh:lmorchard/lmorchard-agent-skills:lmorchard-agent-skills",
"normalized": {
"repo": null,
"ref": "refs/tags/v20251128.0",
"commit": "6e3d0a03e78a8f42719ef7f4782db093b49e6da9",
"treeHash": "a9e0a4cd8498144314f4b93bae64934cd8ea9b9f944415c72309511bab7b1a5c",
"generatedAt": "2025-11-28T10:20:21.017448Z",
"toolVersion": "publish_plugins.py@0.2.0"
},
"origin": {
"remote": "git@github.com:zhongweili/42plugin-data.git",
"branch": "master",
"commit": "aa1497ed0949fd50e99e70d6324a29c5b34f9390",
"repoRoot": "/Users/zhongweili/projects/openmind/42plugin-data"
},
"manifest": {
"name": "lmorchard-agent-skills",
"description": "Collection of miscellaneous skills built by lmorchard"
},
"content": {
"files": [
{
"path": "README.md",
"sha256": "407b60eeba7c49bce52a7b58b2abc7dd96743177ad396470656135c9f438ec8f"
},
{
"path": ".claude-plugin/plugin.json",
"sha256": "9e4672ae872b40e29473436fed803fe6fdc34cb2515a5025dbaa825bd32d612e"
},
{
"path": "skills/weeknotes-blog-post-composer/README.md",
"sha256": "4a3366e12b8947969d3006254dc54e79a8519962329efee2c6aaf9f1563241ed"
},
{
"path": "skills/weeknotes-blog-post-composer/.gitignore",
"sha256": "13a44b4fd25e4bc8e32bf1b8a656300d3b30d0c744455b395a40744d5737cc95"
},
{
"path": "skills/weeknotes-blog-post-composer/SKILL.md",
"sha256": "bd26128f9c588e7672dda5d10f0979a6eaeab967ab24d4d993e3afb3eb3c80a5"
},
{
"path": "skills/weeknotes-blog-post-composer/config/.gitkeep",
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
},
{
"path": "skills/weeknotes-blog-post-composer/scripts/setup.sh",
"sha256": "27d4edeaea43a14217251e92051c6de70a5481945fb16e1e20e74debc7393887"
},
{
"path": "skills/weeknotes-blog-post-composer/scripts/prepare-sources.py",
"sha256": "87d0b811086023c78019c04ca19e11c59c588918a0f8e21da9b781b8e6c3211b"
},
{
"path": "skills/weeknotes-blog-post-composer/scripts/calculate-week.py",
"sha256": "33b998d9bdcd6981d9b00e8526f498768e833ad2a2c3e3bd49fec29c7d27c5cd"
},
{
"path": "skills/weeknotes-blog-post-composer/scripts/download-binaries.sh",
"sha256": "3f90bc5f7c01f66b30c66e72287d9e8070afcf890c93d89ba0da192d1a181a61"
},
{
"path": "skills/weeknotes-blog-post-composer/scripts/fetch-sources.sh",
"sha256": "76d3628a65f089c3b6f0e28f0d8f69e3701a12a642369ba97d9192a16700163c"
},
{
"path": "skills/weeknotes-blog-post-composer/data/.gitkeep",
"sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
},
{
"path": "skills/go-cli-builder/SKILL.md",
"sha256": "f061342f9e9605a9eb80df609c089ea05fe0d11678dd50516fbc7a9d4912a09d"
},
{
"path": "skills/go-cli-builder/LICENSE.txt",
"sha256": "937358f72515cd09b0121939ef09d4efa4219caa94bda9ba3462d38ee4ca254d"
},
{
"path": "skills/go-cli-builder/references/internal-organization.md",
"sha256": "556a8677b07d2376a74757a597a438e13bbc98ad622d7bbd229ffd1b8511265f"
},
{
"path": "skills/go-cli-builder/references/template-patterns.md",
"sha256": "1a54e6a2fcc3210ff39dd30fb786ced716e0248ee9ace2239d99d067e34da34e"
},
{
"path": "skills/go-cli-builder/references/cobra-viper-integration.md",
"sha256": "af9f0637228be636e06395004feccac264981822de71ebcf7dc9facd29184ae1"
},
{
"path": "skills/go-cli-builder/scripts/scaffold_project.py",
"sha256": "a4bc549db26220f9a5163921cddeb5290bef1def36961a03530825a348b8ecb2"
},
{
"path": "skills/go-cli-builder/scripts/add_command.py",
"sha256": "1dc693ad5585ef56d36310e50d13ce1b6dc2d04a362756e8d3c0da047cc7833b"
},
{
"path": "skills/go-cli-builder/assets/templates/config.yaml.example",
"sha256": "f2b46f6c933b68b23f89b6ab721cf41bf01119a1070bf9b322bc7e9ff39817a9"
},
{
"path": "skills/go-cli-builder/assets/templates/ci.yml.template",
"sha256": "14cd2640e4db644f9c179d1788717270d1863872049831c05ff8478e69cae8c1"
},
{
"path": "skills/go-cli-builder/assets/templates/default.md.template",
"sha256": "9ca570e3b5d4f70ca0188061f4af38dcda8dd52e5159e1ac5fcaee571e6b9eec"
},
{
"path": "skills/go-cli-builder/assets/templates/go.mod.template",
"sha256": "84ca89406c806ae810ea2e4e8c8e89fd86c26cf52967a339aa78d219b23ced30"
},
{
"path": "skills/go-cli-builder/assets/templates/Makefile.template",
"sha256": "53347ee4231da1242fb7abc0ffdf6619008559dab33939ae1961f5c5a35e84e9"
},
{
"path": "skills/go-cli-builder/assets/templates/schema.sql.template",
"sha256": "624576312915a59c51a3f389ae994276a5b4a88a34a8a2e1e767214c6366dcfb"
},
{
"path": "skills/go-cli-builder/assets/templates/config.go.template",
"sha256": "d36d201cb9ec8f56d3bde606bc9512051746f9d6f20ad964a0bab62fa6b5a995"
},
{
"path": "skills/go-cli-builder/assets/templates/constants.go.template",
"sha256": "7b86999b9e1e75195ee88e1607cde18e679831ae8a12488563fb447788c0d94b"
},
{
"path": "skills/go-cli-builder/assets/templates/gitignore.template",
"sha256": "b72b2196674e6bb473cbcb5b82072d0e793ea3236ee92b95aec572ec34e8a205"
},
{
"path": "skills/go-cli-builder/assets/templates/database.go.template",
"sha256": "a338acec75b539445b1b96a279abb1e15c8ae02d40ceb7f241a59051f651a932"
},
{
"path": "skills/go-cli-builder/assets/templates/migrations.go.template",
"sha256": "b6652ce52524887de447b1d6c5e66a4e2e65081ab42aa25ac274ed175cbc962a"
},
{
"path": "skills/go-cli-builder/assets/templates/release.yml.template",
"sha256": "b097c216edb6953444f759dd29089b4c8631f968cfbb09f65783b6660885dc94"
},
{
"path": "skills/go-cli-builder/assets/templates/init.go.template",
"sha256": "e06cdb7dcfb4a92d0a296ed038d311bd0189a2e43cb508f77ce8d43526ca1422"
},
{
"path": "skills/go-cli-builder/assets/templates/version.go.template",
"sha256": "82c6edf682d26fd0f9cf2c48cba4b4e5cfe6387ebb6453a4237c7c3bc8ed83c8"
},
{
"path": "skills/go-cli-builder/assets/templates/root.go.template",
"sha256": "0633b8076216872f0dca91082bc20f36031e59bf0a320a0c85988ec7996dbea2"
},
{
"path": "skills/go-cli-builder/assets/templates/templates.go.template",
"sha256": "4d86b2fd86aa4e75e8aeea8f12f7e08a4d792e7a007df7eabfc8dea324427190"
},
{
"path": "skills/go-cli-builder/assets/templates/command.go.template",
"sha256": "3e056822e69526ec047906b8de0e3e317fed67c84e03a7d9d406853e64d5abb0"
},
{
"path": "skills/go-cli-builder/assets/templates/rolling-release.yml.template",
"sha256": "5f0042679cc0ba27a65662fe788c90f7f0a3dd6192921ce2bdd968299fdc7b74"
},
{
"path": "skills/go-cli-builder/assets/templates/main.go",
"sha256": "f1dab0b7821efce75ace34908072b80e8b27783315992450ece53e86ce15feff"
}
],
"dirSha256": "a9e0a4cd8498144314f4b93bae64934cd8ea9b9f944415c72309511bab7b1a5c"
},
"security": {
"scannedAt": null,
"scannerVersion": null,
"flags": []
}
}

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2025 Les Orchard
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,407 @@
---
name: go-cli-builder
description: Build Go-based command-line tools following established patterns with Cobra CLI framework, Viper configuration, SQLite database, and automated GitHub Actions workflows for releases. Use when creating new Go CLI projects or adding features to existing ones that follow the Cobra/Viper/SQLite stack.
---
# Go CLI Builder
## Overview
This skill provides templates, scripts, and patterns for building production-ready Go command-line tools. It follows established patterns from projects like feedspool-go, feed-to-mastodon, and linkding-to-opml.
The skill generates projects with:
- **Cobra** for CLI framework
- **Viper** for configuration management (YAML files with CLI overrides)
- **SQLite** with a naive migration system
- **Logrus** for structured logging
- **Makefile** for common tasks (lint, format, test, build)
- **GitHub Actions** workflows for CI, tagged releases, and rolling releases
- Strict code formatting with gofumpt and linting with golangci-lint
## When to Use This Skill
Use this skill when:
- Creating a new Go CLI tool from scratch
- Adding commands to an existing Go CLI project that follows these patterns
- Needing reference material about Cobra/Viper integration
- Setting up GitHub Actions workflows for multi-platform Go releases
Example user requests:
- "Create a new Go CLI tool called feed-analyzer"
- "Scaffold a Go project for processing log files"
- "Add a new 'export' command to my Go CLI project"
- "Help me set up GitHub Actions for releasing my Go tool"
## Quick Start
### Creating a New Project
To scaffold a complete new project:
```bash
# With database support (default)
python scripts/scaffold_project.py my-cli-tool
# Without database support
python scripts/scaffold_project.py my-cli-tool --no-database
# With template support for generating output
python scripts/scaffold_project.py my-cli-tool --templates
# Combining options
python scripts/scaffold_project.py my-cli-tool --no-database --templates
```
**Project Options:**
- **Database Support** (default: included)
- Includes SQLite with migrations system
- Use `--no-database` to exclude if you don't need persistent storage
- Examples: CLI tools that only fetch/transform data, API clients
- **Template Support** (default: excluded)
- Includes embedded template system with init command
- Use `--templates` to include for tools that generate formatted output
- Examples: Markdown generators, OPML exporters, report generators
**What gets created:**
Base structure (always):
- Entry point (`main.go`)
- Root command with Cobra/Viper integration (`cmd/root.go`)
- Version command (`cmd/version.go`)
- Configuration system (`internal/config/`)
- Makefile with standard targets
- GitHub Actions workflows (CI, release, rolling-release)
Optional additions:
- Database layer with migrations (`internal/database/`) - if database enabled
- Template system (`internal/templates/`, `cmd/init.go`) - if templates enabled
**Next steps after scaffolding:**
1. Update `go.mod` with the actual module name
2. Customize the example config file
3. If using database: Define initial schema in `internal/database/schema.sql`
4. Run `make setup` to install development tools
5. Run `go mod tidy` to download dependencies
### Adding Commands to Existing Projects
To add a new command to an existing project:
```bash
python scripts/add_command.py fetch
```
This creates `cmd/fetch.go` with:
- Command boilerplate
- Access to logger and config
- Flag binding examples
- TODO comments for implementation
## Project Structure
Generated projects follow this structure:
```
my-cli-tool/
├── main.go # Entry point
├── go.mod # Dependencies
├── Makefile # Build automation
├── my-cli-tool.yaml.example # Example configuration
├── cmd/ # Command definitions
│ ├── root.go # Root command + Cobra/Viper setup
│ ├── version.go # Version command
│ ├── constants.go # Application constants
│ └── [command].go # Individual commands
├── internal/
│ ├── config/
│ │ └── config.go # Configuration struct
│ ├── database/
│ │ ├── database.go # Connection + initialization
│ │ ├── migrations.go # Migration system
│ │ └── schema.sql # Initial schema (embedded)
│ └── templates/ # Optional: For tools that generate output
│ ├── templates.go # Embedded template loader
│ └── default.md # Default template (embedded)
└── .github/workflows/
├── ci.yml # PR linting and testing
├── release.yml # Tagged releases
└── rolling-release.yml # Main branch rolling releases
```
## Configuration System
Projects use a three-tier configuration hierarchy:
1. **Config file** (`my-tool.yaml`): Base configuration in YAML
2. **Environment variables**: Automatic via Viper
3. **CLI flags**: Override everything
See `references/cobra-viper-integration.md` for detailed patterns on:
- Binding flags to Viper keys
- Adding new configuration options
- Command-specific vs. global configuration
- Environment variable mapping
## Database Layer
The generated database layer includes:
1. **Initial schema** (`internal/database/schema.sql`): Embedded SQL for first-time setup
2. **Migration tracking**: `schema_migrations` table tracks applied versions
3. **Migration execution**: Automatic on database initialization
4. **Idempotent operations**: Safe to run multiple times
**To add a new migration:**
1. Edit `internal/database/migrations.go`
2. Add to the `getMigrations()` map with the next version number:
```go
func getMigrations() map[int]string {
return map[int]string{
2: `CREATE TABLE IF NOT EXISTS settings (
key TEXT PRIMARY KEY,
value TEXT NOT NULL
);`,
}
}
```
3. Migrations run automatically on next database initialization
## Init Command Pattern
For tools that generate output files (markdown, OPML, etc.), the `init` command pattern provides a great user experience by generating both configuration and customizable templates.
### When to Use Init Command
Use the init command when your CLI tool:
- Generates formatted output (markdown, HTML, XML, etc.)
- Benefits from user-customizable templates
- Has configuration that users need to set up before first use
### Init Command Components
**Available templates:**
- `init.go.template` - Complete init command implementation
- `templates.go.template` - Template loader with embedded default
- `default.md.template` - Example embedded markdown template
**The init command:**
1. Creates a YAML configuration file with all options documented
2. Creates a customizable template file (using embedded default)
3. Supports `--force` flag to overwrite existing files
4. Supports `--template-file` flag to specify custom template filename
5. Provides helpful next steps after initialization
### Embedded Templates
Go's `//go:embed` directive allows embedding template files directly in the binary:
```go
package templates
import (
_ "embed"
)
//go:embed default.md
var defaultTemplate string
func GetDefaultTemplate() (string, error) {
return defaultTemplate, nil
}
```
**Benefits:**
- Single binary distribution (no external template files needed)
- Users can still customize by running `init` to get a copy
- Template always available as fallback
### Integration with Other Commands
Commands that generate output should support both:
1. **Built-in template** (default) - uses embedded template
2. **Custom template** (via `--template` flag or config) - loads from file
Example pattern:
```go
templatePath := viper.GetString("command.template")
var generator *Generator
if templatePath != "" {
generator, err = NewGeneratorFromFile(templatePath)
} else {
generator, err = NewGenerator() // uses embedded default
}
```
### Example Projects Using This Pattern
- `linkding-to-markdown` - Fetches bookmarks and generates markdown
- `mastodon-to-markdown` - Exports Mastodon posts to markdown
## Makefile Targets
All generated projects include these targets:
- `make setup`: Install development tools (gofumpt, golangci-lint)
- `make build`: Build the binary with version information
- `make run`: Build and run the application
- `make lint`: Run golangci-lint
- `make format`: Format code with go fmt and gofumpt
- `make test`: Run tests with race detection
- `make clean`: Remove build artifacts
## GitHub Actions Workflows
Three workflows are included:
### 1. CI (`ci.yml`)
- **Triggers**: Pull requests to main, manual workflow calls
- **Actions**: Lint with golangci-lint, test with race detection
- **Skip**: Commits starting with `[noci]`
### 2. Release (`release.yml`)
- **Triggers**: Tags matching `v*` (e.g., `v1.0.0`)
- **Platforms**: Linux (amd64, arm64), macOS (amd64, arm64), Windows (amd64)
- **Outputs**: Compressed binaries, checksums, GitHub release
- **Docker**: Optional (commented out by default)
### 3. Rolling Release (`rolling-release.yml`)
- **Triggers**: Pushes to main branch
- **Actions**: Same as Release but creates a "latest" prerelease
- **Purpose**: Testing builds from the latest commit
**To customize:**
- Update Docker Hub username in workflows if using Docker
- Adjust Go version if needed (default: 1.21)
- Modify build matrix to add/remove platforms
## Typical Workflow
### Starting a New Project
1. Use this skill to scaffold the project
2. Customize the initial schema in `internal/database/schema.sql`
3. Update configuration struct in `internal/config/config.go`
4. Add domain-specific packages in `internal/` (see `references/internal-organization.md`)
5. Add commands using the add_command script
6. Implement command logic, calling into `internal/` packages
### Adding a Feature
1. Determine if it needs a new command or extends existing one
2. If new command: use `add_command.py` script
3. Add any required configuration to config struct and root flags
4. Implement logic in `internal/` packages
5. Update command to call the internal logic
6. Add tests
7. Run `make format && make lint && make test`
## Reference Documentation
For detailed patterns and guidelines, refer to:
- **`references/cobra-viper-integration.md`**: Complete guide to configuration system
- Flag binding patterns
- Adding new configuration options
- Environment variable mapping
- Best practices
- **`references/internal-organization.md`**: Internal package structure
- Package organization principles
- Dependency rules
- Common patterns (Option pattern, error wrapping)
- When to create new packages
- **`references/template-patterns.md`**: Template-based output generation
- When and how to use embedded templates
- Init command implementation
- Generator/renderer patterns
- Template functions and testing
- User workflow and best practices
## Templates Available
All templates are in `assets/templates/`:
**Core Files:**
- `main.go`: Minimal entry point
- `go.mod.template`: Pre-configured dependencies
- `Makefile.template`: Standard build targets
- `gitignore.template`: Go-specific ignores
- `config.yaml.example`: Example configuration
**Commands:**
- `root.go.template`: Cobra/Viper integration
- `version.go.template`: Version command
- `constants.go.template`: Application constants
- `command.go.template`: New command template
- `init.go.template`: Init command for config/template generation
**Internal Packages:**
- `config.go.template`: Configuration struct
- `database.go.template`: Database layer
- `migrations.go.template`: Migration system
- `schema.sql.template`: Initial schema
- `templates.go.template`: Embedded template loader
- `default.md.template`: Example embedded template
**CI/CD:**
- `ci.yml.template`: CI workflow
- `release.yml.template`: Release workflow
- `rolling-release.yml.template`: Rolling release workflow
## Best Practices
1. **Keep commands thin**: Business logic belongs in `internal/` packages
2. **Use the config struct**: Access configuration through `GetConfig()` rather than calling Viper directly
3. **Wrap errors**: Always add context with `fmt.Errorf("context: %w", err)`
4. **Format before committing**: Run `make format && make lint`
5. **Test with race detection**: `go test -race ./...`
6. **Version your releases**: Use semantic versioning tags (v1.0.0, v1.1.0, etc.)
7. **Document in .yaml.example**: Keep example config updated
8. **Handle errors explicitly**: Use `_ = ` for intentionally ignored errors (e.g., `_ = viper.BindPFlag(...)`)
9. **Defer cleanup safely**: Use `defer func() { _ = tx.Rollback() }()` instead of `defer tx.Rollback()` to avoid linter warnings
## Common Customizations
After scaffolding, projects typically need:
1. **Module name update**: Change `github.com/yourusername/project` in `go.mod` to actual path
2. **Additional dependencies**: Add with `go get` and run `go mod tidy`
3. **Custom schema**: Define tables in `internal/database/schema.sql`
4. **Domain packages**: Create packages in `internal/` for business logic
5. **Command implementations**: Fill in the TODOs in command files
6. **Docker configuration**: Uncomment Docker sections in workflows if needed
## Recent Improvements
### Linter Compliance (2025-11-11)
- **Fixed viper.BindPFlag warnings**: All `viper.BindPFlag()` calls now use `_ = ` prefix to explicitly ignore errors, satisfying the `errcheck` linter
- **Fixed defer Rollback warnings**: Database transaction cleanup now uses `defer func() { _ = tx.Rollback() }()` pattern
- **Removed static linking**: Removed `-linkmode external -extldflags "-static"` flags from Makefile to eliminate getaddrinfo warnings when using CGO with SQLite
- **Updated templates**: All templates now generate linter-clean code out of the box
These changes ensure that projects scaffolded with this skill pass `golangci-lint` without warnings.
## Troubleshooting
**"gofumpt not found" or "golangci-lint not found"**
- Run `make setup` to install development tools
**"Failed to initialize schema"**
- Check database file path and permissions
- Ensure directory exists or is creatable
**"Missing migration for version N"**
- Migrations must be sequential; add any missing versions
**"getaddrinfo warning during build"**
- This warning has been resolved in recent versions by removing static linking flags
- If you see this in an older project, remove the static linking lines from your Makefile (see Recent Improvements section)
**GitHub Actions failing on cross-compilation**
- Ensure CGO is enabled for SQLite
- Linux ARM64 builds require cross-compilation tools (handled in workflow)

View File

@@ -0,0 +1,63 @@
.PHONY: setup build run clean lint format test
# Build variables
VERSION ?= $(shell git describe --tags --always --dirty 2>/dev/null || echo "dev")
COMMIT := $(shell git rev-parse --short HEAD 2>/dev/null || echo "unknown")
BUILD_DATE := $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
LDFLAGS := -X main.version=$(VERSION) -X main.commit=$(COMMIT) -X main.date=$(BUILD_DATE)
# Note: Static linking is not used because SQLite requires CGO, which links dynamically
# to system libraries. Attempting static linking causes getaddrinfo warnings and
# potential runtime compatibility issues.
# Default target
all: build
# Install development tools
setup:
@echo "Installing development tools..."
@go install mvdan.cc/gofumpt@latest
@go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
@echo "✅ Development tools installed"
# Build the application
build:
@echo "Building {{PROJECT_NAME}}..."
@CGO_ENABLED=1 go build -ldflags "$(LDFLAGS)" -o {{PROJECT_NAME}} .
@echo "✅ Built: {{PROJECT_NAME}}"
# Run the application
run: build
./{{PROJECT_NAME}}
# Clean build artifacts
clean:
@rm -f {{PROJECT_NAME}}
@rm -f *.db
@echo "✅ Cleaned"
# Lint code
lint:
@test -f $(HOME)/go/bin/golangci-lint || { \
echo "❌ golangci-lint not found. Install with: make setup"; \
exit 1; \
}
@echo "Running linters..."
@$(HOME)/go/bin/golangci-lint run --timeout 5m
@echo "✅ Lint complete"
# Format code
format:
@go fmt ./...
@test -f $(HOME)/go/bin/gofumpt || { \
echo "❌ gofumpt not found. Install with: make setup"; \
exit 1; \
}
@$(HOME)/go/bin/gofumpt -l -w .
@echo "✅ Format complete"
# Run tests
test:
@echo "Running tests..."
@go test ./...
@echo "✅ Tests complete"

View File

@@ -0,0 +1,45 @@
name: CI
on:
pull_request:
branches: [ main ]
workflow_call:
jobs:
lint-test:
name: CI (Lint, Test)
runs-on: ubuntu-latest
steps:
- name: Check for [noci] in commit message
id: check_commit
run: |
if [[ "${{ github.event.head_commit.message }}" == "[noci]"* ]]; then
echo "skip=true" >> $GITHUB_OUTPUT
else
echo "skip=false" >> $GITHUB_OUTPUT
fi
- name: Checkout code
if: steps.check_commit.outputs.skip != 'true'
uses: actions/checkout@v4
- name: Set up Go
if: steps.check_commit.outputs.skip != 'true'
uses: actions/setup-go@v5
with:
go-version: '1.23'
- name: Download dependencies
if: steps.check_commit.outputs.skip != 'true'
run: go mod download
- name: Run golangci-lint
if: steps.check_commit.outputs.skip != 'true'
uses: golangci/golangci-lint-action@v6
with:
version: latest
args: --timeout=5m
- name: Run tests
if: steps.check_commit.outputs.skip != 'true'
run: go test -race ./...

View File

@@ -0,0 +1,36 @@
package cmd
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
// {{COMMAND_NAME}}Cmd represents the {{COMMAND_NAME}} command
var {{COMMAND_NAME}}Cmd = &cobra.Command{
Use: "{{COMMAND_NAME}}",
Short: "A brief description of the {{COMMAND_NAME}} command",
Long: `A longer description of the {{COMMAND_NAME}} command that explains
what it does and how to use it.
Example usage:
{{PROJECT_NAME}} {{COMMAND_NAME}} [flags]`,
RunE: func(cmd *cobra.Command, args []string) error {
log := GetLogger()
cfg := GetConfig()
log.Info("Running {{COMMAND_NAME}} command")
// TODO: Implement command logic here
return nil
},
}
func init() {
rootCmd.AddCommand({{COMMAND_NAME}}Cmd)
// Add command-specific flags here
// Example:
// {{COMMAND_NAME}}Cmd.Flags().StringP("option", "o", "", "An option for this command")
// _ = viper.BindPFlag("{{COMMAND_NAME}}.option", {{COMMAND_NAME}}Cmd.Flags().Lookup("option"))
}

View File

@@ -0,0 +1,17 @@
package config
// Config holds application configuration
type Config struct {
// Core settings
Database string
Verbose bool
Debug bool
LogJSON bool
// Add command-specific configuration fields here as needed
// Example:
// Fetch struct {
// Concurrency int
// Timeout time.Duration
// }
}

View File

@@ -0,0 +1,21 @@
# Configuration file for {{PROJECT_NAME}}
# Copy this to {{PROJECT_NAME}}.yaml and customize as needed
# Database configuration
database: "{{PROJECT_NAME}}.db"
# Logging configuration
verbose: false
debug: false
log_json: false
# Example command-specific configuration
# Uncomment and customize as needed for your commands
#
# fetch:
# concurrency: 10
# timeout: 30s
#
# serve:
# port: 8080
# host: "localhost"

View File

@@ -0,0 +1,12 @@
package cmd
// Application constants and defaults
const (
// DefaultDatabasePath is the default database file path
DefaultDatabasePath = "{{PROJECT_NAME}}.db"
// DefaultConcurrency is the default number of concurrent operations
DefaultConcurrency = 10
// Add other application constants here
)

View File

@@ -0,0 +1,127 @@
package database
import (
"database/sql"
_ "embed"
"fmt"
"os"
"path/filepath"
_ "github.com/mattn/go-sqlite3"
)
//go:embed schema.sql
var schemaSQL string
// DB wraps a SQLite database connection
type DB struct {
conn *sql.DB
}
// New creates and initializes a new database connection
func New(dbPath string) (*DB, error) {
// Ensure directory exists
dir := filepath.Dir(dbPath)
if dir != "." && dir != "/" {
if err := os.MkdirAll(dir, 0755); err != nil {
return nil, fmt.Errorf("failed to create database directory: %w", err)
}
}
// Open database connection
conn, err := sql.Open("sqlite3", fmt.Sprintf("%s?_foreign_keys=ON&_journal_mode=WAL", dbPath))
if err != nil {
return nil, fmt.Errorf("failed to open database: %w", err)
}
// Set connection pool limits (SQLite works best with limited concurrency)
conn.SetMaxOpenConns(1)
conn.SetMaxIdleConns(1)
db := &DB{conn: conn}
// Initialize schema and run migrations
if err := db.InitSchema(); err != nil {
conn.Close()
return nil, fmt.Errorf("failed to initialize schema: %w", err)
}
if err := db.RunMigrations(); err != nil {
conn.Close()
return nil, fmt.Errorf("failed to run migrations: %w", err)
}
return db, nil
}
// Close closes the database connection
func (db *DB) Close() error {
if db.conn != nil {
return db.conn.Close()
}
return nil
}
// InitSchema creates the initial database schema
func (db *DB) InitSchema() error {
_, err := db.conn.Exec(schemaSQL)
if err != nil {
return fmt.Errorf("failed to execute schema: %w", err)
}
return nil
}
// IsInitialized checks if the database has been initialized
func (db *DB) IsInitialized() (bool, error) {
// Check if schema_migrations table exists
var count int
err := db.conn.QueryRow(`
SELECT COUNT(*)
FROM sqlite_master
WHERE type='table' AND name='schema_migrations'
`).Scan(&count)
if err != nil {
return false, fmt.Errorf("failed to check initialization: %w", err)
}
return count > 0, nil
}
// GetMigrationVersion returns the current migration version
func (db *DB) GetMigrationVersion() (int, error) {
var version int
err := db.conn.QueryRow("SELECT COALESCE(MAX(version), 0) FROM schema_migrations").Scan(&version)
if err != nil {
return 0, fmt.Errorf("failed to get migration version: %w", err)
}
return version, nil
}
// ApplyMigration applies a specific migration
func (db *DB) ApplyMigration(version int, sql string) error {
tx, err := db.conn.Begin()
if err != nil {
return fmt.Errorf("failed to begin transaction: %w", err)
}
defer func() { _ = tx.Rollback() }()
// Execute migration SQL
if _, err := tx.Exec(sql); err != nil {
return fmt.Errorf("failed to execute migration %d: %w", version, err)
}
// Record migration
if _, err := tx.Exec(
"INSERT INTO schema_migrations (version) VALUES (?)",
version,
); err != nil {
return fmt.Errorf("failed to record migration %d: %w", version, err)
}
if err := tx.Commit(); err != nil {
return fmt.Errorf("failed to commit migration %d: %w", version, err)
}
return nil
}

View File

@@ -0,0 +1,14 @@
# {{"{{"}} .Title {{"}}"}}
_Generated: {{"{{"}} .Generated {{"}}"}}_
---
{{"{{"}} range .Items -{{"}}"}}
## {{"{{"}} .Name {{"}}"}}
{{"{{"}} .Description {{"}}"}}
---
{{"{{"}} end -{{"}}"}}

View File

@@ -0,0 +1,43 @@
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
{{PROJECT_NAME}}
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool
*.out
# Dependency directories
vendor/
# Go workspace file
go.work
# IDEs
.idea/
.vscode/
*.swp
*.swo
*~
# OS files
.DS_Store
Thumbs.db
# Database files
*.db
*.sqlite
*.sqlite3
# Config files (keep examples)
*.yaml
!*.yaml.example
# Build artifacts
build/
dist/

View File

@@ -0,0 +1,10 @@
module {{MODULE_NAME}}
go 1.21
require (
github.com/mattn/go-sqlite3 v1.14.32
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cobra v1.9.1
github.com/spf13/viper v1.20.1
)

View File

@@ -0,0 +1,113 @@
package cmd
import (
"fmt"
"os"
"{{.ModuleName}}/internal/templates"
"github.com/spf13/cobra"
)
const defaultConfigContent = `# Configuration file for {{.ProjectName}}
# Copy this to {{.ProjectName}}.yaml and customize as needed
# Database configuration
database: "{{.ProjectName}}.db"
# Logging configuration
verbose: false
debug: false
log_json: false
# Add your application-specific configuration here
# Example:
# myapp:
# api_url: "https://api.example.com"
# api_token: "your-token-here"
# timeout: 30s
`
// initCmd represents the init command
var initCmd = &cobra.Command{
Use: "init",
Short: "Initialize configuration and template files",
Long: ` + "`" + `Create default configuration file and custom template file for customization.
This command generates:
- {{.ProjectName}}.yaml (configuration file)
- {{.ProjectName}}.md (customizable template, or use --template-file to specify)
Use --force to overwrite existing files.
Example:
{{.ProjectName}} init
{{.ProjectName}} init --template-file my-template.md
{{.ProjectName}} init --force` + "`" + `,
RunE: func(cmd *cobra.Command, args []string) error {
log := GetLogger()
force, _ := cmd.Flags().GetBool("force")
templateFile, _ := cmd.Flags().GetString("template-file")
configFile := "{{.ProjectName}}.yaml"
// Check if config file exists
configExists := fileExists(configFile)
if configExists && !force {
return fmt.Errorf("config file %s already exists (use --force to overwrite)", configFile)
}
// Check if template file exists
templateExists := fileExists(templateFile)
if templateExists && !force {
return fmt.Errorf("template file %s already exists (use --force to overwrite)", templateFile)
}
// Create config file
if err := os.WriteFile(configFile, []byte(defaultConfigContent), 0o644); err != nil {
return fmt.Errorf("failed to create config file: %w", err)
}
if configExists {
log.Infof("Overwrote %s", configFile)
} else {
log.Infof("Created %s", configFile)
}
// Get default template content
templateContent, err := templates.GetDefaultTemplate()
if err != nil {
return fmt.Errorf("failed to get default template: %w", err)
}
// Create template file
if err := os.WriteFile(templateFile, []byte(templateContent), 0o644); err != nil {
return fmt.Errorf("failed to create template file: %w", err)
}
if templateExists {
log.Infof("Overwrote %s", templateFile)
} else {
log.Infof("Created %s", templateFile)
}
fmt.Printf("\n✅ Initialization complete!\n\n")
fmt.Printf("Next steps:\n")
fmt.Printf(" 1. Edit %s and add your configuration\n", configFile)
fmt.Printf(" 2. (Optional) Customize %s for your preferred output format\n", templateFile)
fmt.Printf(" 3. Run: {{.ProjectName}} <command> --help for usage information\n\n")
return nil
},
}
func init() {
rootCmd.AddCommand(initCmd)
initCmd.Flags().Bool("force", false, "Overwrite existing files")
initCmd.Flags().String("template-file", "{{.ProjectName}}.md", "Name of custom template file to create")
}
// fileExists checks if a file exists
func fileExists(path string) bool {
_, err := os.Stat(path)
return err == nil
}

View File

@@ -0,0 +1,7 @@
package main
import "{{MODULE_NAME}}/cmd"
func main() {
cmd.Execute()
}

View File

@@ -0,0 +1,73 @@
package database
import (
"fmt"
)
// getMigrations returns all available migrations
// Add new migrations here with incrementing version numbers
func getMigrations() map[int]string {
return map[int]string{
// Example migration:
// 2: `
// CREATE TABLE IF NOT EXISTS settings (
// key TEXT PRIMARY KEY,
// value TEXT NOT NULL,
// updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
// );
// `,
// Add your migrations here starting from version 2
// (version 1 is the initial schema in schema.sql)
}
}
// RunMigrations executes all pending migrations
func (db *DB) RunMigrations() error {
// Ensure schema_migrations table exists (created by InitSchema)
initialized, err := db.IsInitialized()
if err != nil {
return fmt.Errorf("failed to check initialization: %w", err)
}
if !initialized {
return fmt.Errorf("database not initialized")
}
// Get current version
currentVersion, err := db.GetMigrationVersion()
if err != nil {
return fmt.Errorf("failed to get current version: %w", err)
}
// Get all migrations
migrations := getMigrations()
// Find maximum version
maxVersion := currentVersion
for version := range migrations {
if version > maxVersion {
maxVersion = version
}
}
// Apply pending migrations in order
appliedCount := 0
for version := currentVersion + 1; version <= maxVersion; version++ {
migrationSQL, exists := migrations[version]
if !exists {
return fmt.Errorf("missing migration for version %d", version)
}
if err := db.ApplyMigration(version, migrationSQL); err != nil {
return fmt.Errorf("failed to apply migration %d: %w", version, err)
}
appliedCount++
}
if appliedCount > 0 {
fmt.Printf("Applied %d migration(s), current version: %d\n", appliedCount, maxVersion)
}
return nil
}

View File

@@ -0,0 +1,126 @@
name: Release
on:
push:
tags:
- 'v*'
jobs:
ci:
uses: ./.github/workflows/ci.yml
build:
needs: ci
strategy:
matrix:
include:
- os: ubuntu-latest
goos: linux
goarch: amd64
- os: ubuntu-latest
goos: linux
goarch: arm64
- os: macos-latest
goos: darwin
goarch: amd64
- os: macos-latest
goos: darwin
goarch: arm64
- os: windows-latest
goos: windows
goarch: amd64
runs-on: ${{ matrix.os }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: '1.23'
- name: Build
shell: bash
env:
GOOS: ${{ matrix.goos }}
GOARCH: ${{ matrix.goarch }}
run: |
VERSION="${GITHUB_REF#refs/tags/}"
COMMIT="${GITHUB_SHA:0:7}"
BUILD_DATE="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
LDFLAGS="-X main.version=$VERSION -X main.commit=$COMMIT -X main.date=$BUILD_DATE"
go build -ldflags "$LDFLAGS" -o {{PROJECT_NAME}}${{ matrix.goos == 'windows' && '.exe' || '' }}
- name: Package (Unix)
if: matrix.goos != 'windows'
run: |
tar czf {{PROJECT_NAME}}-${{ matrix.goos }}-${{ matrix.goarch }}.tar.gz {{PROJECT_NAME}}
- name: Package (Windows)
if: matrix.goos == 'windows'
run: |
7z a {{PROJECT_NAME}}-${{ matrix.goos }}-${{ matrix.goarch }}.zip {{PROJECT_NAME}}.exe
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: {{PROJECT_NAME}}-${{ matrix.goos }}-${{ matrix.goarch }}
path: |
{{PROJECT_NAME}}-*.tar.gz
{{PROJECT_NAME}}-*.zip
release:
needs: build
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Download all artifacts
uses: actions/download-artifact@v4
- name: Generate checksums
run: |
find . -name "{{PROJECT_NAME}}-*" -type f \( -name "*.tar.gz" -o -name "*.zip" \) -exec sha256sum {} \; > checksums.txt
- name: Create Release
uses: softprops/action-gh-release@v2
with:
files: |
{{PROJECT_NAME}}-*/*.tar.gz
{{PROJECT_NAME}}-*/*.zip
checksums.txt
generate_release_notes: true
# Uncomment to build and push Docker image
# docker:
# needs: build
# runs-on: ubuntu-latest
# steps:
# - name: Checkout code
# uses: actions/checkout@v4
#
# - name: Set up Docker Buildx
# uses: docker/setup-buildx-action@v3
#
# - name: Login to Docker Hub
# uses: docker/login-action@v3
# with:
# username: ${{ secrets.DOCKER_USERNAME }}
# password: ${{ secrets.DOCKER_PASSWORD }}
#
# - name: Extract version
# id: version
# run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
#
# - name: Build and push
# uses: docker/build-push-action@v5
# with:
# context: .
# push: true
# tags: |
# yourusername/{{PROJECT_NAME}}:${{ steps.version.outputs.VERSION }}
# yourusername/{{PROJECT_NAME}}:latest

View File

@@ -0,0 +1,135 @@
name: Rolling Release
on:
push:
branches: [ main ]
jobs:
ci:
uses: ./.github/workflows/ci.yml
build:
needs: ci
strategy:
matrix:
include:
- os: ubuntu-latest
goos: linux
goarch: amd64
- os: ubuntu-latest
goos: linux
goarch: arm64
- os: macos-latest
goos: darwin
goarch: amd64
- os: macos-latest
goos: darwin
goarch: arm64
- os: windows-latest
goos: windows
goarch: amd64
runs-on: ${{ matrix.os }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Go
uses: actions/setup-go@v5
with:
go-version: '1.23'
- name: Build
shell: bash
env:
GOOS: ${{ matrix.goos }}
GOARCH: ${{ matrix.goarch }}
run: |
VERSION="rolling-${GITHUB_SHA:0:7}"
COMMIT="${GITHUB_SHA:0:7}"
BUILD_DATE="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
LDFLAGS="-X main.version=$VERSION -X main.commit=$COMMIT -X main.date=$BUILD_DATE"
go build -ldflags "$LDFLAGS" -o {{PROJECT_NAME}}${{ matrix.goos == 'windows' && '.exe' || '' }}
- name: Package (Unix)
if: matrix.goos != 'windows'
run: |
tar czf {{PROJECT_NAME}}-${{ matrix.goos }}-${{ matrix.goarch }}.tar.gz {{PROJECT_NAME}}
- name: Package (Windows)
if: matrix.goos == 'windows'
run: |
7z a {{PROJECT_NAME}}-${{ matrix.goos }}-${{ matrix.goarch }}.zip {{PROJECT_NAME}}.exe
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: {{PROJECT_NAME}}-${{ matrix.goos }}-${{ matrix.goarch }}
path: |
{{PROJECT_NAME}}-*.tar.gz
{{PROJECT_NAME}}-*.zip
release:
needs: build
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Download all artifacts
uses: actions/download-artifact@v4
- name: Generate checksums
run: |
find . -name "{{PROJECT_NAME}}-*" -type f \( -name "*.tar.gz" -o -name "*.zip" \) -exec sha256sum {} \; > checksums.txt
- name: Delete existing rolling release
env:
GH_TOKEN: ${{ github.token }}
run: |
gh release delete latest --yes || true
git push origin :refs/tags/latest || true
- name: Create Rolling Release
uses: softprops/action-gh-release@v2
with:
tag_name: latest
name: Rolling Release
body: |
**Automated rolling release built from the latest commit on the main branch.**
⚠️ This release may be unstable and is intended for testing purposes only.
**Commit:** ${{ github.sha }}
**Built:** ${{ github.event.head_commit.timestamp }}
prerelease: true
files: |
{{PROJECT_NAME}}-*/*.tar.gz
{{PROJECT_NAME}}-*/*.zip
checksums.txt
# Uncomment to build and push Docker image
# docker:
# needs: build
# runs-on: ubuntu-latest
# steps:
# - name: Checkout code
# uses: actions/checkout@v4
#
# - name: Set up Docker Buildx
# uses: docker/setup-buildx-action@v3
#
# - name: Login to Docker Hub
# uses: docker/login-action@v3
# with:
# username: ${{ secrets.DOCKER_USERNAME }}
# password: ${{ secrets.DOCKER_PASSWORD }}
#
# - name: Build and push
# uses: docker/build-push-action@v5
# with:
# context: .
# push: true
# tags: yourusername/{{PROJECT_NAME}}:latest

View File

@@ -0,0 +1,126 @@
package cmd
import (
"fmt"
"os"
"{{MODULE_NAME}}/internal/config"
"github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var (
cfgFile string
log = logrus.New()
cfg *config.Config
)
// rootCmd represents the base command when called without any subcommands
var rootCmd = &cobra.Command{
Use: "{{PROJECT_NAME}}",
Short: "A brief description of your application",
Long: `A longer description of what your application does and how it works.
This can be multiple lines and should provide helpful context about the
purpose and usage of your CLI tool.`,
PersistentPreRun: func(cmd *cobra.Command, args []string) {
initConfig()
setupLogging()
},
}
// Execute adds all child commands to the root command and sets appropriate flags.
func Execute() {
if err := rootCmd.Execute(); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func init() {
// Configuration file flag
rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is ./{{PROJECT_NAME}}.yaml)")
// Logging flags
rootCmd.PersistentFlags().BoolP("verbose", "v", false, "verbose output")
rootCmd.PersistentFlags().Bool("debug", false, "debug output")
rootCmd.PersistentFlags().Bool("log-json", false, "output logs in JSON format")
// Database flag
rootCmd.PersistentFlags().String("database", "{{PROJECT_NAME}}.db", "database file path")
// Bind flags to viper
_ = viper.BindPFlag("verbose", rootCmd.PersistentFlags().Lookup("verbose"))
_ = viper.BindPFlag("debug", rootCmd.PersistentFlags().Lookup("debug"))
_ = viper.BindPFlag("log_json", rootCmd.PersistentFlags().Lookup("log-json"))
_ = viper.BindPFlag("database", rootCmd.PersistentFlags().Lookup("database"))
}
// initConfig reads in config file and ENV variables if set.
func initConfig() {
if cfgFile != "" {
// Use config file from the flag
viper.SetConfigFile(cfgFile)
} else {
// Search for config in current directory
viper.AddConfigPath(".")
viper.SetConfigType("yaml")
viper.SetConfigName("{{PROJECT_NAME}}")
}
// Set defaults
viper.SetDefault("database", "{{PROJECT_NAME}}.db")
viper.SetDefault("verbose", false)
viper.SetDefault("debug", false)
viper.SetDefault("log_json", false)
// Read in environment variables that match
viper.AutomaticEnv()
// If a config file is found, read it in
if err := viper.ReadInConfig(); err != nil {
if cfgFile != "" {
// Only error if config was explicitly specified
fmt.Fprintf(os.Stderr, "Error reading config file: %v\n", err)
os.Exit(1)
}
}
}
// setupLogging configures the logger based on configuration
func setupLogging() {
if viper.GetBool("log_json") {
log.SetFormatter(&logrus.JSONFormatter{})
} else {
log.SetFormatter(&logrus.TextFormatter{
FullTimestamp: true,
})
}
if viper.GetBool("debug") {
log.SetLevel(logrus.DebugLevel)
} else if viper.GetBool("verbose") {
log.SetLevel(logrus.InfoLevel)
} else {
log.SetLevel(logrus.WarnLevel)
}
}
// GetConfig returns the application configuration, loading it if necessary
func GetConfig() *config.Config {
if cfg == nil {
cfg = &config.Config{
Database: viper.GetString("database"),
Verbose: viper.GetBool("verbose"),
Debug: viper.GetBool("debug"),
LogJSON: viper.GetBool("log_json"),
}
}
return cfg
}
// GetLogger returns the configured logger
func GetLogger() *logrus.Logger {
return log
}

View File

@@ -0,0 +1,24 @@
-- Initial database schema for {{PROJECT_NAME}}
-- This is version 1 of the schema
-- Migration tracking table
CREATE TABLE IF NOT EXISTS schema_migrations (
version INTEGER PRIMARY KEY,
applied_at DATETIME DEFAULT CURRENT_TIMESTAMP
);
-- Insert initial version
INSERT OR IGNORE INTO schema_migrations (version) VALUES (1);
-- Example table - customize for your application
-- CREATE TABLE IF NOT EXISTS items (
-- id INTEGER PRIMARY KEY AUTOINCREMENT,
-- name TEXT NOT NULL,
-- description TEXT,
-- created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
-- updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
-- );
--
-- CREATE INDEX IF NOT EXISTS idx_items_name ON items(name);
-- Add your initial schema tables here

View File

@@ -0,0 +1,13 @@
package templates
import (
_ "embed"
)
//go:embed default.md
var defaultTemplate string
// GetDefaultTemplate returns the embedded default template content
func GetDefaultTemplate() (string, error) {
return defaultTemplate, nil
}

View File

@@ -0,0 +1,27 @@
package cmd
import (
"fmt"
"github.com/spf13/cobra"
)
var (
version = "dev"
commit = "unknown"
date = "unknown"
)
// versionCmd represents the version command
var versionCmd = &cobra.Command{
Use: "version",
Short: "Print version information",
Long: `Print the version, commit hash, and build date of this application.`,
Run: func(cmd *cobra.Command, args []string) {
fmt.Printf("{{PROJECT_NAME}} %s (commit: %s, built: %s)\n", version, commit, date)
},
}
func init() {
rootCmd.AddCommand(versionCmd)
}

View File

@@ -0,0 +1,129 @@
# Cobra + Viper Integration Pattern
This document explains how Cobra (CLI framework) and Viper (configuration management) are integrated in the generated Go CLI projects.
## Architecture Overview
The integration follows these principles:
1. **Configuration Priority** (highest to lowest):
- Command-line flags
- Environment variables
- Config file values
- Default values
2. **Lazy Loading**: Configuration is loaded once in `PersistentPreRun`, before any command executes
3. **Centralized Access**: The `GetConfig()` and `GetLogger()` functions in `cmd/root.go` provide access to configuration and logging
## Key Components
### Root Command (`cmd/root.go`)
The root command sets up the entire configuration system:
```go
var rootCmd = &cobra.Command{
PersistentPreRun: func(cmd *cobra.Command, args []string) {
initConfig()
setupLogging()
},
}
```
### Configuration Initialization (`initConfig()`)
This function:
1. Determines config file location (from flag or default)
2. Sets default values
3. Enables environment variable reading
4. Reads the config file (if it exists)
### Flag Binding
Flags are bound to Viper keys using `viper.BindPFlag()`:
```go
rootCmd.PersistentFlags().StringP("verbose", "v", false, "verbose output")
viper.BindPFlag("verbose", rootCmd.PersistentFlags().Lookup("verbose"))
```
This creates the hierarchy: CLI flag → Viper key → Config struct
## Adding New Configuration
To add a new configuration option:
1. **Add to config struct** (`internal/config/config.go`):
```go
type Config struct {
MyNewOption string
}
```
2. **Add flag** (`cmd/root.go` or command-specific file):
```go
rootCmd.PersistentFlags().String("my-option", "default", "description")
viper.BindPFlag("my_option", rootCmd.PersistentFlags().Lookup("my-option"))
```
3. **Set default** (`cmd/root.go` in `initConfig()`):
```go
viper.SetDefault("my_option", "default_value")
```
4. **Add to config example** (`.yaml.example`):
```yaml
my_option: "default_value"
```
5. **Access in commands**:
```go
cfg := GetConfig()
value := cfg.MyNewOption
// or directly from viper:
value := viper.GetString("my_option")
```
## Command-Specific Configuration
For configuration specific to a single command:
1. Add the flag to the command's `init()` function, not the root command
2. Use a nested structure in the config struct:
```go
type Config struct {
Fetch struct {
Concurrency int
Timeout time.Duration
}
}
```
3. Bind with a namespaced key:
```go
viper.BindPFlag("fetch.concurrency", fetchCmd.Flags().Lookup("concurrency"))
```
## Environment Variables
Viper automatically maps environment variables when you call `viper.AutomaticEnv()`.
By default, environment variables are matched by converting the key to uppercase and replacing `.` with `_`:
- Config key: `fetch.concurrency`
- Environment variable: `FETCH_CONCURRENCY`
## Best Practices
1. **Use PersistentFlags for global options**: Options that apply to all commands should be on `rootCmd.PersistentFlags()`
2. **Use command-specific Flags for local options**: Options specific to one command should be on that command's `Flags()`
3. **Provide sensible defaults**: Always set defaults in `initConfig()` so the tool works without a config file
4. **Document in .yaml.example**: Keep the example config file up to date
5. **Keep flag names kebab-case**: Use hyphens in CLI flags (`--my-option`) and underscores in Viper keys (`my_option`)
6. **Use GetConfig() for structured access**: Prefer accessing configuration through the typed Config struct rather than calling viper.Get* directly in commands

View File

@@ -0,0 +1,231 @@
# Internal Package Organization
This document explains how to organize code in the `internal/` directory of Go CLI projects.
## The `internal/` Directory
The `internal/` directory is a special Go convention. Packages inside `internal/` can only be imported by code in the parent tree. This enforces encapsulation and prevents external projects from depending on internal implementation details.
## Standard Package Structure
A typical Go CLI project has this structure:
```
project/
├── cmd/ # Command definitions (public API of the CLI)
├── internal/ # Private implementation
│ ├── config/ # Configuration structures
│ ├── database/ # Database access layer
│ └── [domain packages] # Business logic packages
├── main.go # Entry point
└── go.mod # Dependencies
```
## Package Guidelines
### `cmd/` Package
**Purpose**: Define the CLI commands and their flags
**Contents**:
- `root.go`: Root command and configuration initialization
- `version.go`: Version command
- `constants.go`: CLI-level constants
- One file per command (e.g., `fetch.go`, `export.go`)
**Responsibilities**:
- Parse and validate user input
- Set up configuration and logging
- Call into `internal/` packages to do the work
- Format and display output
**Anti-patterns**:
- Heavy business logic in command handlers
- Direct database access
- Complex algorithms
### `internal/config/` Package
**Purpose**: Define configuration structures
**Contents**:
- `config.go`: Config struct definitions
**Example**:
```go
package config
type Config struct {
Database string
Verbose bool
Fetch struct {
Concurrency int
Timeout time.Duration
}
}
```
### `internal/database/` Package
**Purpose**: Encapsulate all database operations
**Contents**:
- `database.go`: Connection management, initialization
- `migrations.go`: Migration system
- `schema.sql`: Initial schema (embedded)
- Optional: `queries.go` for complex queries
**Responsibilities**:
- Database connection lifecycle
- Schema initialization and migrations
- Data access methods
- Transaction management
**Anti-patterns**:
- Business logic in database layer
- Exposing `*sql.DB` directly
- SQL in command files
### Domain-Specific Packages
Create additional packages in `internal/` for each major domain or feature:
```
internal/
├── feeds/ # Feed parsing and processing
├── fetcher/ # HTTP fetching logic
├── renderer/ # Output rendering
└── exporter/ # Export functionality
```
**Guidelines**:
- One package per cohesive responsibility
- Packages should be importable by `cmd/` and by each other
- Keep packages focused and single-purpose
- Use clear, descriptive names
## Layering and Dependencies
Follow these dependency rules:
```
main.go
└─> cmd/
└─> internal/config/
└─> internal/database/
└─> internal/[domain]/
└─> internal/[other domains]/
```
**Rules**:
1. `cmd/` can import any `internal/` package
2. `internal/` packages can import each other as needed
3. Avoid circular dependencies between `internal/` packages
4. Keep `cmd/` thin - it orchestrates but doesn't implement
## Example: Adding a New Feature
Let's say you want to add feed fetching functionality:
1. **Create the package**:
```
internal/fetcher/
├── fetcher.go # Main fetching logic
└── fetcher_test.go # Tests
```
2. **Define the API**:
```go
package fetcher
type Fetcher struct {
client *http.Client
// ...
}
func New(opts ...Option) *Fetcher { ... }
func (f *Fetcher) Fetch(url string) ([]byte, error) { ... }
```
3. **Use in command**:
```go
// cmd/fetch.go
package cmd
import "yourproject/internal/fetcher"
var fetchCmd = &cobra.Command{
RunE: func(cmd *cobra.Command, args []string) error {
f := fetcher.New()
data, err := f.Fetch(url)
// ...
},
}
```
## Common Patterns
### Option Pattern for Configuration
```go
type Fetcher struct {
timeout time.Duration
}
type Option func(*Fetcher)
func WithTimeout(d time.Duration) Option {
return func(f *Fetcher) {
f.timeout = d
}
}
func New(opts ...Option) *Fetcher {
f := &Fetcher{timeout: 30 * time.Second}
for _, opt := range opts {
opt(f)
}
return f
}
```
### Embedding Resources
For SQL, templates, or other resources:
```go
import _ "embed"
//go:embed schema.sql
var schemaSQL string
```
### Error Wrapping
Always wrap errors with context:
```go
if err != nil {
return fmt.Errorf("failed to fetch feed %s: %w", url, err)
}
```
## Testing
- Put tests in `_test.go` files alongside the code
- Use table-driven tests for multiple cases
- Consider using `internal/database/database_test.go` with in-memory SQLite for database tests
## When to Create a New Package
Create a new `internal/` package when:
- You have a cohesive set of related functionality
- The code would make commands cleaner and more focused
- You want to unit test logic separately from CLI interaction
- Multiple commands need to share the same functionality
Don't create a package when:
- It would only have one small function
- It's tightly coupled to a single command
- It would create circular dependencies

View File

@@ -0,0 +1,410 @@
# Template Patterns for Go CLI Tools
This guide covers patterns for implementing customizable output templates in Go CLI tools, based on successful patterns from `linkding-to-markdown` and `mastodon-to-markdown`.
## Overview
CLI tools that generate formatted output (Markdown, HTML, XML, etc.) benefit from:
1. **Embedded default templates** - Work out of the box, single binary
2. **User customization** - Users can modify templates for their needs
3. **Init command** - Easy way to get started with configuration and templates
## Architecture
### Directory Structure
```
my-cli-tool/
├── cmd/
│ ├── init.go # Init command to bootstrap config/templates
│ └── fetch.go # Command that uses templates
├── internal/
│ ├── templates/
│ │ ├── templates.go # Template loader with embedded defaults
│ │ └── default.md # Default template (embedded via //go:embed)
│ └── generator/ # Or markdown/, formatter/, etc.
│ └── generator.go # Template renderer and data structures
```
## Implementation Steps
### 1. Create Template Package
**File: `internal/templates/templates.go`**
```go
package templates
import (
_ "embed"
)
//go:embed default.md
var defaultTemplate string
// GetDefaultTemplate returns the embedded default template content
func GetDefaultTemplate() (string, error) {
return defaultTemplate, nil
}
```
**File: `internal/templates/default.md`**
Create your default template using Go's `text/template` syntax:
```markdown
# {{ .Title }}
_Generated: {{ .Generated }}_
---
{{ range .Items -}}
## {{ .Name }}
{{ .Description }}
{{ if .Tags -}}
Tags: {{ join .Tags ", " }}
{{ end -}}
---
{{ end -}}
```
### 2. Create Generator/Renderer
**File: `internal/generator/generator.go`**
```go
package generator
import (
"fmt"
"io"
"os"
"strings"
"text/template"
"time"
"yourproject/internal/templates"
)
type Generator struct {
template *template.Template
}
// NewGenerator creates a generator with the default embedded template
func NewGenerator() (*Generator, error) {
defaultTmpl, err := templates.GetDefaultTemplate()
if err != nil {
return nil, fmt.Errorf("failed to get default template: %w", err)
}
return NewGeneratorWithTemplate(defaultTmpl)
}
// NewGeneratorFromFile creates a generator from a template file
func NewGeneratorFromFile(templatePath string) (*Generator, error) {
content, err := os.ReadFile(templatePath)
if err != nil {
return nil, fmt.Errorf("failed to read template file: %w", err)
}
return NewGeneratorWithTemplate(string(content))
}
// NewGeneratorWithTemplate creates a generator with a custom template string
func NewGeneratorWithTemplate(tmplStr string) (*Generator, error) {
// Define template functions
funcMap := template.FuncMap{
"formatDate": func(t time.Time, format string) string {
return t.Format(format)
},
"join": strings.Join,
"hasContent": func(s string) bool {
return strings.TrimSpace(s) != ""
},
}
tmpl, err := template.New("output").Funcs(funcMap).Parse(tmplStr)
if err != nil {
return nil, fmt.Errorf("failed to parse template: %w", err)
}
return &Generator{template: tmpl}, nil
}
// TemplateData holds data passed to templates
type TemplateData struct {
Title string
Generated string
Items []Item
// Add your domain-specific fields here
}
// Generate executes the template with data and writes to writer
func (g *Generator) Generate(w io.Writer, data TemplateData) error {
if err := g.template.Execute(w, data); err != nil {
return fmt.Errorf("failed to execute template: %w", err)
}
return nil
}
```
### 3. Create Init Command
Use the `init.go.template` from the skill, customizing the `defaultConfigContent` for your project's needs.
Key features:
- Creates config file with documented options
- Creates customizable template file from embedded default
- Supports `--force` to overwrite
- Supports `--template-file` to specify custom filename
- Provides helpful next steps
### 4. Integrate with Commands
**In your command that generates output:**
```go
func runFetch(cmd *cobra.Command, args []string) error {
logger := GetLogger()
// ... fetch your data ...
// Create generator with custom template or default
templatePath := viper.GetString("fetch.template")
var generator *generator.Generator
var err error
if templatePath != "" {
logger.Infof("Using custom template: %s", templatePath)
generator, err = generator.NewGeneratorFromFile(templatePath)
if err != nil {
return fmt.Errorf("failed to load custom template: %w", err)
}
} else {
generator, err = generator.NewGenerator()
if err != nil {
return fmt.Errorf("failed to create generator: %w", err)
}
}
// Prepare template data
data := generator.TemplateData{
Title: viper.GetString("fetch.title"),
Generated: time.Now().Format(time.RFC3339),
Items: fetchedItems,
}
// Determine output destination
outputPath := viper.GetString("fetch.output")
var output *os.File
if outputPath != "" {
output, err = os.Create(outputPath)
if err != nil {
return fmt.Errorf("failed to create output file: %w", err)
}
defer output.Close()
logger.Infof("Writing output to %s", outputPath)
} else {
output = os.Stdout
}
// Generate output
if err := generator.Generate(output, data); err != nil {
return fmt.Errorf("failed to generate output: %w", err)
}
return nil
}
```
### 5. Add Configuration Support
**In `internal/config/config.go`:**
```go
type Config struct {
// ... other config ...
Fetch struct {
Output string
Title string
Template string // Path to custom template file
}
}
```
**In your command's flags:**
```go
fetchCmd.Flags().String("template", "", "Custom template file (default: built-in template)")
_ = viper.BindPFlag("fetch.template", fetchCmd.Flags().Lookup("template"))
```
**In config YAML:**
```yaml
fetch:
output: "output.md"
title: "My Output"
template: "my-custom-template.md" # Optional
```
## Template Functions
Provide helpful template functions for common operations:
```go
funcMap := template.FuncMap{
// Date formatting
"formatDate": func(t time.Time, format string) string {
return t.Format(format)
},
// String operations
"join": strings.Join,
"hasContent": func(s string) bool {
return strings.TrimSpace(s) != ""
},
"truncate": func(s string, length int) string {
if len(s) <= length {
return s
}
return s[:length] + "..."
},
// Conditional helpers
"default": func(defaultVal, val interface{}) interface{} {
if val == nil || val == "" {
return defaultVal
}
return val
},
}
```
## User Workflow
### First-Time Setup
```bash
# User initializes config and template
$ my-tool init
✅ Initialization complete!
Next steps:
1. Edit my-tool.yaml and add your configuration
2. (Optional) Customize my-tool.md for your preferred output format
3. Run: my-tool fetch --help for usage information
```
### Using Default Template
```bash
# Just works with embedded default
$ my-tool fetch --output result.md
```
### Using Custom Template
```bash
# After editing my-tool.md
$ my-tool fetch --template my-tool.md --output result.md
# Or via config file
$ cat my-tool.yaml
fetch:
template: "my-tool.md"
$ my-tool fetch --output result.md
```
## Best Practices
1. **Always provide a sensible default template** - Tool should work without customization
2. **Document template variables** - In README and/or generated template comments
3. **Validate templates early** - Parse template when creating generator, not during execution
4. **Provide helpful error messages** - Template parse errors should show line numbers
5. **Include examples** - Show template snippets in documentation
6. **Support both stdout and file output** - Enables piping and integration
7. **Make template optional** - Config file should work without template field set
## Template Documentation
In your README, document:
### Available Variables
```markdown
### Template Variables
- `.Title` - Document title (string)
- `.Generated` - Generation timestamp (string)
- `.Items` - Array of items to include
### Item Fields
Each item has:
- `.Name` - Item name (string)
- `.Description` - Item description (string)
- `.Tags` - Array of tags ([]string)
```
### Available Functions
```markdown
### Template Functions
- `formatDate <time> <format>` - Format time.Time with Go time format
- `join <slice> <separator>` - Join string slice
- `hasContent <string>` - Check if string is non-empty
```
### Example Template
Include a complete working example users can copy/paste.
## Testing Templates
```go
func TestTemplateExecution(t *testing.T) {
tmpl := `{{ .Title }}
{{ range .Items }}{{ .Name }}{{ end }}`
gen, err := NewGeneratorWithTemplate(tmpl)
if err != nil {
t.Fatalf("failed to create generator: %v", err)
}
data := TemplateData{
Title: "Test",
Items: []Item{{Name: "Item1"}, {Name: "Item2"}},
}
var buf bytes.Buffer
if err := gen.Generate(&buf, data); err != nil {
t.Fatalf("failed to generate: %v", err)
}
expected := "Test\nItem1Item2"
if buf.String() != expected {
t.Errorf("expected %q, got %q", expected, buf.String())
}
}
```
## Common Pitfalls
1. **Not using `//go:embed`** - Requires users to distribute template files separately
2. **No template validation** - Errors appear late during execution
3. **Poor error messages** - Template errors can be cryptic, add context
4. **Forgetting `defer file.Close()`** - When writing to files
5. **Not supporting stdout** - Reduces composability with other tools
6. **Hardcoded paths** - Use relative paths or make configurable
## Examples in the Wild
- **linkding-to-markdown** - Bookmarks to Markdown with grouping options
- **mastodon-to-markdown** - Posts to Markdown with media handling
- **feedspool-go** - RSS/Atom feed processing with custom templates

View File

@@ -0,0 +1,78 @@
#!/usr/bin/env python3
"""
Add a new command to an existing Go CLI project.
Usage:
python add_command.py <command-name> [--path <project-dir>]
"""
import argparse
import os
import sys
from pathlib import Path
from datetime import datetime
def add_command(command_name, project_dir="."):
"""Add a new command file to the cmd/ directory."""
project_path = Path(project_dir)
# Verify we're in a Go project
cmd_dir = project_path / "cmd"
if not cmd_dir.exists():
print(f"❌ cmd/ directory not found in {project_path}")
print(" Make sure you're running this from a Go CLI project root")
sys.exit(1)
# Check if command already exists
command_file = cmd_dir / f"{command_name}.go"
if command_file.exists():
print(f"❌ Command already exists: {command_file}")
sys.exit(1)
# Find and read the template
script_dir = Path(__file__).parent
template_path = script_dir.parent / "assets" / "templates" / "command.go.template"
if not template_path.exists():
print(f"❌ Template not found: command.go.template")
sys.exit(1)
with open(template_path, 'r') as f:
template_content = f.read()
# Prepare replacements
command_name_capitalized = command_name.capitalize()
replacements = {
"COMMAND_NAME": command_name,
"COMMAND_NAME_CAPITALIZED": command_name_capitalized,
}
# Perform replacements
content = template_content
for key, value in replacements.items():
content = content.replace(f"{{{{{key}}}}}", value)
# Write the new command file
with open(command_file, 'w') as f:
f.write(content)
print(f"✅ Created command: cmd/{command_name}.go")
print(f"\nNext steps:")
print(f"1. Edit cmd/{command_name}.go to implement your command logic")
print(f"2. Update the Short and Long descriptions")
print(f"3. Add any flags or configuration specific to this command")
print(f"4. Run: make format && make lint")
def main():
parser = argparse.ArgumentParser(description="Add a new command to a Go CLI project")
parser.add_argument("command_name", help="Name of the command (e.g., 'fetch', 'export')")
parser.add_argument("--path", default=".", help="Project directory (default: current directory)")
args = parser.parse_args()
add_command(args.command_name, args.path)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,209 @@
#!/usr/bin/env python3
"""
Scaffold a new Go CLI project with the standard structure.
Usage:
python scaffold_project.py <project-name> [--path <output-dir>]
"""
import argparse
import os
import sys
from pathlib import Path
import shutil
from datetime import datetime
def create_directory_structure(project_path, include_database=True, include_templates=False):
"""Create the standard directory structure for a Go CLI project."""
directories = [
"cmd",
"internal/config",
".github/workflows",
"docs/dev-sessions",
]
if include_database:
directories.append("internal/database")
if include_templates:
directories.append("internal/templates")
for directory in directories:
dir_path = project_path / directory
dir_path.mkdir(parents=True, exist_ok=True)
print(f"✅ Created directory: {directory}/")
def copy_template(template_name, dest_path, replacements=None, modify_content=None):
"""Copy a template file and perform variable substitutions."""
script_dir = Path(__file__).parent
template_path = script_dir.parent / "assets" / "templates" / template_name
if not template_path.exists():
print(f"❌ Template not found: {template_name}")
return False
# Read template content
with open(template_path, 'r') as f:
content = f.read()
# Perform replacements if provided
if replacements:
for key, value in replacements.items():
content = content.replace(f"{{{{{key}}}}}", value)
# Allow custom content modification
if modify_content:
content = modify_content(content)
# Write to destination
dest_path.parent.mkdir(parents=True, exist_ok=True)
with open(dest_path, 'w') as f:
f.write(content)
print(f"✅ Created: {dest_path.relative_to(dest_path.parents[len(dest_path.parents) - 2])}")
return True
def scaffold_project(project_name, output_dir=".", include_database=True, include_templates=False):
"""Scaffold a complete Go CLI project."""
# Create project directory
project_path = Path(output_dir) / project_name
if project_path.exists():
print(f"❌ Directory already exists: {project_path}")
sys.exit(1)
project_path.mkdir(parents=True, exist_ok=True)
print(f"🚀 Scaffolding Go CLI project: {project_name}")
print(f" Location: {project_path.absolute()}")
print(f" Database support: {'Yes' if include_database else 'No'}")
print(f" Template support: {'Yes' if include_templates else 'No'}\n")
# Prepare replacements
replacements = {
"PROJECT_NAME": project_name,
"MODULE_NAME": f"github.com/yourusername/{project_name}", # User should update this
"YEAR": str(datetime.now().year),
}
# Create directory structure
create_directory_structure(project_path, include_database, include_templates)
# Helper functions for conditional content modification
def remove_sqlite_from_gomod(content):
"""Remove SQLite dependency from go.mod if database not needed."""
lines = content.split('\n')
filtered = [line for line in lines if 'go-sqlite3' not in line]
return '\n'.join(filtered)
def remove_database_from_root(content):
"""Remove database-related code from root.go."""
lines = content.split('\n')
# Remove database flag and its binding
filtered = []
skip_next = False
for line in lines:
if 'Database flag' in line or 'database' in line and 'rootCmd.PersistentFlags()' in line:
continue
if '"database"' in line and 'BindPFlag' in line:
continue
if 'viper.SetDefault("database"' in line:
continue
if 'Database: viper.GetString("database")' in line:
continue
filtered.append(line)
return '\n'.join(filtered)
def remove_database_from_config(content):
"""Remove Database field from config struct."""
lines = content.split('\n')
filtered = [line for line in lines if 'Database string' not in line]
return '\n'.join(filtered)
def remove_cgo_from_makefile(content):
"""Remove CGO_ENABLED from Makefile."""
content = content.replace('CGO_ENABLED=1 ', '')
# Remove SQLite-related comments
lines = content.split('\n')
filtered = []
in_sqlite_comment = False
for line in lines:
if 'SQLite requires CGO' in line or 'static linking' in line.lower() and 'sqlite' in line.lower():
in_sqlite_comment = True
continue
if in_sqlite_comment and (line.strip() == '' or not line.startswith('#')):
in_sqlite_comment = False
if not in_sqlite_comment:
filtered.append(line)
return '\n'.join(filtered)
# Copy core template files
copy_template("main.go", project_path / "main.go", replacements)
# go.mod - conditionally include SQLite
copy_template("go.mod.template", project_path / "go.mod", replacements,
modify_content=None if include_database else remove_sqlite_from_gomod)
# Makefile - conditionally include CGO
copy_template("Makefile.template", project_path / "Makefile", replacements,
modify_content=None if include_database else remove_cgo_from_makefile)
copy_template("gitignore.template", project_path / ".gitignore", replacements)
copy_template("config.yaml.example", project_path / f"{project_name}.yaml.example", replacements)
# root.go - conditionally include database flags
copy_template("root.go.template", project_path / "cmd" / "root.go", replacements,
modify_content=None if include_database else remove_database_from_root)
copy_template("version.go.template", project_path / "cmd" / "version.go", replacements)
copy_template("constants.go.template", project_path / "cmd" / "constants.go", replacements)
# config.go - conditionally include Database field
copy_template("config.go.template", project_path / "internal" / "config" / "config.go", replacements,
modify_content=None if include_database else remove_database_from_config)
copy_template("ci.yml.template", project_path / ".github" / "workflows" / "ci.yml", replacements)
copy_template("release.yml.template", project_path / ".github" / "workflows" / "release.yml", replacements)
copy_template("rolling-release.yml.template", project_path / ".github" / "workflows" / "rolling-release.yml", replacements)
# Add database templates if requested
if include_database:
copy_template("database.go.template", project_path / "internal" / "database" / "database.go", replacements)
copy_template("migrations.go.template", project_path / "internal" / "database" / "migrations.go", replacements)
copy_template("schema.sql.template", project_path / "internal" / "database" / "schema.sql", replacements)
# Add template support files if requested
if include_templates:
copy_template("init.go.template", project_path / "cmd" / "init.go", replacements)
copy_template("templates.go.template", project_path / "internal" / "templates" / "templates.go", replacements)
copy_template("default.md.template", project_path / "internal" / "templates" / "default.md", replacements)
print(f"\n✅ Project '{project_name}' scaffolded successfully!\n")
print("Next steps:")
print(f"1. cd {project_name}")
print("2. Update go.mod with your actual module name")
print("3. Update GitHub Actions workflows with your Docker Hub username (if needed)")
print("4. Run: make setup")
print("5. Run: go mod tidy")
print("6. Start adding commands in cmd/")
def main():
parser = argparse.ArgumentParser(description="Scaffold a new Go CLI project")
parser.add_argument("project_name", help="Name of the project")
parser.add_argument("--path", default=".", help="Output directory (default: current directory)")
parser.add_argument("--no-database", action="store_true", help="Exclude database support (SQLite)")
parser.add_argument("--templates", action="store_true", help="Include template support (for generating formatted output)")
args = parser.parse_args()
scaffold_project(
args.project_name,
args.path,
include_database=not args.no_database,
include_templates=args.templates
)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,9 @@
# Configuration containing API tokens
config/config.json
# Fetched data
data/
# Temporary files
*.tmp
*.bak

View File

@@ -0,0 +1,186 @@
# Weeknotes Blog Post Composer
A Claude Code skill for composing conversational weeknotes blog posts from multiple data sources.
## Overview
This skill automatically fetches content from Mastodon and Linkding, then composes it into a well-formatted Jekyll-style blog post with proper voice, tone, and narrative structure. No more copy-paste dumps—get readable, conversational weeknotes that sound like you.
## Features
- **Multi-source data fetching**: Mastodon posts and Linkding bookmarks
- **Conversational composition**: Claude reads your content and composes readable prose
- **Style matching**: Optionally reference your past weeknotes to maintain consistent voice
- **Smart tagging**: Automatically generates 3-7 contextually appropriate tags
- **Jekyll-ready output**: YAML frontmatter with proper filename conventions
- **Cross-platform**: Supports macOS (ARM64/Intel) and Linux (AMD64)
## Quick Start
### Installation
Install this skill as a Claude Code marketplace:
```bash
# Add to your Claude config
# ~/.claude/config/settings.json
{
"plugins": [
"/path/to/lmorchard-agent-skills-private"
]
}
```
Restart Claude Code to load the skill.
### First-Time Setup
The first time you use the skill, Claude will guide you through configuration:
```
User: Draft weeknotes for this week
Claude: I need to configure the skill first. I'll need:
- Your Mastodon server URL and access token
- Your Linkding instance URL and API token
- (Optional) URL to your past weeknotes for style reference
```
**Getting API credentials:**
- **Mastodon**: Settings → Development → New Application (read permissions)
- **Linkding**: Settings → Integrations → Create Token
### Basic Usage
```
# Default: last 7 days (rolling 7-day window)
Draft weeknotes for this week
# Specific date range
Create weeknotes from November 4-10
```
Claude will:
1. Fetch your Mastodon posts and Linkding bookmarks
2. Analyze the content for themes and topics
3. Compose conversational prose that sounds like you
4. Generate contextually appropriate tags
5. Save to your blog directory (if detected) or offer to save elsewhere
## Configuration
After initial setup, your config lives in `config/config.json`:
```json
{
"mastodon": {
"server": "https://your-instance.social",
"token": "your-access-token"
},
"linkding": {
"url": "https://your-linkding.com",
"token": "your-api-token"
},
"weeknotes_archive": "https://yourblog.com/tag/weeknotes/"
}
```
To reconfigure:
```bash
./scripts/setup.sh
```
## Project Structure
```
weeknotes-blog-post-composer/
├── SKILL.md # Detailed documentation for Claude
├── README.md # This file
├── bin/ # Platform-specific Go CLI binaries
│ ├── darwin-arm64/
│ ├── darwin-amd64/
│ └── linux-amd64/
├── scripts/
│ ├── setup.sh # First-time configuration
│ ├── fetch-sources.sh # Fetch data from sources
│ ├── prepare-sources.py # Verify fetched data
│ └── download-binaries.sh # Update CLI binaries
├── config/
│ └── config.json # API credentials (gitignored)
└── data/ # Fetched markdown files (gitignored)
```
## Data Sources
Currently supported:
- **Mastodon**: Posts from specified date range
- **Linkding**: Bookmarks from specified date range
The architecture supports adding additional data sources in the future.
## Output Format
Generated blog posts include:
- **Jekyll YAML frontmatter** with title, date, tags, and layout
- **Conversational prose** composed from your content
- **Short contextual links** (3-5 words) for readability
- **Inline images** from Mastodon posts
- **3-7 tags** including "weeknotes" plus contextual tags
- **Proper structure** with TL;DR, main sections, Miscellanea, and conclusion
## Filename Convention
When run from your blog directory, posts are saved to:
```
content/posts/{YYYY}/{YYYY-MM-DD-wWW}.md
```
Where:
- `{YYYY}` = 4-digit year of start date
- `{YYYY-MM-DD}` = Start date of the 7-day period
- `{wWW}` = ISO week number (e.g., w16, w45)
Example: `content/posts/2025/2025-11-07-w45.md`
## Manual Commands
You can run individual components if needed:
```bash
# Update binaries to latest releases
./scripts/download-binaries.sh
# Fetch data for specific date range
./scripts/fetch-sources.sh --start 2025-11-01 --end 2025-11-07
# Verify fetched data
./scripts/prepare-sources.py
```
## Security
- API credentials stored in `config/config.json` with 600 permissions
- Config file is gitignored
- Temporary config files cleaned up after use
- Fetched data is gitignored
## Documentation
For detailed documentation on how the skill works and how Claude uses it, see [SKILL.md](SKILL.md).
## Requirements
- **Claude Code**: Latest version
- **Bash**: For shell scripts
- **Python 3**: For Python scripts
- **curl**: For API testing (during setup)
- **jq** (optional): For better config parsing
Binaries for Mastodon and Linkding fetching are included for all supported platforms.
## License
This is a personal skill for use with Claude Code. Binaries for `mastodon-to-markdown` and `linkding-to-markdown` are subject to their respective licenses.

View File

@@ -0,0 +1,603 @@
---
name: weeknotes-blog-post-composer
description: Compose weeknotes blog posts in Jekyll-style Markdown from multiple data sources including Mastodon and Linkding. Use this skill when the user requests to create, draft, or generate weeknotes content for a blog post.
---
# Weeknotes Blog Post Composer
## Overview
This skill enables composing weeknotes blog posts by automatically fetching content from multiple sources (Mastodon posts and Linkding bookmarks) and combining them into a well-formatted Jekyll-style Markdown document with YAML frontmatter. The skill handles data collection, formatting, and composition into a ready-to-publish blog post. Optionally, the skill can reference past weeknotes to match the user's personal writing style and voice.
## Quick Start
When a user first requests to create weeknotes, check if the skill is configured:
```bash
cd /path/to/weeknotes-blog-post-composer
# Check if config exists
if [ ! -f "./config/config.json" ]; then
echo "First-time setup required."
./scripts/setup.sh
fi
```
If configuration doesn't exist:
1. Inform the user that first-time setup is needed
2. Ask for their Mastodon server URL and access token
3. Ask for their Linkding instance URL and API token
4. Optionally ask for their weeknotes archive URL for style reference
5. Run `scripts/setup.sh` with their inputs
### Getting API Credentials
**Mastodon Access Token:**
1. Log into the Mastodon instance
2. Go to Settings → Development → New Application
3. Give it a name (e.g., "Weeknotes Composer")
4. Grant "read" permissions
5. Copy the access token
**Linkding API Token:**
1. Log into the Linkding instance
2. Go to Settings → Integrations
3. Click "Create Token"
4. Copy the generated token
## Composing Weeknotes
The primary workflow for composing weeknotes follows these steps:
### Step 1: Determine Date Range
By default, use the last 7 days (from 7 days ago to today). If the user specifies a different timeframe, parse their request and extract start/end dates.
Examples of user requests:
- "Draft weeknotes for this week" → 7 days ago to today
- "Create weeknotes for last week" → 14 days ago to 7 days ago
- "Generate weeknotes from November 4-10" → 2025-11-04 to 2025-11-10
**Default date calculation:**
```python
from datetime import datetime, timedelta
today = datetime.now()
end_date = today.strftime("%Y-%m-%d")
start_date = (today - timedelta(days=7)).strftime("%Y-%m-%d")
```
So if today is Thursday November 14, 2025:
- Start date: Thursday November 7, 2025
- End date: Thursday November 14, 2025
### Step 2: Fetch Source Data
Run the fetch script to collect data from all configured sources:
```bash
cd /path/to/weeknotes-blog-post-composer
# For current week (automatic date calculation)
./scripts/fetch-sources.sh
# For specific date range
./scripts/fetch-sources.sh --start YYYY-MM-DD --end YYYY-MM-DD
# For custom output directory
./scripts/fetch-sources.sh --start YYYY-MM-DD --end YYYY-MM-DD --output-dir ./data/custom
```
This fetches:
- Mastodon posts from the specified date range
- Linkding bookmarks from the specified date range
Output files are saved to `data/latest/` (or specified directory):
- `mastodon.md` - Formatted Mastodon posts
- `linkding.md` - Formatted bookmarks
### Step 3: Read and Analyze Source Data
Verify the fetched data is ready and understand what content is available:
```bash
cd /path/to/weeknotes-blog-post-composer
./scripts/prepare-sources.py
```
This shows which source files are available and their sizes.
Then read the fetched markdown files to understand the content:
```bash
# Read Mastodon posts
cat data/latest/mastodon.md
# Read Linkding bookmarks
cat data/latest/linkding.md
```
### Step 3.5: Review Past Weeknotes for Style Reference (Optional)
**Check for configured style reference:**
```bash
# Check if weeknotes_archive URL is configured
cd /path/to/weeknotes-blog-post-composer
cat config/config.json
```
If the config contains a `weeknotes_archive` URL, fetch and review 1-2 of the user's past weeknotes to understand their writing style and voice. Use the WebFetch tool to analyze the archive page and individual posts.
If no `weeknotes_archive` is configured, skip this step and compose in a conversational blog post style.
**Key style elements to look for in past weeknotes:**
1. **Voice & Tone:**
- Conversational and self-deprecating
- Frequent parenthetical asides and tangents
- Playful language (e.g., "Ope", casual interjections)
- Self-aware meta-commentary about the writing process itself
2. **Structure:**
- Starts with an opening paragraph containing inline "TL;DR: ..." summary
- Followed by `<!--more-->` on its own line (marks intro for Jekyll excerpt)
- 2-3 deeper dives into specific projects or topics (main body)
- **"Miscellanea" section near the end** (just before conclusion) for brief observations and items that didn't fit elsewhere
- **CRITICAL:** Use bullet points for each item in Miscellanea
- **CRITICAL:** Include ALL bookmarks/links here as bullet points, not in a separate section
- **CRITICAL:** Wrap the Miscellanea bullet points in `<div class="weeknote-miscellanea">` tags
- Miscellanea is a catch-all grab bag for everything else: short observations, bookmarks, reading, random thoughts
- Concluding reflection on the week
3. **Content Balance:**
- Equal weighting of technical depth and personal reflection
- Mixed technical projects, personal observations, and humor
- Philosophy embedded in technical writing
- Comfortable with digression and associative thinking
4. **Transitions:**
- Uses bullet points and whitespace rather than formal prose bridges
- Ideas progress through thematic gravity or personal relevance
- Stream-of-consciousness feel ("notes accumulated throughout the week")
5. **Distinctive Elements:**
- Metaphorical thinking (uses analogies to explain technical challenges)
- Acknowledges when feeling scattered or self-doubting
- References to ongoing projects and past posts
- Comfortable admitting uncertainty or work-in-progress status
When composing, aim to match this voice rather than writing in a generic blog style.
### Step 4: Compose Conversational Weeknotes
**Important:** Do not use template substitution. Instead, read the source markdown and compose it into readable prose.
**Style guidance:** Match the user's voice from past weeknotes (see Step 3.5) - conversational, self-deprecating, with parenthetical asides and comfortable with tangents. Start with an opening paragraph containing an inline "TL;DR: ..." summary (not a header), followed by `<!--more-->` on its own line. Use a "Miscellanea" section near the end (just before the conclusion) as a grab-bag for brief observations and items that didn't fit under other thematic sections. **CRITICAL:** Format ALL Miscellanea items as bullet points, including bookmarks and links - do NOT create a separate "Bookmarks and Reading" section.
Analyze the fetched content and compose a conversational weeknotes post that:
1. **Summarizes Mastodon activity** - Don't just list every post. Instead:
- Identify themes and topics from the week
- Highlight interesting conversations or thoughts
- Group related posts together
- Write in a natural, conversational tone
- Include specific details that are interesting or noteworthy
- **Link to actual Mastodon posts** using the URLs from the source (e.g., `[posted about X](https://masto.hackers.town/@user/12345)`)
- **CRITICAL - AVOID PLAGIARISM:** Only use the user's own words from "My Posts" sections directly in prose. Content from "Posts I Boosted" or "Posts I Favorited" should ONLY be:
- Referenced/cited with attribution (e.g., "Someone on Mastodon pointed out that...")
- Summarized in your own words, not quoted verbatim as if the user wrote them
- Alternatively, include blocks of text using blockquotes where it seems interesting
- Linked to without incorporating their text into the narrative
- This is extremely important to avoid unintentional plagiarism
- **IMPORTANT: Embed images inline** when they add value (e.g., `![Alt text](image-url)`)
- **Look for posts with Media entries** in the mastodon.md file - these contain images that should be included
- Images are especially important for: cats, interesting screenshots, funny visuals, project photos, etc.
2. **Integrates bookmarks meaningfully** - Don't just list links. Instead:
- **CRITICAL: ALL bookmarks MUST go in the Miscellanea section as bullet points**
- Do NOT create a separate "Bookmarks and Reading" section
- Group related bookmarks together within Miscellanea bullets when possible
- Explain why things were interesting or relevant in the bullet text
- Connect bookmarks to larger thoughts or projects
- **Include actual bookmark URLs** with descriptive link text (e.g., `[Article title](https://example.com)`)
- Format as bullet points with links in the Miscellanea section
3. **Creates a cohesive narrative** - The post should read like a blog post, not a data dump:
- Write in first person
- Use conversational language
- Connect different activities together
- Add context and reflection
- Include section headings that make sense for the content
4. **Uses proper formatting**:
- Jekyll-style YAML frontmatter with title, date, tags ("weeknotes" should always be used, along with 3-7 additional tags relevant to the content), and layout
- **Opening paragraph** with inline "TL;DR: ..." summary (NOT a header)
- **`<!--more-->`** comment on its own line immediately after the opening paragraph (marks excerpt boundary)
- **Table of contents nav** on its own line after `<!--more-->` if there are multiple sections (2+ headings): `<nav role="navigation" class="table-of-contents"></nav>`
- Markdown headings (##, ###) for structure in the main body
- Links to interesting posts or bookmarks
- Inline images from Mastodon posts where relevant
- Code blocks or quotes where appropriate
**Example opening structure:**
```markdown
TL;DR: Our 15-year-old solar inverter died this week, which kicked off a lot of thinking about technology longevity and IoT device lifecycles. Also spent time tinkering with Claude Code skills and bookmarking way too many articles about AI coding tools.
<!--more-->
<nav role="navigation" class="table-of-contents"></nav>
## Technology Longevity
...
```
**Critical: Always include the actual URLs!**
When referencing content:
- **Mastodon posts**: Link to the post URL with **short link text (3-5 words)** for aesthetics (e.g., `This week I [posted](https://masto.hackers.town/@user/12345) about solar inverters...`)
- **Bookmarks**: Include the bookmark URL with descriptive text (e.g., `I found [this article about AI coding](https://example.com/article) particularly interesting...`)
- **Images**: Embed Mastodon images inline using `![Description](image-url)` when they're interesting or funny
- **For multiple consecutive images** (3+), wrap them in `<image-gallery>` tags with newlines before/after the opening and closing tags:
```markdown
<image-gallery>
![First image](url1)
![Second image](url2)
![Third image](url3)
</image-gallery>
```
**Example composition approach:**
Instead of listing every post, write something like:
> This week I [spent a lot](https://masto.hackers.town/@user/12345) of time thinking about technology longevity. Our 15-year-old solar inverter died, which [kicked off](https://masto.hackers.town/@user/12346) a whole thread about IoT devices and how frustrating it is when tech doesn't have a 15-20 year plan.
**CRITICAL - Only use the user's own posts this way!** If you want to reference a boosted/favorited post or bookmark:
> There's been this interesting [article making the rounds](https://example.com/article) about BBS-era communication patterns - explaining how those carefully drafted essay-like responses created a distinctive writing style. But nope, it's just how we learned to write when bandwidth was scarce.
Then for bookmarks in Miscellanea, reference them naturally wrapped in the `weeknote-miscellanea` div:
```markdown
## Miscellanea
<div class="weeknote-miscellanea">
* [*Thinking About Thinking With LLMs*](https://example.com/article) - explores how new tools make it easier to code with shallower understanding
* [Another piece](https://example.com/article2) argues that the best programmers still dig deep to understand what's happening underneath
</div>
```
**IMPORTANT: Always scan the mastodon.md for images!**
The mastodon.md file includes `Media:` entries with image URLs and descriptions. Look for these and include them in your weeknotes. Example from the source:
```
Media: [image](https://cdn.masto.host/.../image.jpg) - Description of the image
```
When you find these, embed them in the weeknotes like this:
**Single image:**
> Miss Biscuits [discovered a new perch](https://masto.hackers.town/@user/12347):
>
> ![Description of the image](https://cdn.masto.host/.../image.jpg)
**Multiple images (3+) - use image gallery:**
> I [shared some photos](https://masto.hackers.town/@user/12348) of my 3D printing projects:
>
> <image-gallery>
>
> ![3D printed dragon](https://cdn.example.com/image1.jpg)
>
> ![Flexible octopus](https://cdn.example.com/image2.jpg)
>
> ![Cat playing with prints](https://cdn.example.com/image3.jpg)
>
> </image-gallery>
### Step 5: Review and Revise the Draft
Before finalizing, review the composed weeknotes and make light revisions:
1. **Structure check:**
- Ensure Miscellanea section is at the end (just before the conclusion)
- Move any straggling bookmark bullets that didn't fit into main sections into Miscellanea
- Verify all sections flow logically
2. **Prose polish:**
- Tighten up verbose sentences
- Remove unnecessary repetition
- Ensure transitions between sections make sense
- Check that the voice remains conversational and natural
3. **Content verification:**
- All Mastodon post links are present (3-5 word link text)
- All bookmark URLs are included
- Images are properly embedded (single images inline, 3+ images in `<image-gallery>`)
- Opening has inline "TL;DR: ..." followed by `<!--more-->`
- Table of contents nav is present if there are multiple sections
4. **Final touches:**
- Verify 3-7 tags (including "weeknotes")
- Check that conclusion ties things together
- Ensure Miscellanea items are formatted as bullet points
### Step 6: Write the Final Blog Post
Create the Jekyll blog post file with:
1. **YAML frontmatter:**
```yaml
---
title: "[Date Range]"
date: YYYY-MM-DD
tags:
- weeknotes
- [contextual-tag-1]
- [contextual-tag-2]
- [contextual-tag-3]
layout: post
---
```
**Important - Title Format:** Use the date range format without the word "Weeknotes" (e.g., "2025 Week 48" or "November 22-26, 2025"). The "weeknotes" tag already categorizes the post, so the title should be concise.
**Important - Tags:** Always include "weeknotes" as the first tag, then add 2-6 additional contextually appropriate tags based on the content (3-7 tags total). Tags should reflect major themes, technologies, topics, or projects discussed in the post. Examples:
- Technical topics: `ai`, `javascript`, `golang`, `docker`, `apis`
- Project types: `side-projects`, `open-source`, `blogging`
- Activities: `learning`, `refactoring`, `debugging`
- Themes: `productivity`, `tools`, `workflows`
Analyze the composed content and choose tags that genuinely reflect what the post is about.
2. **Composed content** - The conversational weeknotes you composed in Step 4 and revised in Step 5
**CRITICAL:** Do NOT include "Generated with Claude Code" or similar AI attribution footer in weeknotes posts. These are personal blog posts that should maintain the author's authentic voice throughout.
3. **Save** to the appropriate location and filename:
**Detecting the blog directory:**
Check if the current working directory contains `content/posts/` - if so, you're in the blog directory.
```bash
if [ -d "content/posts" ]; then
echo "In blog directory - using blog naming convention"
fi
```
**If running from the user's blog directory**, use this naming convention:
```
content/posts/{YYYY}/{YYYY-MM-DD-wWW}.md
```
Where:
- `{YYYY}` = 4-digit year (of today's date)
- `{YYYY-MM-DD}` = Today's date (the publication date)
- `{wWW}` = ISO week number for today (e.g., w16, w17, w42)
Examples:
- `content/posts/2025/2025-04-18-w16.md` (Week 16, published April 18, 2025)
- `content/posts/2025/2025-11-13-w46.md` (Week 46, published November 13, 2025)
**To calculate the week number and filename**, use the helper script:
```bash
cd /path/to/weeknotes-blog-post-composer
./scripts/calculate-week.py
# Or for a specific date:
./scripts/calculate-week.py --date 2025-11-13
# Or get JSON output:
./scripts/calculate-week.py --json
```
This script uses **today's date** (not the start date) and calculates the ISO week number, generating the correct filename format: `content/posts/{year}/{date}-w{week}.md`
**Important:** Ensure the year directory exists before saving:
```python
import os
year_dir = f"content/posts/{start_date.year}"
os.makedirs(year_dir, exist_ok=True)
```
**If not in the blog directory**, save to a temporary location (e.g., `/tmp/weeknotes-YYYY-MM-DD.md`) and ask the user where they'd like to move it
### Step 7: Select Cover Image Thumbnail
Review the images already embedded in the post and select one to use as the cover thumbnail:
1. **Analyze embedded images:**
- Review all images included in the post (from Mastodon posts)
- Consider their alt text/descriptions
- Evaluate which image best represents the overall themes of the weeknotes
2. **Selection criteria:**
- **Thematic relevance**: Image should represent main topics/themes, not just incidental content
- **Visual interest**: Choose images that are visually distinct and engaging
- **Quality**: Avoid low-quality screenshots or purely text-based images
- **Context**: Consider the image's role in the narrative - is it central to a main section or just a side note?
3. **Priority order:**
- Images related to primary themes/topics in the post
- Project photos, interesting technical subjects
- Noteworthy screenshots or visual examples
- Cat photos (only if cats are a significant theme of the week)
- Last resort: use the first image in the post
4. **Add to frontmatter:**
- Update the YAML frontmatter to include the `thumbnail:` property
- Use the full URL of the selected image
```yaml
---
title: "Weeknotes: [Date Range]"
date: YYYY-MM-DD
thumbnail: "https://cdn.masto.host/.../selected-image.jpg"
tags:
- weeknotes
- [other-tags]
layout: post
---
```
5. **If no suitable images exist in the post:**
- Omit the `thumbnail:` property for now
- The blog software will use the first image as a fallback
- Note: Future enhancement will add public domain image search
### Step 8: User Feedback and Final Refinement
1. Present the composed weeknotes to the user
2. Ask if they want any adjustments:
- Different tone or style
- More/less detail in certain areas
- Additional context or reflection
- Restructuring of content
3. Make requested edits
4. Offer to add a final reflection section if desired
## Additional Operations
### Updating Binaries
To update the Go CLI binaries to the latest releases:
```bash
cd /path/to/weeknotes-blog-post-composer
./scripts/download-binaries.sh
```
This downloads the latest versions of:
- `mastodon-to-markdown`
- `linkding-to-markdown`
For all supported platforms (darwin-arm64, darwin-amd64, linux-amd64).
### Reconfiguring
To update API credentials, change data source settings, or add/update style reference URL:
```bash
cd /path/to/weeknotes-blog-post-composer
./scripts/setup.sh
```
The setup script will detect existing configuration and ask for confirmation before reconfiguring. This includes:
- Mastodon server URL and access token
- Linkding URL and API token
- Weeknotes archive URL for style reference (optional)
### Customizing the Output Style
The composition process is flexible and can be customized based on user preferences:
1. **Tone and Style:**
- More formal or casual
- Technical vs. personal
- Detailed vs. high-level summaries
2. **Structure:**
- Different section organization
- Thematic groupings vs. chronological
- Depth of technical detail
3. **Content Selection:**
- Which topics to emphasize
- What to skip or summarize briefly
- Which links/posts deserve more attention
Ask the user about their preferences for these aspects when composing weeknotes.
### Adding New Data Sources
To extend the skill with additional data sources:
1. Add the new Go CLI binary to `bin/{platform}-{arch}/`
2. Update `scripts/fetch-sources.sh` to fetch from the new source
3. Update the SKILL.md Step 3 to instruct Claude to read the new source files
4. Update Step 4 composition guidance to explain how to integrate the new content
## Platform Detection
All scripts automatically detect the current platform and use the appropriate binary:
- **macOS ARM64**: `bin/darwin-arm64/`
- **macOS Intel**: `bin/darwin-amd64/`
- **Linux AMD64**: `bin/linux-amd64/`
Platform detection is handled automatically via `uname` commands. No manual configuration needed.
## Resources
### scripts/
- `setup.sh` - First-time configuration for API credentials
- `fetch-sources.sh` - Fetch data from all configured sources
- `prepare-sources.py` - Verify fetched data and prepare for composition
- `calculate-week.py` - Calculate ISO week number and generate filename for weeknotes
- `download-binaries.sh` - Update Go CLI binaries to latest releases
### bin/
Pre-compiled Go CLI binaries organized by platform:
- `mastodon-to-markdown` - Fetch Mastodon posts as markdown
- `linkding-to-markdown` - Fetch Linkding bookmarks as markdown
Binaries are platform-specific and automatically selected at runtime.
### config/
- `config.json` - User configuration with API credentials and optional settings (created by setup.sh)
- Contains Mastodon server URL and access token
- Contains Linkding URL and API token
- Optionally contains weeknotes_archive URL for style reference
- This file contains sensitive tokens and is secured with 600 permissions
### data/
- `latest/` - Most recently fetched source data
- Other directories for historical or custom fetches
- Contains `mastodon.md` and `linkding.md` after fetching
## Troubleshooting
### Configuration Issues
If setup fails:
- Verify API credentials are correct
- Check that server URLs are accessible
- Ensure tokens have appropriate permissions
### Binary Not Found
If platform detection fails:
```bash
# Check current platform
uname -s # Should show: Darwin or Linux
uname -m # Should show: arm64, x86_64, etc.
# Verify binary exists
ls -la bin/darwin-arm64/ # Or appropriate platform directory
```
### Empty Content
If fetched data is empty:
- Verify the date range includes actual activity
- Check that API credentials have read permissions
- Run fetch scripts with `--verbose` flag for debugging
### Template Errors
If composition fails with template errors:
- Verify `assets/weeknotes-template.md` exists and is readable
- Check that all required placeholders are present
- Ensure no syntax errors in template YAML frontmatter

View File

@@ -0,0 +1,56 @@
#!/usr/bin/env python3
"""
Calculate the current ISO week number and generate the weeknotes filename.
Usage:
./scripts/calculate-week.py [--date YYYY-MM-DD]
If no date is provided, uses today's date.
"""
import argparse
import os
from datetime import datetime
def calculate_week_info(date=None):
"""Calculate week information for the given date (or today)."""
if date is None:
date = datetime.now()
elif isinstance(date, str):
date = datetime.strptime(date, '%Y-%m-%d')
week_number = date.isocalendar()[1]
year = date.year
date_str = date.strftime('%Y-%m-%d')
return {
'date': date_str,
'year': year,
'week': week_number,
'filename': f"content/posts/{year}/{date_str}-w{week_number:02d}.md",
'title': f"Weeknotes: {year} Week {week_number}"
}
def main():
parser = argparse.ArgumentParser(description='Calculate weeknotes week number and filename')
parser.add_argument('--date', type=str, help='Date in YYYY-MM-DD format (default: today)')
parser.add_argument('--json', action='store_true', help='Output as JSON')
args = parser.parse_args()
info = calculate_week_info(args.date)
if args.json:
import json
print(json.dumps(info, indent=2))
else:
print(f"Date: {info['date']}")
print(f"ISO Week: {info['week']}")
print(f"Title: {info['title']}")
print(f"Filename: {info['filename']}")
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,89 @@
#!/bin/bash
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SKILL_DIR="$(dirname "$SCRIPT_DIR")"
BIN_DIR="${SKILL_DIR}/bin"
echo "╔════════════════════════════════════════╗"
echo "║ Weeknotes Binary Downloader ║"
echo "╚════════════════════════════════════════╝"
echo ""
# Create bin directory structure
mkdir -p "${BIN_DIR}/darwin-arm64"
mkdir -p "${BIN_DIR}/darwin-amd64"
mkdir -p "${BIN_DIR}/linux-amd64"
# Function to download and extract a GitHub release
download_tool() {
local repo=$1
local tool_name=$2
local platform=$3
local arch=$4
echo "📦 Downloading ${tool_name} for ${platform}-${arch}..."
# Construct the asset name based on the naming convention
local archive_name="${tool_name}-${platform}-${arch}.tar.gz"
local asset_url="https://github.com/${repo}/releases/download/latest/${archive_name}"
local temp_archive="/tmp/${archive_name}"
local target_dir="${BIN_DIR}/${platform}-${arch}"
local target_binary="${target_dir}/${tool_name}"
# Download the archive
echo " Downloading from ${asset_url}..."
if curl -L -f -o "${temp_archive}" "${asset_url}"; then
echo " ✅ Downloaded archive"
# Extract the binary from the archive
echo " Extracting binary..."
tar -xzf "${temp_archive}" -C "${target_dir}" "${tool_name}" 2>/dev/null || {
# If extraction with specific file fails, extract all and find the binary
tar -xzf "${temp_archive}" -C /tmp/
find /tmp -name "${tool_name}" -type f -exec mv {} "${target_binary}" \;
}
# Make binary executable
chmod +x "${target_binary}"
# Cleanup
rm -f "${temp_archive}"
# Verify the binary exists
if [ -f "${target_binary}" ]; then
echo " ✅ Installed to ${target_binary}"
else
echo " ❌ Failed to extract binary"
return 1
fi
else
echo " ❌ Failed to download from ${asset_url}"
return 1
fi
echo ""
}
# Download mastodon-to-markdown
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Mastodon to Markdown"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
download_tool "lmorchard/mastodon-to-markdown" "mastodon-to-markdown" "darwin" "arm64"
download_tool "lmorchard/mastodon-to-markdown" "mastodon-to-markdown" "darwin" "amd64"
download_tool "lmorchard/mastodon-to-markdown" "mastodon-to-markdown" "linux" "amd64"
# Download linkding-to-markdown
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "Linkding to Markdown"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
download_tool "lmorchard/linkding-to-markdown" "linkding-to-markdown" "darwin" "arm64"
download_tool "lmorchard/linkding-to-markdown" "linkding-to-markdown" "darwin" "amd64"
download_tool "lmorchard/linkding-to-markdown" "linkding-to-markdown" "linux" "amd64"
echo "╔════════════════════════════════════════╗"
echo "║ Download Complete! ║"
echo "╚════════════════════════════════════════╝"
echo ""
echo "Binary locations:"
tree "${BIN_DIR}" || ls -R "${BIN_DIR}"

View File

@@ -0,0 +1,178 @@
#!/bin/bash
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SKILL_DIR="$(dirname "$SCRIPT_DIR")"
CONFIG_FILE="${SKILL_DIR}/config/config.json"
DATA_DIR="${SKILL_DIR}/data"
# Default to last 7 days (from 7 days ago through today)
# Note: The APIs treat end date as exclusive, so we use tomorrow's date
get_week_dates() {
if [[ "$OSTYPE" == "darwin"* ]]; then
# macOS date command
START_DATE=$(date -v-7d +%Y-%m-%d)
END_DATE=$(date -v+1d +%Y-%m-%d)
else
# Linux date command
START_DATE=$(date -d "7 days ago" +%Y-%m-%d)
END_DATE=$(date -d "tomorrow" +%Y-%m-%d)
fi
}
# Parse command line arguments
START_DATE=""
END_DATE=""
OUTPUT_DIR="${DATA_DIR}/latest"
while [[ $# -gt 0 ]]; do
case $1 in
--start)
START_DATE="$2"
shift 2
;;
--end)
END_DATE="$2"
shift 2
;;
--output-dir)
OUTPUT_DIR="$2"
shift 2
;;
-h|--help)
echo "Usage: fetch-sources.sh [options]"
echo ""
echo "Options:"
echo " --start DATE Start date (YYYY-MM-DD), defaults to Monday of current week"
echo " --end DATE End date (YYYY-MM-DD), defaults to Sunday of current week"
echo " --output-dir DIR Output directory (default: data/latest)"
echo " -h, --help Show this help message"
echo ""
echo "Examples:"
echo " fetch-sources.sh # Fetch this week"
echo " fetch-sources.sh --start 2025-11-01 --end 2025-11-07"
exit 0
;;
*)
echo "Unknown option: $1"
echo "Use --help for usage information"
exit 1
;;
esac
done
# If dates not provided, use current week
if [ -z "$START_DATE" ] || [ -z "$END_DATE" ]; then
get_week_dates
fi
echo "╔════════════════════════════════════════╗"
echo "║ Weeknotes Source Fetcher ║"
echo "╚════════════════════════════════════════╝"
echo ""
echo "Fetching data from ${START_DATE} to ${END_DATE}"
echo ""
# Check if configured
if [ ! -f "${CONFIG_FILE}" ]; then
echo "❌ Not configured yet. Running setup..."
echo ""
"${SCRIPT_DIR}/setup.sh"
echo ""
fi
# Detect platform
OS=$(uname -s | tr '[:upper:]' '[:lower:]')
ARCH=$(uname -m)
case $ARCH in
x86_64) ARCH="amd64" ;;
aarch64|arm64) ARCH="arm64" ;;
esac
BIN_DIR="${SKILL_DIR}/bin/${OS}-${ARCH}"
# Check if binaries exist
if [ ! -f "${BIN_DIR}/mastodon-to-markdown" ] || [ ! -f "${BIN_DIR}/linkding-to-markdown" ]; then
echo "❌ Binaries not found for platform: ${OS}-${ARCH}"
echo " Please run scripts/download-binaries.sh first"
exit 1
fi
# Load config using jq (if not available, use basic parsing)
if command -v jq &> /dev/null; then
MASTODON_SERVER=$(jq -r .mastodon.server "${CONFIG_FILE}")
MASTODON_TOKEN=$(jq -r .mastodon.token "${CONFIG_FILE}")
LINKDING_URL=$(jq -r .linkding.url "${CONFIG_FILE}")
LINKDING_TOKEN=$(jq -r .linkding.token "${CONFIG_FILE}")
else
echo "⚠️ Warning: jq not found. Using basic config parsing."
echo " Install jq for better config handling: brew install jq"
# Basic parsing fallback (not recommended for production)
MASTODON_SERVER=$(grep -o '"server"[[:space:]]*:[[:space:]]*"[^"]*"' "${CONFIG_FILE}" | cut -d'"' -f4 | head -1)
MASTODON_TOKEN=$(grep -o '"token"[[:space:]]*:[[:space:]]*"[^"]*"' "${CONFIG_FILE}" | cut -d'"' -f4 | head -1)
LINKDING_URL=$(grep -o '"url"[[:space:]]*:[[:space:]]*"[^"]*"' "${CONFIG_FILE}" | cut -d'"' -f4 | tail -1)
LINKDING_TOKEN=$(grep -o '"token"[[:space:]]*:[[:space:]]*"[^"]*"' "${CONFIG_FILE}" | cut -d'"' -f4 | tail -1)
fi
# Create output directory
mkdir -p "${OUTPUT_DIR}"
# Create config files for the tools
MASTODON_CONFIG="${OUTPUT_DIR}/mastodon-config.yaml"
LINKDING_CONFIG="${OUTPUT_DIR}/linkding-config.yaml"
cat > "${MASTODON_CONFIG}" <<EOF
mastodon:
server: "${MASTODON_SERVER}"
access_token: "${MASTODON_TOKEN}"
EOF
cat > "${LINKDING_CONFIG}" <<EOF
linkding:
url: "${LINKDING_URL}"
token: "${LINKDING_TOKEN}"
EOF
# Fetch from Mastodon
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "📱 Fetching Mastodon posts..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
"${BIN_DIR}/mastodon-to-markdown" fetch \
--config "${MASTODON_CONFIG}" \
--start "${START_DATE}" \
--end "${END_DATE}" \
--output "${OUTPUT_DIR}/mastodon.md" \
--verbose
echo "✅ Mastodon posts saved to: ${OUTPUT_DIR}/mastodon.md"
echo ""
# Fetch from Linkding
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🔖 Fetching Linkding bookmarks..."
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
"${BIN_DIR}/linkding-to-markdown" fetch \
--config "${LINKDING_CONFIG}" \
--since "${START_DATE}" \
--until "${END_DATE}" \
--output "${OUTPUT_DIR}/linkding.md" \
--verbose
echo "✅ Linkding bookmarks saved to: ${OUTPUT_DIR}/linkding.md"
echo ""
# Cleanup config files (they contain secrets)
rm -f "${MASTODON_CONFIG}" "${LINKDING_CONFIG}"
echo "╔════════════════════════════════════════╗"
echo "║ Fetch Complete! ║"
echo "╚════════════════════════════════════════╝"
echo ""
echo "Output directory: ${OUTPUT_DIR}"
echo "Files:"
echo " - mastodon.md"
echo " - linkding.md"
echo ""

View File

@@ -0,0 +1,102 @@
#!/usr/bin/env python3
"""
Prepare fetched source data for composition.
This script reads the fetched markdown files and displays them for Claude
to read and compose into a cohesive weeknotes blog post.
"""
import argparse
import sys
from datetime import datetime, timedelta
from pathlib import Path
def get_current_week_dates():
"""Calculate dates for the last 7 days (7 days ago to today)."""
today = datetime.now()
seven_days_ago = today - timedelta(days=7)
return seven_days_ago.strftime("%Y-%m-%d"), today.strftime("%Y-%m-%d")
def main():
"""Main entry point."""
parser = argparse.ArgumentParser(
description="Prepare fetched source data for weeknotes composition"
)
parser.add_argument(
"--input-dir",
type=Path,
help="Input directory with fetched data (default: data/latest)",
)
parser.add_argument("--start", help="Start date (YYYY-MM-DD)")
parser.add_argument("--end", help="End date (YYYY-MM-DD)")
args = parser.parse_args()
# Set default input directory
if not args.input_dir:
args.input_dir = Path(__file__).parent.parent / "data" / "latest"
print("╔════════════════════════════════════════╗")
print("║ Weeknotes Source Preparation ║")
print("╚════════════════════════════════════════╝")
print()
# Check if input directory exists
if not args.input_dir.exists():
print(f"❌ Input directory not found: {args.input_dir}")
print(" Please run fetch-sources.sh first")
sys.exit(1)
# Determine dates
if not args.start or not args.end:
args.start, args.end = get_current_week_dates()
week_range = f"{args.start} to {args.end}"
print(f"📅 Date range: {week_range}")
print()
# Check for source files
mastodon_file = args.input_dir / "mastodon.md"
linkding_file = args.input_dir / "linkding.md"
has_mastodon = mastodon_file.exists()
has_linkding = linkding_file.exists()
if not has_mastodon and not has_linkding:
print("❌ No source data found!")
print(f" Expected files in: {args.input_dir}")
sys.exit(1)
print("📂 Available source data:")
if has_mastodon:
size = mastodon_file.stat().st_size
print(f" ✅ Mastodon posts: {mastodon_file} ({size:,} bytes)")
else:
print(f" ⚠️ No Mastodon data: {mastodon_file}")
if has_linkding:
size = linkding_file.stat().st_size
print(f" ✅ Linkding bookmarks: {linkding_file} ({size:,} bytes)")
else:
print(f" ⚠️ No Linkding data: {linkding_file}")
print()
print("╔════════════════════════════════════════╗")
print("║ Ready for Composition ║")
print("╚════════════════════════════════════════╝")
print()
print("Source files are ready to be read and composed into a weeknotes post.")
print()
print("Next steps:")
print(f"1. Read: {mastodon_file}")
if has_linkding:
print(f"2. Read: {linkding_file}")
print(f"3. Compose conversational weeknotes for {week_range}")
print("4. Write the composed post with Jekyll frontmatter")
print()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,226 @@
#!/bin/bash
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SKILL_DIR="$(dirname "$SCRIPT_DIR")"
CONFIG_FILE="${SKILL_DIR}/config/config.json"
DATA_DIR="${SKILL_DIR}/data"
echo "╔════════════════════════════════════════╗"
echo "║ Weeknotes Composer Setup ║"
echo "╚════════════════════════════════════════╝"
echo ""
# Check if config already exists
if [ -f "${CONFIG_FILE}" ]; then
echo "⚠️ Configuration already exists."
read -p "Do you want to reconfigure? (y/N): " RECONFIGURE
if [[ ! "$RECONFIGURE" =~ ^[Yy]$ ]]; then
echo "Setup cancelled."
exit 0
fi
echo ""
fi
echo "This setup will configure connections to your data sources."
echo ""
# ============================================================================
# Mastodon Configuration
# ============================================================================
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "📱 Mastodon Configuration"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
echo "Enter your Mastodon instance details."
echo "Example server: https://mastodon.social"
echo ""
read -p "Mastodon server URL: " MASTODON_SERVER
MASTODON_SERVER="${MASTODON_SERVER%/}"
# Validate URL format
if [[ ! "$MASTODON_SERVER" =~ ^https?:// ]]; then
echo "❌ Error: URL must start with http:// or https://"
exit 1
fi
echo ""
echo "To get your Mastodon access token:"
echo "1. Log into your Mastodon instance"
echo "2. Go to Settings → Development → New Application"
echo "3. Give it a name (e.g., 'Weeknotes Composer')"
echo "4. Grant 'read' permissions"
echo "5. Copy the access token"
echo ""
read -sp "Mastodon access token: " MASTODON_TOKEN
echo ""
# Validate token is not empty
if [ -z "$MASTODON_TOKEN" ]; then
echo "❌ Error: Access token cannot be empty"
exit 1
fi
# Test the Mastodon connection
echo ""
echo "🔍 Testing Mastodon connection..."
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" \
"${MASTODON_SERVER}/api/v1/accounts/verify_credentials" \
-H "Authorization: Bearer ${MASTODON_TOKEN}")
if [ "$HTTP_CODE" -eq 200 ]; then
echo "✅ Mastodon connection successful!"
else
echo "❌ Mastodon connection failed (HTTP ${HTTP_CODE})"
echo " Please check your server URL and token."
exit 1
fi
echo ""
# ============================================================================
# Linkding Configuration
# ============================================================================
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🔖 Linkding Configuration"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
echo "Enter your Linkding instance details."
echo "Example: https://linkding.example.com"
echo ""
read -p "Linkding URL: " LINKDING_URL
LINKDING_URL="${LINKDING_URL%/}"
# Validate URL format
if [[ ! "$LINKDING_URL" =~ ^https?:// ]]; then
echo "❌ Error: URL must start with http:// or https://"
exit 1
fi
echo ""
echo "To get your Linkding API token:"
echo "1. Log into your Linkding instance"
echo "2. Go to Settings → Integrations"
echo "3. Click 'Create Token'"
echo "4. Copy the generated token"
echo ""
read -sp "Linkding API token: " LINKDING_TOKEN
echo ""
# Validate token is not empty
if [ -z "$LINKDING_TOKEN" ]; then
echo "❌ Error: API token cannot be empty"
exit 1
fi
# Test the Linkding connection
echo ""
echo "🔍 Testing Linkding connection..."
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" \
"${LINKDING_URL}/api/bookmarks/?limit=1" \
-H "Authorization: Token ${LINKDING_TOKEN}")
if [ "$HTTP_CODE" -eq 200 ]; then
echo "✅ Linkding connection successful!"
else
echo "❌ Linkding connection failed (HTTP ${HTTP_CODE})"
echo " Please check your URL and token."
exit 1
fi
echo ""
# ============================================================================
# Style Reference Configuration (Optional)
# ============================================================================
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🎨 Style Reference (Optional)"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
echo "Enter a URL to your past weeknotes archive for style reference."
echo "This helps maintain consistent voice and tone in composed posts."
echo "Example: https://blog.example.com/tag/weeknotes/"
echo ""
echo "Leave blank to skip style reference."
echo ""
read -p "Weeknotes archive URL (optional): " WEEKNOTES_ARCHIVE_URL
# Remove trailing slash if present
WEEKNOTES_ARCHIVE_URL="${WEEKNOTES_ARCHIVE_URL%/}"
# Validate URL format if provided
if [ -n "$WEEKNOTES_ARCHIVE_URL" ] && [[ ! "$WEEKNOTES_ARCHIVE_URL" =~ ^https?:// ]]; then
echo "⚠️ Warning: URL should start with http:// or https://"
echo " Proceeding anyway..."
fi
if [ -n "$WEEKNOTES_ARCHIVE_URL" ]; then
echo "✅ Style reference URL configured"
else
echo "⏭️ Skipping style reference"
fi
echo ""
# ============================================================================
# Save Configuration
# ============================================================================
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "💾 Saving Configuration"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
# Create config directory if it doesn't exist
mkdir -p "$(dirname "${CONFIG_FILE}")"
# Create data directory if it doesn't exist
mkdir -p "${DATA_DIR}"
# Write config file with conditional weeknotes_archive
if [ -n "$WEEKNOTES_ARCHIVE_URL" ]; then
cat > "${CONFIG_FILE}" <<EOF
{
"mastodon": {
"server": "${MASTODON_SERVER}",
"token": "${MASTODON_TOKEN}"
},
"linkding": {
"url": "${LINKDING_URL}",
"token": "${LINKDING_TOKEN}"
},
"weeknotes_archive": "${WEEKNOTES_ARCHIVE_URL}",
"created_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
}
EOF
else
cat > "${CONFIG_FILE}" <<EOF
{
"mastodon": {
"server": "${MASTODON_SERVER}",
"token": "${MASTODON_TOKEN}"
},
"linkding": {
"url": "${LINKDING_URL}",
"token": "${LINKDING_TOKEN}"
},
"created_at": "$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
}
EOF
fi
# Secure the config file
chmod 600 "${CONFIG_FILE}"
echo "✅ Configuration saved to: ${CONFIG_FILE}"
echo ""
echo "╔════════════════════════════════════════╗"
echo "║ Setup Complete! ║"
echo "╚════════════════════════════════════════╝"
echo ""
echo "You can now use the weeknotes composer."
echo ""