Initial commit
This commit is contained in:
12
.claude-plugin/plugin.json
Normal file
12
.claude-plugin/plugin.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"name": "llm-router",
|
||||||
|
"description": "This skill should be used when users want to route LLM requests to different AI providers (OpenAI, Grok/xAI, Groq, DeepSeek, OpenRouter) using SwiftOpenAI-CLI. Use this skill when users ask to 'use grok', 'ask grok', 'use groq', 'ask deepseek', or any similar request to query a specific LLM provider in agent mode.",
|
||||||
|
"version": "0.0.0-2025.11.28",
|
||||||
|
"author": {
|
||||||
|
"name": "James Rochabrun",
|
||||||
|
"email": "jamesrochabrun@gmail.com"
|
||||||
|
},
|
||||||
|
"skills": [
|
||||||
|
"./skills/llm-router"
|
||||||
|
]
|
||||||
|
}
|
||||||
3
README.md
Normal file
3
README.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# llm-router
|
||||||
|
|
||||||
|
This skill should be used when users want to route LLM requests to different AI providers (OpenAI, Grok/xAI, Groq, DeepSeek, OpenRouter) using SwiftOpenAI-CLI. Use this skill when users ask to 'use grok', 'ask grok', 'use groq', 'ask deepseek', or any similar request to query a specific LLM provider in agent mode.
|
||||||
56
plugin.lock.json
Normal file
56
plugin.lock.json
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
{
|
||||||
|
"$schema": "internal://schemas/plugin.lock.v1.json",
|
||||||
|
"pluginId": "gh:jamesrochabrun/skills:llm-router",
|
||||||
|
"normalized": {
|
||||||
|
"repo": null,
|
||||||
|
"ref": "refs/tags/v20251128.0",
|
||||||
|
"commit": "d49cc6b910db4c71410aac4b9e27da9870fb1aad",
|
||||||
|
"treeHash": "c8ac09fa1acdc5a7fb1c025c310bb8f0a198ad96a7e95b038bc6e0b44d295865",
|
||||||
|
"generatedAt": "2025-11-28T10:17:51.316633Z",
|
||||||
|
"toolVersion": "publish_plugins.py@0.2.0"
|
||||||
|
},
|
||||||
|
"origin": {
|
||||||
|
"remote": "git@github.com:zhongweili/42plugin-data.git",
|
||||||
|
"branch": "master",
|
||||||
|
"commit": "aa1497ed0949fd50e99e70d6324a29c5b34f9390",
|
||||||
|
"repoRoot": "/Users/zhongweili/projects/openmind/42plugin-data"
|
||||||
|
},
|
||||||
|
"manifest": {
|
||||||
|
"name": "llm-router",
|
||||||
|
"description": "This skill should be used when users want to route LLM requests to different AI providers (OpenAI, Grok/xAI, Groq, DeepSeek, OpenRouter) using SwiftOpenAI-CLI. Use this skill when users ask to 'use grok', 'ask grok', 'use groq', 'ask deepseek', or any similar request to query a specific LLM provider in agent mode."
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "README.md",
|
||||||
|
"sha256": "3271ac1536548d7437e7175025910f9c16a9d311fa7d8d7ed267b8f70ab9b430"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ".claude-plugin/plugin.json",
|
||||||
|
"sha256": "71043340b41f0ca0ab3e6cd6e4e8f761529134795595894661e01659ff1d6696"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "skills/llm-router/SKILL.md",
|
||||||
|
"sha256": "ef23573396e6779a0e61be3a6676dd0302fe20125463af50b0e192ccbacf0c6d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "skills/llm-router/references/providers.md",
|
||||||
|
"sha256": "48ea2f40e79a1f7d17e6627d1fd8269229abc1ab76184e911d0110dfd0bea70c"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "skills/llm-router/scripts/configure_provider.sh",
|
||||||
|
"sha256": "fdee80a0dbabb722736580c9dcf91813bbd7cc8cfce601acb3b981e6bc5e6619"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "skills/llm-router/scripts/check_install_cli.sh",
|
||||||
|
"sha256": "b9229171ec933d6e6b37758d866dc80b29ef06ecd513ce7340f72225b3343374"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dirSha256": "c8ac09fa1acdc5a7fb1c025c310bb8f0a198ad96a7e95b038bc6e0b44d295865"
|
||||||
|
},
|
||||||
|
"security": {
|
||||||
|
"scannedAt": null,
|
||||||
|
"scannerVersion": null,
|
||||||
|
"flags": []
|
||||||
|
}
|
||||||
|
}
|
||||||
501
skills/llm-router/SKILL.md
Normal file
501
skills/llm-router/SKILL.md
Normal file
@@ -0,0 +1,501 @@
|
|||||||
|
---
|
||||||
|
name: llm-router
|
||||||
|
description: This skill should be used when users want to route LLM requests to different AI providers (OpenAI, Grok/xAI, Groq, DeepSeek, OpenRouter) using SwiftOpenAI-CLI. Use this skill when users ask to "use grok", "ask grok", "use groq", "ask deepseek", or any similar request to query a specific LLM provider in agent mode.
|
||||||
|
---
|
||||||
|
|
||||||
|
# LLM Router
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Route AI requests to different LLM providers using SwiftOpenAI-CLI's agent mode. This skill automatically configures the CLI to use the requested provider (OpenAI, Grok, Groq, DeepSeek, or OpenRouter), ensures the tool is installed and up-to-date, and executes one-shot agentic tasks.
|
||||||
|
|
||||||
|
## Core Workflow
|
||||||
|
|
||||||
|
When a user requests to use a specific LLM provider (e.g., "use grok to explain quantum computing"), follow this workflow:
|
||||||
|
|
||||||
|
### Step 1: Ensure SwiftOpenAI-CLI is Ready
|
||||||
|
|
||||||
|
Check if SwiftOpenAI-CLI is installed and up-to-date:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
scripts/check_install_cli.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
This script will:
|
||||||
|
- Check if `swiftopenai` is installed
|
||||||
|
- Verify the version (minimum 1.4.4)
|
||||||
|
- Install or update if necessary
|
||||||
|
- Report the current installation status
|
||||||
|
|
||||||
|
### Step 2: Configure the Provider
|
||||||
|
|
||||||
|
Based on the user's request, identify the target provider and configure SwiftOpenAI-CLI:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
scripts/configure_provider.sh <provider> [model]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Supported providers:**
|
||||||
|
- `openai` - OpenAI (GPT-4, GPT-5, etc.)
|
||||||
|
- `grok` - xAI Grok models
|
||||||
|
- `groq` - Groq (Llama, Mixtral, etc.)
|
||||||
|
- `deepseek` - DeepSeek models
|
||||||
|
- `openrouter` - OpenRouter (300+ models)
|
||||||
|
|
||||||
|
**Examples:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Configure for Grok
|
||||||
|
scripts/configure_provider.sh grok grok-4-0709
|
||||||
|
|
||||||
|
# Configure for Groq with Llama
|
||||||
|
scripts/configure_provider.sh groq llama-3.3-70b-versatile
|
||||||
|
|
||||||
|
# Configure for DeepSeek Reasoner
|
||||||
|
scripts/configure_provider.sh deepseek deepseek-reasoner
|
||||||
|
|
||||||
|
# Configure for OpenAI GPT-5
|
||||||
|
scripts/configure_provider.sh openai gpt-5
|
||||||
|
```
|
||||||
|
|
||||||
|
The script automatically:
|
||||||
|
- Sets the provider configuration
|
||||||
|
- Sets the appropriate base URL
|
||||||
|
- Sets the default model
|
||||||
|
- Provides guidance on API key configuration
|
||||||
|
|
||||||
|
### Step 3: Verify API Key
|
||||||
|
|
||||||
|
The configuration script automatically checks if an API key is set and will **stop with clear instructions** if no API key is found.
|
||||||
|
|
||||||
|
**If API key is missing:**
|
||||||
|
|
||||||
|
The script exits with error code 1 and displays:
|
||||||
|
- ⚠️ Warning that API key is not set
|
||||||
|
- Instructions for setting via environment variable
|
||||||
|
- Instructions for setting via config (persistent)
|
||||||
|
|
||||||
|
**Do not proceed to Step 4 if the configuration script fails due to missing API key.**
|
||||||
|
|
||||||
|
Instead, inform the user they need to set their API key first:
|
||||||
|
|
||||||
|
**Option 1 - Environment variable (session only):**
|
||||||
|
```bash
|
||||||
|
export XAI_API_KEY=xai-... # for Grok
|
||||||
|
export GROQ_API_KEY=gsk_... # for Groq
|
||||||
|
export DEEPSEEK_API_KEY=sk-... # for DeepSeek
|
||||||
|
export OPENROUTER_API_KEY=sk-or-... # for OpenRouter
|
||||||
|
export OPENAI_API_KEY=sk-... # for OpenAI
|
||||||
|
```
|
||||||
|
|
||||||
|
**Option 2 - Config file (persistent):**
|
||||||
|
```bash
|
||||||
|
swiftopenai config set api-key <api-key-value>
|
||||||
|
```
|
||||||
|
|
||||||
|
After the user sets their API key, re-run the configuration script to verify.
|
||||||
|
|
||||||
|
### Step 4: Execute the Agentic Task
|
||||||
|
|
||||||
|
Run the user's request using agent mode:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai agent "<user's question or task>"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Agent mode features:**
|
||||||
|
- One-shot task execution
|
||||||
|
- Built-in tool calling
|
||||||
|
- MCP (Model Context Protocol) integration support
|
||||||
|
- Conversation memory with session IDs
|
||||||
|
- Multiple output formats
|
||||||
|
|
||||||
|
**Examples:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Simple question
|
||||||
|
swiftopenai agent "What is quantum entanglement?"
|
||||||
|
|
||||||
|
# With specific model override
|
||||||
|
swiftopenai agent "Write a Python function" --model grok-3
|
||||||
|
|
||||||
|
# With session for conversation continuity
|
||||||
|
swiftopenai agent "Remember my name is Alice" --session-id chat-123
|
||||||
|
swiftopenai agent "What's my name?" --session-id chat-123
|
||||||
|
|
||||||
|
# With MCP tools (filesystem example)
|
||||||
|
swiftopenai agent "Read the README.md file" \
|
||||||
|
--mcp-servers filesystem \
|
||||||
|
--allowed-tools "mcp__filesystem__*"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage Patterns
|
||||||
|
|
||||||
|
### Pattern 1: Simple Provider Routing
|
||||||
|
|
||||||
|
**User Request:** "Use grok to explain quantum computing"
|
||||||
|
|
||||||
|
**Execution:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Check CLI installation
|
||||||
|
scripts/check_install_cli.sh
|
||||||
|
|
||||||
|
# 2. Configure for Grok
|
||||||
|
scripts/configure_provider.sh grok grok-4-0709
|
||||||
|
|
||||||
|
# 3. Execute the task
|
||||||
|
swiftopenai agent "Explain quantum computing"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 2: Specific Model Selection
|
||||||
|
|
||||||
|
**User Request:** "Ask DeepSeek Reasoner to solve this math problem step by step"
|
||||||
|
|
||||||
|
**Execution:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Check CLI installation
|
||||||
|
scripts/check_install_cli.sh
|
||||||
|
|
||||||
|
# 2. Configure for DeepSeek with Reasoner model
|
||||||
|
scripts/configure_provider.sh deepseek deepseek-reasoner
|
||||||
|
|
||||||
|
# 3. Execute with explicit model
|
||||||
|
swiftopenai agent "Solve x^2 + 5x + 6 = 0 step by step" --model deepseek-reasoner
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 3: Fast Inference with Groq
|
||||||
|
|
||||||
|
**User Request:** "Use groq to generate code quickly"
|
||||||
|
|
||||||
|
**Execution:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Check CLI installation
|
||||||
|
scripts/check_install_cli.sh
|
||||||
|
|
||||||
|
# 2. Configure for Groq (known for fast inference)
|
||||||
|
scripts/configure_provider.sh groq llama-3.3-70b-versatile
|
||||||
|
|
||||||
|
# 3. Execute the task
|
||||||
|
swiftopenai agent "Write a function to calculate fibonacci numbers"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pattern 4: Access Multiple Models via OpenRouter
|
||||||
|
|
||||||
|
**User Request:** "Use OpenRouter to access Claude"
|
||||||
|
|
||||||
|
**Execution:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Check CLI installation
|
||||||
|
scripts/check_install_cli.sh
|
||||||
|
|
||||||
|
# 2. Configure for OpenRouter
|
||||||
|
scripts/configure_provider.sh openrouter anthropic/claude-3.5-sonnet
|
||||||
|
|
||||||
|
# 3. Execute with Claude via OpenRouter
|
||||||
|
swiftopenai agent "Explain the benefits of functional programming"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Provider-Specific Considerations
|
||||||
|
|
||||||
|
### OpenAI (GPT-5 Models)
|
||||||
|
|
||||||
|
GPT-5 models support advanced parameters:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Minimal reasoning for fast coding tasks
|
||||||
|
swiftopenai agent "Write a sort function" \
|
||||||
|
--model gpt-5 \
|
||||||
|
--reasoning minimal \
|
||||||
|
--verbose low
|
||||||
|
|
||||||
|
# High reasoning for complex problems
|
||||||
|
swiftopenai agent "Explain quantum mechanics" \
|
||||||
|
--model gpt-5 \
|
||||||
|
--reasoning high \
|
||||||
|
--verbose high
|
||||||
|
```
|
||||||
|
|
||||||
|
**Verbosity levels:** `low`, `medium`, `high`
|
||||||
|
**Reasoning effort:** `minimal`, `low`, `medium`, `high`
|
||||||
|
|
||||||
|
### Grok (xAI)
|
||||||
|
|
||||||
|
Grok models are optimized for real-time information and coding:
|
||||||
|
|
||||||
|
- `grok-4-0709` - Latest with enhanced reasoning
|
||||||
|
- `grok-3` - General purpose
|
||||||
|
- `grok-code-fast-1` - Optimized for code generation
|
||||||
|
|
||||||
|
### Groq
|
||||||
|
|
||||||
|
Known for ultra-fast inference with open-source models:
|
||||||
|
|
||||||
|
- `llama-3.3-70b-versatile` - Best general purpose
|
||||||
|
- `mixtral-8x7b-32768` - Mixture of experts
|
||||||
|
|
||||||
|
### DeepSeek
|
||||||
|
|
||||||
|
Specialized in reasoning and coding:
|
||||||
|
|
||||||
|
- `deepseek-reasoner` - Advanced step-by-step reasoning
|
||||||
|
- `deepseek-coder` - Coding specialist
|
||||||
|
- `deepseek-chat` - General chat
|
||||||
|
|
||||||
|
### OpenRouter
|
||||||
|
|
||||||
|
Provides access to 300+ models:
|
||||||
|
|
||||||
|
- Anthropic Claude models
|
||||||
|
- OpenAI models
|
||||||
|
- Google Gemini models
|
||||||
|
- Meta Llama models
|
||||||
|
- And many more
|
||||||
|
|
||||||
|
## API Key Management
|
||||||
|
|
||||||
|
### Recommended: Use Environment Variables for Multiple Providers
|
||||||
|
|
||||||
|
The **best practice** for using multiple providers is to set all API keys as environment variables. This allows seamless switching between providers without reconfiguring keys.
|
||||||
|
|
||||||
|
**Add to your shell profile** (`~/.zshrc` or `~/.bashrc`):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# API Keys for LLM Providers
|
||||||
|
export OPENAI_API_KEY=sk-...
|
||||||
|
export XAI_API_KEY=xai-...
|
||||||
|
export GROQ_API_KEY=gsk_...
|
||||||
|
export DEEPSEEK_API_KEY=sk-...
|
||||||
|
export OPENROUTER_API_KEY=sk-or-v1-...
|
||||||
|
```
|
||||||
|
|
||||||
|
After adding these, reload your shell:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
source ~/.zshrc # or source ~/.bashrc
|
||||||
|
```
|
||||||
|
|
||||||
|
**How it works:**
|
||||||
|
- SwiftOpenAI-CLI automatically uses the **correct provider-specific key** based on the configured provider
|
||||||
|
- When you switch to Grok, it uses `XAI_API_KEY`
|
||||||
|
- When you switch to OpenAI, it uses `OPENAI_API_KEY`
|
||||||
|
- No need to reconfigure keys each time
|
||||||
|
|
||||||
|
### Alternative: Single API Key via Config (Not Recommended for Multiple Providers)
|
||||||
|
|
||||||
|
If you only use **one provider**, you can store the key in the config file:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config set api-key <your-key>
|
||||||
|
```
|
||||||
|
|
||||||
|
**Limitation:** The config file only stores ONE api-key. If you switch providers, you'd need to reconfigure the key each time.
|
||||||
|
|
||||||
|
### Checking Current API Key
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# View current configuration (API key is masked)
|
||||||
|
swiftopenai config list
|
||||||
|
|
||||||
|
# Get specific API key setting
|
||||||
|
swiftopenai config get api-key
|
||||||
|
```
|
||||||
|
|
||||||
|
**Priority:** Provider-specific environment variables take precedence over config file settings.
|
||||||
|
|
||||||
|
## Advanced Features
|
||||||
|
|
||||||
|
### Interactive Configuration
|
||||||
|
|
||||||
|
For complex setups, use the interactive wizard:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config setup
|
||||||
|
```
|
||||||
|
|
||||||
|
This launches a guided setup that walks through:
|
||||||
|
- Provider selection
|
||||||
|
- API key entry
|
||||||
|
- Model selection
|
||||||
|
- Debug mode configuration
|
||||||
|
- Base URL setup (if needed)
|
||||||
|
|
||||||
|
### Session Management
|
||||||
|
|
||||||
|
Maintain conversation context across multiple requests:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start a session
|
||||||
|
swiftopenai agent "My project is a React app" --session-id project-123
|
||||||
|
|
||||||
|
# Continue the session
|
||||||
|
swiftopenai agent "What framework did I mention?" --session-id project-123
|
||||||
|
```
|
||||||
|
|
||||||
|
### MCP Tool Integration
|
||||||
|
|
||||||
|
Connect to external services via Model Context Protocol:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# With GitHub MCP
|
||||||
|
swiftopenai agent "List my repos" \
|
||||||
|
--mcp-servers github \
|
||||||
|
--allowed-tools "mcp__github__*"
|
||||||
|
|
||||||
|
# With filesystem MCP
|
||||||
|
swiftopenai agent "Read package.json and explain dependencies" \
|
||||||
|
--mcp-servers filesystem \
|
||||||
|
--allowed-tools "mcp__filesystem__*"
|
||||||
|
|
||||||
|
# Multiple MCP servers
|
||||||
|
swiftopenai agent "Complex task" \
|
||||||
|
--mcp-servers github,filesystem,postgres \
|
||||||
|
--allowed-tools "mcp__*"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Output Formats
|
||||||
|
|
||||||
|
Control how results are presented:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Plain text (default)
|
||||||
|
swiftopenai agent "Calculate 5 + 3" --output-format plain
|
||||||
|
|
||||||
|
# Structured JSON
|
||||||
|
swiftopenai agent "List 3 colors" --output-format json
|
||||||
|
|
||||||
|
# Streaming JSON events (Claude SDK style)
|
||||||
|
swiftopenai agent "Analyze data" --output-format stream-json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
**Issue: "swiftopenai: command not found"**
|
||||||
|
|
||||||
|
Solution: Run the check_install_cli.sh script, which will install the CLI automatically.
|
||||||
|
|
||||||
|
**Issue: Authentication errors**
|
||||||
|
|
||||||
|
Solution: Verify the correct API key is set for the provider:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check current config
|
||||||
|
swiftopenai config list
|
||||||
|
|
||||||
|
# Set the appropriate API key
|
||||||
|
swiftopenai config set api-key <your-key>
|
||||||
|
|
||||||
|
# Or use environment variable
|
||||||
|
export XAI_API_KEY=xai-... # for Grok
|
||||||
|
```
|
||||||
|
|
||||||
|
**Issue: Model not available**
|
||||||
|
|
||||||
|
Solution: Verify the model name matches the provider's available models. Check `references/providers.md` for correct model names or run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai models
|
||||||
|
```
|
||||||
|
|
||||||
|
**Issue: Rate limiting or quota errors**
|
||||||
|
|
||||||
|
Solution: These are provider-specific limits. Consider:
|
||||||
|
- Using a different model tier
|
||||||
|
- Switching to a different provider temporarily
|
||||||
|
- Checking your API usage dashboard
|
||||||
|
|
||||||
|
### Debug Mode
|
||||||
|
|
||||||
|
Enable debug mode to see detailed HTTP information:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config set debug true
|
||||||
|
```
|
||||||
|
|
||||||
|
This shows:
|
||||||
|
- HTTP status codes and headers
|
||||||
|
- API request details
|
||||||
|
- Response metadata
|
||||||
|
|
||||||
|
## Resources
|
||||||
|
|
||||||
|
This skill includes bundled resources to support LLM routing:
|
||||||
|
|
||||||
|
### scripts/
|
||||||
|
|
||||||
|
- **check_install_cli.sh** - Ensures SwiftOpenAI-CLI is installed and up-to-date
|
||||||
|
- **configure_provider.sh** - Configures the CLI for a specific provider
|
||||||
|
|
||||||
|
### references/
|
||||||
|
|
||||||
|
- **providers.md** - Comprehensive reference on all supported providers, models, configurations, and capabilities
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
1. **Always check installation first** - Run `check_install_cli.sh` before routing requests
|
||||||
|
2. **Configure provider explicitly** - Use `configure_provider.sh` to ensure correct setup
|
||||||
|
3. **Verify API keys** - Check that the appropriate API key is set for the target provider
|
||||||
|
4. **Choose the right model** - Match the model to the task (coding, reasoning, general chat)
|
||||||
|
5. **Use sessions for continuity** - Leverage `--session-id` for multi-turn conversations
|
||||||
|
6. **Enable debug mode for troubleshooting** - When issues arise, debug mode provides valuable insights
|
||||||
|
7. **Reference provider documentation** - Consult `references/providers.md` for detailed provider information
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Example 1: Routing to Grok for Real-Time Information
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# User: "Use grok to tell me about recent AI developments"
|
||||||
|
|
||||||
|
scripts/check_install_cli.sh
|
||||||
|
scripts/configure_provider.sh grok grok-4-0709
|
||||||
|
swiftopenai agent "Tell me about recent AI developments"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 2: Using DeepSeek for Step-by-Step Reasoning
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# User: "Ask deepseek to explain how to solve this algorithm problem"
|
||||||
|
|
||||||
|
scripts/check_install_cli.sh
|
||||||
|
scripts/configure_provider.sh deepseek deepseek-reasoner
|
||||||
|
swiftopenai agent "Explain step by step how to implement quicksort"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 3: Fast Code Generation with Groq
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# User: "Use groq to quickly generate a REST API"
|
||||||
|
|
||||||
|
scripts/check_install_cli.sh
|
||||||
|
scripts/configure_provider.sh groq llama-3.3-70b-versatile
|
||||||
|
swiftopenai agent "Generate a REST API with authentication in Python"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 4: Accessing Claude via OpenRouter
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# User: "Use openrouter to access claude and write documentation"
|
||||||
|
|
||||||
|
scripts/check_install_cli.sh
|
||||||
|
scripts/configure_provider.sh openrouter anthropic/claude-3.5-sonnet
|
||||||
|
swiftopenai agent "Write comprehensive documentation for a todo app API"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 5: GPT-5 with Custom Parameters
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# User: "Use gpt-5 with high reasoning to solve this complex problem"
|
||||||
|
|
||||||
|
scripts/check_install_cli.sh
|
||||||
|
scripts/configure_provider.sh openai gpt-5
|
||||||
|
swiftopenai agent "Design a distributed caching system" \
|
||||||
|
--model gpt-5 \
|
||||||
|
--reasoning high \
|
||||||
|
--verbose high
|
||||||
|
```
|
||||||
309
skills/llm-router/references/providers.md
Normal file
309
skills/llm-router/references/providers.md
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
# LLM Provider Reference
|
||||||
|
|
||||||
|
This document provides detailed information about supported LLM providers, their configurations, models, and capabilities.
|
||||||
|
|
||||||
|
## Supported Providers
|
||||||
|
|
||||||
|
### OpenAI (Default)
|
||||||
|
|
||||||
|
**Provider ID:** `openai` (or empty string)
|
||||||
|
**Base URL:** Default OpenAI endpoint
|
||||||
|
**Environment Variable:** `OPENAI_API_KEY`
|
||||||
|
|
||||||
|
#### Popular Models
|
||||||
|
|
||||||
|
- **GPT-5** (`gpt-5`, `gpt-5-mini`, `gpt-5-nano`) - Latest generation with advanced reasoning
|
||||||
|
- `gpt-5`: Complex reasoning, broad world knowledge, code-heavy tasks
|
||||||
|
- `gpt-5-mini`: Cost-optimized reasoning and chat
|
||||||
|
- `gpt-5-nano`: High-throughput, simple instruction-following
|
||||||
|
- **GPT-4** (`gpt-4o`, `gpt-4o-mini`) - Multimodal, balanced performance
|
||||||
|
- **GPT-3.5** (`gpt-3.5-turbo`) - Fast and economical
|
||||||
|
|
||||||
|
#### Configuration Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config set provider ""
|
||||||
|
swiftopenai config set base-url ""
|
||||||
|
swiftopenai config set api-key sk-...
|
||||||
|
swiftopenai config set default-model gpt-5-mini
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Agent Mode
|
||||||
|
|
||||||
|
Supports full agent mode with MCP tools, session management, and auto-compaction.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### xAI (Grok)
|
||||||
|
|
||||||
|
**Provider ID:** `xai`
|
||||||
|
**Base URL:** `https://api.x.ai/v1`
|
||||||
|
**Environment Variable:** `XAI_API_KEY`
|
||||||
|
|
||||||
|
#### Popular Models
|
||||||
|
|
||||||
|
- **Grok-4** (`grok-4-0709`) - Latest Grok model with enhanced reasoning
|
||||||
|
- **Grok-3** (`grok-3`) - General purpose language model
|
||||||
|
- **Grok Code Fast** (`grok-code-fast-1`) - Optimized for code generation
|
||||||
|
|
||||||
|
#### Configuration Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config set provider xai
|
||||||
|
swiftopenai config set base-url https://api.x.ai/v1
|
||||||
|
swiftopenai config set api-key xai-...
|
||||||
|
swiftopenai config set default-model grok-4-0709
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Agent Mode
|
||||||
|
|
||||||
|
Fully supported with agent mode capabilities.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Groq
|
||||||
|
|
||||||
|
**Provider ID:** `groq`
|
||||||
|
**Base URL:** `https://api.groq.com/openai/v1`
|
||||||
|
**Environment Variable:** `GROQ_API_KEY`
|
||||||
|
|
||||||
|
#### Popular Models
|
||||||
|
|
||||||
|
- **Llama 3.3** (`llama-3.3-70b-versatile`) - Versatile open-source model
|
||||||
|
- **Mixtral** (`mixtral-8x7b-32768`) - Mixture of experts architecture
|
||||||
|
- Various other open-source models with ultra-fast inference
|
||||||
|
|
||||||
|
#### Configuration Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config set provider groq
|
||||||
|
swiftopenai config set base-url https://api.groq.com/openai/v1
|
||||||
|
swiftopenai config set api-key gsk_...
|
||||||
|
swiftopenai config set default-model llama-3.3-70b-versatile
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Agent Mode
|
||||||
|
|
||||||
|
Supported with agent mode capabilities.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### DeepSeek
|
||||||
|
|
||||||
|
**Provider ID:** `deepseek`
|
||||||
|
**Base URL:** `https://api.deepseek.com`
|
||||||
|
**Environment Variable:** `DEEPSEEK_API_KEY`
|
||||||
|
|
||||||
|
#### Popular Models
|
||||||
|
|
||||||
|
- **DeepSeek Chat** (`deepseek-chat`) - General purpose conversational model
|
||||||
|
- **DeepSeek Coder** (`deepseek-coder`) - Specialized for coding tasks
|
||||||
|
- **DeepSeek Reasoner** (`deepseek-reasoner`) - Advanced reasoning capabilities
|
||||||
|
|
||||||
|
#### Configuration Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config set provider deepseek
|
||||||
|
swiftopenai config set base-url https://api.deepseek.com
|
||||||
|
swiftopenai config set api-key sk-...
|
||||||
|
swiftopenai config set default-model deepseek-reasoner
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Agent Mode
|
||||||
|
|
||||||
|
Supported with agent mode capabilities.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### OpenRouter
|
||||||
|
|
||||||
|
**Provider ID:** `openrouter`
|
||||||
|
**Base URL:** `https://openrouter.ai/api/v1`
|
||||||
|
**Environment Variable:** `OPENROUTER_API_KEY`
|
||||||
|
|
||||||
|
#### Popular Models
|
||||||
|
|
||||||
|
OpenRouter provides access to 300+ models from various providers:
|
||||||
|
|
||||||
|
- **Anthropic:** `anthropic/claude-3.5-sonnet`, `anthropic/claude-3.5-haiku`
|
||||||
|
- **OpenAI:** `openai/gpt-4`, `openai/gpt-4-turbo`
|
||||||
|
- **Google:** `google/gemini-pro`, `google/gemini-pro-vision`
|
||||||
|
- **Meta:** `meta-llama/llama-3.1-405b`
|
||||||
|
- And many more...
|
||||||
|
|
||||||
|
#### Configuration Example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config set provider openrouter
|
||||||
|
swiftopenai config set base-url https://openrouter.ai/api/v1
|
||||||
|
swiftopenai config set api-key sk-or-v1-...
|
||||||
|
swiftopenai config set default-model anthropic/claude-3.5-sonnet
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Agent Mode
|
||||||
|
|
||||||
|
Supported with agent mode capabilities.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Agent Mode Features
|
||||||
|
|
||||||
|
SwiftOpenAI-CLI's agent mode provides:
|
||||||
|
|
||||||
|
- **MCP Integration** - Connect to external tools via Model Context Protocol
|
||||||
|
- **Conversation Memory** - Maintains context within sessions
|
||||||
|
- **Auto-Compaction** - Automatically summarizes long conversations
|
||||||
|
- **Session Management** - Continue conversations with `--session-id`
|
||||||
|
- **Tool Calling** - Execute built-in and MCP tools
|
||||||
|
- **Multiple Output Formats** - plain, json, stream-json
|
||||||
|
|
||||||
|
### Agent Mode Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Simple agent command
|
||||||
|
swiftopenai agent "Calculate 25 * 37 and tell me today's date"
|
||||||
|
|
||||||
|
# With specific model
|
||||||
|
swiftopenai agent "Explain quantum computing" --model gpt-5
|
||||||
|
|
||||||
|
# Interactive mode
|
||||||
|
swiftopenai agent --interactive
|
||||||
|
|
||||||
|
# With MCP tools
|
||||||
|
swiftopenai agent "Read config.json" \
|
||||||
|
--mcp-servers filesystem \
|
||||||
|
--allowed-tools "mcp__filesystem__*"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Configuration Commands
|
||||||
|
|
||||||
|
### View Current Configuration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config list
|
||||||
|
```
|
||||||
|
|
||||||
|
### Get Specific Setting
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config get default-model
|
||||||
|
swiftopenai config get provider
|
||||||
|
```
|
||||||
|
|
||||||
|
### Set Configuration Values
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config set <key> <value>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Interactive Configuration Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config setup
|
||||||
|
```
|
||||||
|
|
||||||
|
## Temperature and Parameters
|
||||||
|
|
||||||
|
Control response randomness and behavior:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Set default temperature (0.0-2.0)
|
||||||
|
swiftopenai config set temperature 0.7
|
||||||
|
|
||||||
|
# Override per command
|
||||||
|
swiftopenai chat "Write a poem" --temperature 1.5
|
||||||
|
|
||||||
|
# Set max tokens
|
||||||
|
swiftopenai config set max-tokens 2000
|
||||||
|
```
|
||||||
|
|
||||||
|
## Managing Multiple Provider API Keys
|
||||||
|
|
||||||
|
### Best Practice: Use Provider-Specific Environment Variables
|
||||||
|
|
||||||
|
To seamlessly use multiple providers without constantly reconfiguring API keys, set all provider keys as environment variables in your shell profile.
|
||||||
|
|
||||||
|
**Add to `~/.zshrc` or `~/.bashrc`:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# API Keys for LLM Providers
|
||||||
|
export OPENAI_API_KEY=sk-...
|
||||||
|
export XAI_API_KEY=xai-...
|
||||||
|
export GROQ_API_KEY=gsk_...
|
||||||
|
export DEEPSEEK_API_KEY=sk-...
|
||||||
|
export OPENROUTER_API_KEY=sk-or-v1-...
|
||||||
|
```
|
||||||
|
|
||||||
|
**Benefits:**
|
||||||
|
- ✅ All keys available at once
|
||||||
|
- ✅ Automatic key selection based on provider
|
||||||
|
- ✅ No need to reconfigure when switching providers
|
||||||
|
- ✅ Keys persist across terminal sessions
|
||||||
|
- ✅ More secure than storing in config file
|
||||||
|
|
||||||
|
**After adding, reload your shell:**
|
||||||
|
```bash
|
||||||
|
source ~/.zshrc # or source ~/.bashrc
|
||||||
|
```
|
||||||
|
|
||||||
|
### How SwiftOpenAI-CLI Selects API Keys
|
||||||
|
|
||||||
|
When you configure a provider, SwiftOpenAI-CLI uses this priority:
|
||||||
|
|
||||||
|
1. **Provider-specific environment variable** (e.g., `XAI_API_KEY` for Grok)
|
||||||
|
2. Config file `api-key` setting (if set)
|
||||||
|
3. Generic `OPENAI_API_KEY` fallback (for OpenAI provider only)
|
||||||
|
|
||||||
|
**Example Flow:**
|
||||||
|
```bash
|
||||||
|
# Configure for Grok
|
||||||
|
swiftopenai config set provider xai
|
||||||
|
|
||||||
|
# CLI automatically uses XAI_API_KEY environment variable
|
||||||
|
swiftopenai agent "Hello, world!"
|
||||||
|
|
||||||
|
# Switch to OpenAI
|
||||||
|
swiftopenai config set provider ""
|
||||||
|
|
||||||
|
# CLI automatically uses OPENAI_API_KEY environment variable
|
||||||
|
swiftopenai agent "Hello, world!"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Config File Limitation
|
||||||
|
|
||||||
|
The config file (`~/.swiftopenai/config.json`) only stores **ONE** api-key value. If you use `swiftopenai config set api-key <key>`, it will be used for **all providers** (unless overridden by a provider-specific env var).
|
||||||
|
|
||||||
|
This is why environment variables are recommended for multiple providers.
|
||||||
|
|
||||||
|
## Debug Mode
|
||||||
|
|
||||||
|
Enable debug mode to see HTTP details:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
swiftopenai config set debug true
|
||||||
|
```
|
||||||
|
|
||||||
|
Shows:
|
||||||
|
- HTTP status codes and headers
|
||||||
|
- Full API requests (when built in debug mode)
|
||||||
|
- Response details
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Authentication Errors
|
||||||
|
|
||||||
|
- Verify API key is set correctly
|
||||||
|
- Check environment variable is exported
|
||||||
|
- Ensure provider configuration matches your API key
|
||||||
|
|
||||||
|
### Model Not Available
|
||||||
|
|
||||||
|
- Verify model name is correct for the provider
|
||||||
|
- Use `swiftopenai models` to list available models
|
||||||
|
- Check provider documentation for model availability
|
||||||
|
|
||||||
|
### Connection Issues
|
||||||
|
|
||||||
|
- Verify base URL is correct
|
||||||
|
- Check network connectivity
|
||||||
|
- Ensure API service is operational
|
||||||
61
skills/llm-router/scripts/check_install_cli.sh
Executable file
61
skills/llm-router/scripts/check_install_cli.sh
Executable file
@@ -0,0 +1,61 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Script to check if swiftopenai-cli is installed and install/update if needed
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
REQUIRED_VERSION="1.4.4"
|
||||||
|
PACKAGE_NAME="swiftopenai-cli"
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
echo "🔍 Checking SwiftOpenAI-CLI installation..."
|
||||||
|
|
||||||
|
# Check if swiftopenai command exists
|
||||||
|
if ! command -v swiftopenai &> /dev/null; then
|
||||||
|
echo -e "${YELLOW}SwiftOpenAI-CLI is not installed.${NC}"
|
||||||
|
echo "📦 Installing swiftopenai-cli via npm..."
|
||||||
|
npm install -g swiftopenai-cli
|
||||||
|
echo -e "${GREEN}✅ SwiftOpenAI-CLI installed successfully!${NC}"
|
||||||
|
swiftopenai --version
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get current version
|
||||||
|
CURRENT_VERSION=$(swiftopenai --version 2>&1 | tr -d '\n')
|
||||||
|
|
||||||
|
echo "📌 Current version: $CURRENT_VERSION"
|
||||||
|
echo "📌 Latest known version: $REQUIRED_VERSION"
|
||||||
|
|
||||||
|
# Function to compare versions
|
||||||
|
version_ge() {
|
||||||
|
# Returns 0 if $1 >= $2
|
||||||
|
printf '%s\n%s\n' "$2" "$1" | sort -V -C
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if update is available
|
||||||
|
echo "🔍 Checking for updates..."
|
||||||
|
LATEST_VERSION=$(npm view swiftopenai-cli version 2>/dev/null || echo "$REQUIRED_VERSION")
|
||||||
|
|
||||||
|
if [ "$CURRENT_VERSION" = "$LATEST_VERSION" ]; then
|
||||||
|
echo -e "${GREEN}✅ SwiftOpenAI-CLI is up to date (v$CURRENT_VERSION)${NC}"
|
||||||
|
elif version_ge "$CURRENT_VERSION" "$REQUIRED_VERSION"; then
|
||||||
|
echo -e "${GREEN}✅ SwiftOpenAI-CLI version is acceptable (v$CURRENT_VERSION >= v$REQUIRED_VERSION)${NC}"
|
||||||
|
echo -e "${YELLOW}⚠️ A newer version (v$LATEST_VERSION) is available. Consider updating with:${NC}"
|
||||||
|
echo " npm update -g swiftopenai-cli"
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}⚠️ SwiftOpenAI-CLI is outdated (v$CURRENT_VERSION < v$REQUIRED_VERSION)${NC}"
|
||||||
|
echo "🔄 Updating to latest version..."
|
||||||
|
npm update -g swiftopenai-cli
|
||||||
|
NEW_VERSION=$(swiftopenai --version 2>&1 | tr -d '\n')
|
||||||
|
echo -e "${GREEN}✅ Updated to version $NEW_VERSION${NC}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify installation
|
||||||
|
echo ""
|
||||||
|
echo "🔧 SwiftOpenAI-CLI is ready to use!"
|
||||||
|
echo " Location: $(which swiftopenai)"
|
||||||
|
echo " Version: $(swiftopenai --version 2>&1)"
|
||||||
176
skills/llm-router/scripts/configure_provider.sh
Executable file
176
skills/llm-router/scripts/configure_provider.sh
Executable file
@@ -0,0 +1,176 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Script to configure swiftopenai-cli to use a specific LLM provider
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
|
PROVIDER_NAME="$1"
|
||||||
|
MODEL="$2"
|
||||||
|
|
||||||
|
# Function to get provider configuration
|
||||||
|
get_provider_config() {
|
||||||
|
case "$1" in
|
||||||
|
openai)
|
||||||
|
echo "||gpt-4o,gpt-5,gpt-5-mini"
|
||||||
|
;;
|
||||||
|
grok)
|
||||||
|
echo "xai|https://api.x.ai/v1|grok-4-0709,grok-3,grok-code-fast-1"
|
||||||
|
;;
|
||||||
|
groq)
|
||||||
|
echo "groq|https://api.groq.com/openai/v1|llama-3.3-70b-versatile,mixtral-8x7b-32768"
|
||||||
|
;;
|
||||||
|
deepseek)
|
||||||
|
echo "deepseek|https://api.deepseek.com|deepseek-chat,deepseek-coder,deepseek-reasoner"
|
||||||
|
;;
|
||||||
|
openrouter)
|
||||||
|
echo "openrouter|https://openrouter.ai/api/v1|anthropic/claude-3.5-sonnet,openai/gpt-4"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo ""
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
echo -e "${BLUE}Usage: $0 <provider> [model]${NC}"
|
||||||
|
echo ""
|
||||||
|
echo "Supported providers:"
|
||||||
|
echo " openai - OpenAI (GPT-4, GPT-5, etc.)"
|
||||||
|
echo " grok - xAI Grok models"
|
||||||
|
echo " groq - Groq (Llama, Mixtral, etc.)"
|
||||||
|
echo " deepseek - DeepSeek models"
|
||||||
|
echo " openrouter - OpenRouter (300+ models)"
|
||||||
|
echo ""
|
||||||
|
echo "Examples:"
|
||||||
|
echo " $0 grok grok-4-0709"
|
||||||
|
echo " $0 groq llama-3.3-70b-versatile"
|
||||||
|
echo " $0 deepseek deepseek-reasoner"
|
||||||
|
echo " $0 openai gpt-5"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ -z "$PROVIDER_NAME" ]; then
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Normalize provider name to lowercase
|
||||||
|
PROVIDER_NAME=$(echo "$PROVIDER_NAME" | tr '[:upper:]' '[:lower:]')
|
||||||
|
|
||||||
|
# Get provider configuration
|
||||||
|
PROVIDER_CONFIG=$(get_provider_config "$PROVIDER_NAME")
|
||||||
|
|
||||||
|
# Check if provider is supported
|
||||||
|
if [ -z "$PROVIDER_CONFIG" ]; then
|
||||||
|
echo -e "${RED}❌ Unsupported provider: $PROVIDER_NAME${NC}"
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Parse provider configuration
|
||||||
|
IFS='|' read -r PROVIDER_ID BASE_URL DEFAULT_MODELS <<< "$PROVIDER_CONFIG"
|
||||||
|
|
||||||
|
echo -e "${BLUE}🔧 Configuring SwiftOpenAI-CLI for $PROVIDER_NAME${NC}"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Configure provider
|
||||||
|
if [ "$PROVIDER_NAME" = "openai" ]; then
|
||||||
|
echo "📝 Setting provider to OpenAI (default)"
|
||||||
|
swiftopenai config set provider "" 2>/dev/null || true
|
||||||
|
swiftopenai config set base-url "" 2>/dev/null || true
|
||||||
|
else
|
||||||
|
echo "📝 Setting provider to $PROVIDER_ID"
|
||||||
|
swiftopenai config set provider "$PROVIDER_ID"
|
||||||
|
|
||||||
|
if [ -n "$BASE_URL" ]; then
|
||||||
|
echo "📝 Setting base URL to $BASE_URL"
|
||||||
|
swiftopenai config set base-url "$BASE_URL"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set model if provided
|
||||||
|
if [ -n "$MODEL" ]; then
|
||||||
|
echo "📝 Setting default model to $MODEL"
|
||||||
|
swiftopenai config set default-model "$MODEL"
|
||||||
|
elif [ -n "$DEFAULT_MODELS" ]; then
|
||||||
|
# Use first default model
|
||||||
|
DEFAULT_MODEL=$(echo "$DEFAULT_MODELS" | cut -d',' -f1)
|
||||||
|
echo "📝 Setting default model to $DEFAULT_MODEL"
|
||||||
|
swiftopenai config set default-model "$DEFAULT_MODEL"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo -e "${GREEN}✅ Configuration complete!${NC}"
|
||||||
|
echo ""
|
||||||
|
echo "Current configuration:"
|
||||||
|
swiftopenai config list
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Check if API key is set
|
||||||
|
API_KEY_SET=false
|
||||||
|
CONFIG_API_KEY=$(swiftopenai config get api-key 2>/dev/null || echo "")
|
||||||
|
API_KEY_SOURCE=""
|
||||||
|
|
||||||
|
# Check provider-specific environment variable
|
||||||
|
case "$PROVIDER_NAME" in
|
||||||
|
openai)
|
||||||
|
ENV_VAR="OPENAI_API_KEY"
|
||||||
|
;;
|
||||||
|
grok)
|
||||||
|
ENV_VAR="XAI_API_KEY"
|
||||||
|
;;
|
||||||
|
groq)
|
||||||
|
ENV_VAR="GROQ_API_KEY"
|
||||||
|
;;
|
||||||
|
deepseek)
|
||||||
|
ENV_VAR="DEEPSEEK_API_KEY"
|
||||||
|
;;
|
||||||
|
openrouter)
|
||||||
|
ENV_VAR="OPENROUTER_API_KEY"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Check if API key is available (either in config or provider-specific env var)
|
||||||
|
if [ -n "$CONFIG_API_KEY" ] && [ "$CONFIG_API_KEY" != "not set" ] && [ "$CONFIG_API_KEY" != "****" ]; then
|
||||||
|
API_KEY_SET=true
|
||||||
|
API_KEY_SOURCE="config file"
|
||||||
|
elif [ -n "${!ENV_VAR}" ]; then
|
||||||
|
API_KEY_SET=true
|
||||||
|
API_KEY_SOURCE="environment variable $ENV_VAR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$API_KEY_SET" = true ]; then
|
||||||
|
echo -e "${GREEN}✅ API key is configured ($API_KEY_SOURCE)${NC}"
|
||||||
|
echo ""
|
||||||
|
echo -e "${GREEN}🚀 Ready to use! Test with:${NC}"
|
||||||
|
echo " swiftopenai agent \"Hello, world!\""
|
||||||
|
else
|
||||||
|
echo -e "${RED}⚠️ API KEY NOT SET${NC}"
|
||||||
|
echo ""
|
||||||
|
echo "You need to set an API key for $PROVIDER_NAME before using the CLI."
|
||||||
|
echo ""
|
||||||
|
echo -e "${GREEN}Recommended - Set via environment variable (best for multiple providers):${NC}"
|
||||||
|
echo " export $ENV_VAR=<your-api-key>"
|
||||||
|
echo ""
|
||||||
|
echo " To persist across sessions, add to your shell profile:"
|
||||||
|
echo " echo 'export $ENV_VAR=<your-api-key>' >> ~/.zshrc"
|
||||||
|
echo " source ~/.zshrc"
|
||||||
|
echo ""
|
||||||
|
echo -e "${YELLOW}Alternative - Set via config (only if using one provider):${NC}"
|
||||||
|
echo " swiftopenai config set api-key <your-api-key>"
|
||||||
|
echo ""
|
||||||
|
echo -e "${BLUE}💡 Tip: Set ALL provider keys as environment variables for easy switching:${NC}"
|
||||||
|
echo " export OPENAI_API_KEY=sk-..."
|
||||||
|
echo " export XAI_API_KEY=xai-..."
|
||||||
|
echo " export GROQ_API_KEY=gsk_..."
|
||||||
|
echo " export DEEPSEEK_API_KEY=sk-..."
|
||||||
|
echo " export OPENROUTER_API_KEY=sk-or-..."
|
||||||
|
echo ""
|
||||||
|
echo -e "${YELLOW}After setting the API key, run the configuration again to verify.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
Reference in New Issue
Block a user