Initial commit

This commit is contained in:
Zhongwei Li
2025-11-30 08:25:15 +08:00
commit 0c577730d5
20 changed files with 5085 additions and 0 deletions

View File

@@ -0,0 +1,87 @@
/**
* Basic Assistant Example
*
* Demonstrates the fundamental workflow:
* 1. Create an assistant
* 2. Create a thread
* 3. Add a message
* 4. Create a run
* 5. Poll for completion
* 6. Retrieve the response
*/
import OpenAI from 'openai';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
async function main() {
console.log('🤖 Creating Math Tutor Assistant...\n');
// 1. Create an assistant
const assistant = await openai.beta.assistants.create({
name: "Math Tutor",
instructions: "You are a personal math tutor. When asked a question, write and run Python code to answer the question.",
tools: [{ type: "code_interpreter" }],
model: "gpt-4o",
});
console.log(`✅ Assistant created: ${assistant.id}\n`);
// 2. Create a thread
const thread = await openai.beta.threads.create();
console.log(`✅ Thread created: ${thread.id}\n`);
// 3. Add a message to the thread
await openai.beta.threads.messages.create(thread.id, {
role: "user",
content: "I need to solve the equation `3x + 11 = 14`. Can you help me?",
});
console.log('✅ Message added to thread\n');
// 4. Create a run
console.log('🏃 Creating run...\n');
const run = await openai.beta.threads.runs.create(thread.id, {
assistant_id: assistant.id,
});
// 5. Poll for completion
console.log('⏳ Waiting for completion...\n');
let runStatus = await openai.beta.threads.runs.retrieve(thread.id, run.id);
while (runStatus.status !== 'completed') {
if (runStatus.status === 'failed' || runStatus.status === 'cancelled') {
console.error(`❌ Run ${runStatus.status}:`, runStatus.last_error);
process.exit(1);
}
await new Promise(resolve => setTimeout(resolve, 1000));
runStatus = await openai.beta.threads.runs.retrieve(thread.id, run.id);
console.log(` Status: ${runStatus.status}`);
}
console.log('\n✅ Run completed!\n');
// 6. Retrieve messages
const messages = await openai.beta.threads.messages.list(thread.id);
console.log('💬 Response:\n');
const response = messages.data[0].content[0];
if (response.type === 'text') {
console.log(response.text.value);
}
// Usage stats
console.log('\n📊 Usage:');
console.log(` Prompt tokens: ${runStatus.usage?.prompt_tokens}`);
console.log(` Completion tokens: ${runStatus.usage?.completion_tokens}`);
console.log(` Total tokens: ${runStatus.usage?.total_tokens}`);
// Cleanup (optional)
// await openai.beta.assistants.del(assistant.id);
// await openai.beta.threads.del(thread.id);
}
main().catch(console.error);

View File

@@ -0,0 +1,136 @@
/**
* Code Interpreter Assistant
*
* Demonstrates:
* - Python code execution
* - File uploads for data analysis
* - Retrieving generated files (charts, CSVs)
* - Data visualization
*/
import OpenAI from 'openai';
import fs from 'fs';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
async function main() {
console.log('📊 Creating Data Analyst Assistant...\n');
// 1. Create assistant with code interpreter
const assistant = await openai.beta.assistants.create({
name: "Data Analyst",
instructions: "You are a data analyst. Analyze data and create visualizations. Always explain your approach and findings.",
tools: [{ type: "code_interpreter" }],
model: "gpt-4o",
});
console.log(`✅ Assistant created: ${assistant.id}\n`);
// 2. Upload a data file (CSV example)
// For this example, create a sample CSV
const csvData = `date,revenue,expenses
2025-01-01,10000,4000
2025-01-02,12000,4500
2025-01-03,9500,4200
2025-01-04,15000,5000
2025-01-05,13500,4800`;
fs.writeFileSync('sample_data.csv', csvData);
const file = await openai.files.create({
file: fs.createReadStream('sample_data.csv'),
purpose: 'assistants',
});
console.log(`✅ File uploaded: ${file.id}\n`);
// 3. Create thread with file attachment
const thread = await openai.beta.threads.create({
messages: [{
role: "user",
content: "Analyze this revenue data. Calculate total revenue, average daily revenue, and create a visualization showing revenue and expenses over time.",
attachments: [{
file_id: file.id,
tools: [{ type: "code_interpreter" }],
}],
}],
});
console.log(`✅ Thread created: ${thread.id}\n`);
// 4. Run the assistant
console.log('🏃 Running analysis...\n');
const run = await openai.beta.threads.runs.create(thread.id, {
assistant_id: assistant.id,
});
// 5. Poll for completion
let runStatus = await openai.beta.threads.runs.retrieve(thread.id, run.id);
while (!['completed', 'failed', 'cancelled'].includes(runStatus.status)) {
await new Promise(resolve => setTimeout(resolve, 1000));
runStatus = await openai.beta.threads.runs.retrieve(thread.id, run.id);
console.log(` Status: ${runStatus.status}`);
}
if (runStatus.status !== 'completed') {
console.error(`❌ Run ${runStatus.status}:`, runStatus.last_error);
process.exit(1);
}
console.log('\n✅ Analysis completed!\n');
// 6. Retrieve the response
const messages = await openai.beta.threads.messages.list(thread.id);
const responseMessage = messages.data[0];
console.log('💬 Analysis Results:\n');
for (const content of responseMessage.content) {
if (content.type === 'text') {
console.log(content.text.value);
console.log('\n---\n');
}
// Download generated image files (charts)
if (content.type === 'image_file') {
const imageFileId = content.image_file.file_id;
console.log(`📈 Chart generated: ${imageFileId}`);
// Download the image
const imageData = await openai.files.content(imageFileId);
const imageBuffer = Buffer.from(await imageData.arrayBuffer());
fs.writeFileSync(`chart_${imageFileId}.png`, imageBuffer);
console.log(` Saved as: chart_${imageFileId}.png\n`);
}
}
// 7. Check run steps to see code that was executed
const runSteps = await openai.beta.threads.runs.steps.list(thread.id, run.id);
console.log('🔍 Execution Steps:\n');
for (const step of runSteps.data) {
if (step.step_details.type === 'tool_calls') {
for (const toolCall of step.step_details.tool_calls) {
if (toolCall.type === 'code_interpreter') {
console.log('Python code executed:');
console.log(toolCall.code_interpreter.input);
console.log('\nOutput:');
console.log(toolCall.code_interpreter.outputs);
console.log('\n---\n');
}
}
}
}
// Cleanup
fs.unlinkSync('sample_data.csv');
console.log('\n📊 Usage:');
console.log(` Total tokens: ${runStatus.usage?.total_tokens}`);
}
main().catch(console.error);

View File

@@ -0,0 +1,213 @@
/**
* File Search Assistant (RAG)
*
* Demonstrates:
* - Creating a vector store
* - Uploading documents
* - Semantic search with file_search tool
* - Retrieving answers with citations
*/
import OpenAI from 'openai';
import fs from 'fs';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
async function main() {
console.log('📚 Creating Knowledge Base Assistant...\n');
// 1. Create a vector store
const vectorStore = await openai.beta.vectorStores.create({
name: "Product Documentation",
expires_after: {
anchor: "last_active_at",
days: 7, // Auto-delete after 7 days of inactivity
},
});
console.log(`✅ Vector store created: ${vectorStore.id}\n`);
// 2. Upload documents to vector store
// Create sample documents
const doc1 = `# Product Installation Guide
To install our product:
1. Download the installer from our website
2. Run the installer with administrator privileges
3. Follow the on-screen instructions
4. Restart your computer after installation
System Requirements:
- Windows 10 or later / macOS 11 or later
- 4GB RAM minimum, 8GB recommended
- 500MB free disk space`;
const doc2 = `# Troubleshooting Guide
Common Issues:
1. Installation Fails
- Ensure you have administrator privileges
- Disable antivirus temporarily
- Check disk space
2. Application Won't Start
- Update graphics drivers
- Run compatibility troubleshooter
- Reinstall the application
3. Performance Issues
- Close other applications
- Increase virtual memory
- Check for updates`;
fs.writeFileSync('install_guide.md', doc1);
fs.writeFileSync('troubleshooting.md', doc2);
// Upload files
const file1 = await openai.files.create({
file: fs.createReadStream('install_guide.md'),
purpose: 'assistants',
});
const file2 = await openai.files.create({
file: fs.createReadStream('troubleshooting.md'),
purpose: 'assistants',
});
console.log(`✅ Files uploaded: ${file1.id}, ${file2.id}\n`);
// Add files to vector store (batch upload)
const fileBatch = await openai.beta.vectorStores.fileBatches.create(
vectorStore.id,
{
file_ids: [file1.id, file2.id],
}
);
console.log('⏳ Indexing files...\n');
// Poll for vector store completion
let batch = await openai.beta.vectorStores.fileBatches.retrieve(
vectorStore.id,
fileBatch.id
);
while (batch.status === 'in_progress') {
await new Promise(resolve => setTimeout(resolve, 1000));
batch = await openai.beta.vectorStores.fileBatches.retrieve(
vectorStore.id,
fileBatch.id
);
}
console.log(`✅ Indexing complete! Status: ${batch.status}\n`);
// 3. Create assistant with file search
const assistant = await openai.beta.assistants.create({
name: "Product Support Assistant",
instructions: "You are a helpful product support assistant. Use the file search tool to answer questions about installation, troubleshooting, and product usage. Always cite your sources.",
tools: [{ type: "file_search" }],
tool_resources: {
file_search: {
vector_store_ids: [vectorStore.id],
},
},
model: "gpt-4o",
});
console.log(`✅ Assistant created: ${assistant.id}\n`);
// 4. Create thread and ask questions
const thread = await openai.beta.threads.create();
await openai.beta.threads.messages.create(thread.id, {
role: "user",
content: "How do I install the product?",
});
console.log('❓ Question: How do I install the product?\n');
// 5. Run
const run = await openai.beta.threads.runs.create(thread.id, {
assistant_id: assistant.id,
});
// Poll for completion
let runStatus = await openai.beta.threads.runs.retrieve(thread.id, run.id);
while (!['completed', 'failed'].includes(runStatus.status)) {
await new Promise(resolve => setTimeout(resolve, 1000));
runStatus = await openai.beta.threads.runs.retrieve(thread.id, run.id);
}
// 6. Retrieve response
const messages = await openai.beta.threads.messages.list(thread.id);
const response = messages.data[0];
console.log('💬 Answer:\n');
for (const content of response.content) {
if (content.type === 'text') {
console.log(content.text.value);
// Check for citations
if (content.text.annotations && content.text.annotations.length > 0) {
console.log('\n📎 Citations:');
for (const annotation of content.text.annotations) {
if (annotation.type === 'file_citation') {
console.log(` File: ${annotation.file_citation.file_id}`);
console.log(` Quote: ${annotation.file_citation.quote}`);
}
}
}
}
}
console.log('\n---\n');
// Ask another question
await openai.beta.threads.messages.create(thread.id, {
role: "user",
content: "What should I do if the application won't start?",
});
console.log('❓ Question: What should I do if the application won\'t start?\n');
const run2 = await openai.beta.threads.runs.create(thread.id, {
assistant_id: assistant.id,
});
let runStatus2 = await openai.beta.threads.runs.retrieve(thread.id, run2.id);
while (!['completed', 'failed'].includes(runStatus2.status)) {
await new Promise(resolve => setTimeout(resolve, 1000));
runStatus2 = await openai.beta.threads.runs.retrieve(thread.id, run2.id);
}
const messages2 = await openai.beta.threads.messages.list(thread.id);
const response2 = messages2.data[0];
console.log('💬 Answer:\n');
for (const content of response2.content) {
if (content.type === 'text') {
console.log(content.text.value);
}
}
// 7. Vector store stats
const storeInfo = await openai.beta.vectorStores.retrieve(vectorStore.id);
console.log('\n📊 Vector Store Stats:');
console.log(` Files: ${storeInfo.file_counts.completed}`);
console.log(` Size: ${storeInfo.usage_bytes} bytes`);
// Cleanup
fs.unlinkSync('install_guide.md');
fs.unlinkSync('troubleshooting.md');
console.log('\n💡 Note: Vector store will auto-delete after 7 days of inactivity');
console.log(` Or manually delete with: await openai.beta.vectorStores.del("${vectorStore.id}")`);
}
main().catch(console.error);

View File

@@ -0,0 +1,247 @@
/**
* Function Calling Assistant
*
* Demonstrates:
* - Defining custom functions
* - Handling requires_action state
* - Submitting tool outputs
* - Multiple function calls
*/
import OpenAI from 'openai';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
// Mock weather API
async function getWeather(location: string, unit: 'celsius' | 'fahrenheit' = 'celsius') {
// In production, call a real weather API
const temps = { celsius: 22, fahrenheit: 72 };
return {
location,
temperature: temps[unit],
unit,
conditions: "Partly cloudy",
humidity: 65,
};
}
// Mock stock price API
async function getStockPrice(symbol: string) {
// In production, call a real stock API
const prices: Record<string, number> = {
AAPL: 185.50,
GOOGL: 142.75,
MSFT: 420.30,
};
return {
symbol,
price: prices[symbol] || 100.00,
currency: 'USD',
timestamp: new Date().toISOString(),
};
}
async function main() {
console.log('🛠️ Creating Function Calling Assistant...\n');
// 1. Create assistant with functions
const assistant = await openai.beta.assistants.create({
name: "Multi-Tool Assistant",
instructions: "You are a helpful assistant that can check weather and stock prices. Use the available functions to answer user questions.",
tools: [
{
type: "function",
function: {
name: "get_weather",
description: "Get the current weather for a location",
parameters: {
type: "object",
properties: {
location: {
type: "string",
description: "The city name, e.g., 'San Francisco'",
},
unit: {
type: "string",
enum: ["celsius", "fahrenheit"],
description: "Temperature unit",
},
},
required: ["location"],
},
},
},
{
type: "function",
function: {
name: "get_stock_price",
description: "Get the current stock price for a symbol",
parameters: {
type: "object",
properties: {
symbol: {
type: "string",
description: "Stock symbol, e.g., 'AAPL' for Apple",
},
},
required: ["symbol"],
},
},
},
],
model: "gpt-4o",
});
console.log(`✅ Assistant created: ${assistant.id}\n`);
// 2. Create thread
const thread = await openai.beta.threads.create({
messages: [{
role: "user",
content: "What's the weather in London and what's Apple's stock price?",
}],
});
console.log(`✅ Thread created: ${thread.id}\n`);
console.log('❓ User: What\'s the weather in London and what\'s Apple\'s stock price?\n');
// 3. Create run
let run = await openai.beta.threads.runs.create(thread.id, {
assistant_id: assistant.id,
});
console.log('🏃 Running assistant...\n');
// 4. Poll and handle function calls
while (true) {
run = await openai.beta.threads.runs.retrieve(thread.id, run.id);
console.log(` Status: ${run.status}`);
if (run.status === 'completed') {
break;
}
if (run.status === 'failed' || run.status === 'cancelled' || run.status === 'expired') {
console.error(`❌ Run ${run.status}:`, run.last_error);
process.exit(1);
}
if (run.status === 'requires_action') {
console.log('\n🔧 Function calls required:\n');
const toolCalls = run.required_action!.submit_tool_outputs.tool_calls;
const toolOutputs = [];
for (const toolCall of toolCalls) {
console.log(` Function: ${toolCall.function.name}`);
console.log(` Arguments: ${toolCall.function.arguments}`);
const args = JSON.parse(toolCall.function.arguments);
let output;
// Execute the function
if (toolCall.function.name === 'get_weather') {
output = await getWeather(args.location, args.unit);
console.log(` Result: ${output.temperature}°${output.unit === 'celsius' ? 'C' : 'F'}, ${output.conditions}`);
} else if (toolCall.function.name === 'get_stock_price') {
output = await getStockPrice(args.symbol);
console.log(` Result: $${output.price}`);
}
toolOutputs.push({
tool_call_id: toolCall.id,
output: JSON.stringify(output),
});
console.log('');
}
// Submit tool outputs
console.log('📤 Submitting tool outputs...\n');
run = await openai.beta.threads.runs.submitToolOutputs(
thread.id,
run.id,
{ tool_outputs: toolOutputs }
);
}
await new Promise(resolve => setTimeout(resolve, 1000));
}
console.log('\n✅ Run completed!\n');
// 5. Retrieve final response
const messages = await openai.beta.threads.messages.list(thread.id);
const response = messages.data[0];
console.log('💬 Assistant Response:\n');
for (const content of response.content) {
if (content.type === 'text') {
console.log(content.text.value);
}
}
// Ask another question
console.log('\n---\n');
await openai.beta.threads.messages.create(thread.id, {
role: "user",
content: "How about Microsoft's stock?",
});
console.log('❓ User: How about Microsoft\'s stock?\n');
run = await openai.beta.threads.runs.create(thread.id, {
assistant_id: assistant.id,
});
// Handle function calls again
while (true) {
run = await openai.beta.threads.runs.retrieve(thread.id, run.id);
if (run.status === 'completed') {
break;
}
if (run.status === 'requires_action') {
const toolCalls = run.required_action!.submit_tool_outputs.tool_calls;
const toolOutputs = [];
for (const toolCall of toolCalls) {
const args = JSON.parse(toolCall.function.arguments);
if (toolCall.function.name === 'get_stock_price') {
const output = await getStockPrice(args.symbol);
console.log(` 🔧 Called get_stock_price(${args.symbol}): $${output.price}`);
toolOutputs.push({
tool_call_id: toolCall.id,
output: JSON.stringify(output),
});
}
}
run = await openai.beta.threads.runs.submitToolOutputs(
thread.id,
run.id,
{ tool_outputs: toolOutputs }
);
}
await new Promise(resolve => setTimeout(resolve, 1000));
}
const messages2 = await openai.beta.threads.messages.list(thread.id);
const response2 = messages2.data[0];
console.log('\n💬 Assistant Response:\n');
for (const content of response2.content) {
if (content.type === 'text') {
console.log(content.text.value);
}
}
}
main().catch(console.error);

23
templates/package.json Normal file
View File

@@ -0,0 +1,23 @@
{
"name": "openai-assistants-templates",
"version": "1.0.0",
"description": "OpenAI Assistants API v2 templates",
"type": "module",
"scripts": {
"basic": "tsx templates/basic-assistant.ts",
"code-interpreter": "tsx templates/code-interpreter-assistant.ts",
"file-search": "tsx templates/file-search-assistant.ts",
"function-calling": "tsx templates/function-calling-assistant.ts",
"streaming": "tsx templates/streaming-assistant.ts",
"thread-management": "tsx templates/thread-management.ts",
"vector-store": "tsx templates/vector-store-setup.ts"
},
"dependencies": {
"openai": "^6.7.0"
},
"devDependencies": {
"@types/node": "^20.10.0",
"tsx": "^4.7.0",
"typescript": "^5.3.3"
}
}

View File

@@ -0,0 +1,165 @@
/**
* Streaming Assistant
*
* Demonstrates:
* - Real-time streaming with Server-Sent Events (SSE)
* - Handling different event types
* - Streaming message deltas
* - Tool call progress updates
*/
import OpenAI from 'openai';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
async function main() {
console.log('🌊 Creating Streaming Assistant...\n');
// 1. Create assistant
const assistant = await openai.beta.assistants.create({
name: "Streaming Tutor",
instructions: "You are a helpful tutor. Explain concepts clearly and use code interpreter when helpful.",
tools: [{ type: "code_interpreter" }],
model: "gpt-4o",
});
console.log(`✅ Assistant created: ${assistant.id}\n`);
// 2. Create thread
const thread = await openai.beta.threads.create({
messages: [{
role: "user",
content: "Explain the Fibonacci sequence and calculate the first 10 numbers.",
}],
});
console.log(`✅ Thread created: ${thread.id}\n`);
console.log('💬 User: Explain the Fibonacci sequence and calculate the first 10 numbers.\n');
console.log('🤖 Assistant: ');
// 3. Create streaming run
const stream = await openai.beta.threads.runs.stream(thread.id, {
assistant_id: assistant.id,
});
// Track state
let currentMessageId: string | null = null;
let fullResponse = '';
// 4. Handle stream events
for await (const event of stream) {
switch (event.event) {
case 'thread.run.created':
console.log('[Run started]\n');
break;
case 'thread.run.queued':
console.log('[Run queued...]\n');
break;
case 'thread.run.in_progress':
console.log('[Processing...]\n');
break;
case 'thread.message.created':
currentMessageId = event.data.id;
break;
case 'thread.message.delta':
// Stream text content in real-time
const delta = event.data.delta.content?.[0];
if (delta?.type === 'text' && delta.text?.value) {
process.stdout.write(delta.text.value);
fullResponse += delta.text.value;
}
break;
case 'thread.message.completed':
console.log('\n\n[Message completed]\n');
break;
case 'thread.run.step.created':
const step = event.data;
if (step.type === 'tool_calls') {
console.log('\n[Tool call initiated...]\n');
}
break;
case 'thread.run.step.delta':
// Show code interpreter progress
const stepDelta = event.data.delta.step_details;
if (stepDelta?.type === 'tool_calls') {
const toolCall = stepDelta.tool_calls?.[0];
if (toolCall?.type === 'code_interpreter') {
if (toolCall.code_interpreter?.input) {
console.log('\n🔧 Executing Python code:\n');
console.log(toolCall.code_interpreter.input);
console.log('\n');
}
if (toolCall.code_interpreter?.outputs) {
for (const output of toolCall.code_interpreter.outputs) {
if (output.type === 'logs') {
console.log('📋 Output:', output.logs);
}
}
}
}
}
break;
case 'thread.run.step.completed':
console.log('[Step completed]\n');
break;
case 'thread.run.completed':
console.log('\n✅ Run completed!\n');
break;
case 'thread.run.failed':
console.error('\n❌ Run failed:', event.data.last_error);
break;
case 'thread.run.requires_action':
console.log('\n⚠ Requires action (function calling needed)');
break;
case 'error':
console.error('\n❌ Stream error:', event.data);
break;
}
}
console.log('---\n');
// Ask a follow-up question
await openai.beta.threads.messages.create(thread.id, {
role: "user",
content: "Can you explain how recursion works in that sequence?",
});
console.log('💬 User: Can you explain how recursion works in that sequence?\n');
console.log('🤖 Assistant: ');
const stream2 = await openai.beta.threads.runs.stream(thread.id, {
assistant_id: assistant.id,
});
for await (const event of stream2) {
if (event.event === 'thread.message.delta') {
const delta = event.data.delta.content?.[0];
if (delta?.type === 'text' && delta.text?.value) {
process.stdout.write(delta.text.value);
}
}
if (event.event === 'thread.run.completed') {
console.log('\n\n✅ Streaming complete!\n');
}
}
}
main().catch(console.error);

View File

@@ -0,0 +1,237 @@
/**
* Thread Lifecycle Management
*
* Demonstrates:
* - Creating and reusing threads
* - Checking for active runs
* - Thread cleanup patterns
* - Error handling for common issues
*/
import OpenAI from 'openai';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
// Simulate database storage
const userThreads = new Map<string, string>();
/**
* Get or create a thread for a user
*/
async function getOrCreateUserThread(userId: string): Promise<string> {
console.log(`🔍 Checking thread for user: ${userId}`);
// Check if thread exists
let threadId = userThreads.get(userId);
if (!threadId) {
console.log(' No existing thread found. Creating new thread...');
const thread = await openai.beta.threads.create({
metadata: {
user_id: userId,
created_at: new Date().toISOString(),
},
});
threadId = thread.id;
userThreads.set(userId, threadId);
console.log(` ✅ Thread created: ${threadId}`);
} else {
console.log(` ✅ Existing thread found: ${threadId}`);
}
return threadId;
}
/**
* Check if thread has an active run
*/
async function hasActiveRun(threadId: string): Promise<boolean> {
const runs = await openai.beta.threads.runs.list(threadId, {
limit: 1,
order: 'desc',
});
const latestRun = runs.data[0];
return latestRun && ['queued', 'in_progress', 'cancelling'].includes(latestRun.status);
}
/**
* Wait for active run to complete or cancel it
*/
async function ensureNoActiveRun(threadId: string, cancelIfActive = false) {
const runs = await openai.beta.threads.runs.list(threadId, {
limit: 1,
order: 'desc',
});
const latestRun = runs.data[0];
if (latestRun && ['queued', 'in_progress', 'cancelling'].includes(latestRun.status)) {
if (cancelIfActive) {
console.log(` ⚠️ Active run detected: ${latestRun.id}. Cancelling...`);
await openai.beta.threads.runs.cancel(threadId, latestRun.id);
// Wait for cancellation
let run = latestRun;
while (run.status !== 'cancelled') {
await new Promise(resolve => setTimeout(resolve, 500));
run = await openai.beta.threads.runs.retrieve(threadId, run.id);
}
console.log(' ✅ Run cancelled');
} else {
throw new Error(
`Thread ${threadId} has an active run (${latestRun.id}). ` +
`Wait for completion or set cancelIfActive=true`
);
}
}
}
/**
* Send a message safely (check for active runs first)
*/
async function sendMessage(
threadId: string,
assistantId: string,
message: string
): Promise<string> {
console.log(`\n💬 Sending message to thread ${threadId}...`);
// Ensure no active run
await ensureNoActiveRun(threadId, true);
// Add message
await openai.beta.threads.messages.create(threadId, {
role: 'user',
content: message,
});
console.log(' ✅ Message added');
// Create run
console.log(' 🏃 Creating run...');
const run = await openai.beta.threads.runs.create(threadId, {
assistant_id: assistantId,
});
// Poll for completion
let runStatus = await openai.beta.threads.runs.retrieve(threadId, run.id);
while (!['completed', 'failed', 'cancelled'].includes(runStatus.status)) {
await new Promise(resolve => setTimeout(resolve, 1000));
runStatus = await openai.beta.threads.runs.retrieve(threadId, run.id);
}
if (runStatus.status !== 'completed') {
throw new Error(`Run ${runStatus.status}: ${runStatus.last_error?.message}`);
}
console.log(' ✅ Run completed');
// Get response
const messages = await openai.beta.threads.messages.list(threadId, {
limit: 1,
order: 'desc',
});
const responseContent = messages.data[0].content[0];
if (responseContent.type === 'text') {
return responseContent.text.value;
}
return '';
}
/**
* Clean up old threads
*/
async function cleanupOldThreads(maxAgeHours: number = 24) {
console.log(`\n🧹 Cleaning up threads older than ${maxAgeHours} hours...`);
let deletedCount = 0;
for (const [userId, threadId] of userThreads.entries()) {
try {
const thread = await openai.beta.threads.retrieve(threadId);
const createdAt = new Date(thread.created_at * 1000);
const ageHours = (Date.now() - createdAt.getTime()) / (1000 * 60 * 60);
if (ageHours > maxAgeHours) {
await openai.beta.threads.del(threadId);
userThreads.delete(userId);
deletedCount++;
console.log(` ✅ Deleted thread for user ${userId} (age: ${ageHours.toFixed(1)}h)`);
}
} catch (error) {
console.error(` ❌ Error deleting thread ${threadId}:`, error);
}
}
console.log(`\n Total threads deleted: ${deletedCount}`);
}
/**
* Main demo
*/
async function main() {
console.log('🧵 Thread Lifecycle Management Demo\n');
// Create an assistant
const assistant = await openai.beta.assistants.create({
name: "Demo Assistant",
instructions: "You are a helpful assistant.",
model: "gpt-4o",
});
console.log(`✅ Assistant created: ${assistant.id}\n`);
// Simulate multiple users
const user1 = 'user_alice';
const user2 = 'user_bob';
// User 1: First message
let thread1 = await getOrCreateUserThread(user1);
let response1 = await sendMessage(thread1, assistant.id, "Hello! What's 2+2?");
console.log(`\n🤖 Response: ${response1}\n`);
// User 2: First message
let thread2 = await getOrCreateUserThread(user2);
let response2 = await sendMessage(thread2, assistant.id, "What's the capital of France?");
console.log(`\n🤖 Response: ${response2}\n`);
// User 1: Second message (reuses thread)
thread1 = await getOrCreateUserThread(user1);
response1 = await sendMessage(thread1, assistant.id, "Can you multiply that by 3?");
console.log(`\n🤖 Response: ${response1}\n`);
// Check for active runs
console.log('\n📊 Thread Status:');
const hasActive1 = await hasActiveRun(thread1);
const hasActive2 = await hasActiveRun(thread2);
console.log(` User 1 thread active: ${hasActive1}`);
console.log(` User 2 thread active: ${hasActive2}`);
// List messages in thread 1
console.log(`\n📜 Conversation history for user 1:`);
const messages = await openai.beta.threads.messages.list(thread1);
for (const message of messages.data.reverse()) {
const content = message.content[0];
if (content.type === 'text') {
console.log(` ${message.role}: ${content.text.value}`);
}
}
// Cleanup demo (set to 0 hours to delete all)
// await cleanupOldThreads(0);
console.log('\n✅ Demo complete!');
console.log(`\n💡 Tips:`);
console.log(' - Always check for active runs before creating new ones');
console.log(' - Reuse threads for conversation continuity');
console.log(' - Clean up old threads to manage costs');
console.log(' - Use metadata to track thread ownership');
}
main().catch(console.error);

View File

@@ -0,0 +1,241 @@
/**
* Vector Store Setup
*
* Demonstrates:
* - Creating vector stores
* - Batch file uploads
* - Monitoring indexing progress
* - Vector store management
* - Cost optimization
*/
import OpenAI from 'openai';
import fs from 'fs';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
async function main() {
console.log('📦 Vector Store Setup Guide\n');
// 1. Create a vector store
console.log('Step 1: Creating vector store...\n');
const vectorStore = await openai.beta.vectorStores.create({
name: "Company Knowledge Base",
metadata: {
department: "engineering",
version: "1.0",
},
expires_after: {
anchor: "last_active_at",
days: 30, // Auto-delete after 30 days of inactivity
},
});
console.log(`✅ Vector store created: ${vectorStore.id}`);
console.log(` Name: ${vectorStore.name}`);
console.log(` Status: ${vectorStore.status}`);
console.log(` Auto-expires: ${vectorStore.expires_after?.days} days after last use\n`);
// 2. Prepare sample documents
console.log('Step 2: Preparing documents...\n');
const documents = [
{
filename: 'api_docs.md',
content: `# API Documentation
## Authentication
All API requests require an API key in the Authorization header.
## Rate Limits
- Free tier: 100 requests/hour
- Pro tier: 1000 requests/hour
## Endpoints
- GET /api/users - List users
- POST /api/users - Create user
- GET /api/users/:id - Get user details`,
},
{
filename: 'deployment_guide.md',
content: `# Deployment Guide
## Prerequisites
- Docker installed
- Kubernetes cluster running
- kubectl configured
## Steps
1. Build Docker image: docker build -t app:latest .
2. Push to registry: docker push registry/app:latest
3. Deploy: kubectl apply -f deployment.yaml
4. Verify: kubectl get pods`,
},
{
filename: 'security_policy.md',
content: `# Security Policy
## Password Requirements
- Minimum 12 characters
- Must include uppercase, lowercase, numbers, symbols
- Cannot reuse last 5 passwords
## Access Control
- Use SSO for authentication
- Enable 2FA for all accounts
- Review access logs monthly
## Incident Response
- Report security issues to security@company.com
- Critical incidents escalated within 1 hour`,
},
];
// Write files to disk
const fileIds: string[] = [];
for (const doc of documents) {
fs.writeFileSync(doc.filename, doc.content);
console.log(` 📄 Created: ${doc.filename}`);
}
// 3. Upload files
console.log('\nStep 3: Uploading files to OpenAI...\n');
for (const doc of documents) {
const file = await openai.files.create({
file: fs.createReadStream(doc.filename),
purpose: 'assistants',
});
fileIds.push(file.id);
console.log(` ✅ Uploaded: ${doc.filename} (${file.id})`);
// Clean up local file
fs.unlinkSync(doc.filename);
}
// 4. Add files to vector store (batch upload)
console.log('\nStep 4: Adding files to vector store...\n');
const fileBatch = await openai.beta.vectorStores.fileBatches.create(
vectorStore.id,
{
file_ids: fileIds,
}
);
console.log(` 📦 Batch created: ${fileBatch.id}`);
console.log(` Files in batch: ${fileBatch.file_counts.total}`);
// 5. Monitor indexing progress
console.log('\nStep 5: Monitoring indexing progress...\n');
let batch = fileBatch;
let lastStatus = '';
while (batch.status === 'in_progress') {
batch = await openai.beta.vectorStores.fileBatches.retrieve(
vectorStore.id,
fileBatch.id
);
const statusMsg = ` Status: ${batch.status} | ` +
`Completed: ${batch.file_counts.completed}/${batch.file_counts.total} | ` +
`Failed: ${batch.file_counts.failed}`;
if (statusMsg !== lastStatus) {
console.log(statusMsg);
lastStatus = statusMsg;
}
await new Promise(resolve => setTimeout(resolve, 1000));
}
console.log(`\n ✅ Indexing ${batch.status}!`);
if (batch.file_counts.failed > 0) {
console.log(` ⚠️ ${batch.file_counts.failed} files failed to index`);
// List failed files
const files = await openai.beta.vectorStores.files.list(vectorStore.id);
for (const file of files.data) {
if (file.status === 'failed') {
console.log(` - File ${file.id}: ${file.last_error?.message}`);
}
}
}
// 6. Get vector store details
console.log('\nStep 6: Vector store statistics...\n');
const updatedStore = await openai.beta.vectorStores.retrieve(vectorStore.id);
console.log(` 📊 Statistics:`);
console.log(` Total files: ${updatedStore.file_counts.completed}`);
console.log(` Storage used: ${updatedStore.usage_bytes} bytes (${(updatedStore.usage_bytes / 1024 / 1024).toFixed(2)} MB)`);
console.log(` Status: ${updatedStore.status}`);
// 7. Cost estimation
const storageMB = updatedStore.usage_bytes / 1024 / 1024;
const storageGB = storageMB / 1024;
const costPerDay = Math.max(0, (storageGB - 1) * 0.10); // First 1GB free, then $0.10/GB/day
const costPerMonth = costPerDay * 30;
console.log(`\n 💰 Cost Estimation:`);
console.log(` Storage: ${storageGB.toFixed(4)} GB`);
console.log(` Cost per day: $${costPerDay.toFixed(4)} (first 1GB free)`);
console.log(` Cost per month: $${costPerMonth.toFixed(2)}`);
// 8. List all files in vector store
console.log('\nStep 7: Files in vector store...\n');
const filesInStore = await openai.beta.vectorStores.files.list(vectorStore.id);
for (const file of filesInStore.data) {
console.log(` 📄 ${file.id}`);
console.log(` Status: ${file.status}`);
console.log(` Created: ${new Date(file.created_at * 1000).toISOString()}`);
}
// 9. Management operations
console.log('\nStep 8: Management operations...\n');
// Update vector store metadata
const updated = await openai.beta.vectorStores.update(vectorStore.id, {
metadata: {
department: "engineering",
version: "1.0",
last_updated: new Date().toISOString(),
},
});
console.log(' ✅ Metadata updated');
// List all vector stores
const allStores = await openai.beta.vectorStores.list({ limit: 5 });
console.log(`\n 📚 Total vector stores in account: ${allStores.data.length}`);
for (const store of allStores.data) {
console.log(` - ${store.name} (${store.id}): ${store.file_counts.completed} files`);
}
// 10. Cleanup instructions
console.log('\n💡 Cleanup Instructions:\n');
console.log(' To delete individual files:');
console.log(` await openai.beta.vectorStores.files.del("${vectorStore.id}", "file_id");`);
console.log('');
console.log(' To delete entire vector store:');
console.log(` await openai.beta.vectorStores.del("${vectorStore.id}");`);
console.log('');
console.log(' Note: Vector store will auto-delete after 30 days of inactivity (configured above)');
console.log('\n✅ Vector store setup complete!');
console.log(`\n🔑 Save this ID to use with assistants:`);
console.log(` ${vectorStore.id}`);
}
main().catch(console.error);