87 lines
2.4 KiB
TypeScript
87 lines
2.4 KiB
TypeScript
// Streaming Chat Completion (Fetch API - Cloudflare Workers)
|
|
// Server-Sent Events (SSE) parsing for edge runtimes
|
|
|
|
interface Env {
|
|
OPENAI_API_KEY: string;
|
|
}
|
|
|
|
export default {
|
|
async fetch(request: Request, env: Env): Promise<Response> {
|
|
if (request.method !== 'POST') {
|
|
return new Response('Method not allowed', { status: 405 });
|
|
}
|
|
|
|
const { message } = await request.json() as { message: string };
|
|
|
|
// Call OpenAI with streaming
|
|
const response = await fetch('https://api.openai.com/v1/chat/completions', {
|
|
method: 'POST',
|
|
headers: {
|
|
'Authorization': `Bearer ${env.OPENAI_API_KEY}`,
|
|
'Content-Type': 'application/json',
|
|
},
|
|
body: JSON.stringify({
|
|
model: 'gpt-5',
|
|
messages: [
|
|
{ role: 'user', content: message }
|
|
],
|
|
stream: true,
|
|
}),
|
|
});
|
|
|
|
// Create a transformed stream for the client
|
|
const { readable, writable } = new TransformStream();
|
|
const writer = writable.getWriter();
|
|
const encoder = new TextEncoder();
|
|
|
|
// Process SSE stream
|
|
(async () => {
|
|
const reader = response.body?.getReader();
|
|
const decoder = new TextDecoder();
|
|
|
|
try {
|
|
while (true) {
|
|
const { done, value } = await reader!.read();
|
|
if (done) break;
|
|
|
|
const chunk = decoder.decode(value);
|
|
const lines = chunk.split('\n').filter(line => line.trim() !== '');
|
|
|
|
for (const line of lines) {
|
|
if (line.startsWith('data: ')) {
|
|
const data = line.slice(6);
|
|
|
|
if (data === '[DONE]') {
|
|
await writer.write(encoder.encode('data: [DONE]\n\n'));
|
|
break;
|
|
}
|
|
|
|
try {
|
|
const json = JSON.parse(data);
|
|
const content = json.choices[0]?.delta?.content || '';
|
|
|
|
if (content) {
|
|
// Forward to client
|
|
await writer.write(encoder.encode(`data: ${JSON.stringify({ content })}\n\n`));
|
|
}
|
|
} catch (e) {
|
|
// Skip invalid JSON
|
|
}
|
|
}
|
|
}
|
|
}
|
|
} finally {
|
|
await writer.close();
|
|
}
|
|
})();
|
|
|
|
return new Response(readable, {
|
|
headers: {
|
|
'Content-Type': 'text/event-stream',
|
|
'Cache-Control': 'no-cache',
|
|
'Connection': 'keep-alive',
|
|
},
|
|
});
|
|
},
|
|
};
|