Initial commit

This commit is contained in:
Zhongwei Li
2025-11-30 08:25:37 +08:00
commit 13df4850f7
29 changed files with 6729 additions and 0 deletions

View File

@@ -0,0 +1,191 @@
/**
* Cloudflare Workers + Vite Frontend Setup
*
* File: src/App.tsx
*
* Frontend configuration for Vite + React app deployed with Cloudflare Workers.
* Uses relative paths since Worker and frontend run on same origin.
*
* Key Differences from standalone Vite:
* - API URLs are relative (not absolute)
* - No CORS issues (same origin)
* - Worker handles routing, serves static assets
*/
import "@crayonai/react-ui/styles/index.css";
import { ThemeProvider, C1Component } from "@thesysai/genui-sdk";
import { useState } from "react";
import "./App.css";
export default function App() {
const [isLoading, setIsLoading] = useState(false);
const [c1Response, setC1Response] = useState("");
const [question, setQuestion] = useState("");
const [error, setError] = useState<string | null>(null);
const makeApiCall = async (query: string, previousResponse?: string) => {
if (!query.trim()) return;
setIsLoading(true);
setError(null);
try {
// NOTE: Using relative path - Worker handles this on same domain
const response = await fetch("/api/chat", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
prompt: query,
previousC1Response: previousResponse || c1Response,
}),
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.error || `HTTP ${response.status}`);
}
const data = await response.json();
setC1Response(data.response);
setQuestion("");
} catch (err) {
console.error("API Error:", err);
setError(err instanceof Error ? err.message : "Failed to get response");
} finally {
setIsLoading(false);
}
};
const handleSubmit = (e: React.FormEvent) => {
e.preventDefault();
makeApiCall(question);
};
return (
<div className="app-container">
<header className="app-header">
<h1>Cloudflare AI Assistant</h1>
<p>Powered by Workers + TheSys C1</p>
</header>
<form onSubmit={handleSubmit} className="input-form">
<input
type="text"
value={question}
onChange={(e) => setQuestion(e.target.value)}
placeholder="Ask me anything..."
disabled={isLoading}
className="question-input"
autoFocus
/>
<button
type="submit"
disabled={isLoading || !question.trim()}
className="submit-button"
>
{isLoading ? "Processing..." : "Send"}
</button>
</form>
{error && (
<div className="error-message">
<strong>Error:</strong> {error}
</div>
)}
{c1Response && (
<div className="response-container">
<ThemeProvider>
<C1Component
c1Response={c1Response}
isStreaming={isLoading}
updateMessage={(message) => setC1Response(message)}
onAction={({ llmFriendlyMessage }) => {
if (!isLoading) {
makeApiCall(llmFriendlyMessage, c1Response);
}
}}
/>
</ThemeProvider>
</div>
)}
</div>
);
}
/**
* vite.config.ts Configuration
*
* IMPORTANT: When using @cloudflare/vite-plugin, the Worker runs
* alongside Vite on the same port, so use relative API paths.
*
* import { defineConfig } from "vite";
* import react from "@vitejs/plugin-react";
* import { cloudflare } from "@cloudflare/vite-plugin";
*
* export default defineConfig({
* plugins: [
* react(),
* cloudflare({
* configPath: "./wrangler.jsonc",
* }),
* ],
* build: {
* outDir: "dist",
* },
* });
*/
/**
* Alternative: Streaming Setup
*
* For streaming responses, modify the API call:
*
* const makeStreamingApiCall = async (query: string) => {
* setIsLoading(true);
* setC1Response("");
*
* const response = await fetch("/api/chat/stream", {
* method: "POST",
* headers: { "Content-Type": "application/json" },
* body: JSON.stringify({ prompt: query }),
* });
*
* if (!response.ok) {
* throw new Error("Stream failed");
* }
*
* const reader = response.body?.getReader();
* if (!reader) return;
*
* const decoder = new TextDecoder();
* let accumulated = "";
*
* while (true) {
* const { done, value } = await reader.read();
* if (done) break;
*
* const chunk = decoder.decode(value);
* accumulated += chunk;
* setC1Response(accumulated);
* }
*
* setIsLoading(false);
* };
*/
/**
* Deployment Steps:
*
* 1. Build frontend:
* npm run build
*
* 2. Deploy to Cloudflare:
* npx wrangler deploy
*
* 3. Set secrets:
* npx wrangler secret put THESYS_API_KEY
*
* 4. Test:
* Visit your-worker.workers.dev
*/

View File

@@ -0,0 +1,247 @@
/**
* Cloudflare Worker Backend with Hono + TheSys C1
*
* File: backend/src/index.ts
*
* Features:
* - Hono routing
* - TheSys C1 API proxy
* - Streaming support
* - Static assets serving
* - CORS handling
*/
import { Hono } from "hono";
import { cors } from "hono/cors";
import { serveStatic } from "hono/cloudflare-workers";
type Bindings = {
THESYS_API_KEY: string;
ASSETS: Fetcher;
};
const app = new Hono<{ Bindings: Bindings }>();
// CORS middleware
app.use("/*", cors({
origin: "*",
allowMethods: ["GET", "POST", "OPTIONS"],
allowHeaders: ["Content-Type", "Authorization"],
}));
// ============================================================================
// Chat API Endpoint
// ============================================================================
app.post("/api/chat", async (c) => {
try {
const { prompt, previousC1Response } = await c.req.json();
if (!prompt || typeof prompt !== "string") {
return c.json({ error: "Invalid prompt" }, 400);
}
// Check API key binding
if (!c.env.THESYS_API_KEY) {
console.error("THESYS_API_KEY binding not found");
return c.json({ error: "Server configuration error" }, 500);
}
// Build messages
const messages = [
{
role: "system",
content: "You are a helpful AI assistant that generates interactive UI.",
},
{
role: "user",
content: prompt,
},
];
if (previousC1Response) {
messages.splice(1, 0, {
role: "assistant",
content: previousC1Response,
});
}
// Call TheSys C1 API
const response = await fetch(
"https://api.thesys.dev/v1/embed/chat/completions",
{
method: "POST",
headers: {
"Authorization": `Bearer ${c.env.THESYS_API_KEY}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
model: "c1/openai/gpt-5/v-20250930",
messages,
stream: false, // Or handle streaming
temperature: 0.7,
max_tokens: 2000,
}),
}
);
if (!response.ok) {
const error = await response.text();
console.error("TheSys API Error:", error);
return c.json(
{ error: "Failed to get AI response" },
response.status
);
}
const data = await response.json();
return c.json({
response: data.choices[0]?.message?.content || "",
usage: data.usage,
});
} catch (error) {
console.error("Chat endpoint error:", error);
return c.json(
{ error: error instanceof Error ? error.message : "Internal error" },
500
);
}
});
// ============================================================================
// Streaming Chat Endpoint
// ============================================================================
app.post("/api/chat/stream", async (c) => {
try {
const { prompt } = await c.req.json();
const response = await fetch(
"https://api.thesys.dev/v1/embed/chat/completions",
{
method: "POST",
headers: {
"Authorization": `Bearer ${c.env.THESYS_API_KEY}`,
"Content-Type": "application/json",
},
body: JSON.stringify({
model: "c1/openai/gpt-5/v-20250930",
messages: [
{ role: "system", content: "You are a helpful assistant." },
{ role: "user", content: prompt },
],
stream: true,
}),
}
);
if (!response.ok) {
return c.json({ error: "Stream failed" }, response.status);
}
// Return the stream directly
return new Response(response.body, {
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Connection": "keep-alive",
},
});
} catch (error) {
console.error("Stream error:", error);
return c.json({ error: "Stream failed" }, 500);
}
});
// ============================================================================
// Health Check
// ============================================================================
app.get("/api/health", (c) => {
return c.json({
status: "ok",
timestamp: new Date().toISOString(),
});
});
// ============================================================================
// Serve Static Assets (Vite build output)
// ============================================================================
app.get("/*", serveStatic({ root: "./", mimes: {} }));
export default app;
/**
* Alternative: Using Workers AI directly (cheaper for some models)
*
* type Bindings = {
* AI: any; // Cloudflare AI binding
* };
*
* app.post("/api/chat", async (c) => {
* const { prompt } = await c.req.json();
*
* const aiResponse = await c.env.AI.run('@cf/meta/llama-3-8b-instruct', {
* messages: [
* { role: "system", content: "You are a helpful assistant." },
* { role: "user", content: prompt },
* ],
* });
*
* // Then optionally send to TheSys C1 for UI generation
* const c1Response = await fetch("https://api.thesys.dev/v1/embed/chat/completions", {
* method: "POST",
* headers: {
* "Authorization": `Bearer ${c.env.THESYS_API_KEY}`,
* "Content-Type": "application/json",
* },
* body: JSON.stringify({
* model: "c1/openai/gpt-5/v-20250930",
* messages: [
* {
* role: "system",
* content: "Generate a UI for this content: " + aiResponse.response,
* },
* ],
* }),
* });
*
* // ... return c1Response
* });
*/
/**
* Alternative: With D1 Database for message persistence
*
* type Bindings = {
* THESYS_API_KEY: string;
* DB: D1Database; // D1 binding
* };
*
* app.post("/api/chat", async (c) => {
* const { userId, threadId, prompt } = await c.req.json();
*
* // Save user message
* await c.env.DB.prepare(
* "INSERT INTO messages (thread_id, user_id, role, content) VALUES (?, ?, ?, ?)"
* )
* .bind(threadId, userId, "user", prompt)
* .run();
*
* // Get conversation history
* const { results } = await c.env.DB.prepare(
* "SELECT role, content FROM messages WHERE thread_id = ? ORDER BY created_at"
* )
* .bind(threadId)
* .all();
*
* const messages = [
* { role: "system", content: "You are a helpful assistant." },
* ...results,
* ];
*
* // Call TheSys API with full history...
* });
*/

View File

@@ -0,0 +1,106 @@
{
// Cloudflare Worker Configuration with Static Assets
//
// This configures a Worker that serves a Vite+React frontend
// and handles API routes for TheSys C1 integration.
//
// Prerequisites:
// 1. Set THESYS_API_KEY secret: npx wrangler secret put THESYS_API_KEY
// 2. Build frontend: npm run build
// 3. Deploy: npx wrangler deploy
"name": "thesys-chat-worker",
"compatibility_date": "2025-10-26",
"compatibility_flags": ["nodejs_compat"],
// Main worker file (Hono backend)
"main": "backend/src/index.ts",
// Static assets configuration (Vite build output)
"assets": {
"directory": "dist",
"binding": "ASSETS",
"html_handling": "auto-trailing-slash",
"not_found_handling": "single-page-application"
},
// Environment variables (non-sensitive)
"vars": {
"ENVIRONMENT": "production",
"LOG_LEVEL": "info"
},
// Secrets (set via CLI, not in this file!)
// npx wrangler secret put THESYS_API_KEY
// npx wrangler secret put TAVILY_API_KEY (optional, for tool calling)
// Optional: D1 Database binding for message persistence
// "d1_databases": [
// {
// "binding": "DB",
// "database_name": "thesys-chat-db",
// "database_id": "your-database-id"
// }
// ],
// Optional: KV namespace for caching
// "kv_namespaces": [
// {
// "binding": "KV",
// "id": "your-kv-id"
// }
// ],
// Optional: Workers AI binding (for hybrid approach)
// "ai": {
// "binding": "AI"
// },
// Optional: Durable Objects for real-time features
// "durable_objects": {
// "bindings": [
// {
// "name": "CHAT_SESSION",
// "class_name": "ChatSession",
// "script_name": "thesys-chat-worker"
// }
// ]
// },
// Node.js compatibility for packages like OpenAI SDK
"node_compat": true,
// Build configuration
"build": {
"command": "npm run build"
},
// Development settings
"dev": {
"port": 8787,
"local_protocol": "http"
},
// Observability
"observability": {
"enabled": true
},
// Routes (optional - for custom domains)
// "routes": [
// {
// "pattern": "chat.yourdomain.com/*",
// "zone_name": "yourdomain.com"
// }
// ],
// Workers Limits
"limits": {
"cpu_ms": 50000
},
// Placement (optional - for closer to users)
// "placement": {
// "mode": "smart"
// }
}