/** * Custom C1Component Integration with Advanced State Management * * Shows how to use C1Component with full control over: * - Message history * - Conversation state * - Custom UI layout * - Error boundaries * * Use this when you need more control than C1Chat provides. */ import "@crayonai/react-ui/styles/index.css"; import { ThemeProvider, C1Component } from "@thesysai/genui-sdk"; import { useState, useRef, useEffect } from "react"; import { ErrorBoundary } from "react-error-boundary"; import "./App.css"; interface Message { id: string; role: "user" | "assistant"; content: string; timestamp: Date; } function ErrorFallback({ error, resetErrorBoundary }: { error: Error; resetErrorBoundary: () => void; }) { return (

Something went wrong

{error.message}
); } export default function App() { const [messages, setMessages] = useState([]); const [currentResponse, setCurrentResponse] = useState(""); const [isStreaming, setIsStreaming] = useState(false); const [inputValue, setInputValue] = useState(""); const messagesEndRef = useRef(null); // Auto-scroll to bottom when new messages arrive useEffect(() => { messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }); }, [messages, currentResponse]); const sendMessage = async (userMessage: string) => { if (!userMessage.trim() || isStreaming) return; // Add user message const userMsg: Message = { id: crypto.randomUUID(), role: "user", content: userMessage, timestamp: new Date(), }; setMessages((prev) => [...prev, userMsg]); setInputValue(""); setIsStreaming(true); setCurrentResponse(""); try { const response = await fetch("/api/chat", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ messages: [...messages, userMsg].map((m) => ({ role: m.role, content: m.content, })), }), }); if (!response.ok) { throw new Error(`HTTP ${response.status}: ${response.statusText}`); } const data = await response.json(); // Add assistant response const assistantMsg: Message = { id: crypto.randomUUID(), role: "assistant", content: data.response, timestamp: new Date(), }; setCurrentResponse(data.response); setMessages((prev) => [...prev, assistantMsg]); } catch (error) { console.error("Error sending message:", error); // Add error message const errorMsg: Message = { id: crypto.randomUUID(), role: "assistant", content: `Error: ${error instanceof Error ? error.message : "Failed to get response"}`, timestamp: new Date(), }; setMessages((prev) => [...prev, errorMsg]); } finally { setIsStreaming(false); } }; const handleSubmit = (e: React.FormEvent) => { e.preventDefault(); sendMessage(inputValue); }; const clearConversation = () => { setMessages([]); setCurrentResponse(""); }; return (

AI Assistant

{messages.map((message, index) => (
{message.role === "user" ? "You" : "AI"} {message.timestamp.toLocaleTimeString()}
{message.role === "assistant" ? ( { setCurrentResponse(updatedContent); setMessages((prev) => prev.map((m) => m.id === message.id ? { ...m, content: updatedContent } : m ) ); }} onAction={({ llmFriendlyMessage }) => { sendMessage(llmFriendlyMessage); }} /> ) : (
{message.content}
)}
))} {isStreaming && !currentResponse && (
AI is thinking...
)}
setInputValue(e.target.value)} placeholder="Type your message..." disabled={isStreaming} className="message-input" autoFocus />
); }