Initial commit

This commit is contained in:
Zhongwei Li
2025-11-30 09:01:09 +08:00
commit 9621ada71c
9 changed files with 4298 additions and 0 deletions

View File

@@ -0,0 +1,689 @@
# Advanced Features
Advanced Workers capabilities for complex applications and enterprise use cases.
## Workers for Platforms
Deploy and manage customer-provided Workers on your infrastructure.
**Use cases:**
- SaaS platforms where customers write custom code
- Low-code/no-code platforms
- Plugin systems
- Custom business logic hosting
### Setup
**wrangler.toml:**
```toml
[[dispatch_namespaces]]
binding = "DISPATCHER"
namespace = "my-platform"
# Optional outbound Worker
outbound = { service = "my-outbound-worker" }
```
### Dynamic Dispatch
Route requests to customer Workers dynamically.
**Dispatch Worker:**
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
const url = new URL(request.url);
const customerId = url.hostname.split(".")[0];
// Get customer's Worker
const userWorker = env.DISPATCHER.get(customerId);
// Forward request to customer's Worker
return userWorker.fetch(request);
},
};
```
### Upload User Workers
**Via API:**
```typescript
async function uploadUserWorker(
customerId: string,
code: string,
env: Env
): Promise<void> {
const response = await fetch(
`https://api.cloudflare.com/client/v4/accounts/${env.ACCOUNT_ID}/workers/dispatch/namespaces/${env.NAMESPACE}/scripts/${customerId}`,
{
method: "PUT",
headers: {
"Authorization": `Bearer ${env.API_TOKEN}`,
"Content-Type": "application/javascript",
},
body: code,
}
);
if (!response.ok) {
throw new Error(`Failed to upload: ${await response.text()}`);
}
}
```
**Via Wrangler:**
```bash
wrangler dispatch-namespace put my-platform customer-123 ./customer-worker.js
```
### Outbound Workers
Control what customer Workers can access.
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
// Validate request from customer Worker
const url = new URL(request.url);
// Block certain domains
const blockedDomains = ["internal.example.com"];
if (blockedDomains.some((d) => url.hostname.includes(d))) {
return new Response("Forbidden", { status: 403 });
}
// Add authentication
request.headers.set("X-Platform-Auth", env.PLATFORM_SECRET);
// Forward to destination
return fetch(request);
},
};
```
### Limits and Quotas
Set limits for customer Workers.
```toml
[[dispatch_namespaces]]
binding = "DISPATCHER"
namespace = "my-platform"
[dispatch_namespaces.outbound]
service = "outbound-worker"
parameters = {
cpu_ms = 50,
requests = 1000
}
```
## Smart Placement
Automatically place Workers near data sources to reduce latency.
### Enable Smart Placement
**wrangler.toml:**
```toml
[placement]
mode = "smart"
```
**How it works:**
- Workers monitors where your Worker makes subrequests
- Automatically places future executions near those data sources
- Reduces round-trip time for database/API calls
- No code changes required
**Best for:**
- Database-heavy Workers
- Workers making many external API calls
- Geographically distributed data sources
**Limitations:**
- Not compatible with Durable Objects
- Requires Workers Standard plan
## WebSockets
Build real-time applications with WebSockets.
### Basic WebSocket Server
```typescript
export default {
async fetch(request: Request): Promise<Response> {
const upgradeHeader = request.headers.get("Upgrade");
if (upgradeHeader !== "websocket") {
return new Response("Expected WebSocket", { status: 426 });
}
const [client, server] = Object.values(new WebSocketPair());
// Handle WebSocket messages
server.accept();
server.addEventListener("message", (event) => {
console.log("Received:", event.data);
// Echo message back
server.send(`Echo: ${event.data}`);
});
server.addEventListener("close", (event) => {
console.log("WebSocket closed:", event.code, event.reason);
});
server.addEventListener("error", (event) => {
console.error("WebSocket error:", event);
});
return new Response(null, {
status: 101,
webSocket: client,
});
},
};
```
### WebSocket with Durable Objects
Use Durable Objects for coordinated WebSocket servers.
**Durable Object:**
```typescript
export class ChatRoom {
state: DurableObjectState;
sessions: Set<WebSocket>;
constructor(state: DurableObjectState) {
this.state = state;
this.sessions = new Set();
}
async fetch(request: Request): Promise<Response> {
const [client, server] = Object.values(new WebSocketPair());
server.accept();
this.sessions.add(server);
server.addEventListener("message", (event) => {
// Broadcast to all connected clients
this.broadcast(event.data as string);
});
server.addEventListener("close", () => {
this.sessions.delete(server);
});
return new Response(null, {
status: 101,
webSocket: client,
});
}
broadcast(message: string) {
for (const session of this.sessions) {
try {
session.send(message);
} catch (error) {
// Remove failed sessions
this.sessions.delete(session);
}
}
}
}
```
**Worker:**
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
const url = new URL(request.url);
const roomId = url.pathname.substring(1) || "default";
// Get Durable Object for this room
const id = env.CHAT_ROOM.idFromName(roomId);
const room = env.CHAT_ROOM.get(id);
return room.fetch(request);
},
};
```
### WebSocket Hibernation
Reduce costs by hibernating idle WebSocket connections.
```typescript
export class HibernatingChatRoom {
state: DurableObjectState;
constructor(state: DurableObjectState) {
this.state = state;
// Enable hibernation
state.setWebSocketAutoResponse(
new WebSocketRequestResponsePair("ping", "pong")
);
}
async fetch(request: Request): Promise<Response> {
const [client, server] = Object.values(new WebSocketPair());
// Accept with hibernation
this.state.acceptWebSocket(server);
return new Response(null, {
status: 101,
webSocket: client,
});
}
async webSocketMessage(ws: WebSocket, message: string) {
// Called when message received (Worker woken up)
const data = JSON.parse(message);
// Broadcast to all
this.state.getWebSockets().forEach((socket) => {
socket.send(message);
});
}
async webSocketClose(ws: WebSocket, code: number, reason: string) {
// Cleanup on close
ws.close(code, reason);
}
}
```
## Streaming
Stream responses for large datasets.
### Streaming JSON
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
const { readable, writable } = new TransformStream();
const writer = writable.getWriter();
const encoder = new TextEncoder();
// Stream in background
(async () => {
try {
await writer.write(encoder.encode("["));
const results = await env.DB.prepare(
"SELECT * FROM large_table"
).all();
for (let i = 0; i < results.results.length; i++) {
const item = results.results[i];
await writer.write(encoder.encode(JSON.stringify(item)));
if (i < results.results.length - 1) {
await writer.write(encoder.encode(","));
}
}
await writer.write(encoder.encode("]"));
await writer.close();
} catch (error) {
await writer.abort(error);
}
})();
return new Response(readable, {
headers: { "Content-Type": "application/json" },
});
},
};
```
### Server-Sent Events (SSE)
```typescript
export default {
async fetch(request: Request): Promise<Response> {
const { readable, writable } = new TransformStream();
const writer = writable.getWriter();
const encoder = new TextEncoder();
// Send events in background
(async () => {
try {
for (let i = 0; i < 10; i++) {
await writer.write(
encoder.encode(`data: ${JSON.stringify({ count: i })}\n\n`)
);
await new Promise((resolve) => setTimeout(resolve, 1000));
}
await writer.close();
} catch (error) {
await writer.abort(error);
}
})();
return new Response(readable, {
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Connection": "keep-alive",
},
});
},
};
```
## Custom Domains
Configure custom domains for Workers.
### Via Wrangler
```toml
routes = [
{ pattern = "api.example.com/*", zone_name = "example.com" }
]
```
### Via Dashboard
1. Navigate to Workers & Pages
2. Select your Worker
3. Go to Settings > Triggers
4. Add Custom Domain
### Multiple Domains
```toml
routes = [
{ pattern = "api.example.com/*", zone_name = "example.com" },
{ pattern = "api.other-domain.com/*", zone_name = "other-domain.com" }
]
```
## Static Assets
Serve static files with Workers.
### Configuration
**wrangler.toml:**
```toml
[assets]
directory = "./public"
binding = "ASSETS"
# HTML handling
html_handling = "auto-trailing-slash"
# 404 handling
not_found_handling = "single-page-application"
```
### Custom Asset Handling
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
const url = new URL(request.url);
// API routes
if (url.pathname.startsWith("/api/")) {
return handleAPI(request, env);
}
// Static assets
try {
const asset = await env.ASSETS.fetch(request);
// Add custom headers
const response = new Response(asset.body, asset);
response.headers.set("X-Custom-Header", "value");
return response;
} catch {
return new Response("Not found", { status: 404 });
}
},
};
```
### Framework Integration
**Next.js:**
```bash
npm create cloudflare@latest my-next-app -- --framework=next
```
**Remix:**
```bash
npm create cloudflare@latest my-remix-app -- --framework=remix
```
**Astro:**
```bash
npm create cloudflare@latest my-astro-app -- --framework=astro
```
## TCP Sockets
Connect to external services via TCP.
```typescript
export default {
async fetch(request: Request): Promise<Response> {
const socket = connect({
hostname: "example.com",
port: 6379, // Redis
});
const writer = socket.writable.getWriter();
const encoder = new TextEncoder();
// Send Redis command
await writer.write(encoder.encode("PING\r\n"));
// Read response
const reader = socket.readable.getReader();
const { value } = await reader.read();
const response = new TextDecoder().decode(value);
return Response.json({ response });
},
};
```
## HTML Rewriter
Transform HTML on the fly.
```typescript
class LinkRewriter {
element(element: Element) {
const href = element.getAttribute("href");
if (href && href.startsWith("/")) {
// Make absolute
element.setAttribute("href", `https://example.com${href}`);
}
// Add tracking
element.setAttribute("data-tracked", "true");
}
}
export default {
async fetch(request: Request): Promise<Response> {
const response = await fetch(request);
return new HTMLRewriter()
.on("a", new LinkRewriter())
.on("img", {
element(element) {
// Lazy load images
element.setAttribute("loading", "lazy");
},
})
.transform(response);
},
};
```
## Scheduled Events (Cron)
Run Workers on a schedule.
**wrangler.toml:**
```toml
[triggers]
crons = [
"0 0 * * *", # Daily at midnight
"*/15 * * * *", # Every 15 minutes
"0 9 * * 1-5" # Weekdays at 9 AM
]
```
**Handler:**
```typescript
export default {
async scheduled(event: ScheduledEvent, env: Env, ctx: ExecutionContext) {
console.log("Cron trigger:", event.cron);
// Cleanup old data
await env.DB.prepare("DELETE FROM logs WHERE created_at < ?")
.bind(Date.now() - 30 * 24 * 60 * 60 * 1000)
.run();
// Send daily report
ctx.waitUntil(sendDailyReport(env));
},
};
```
## Email Handler
Process incoming emails.
**wrangler.toml:**
```toml
[email]
name = "support@example.com"
```
**Handler:**
```typescript
export default {
async email(message: ForwardableEmailMessage, env: Env) {
console.log("From:", message.from);
console.log("Subject:", message.headers.get("subject"));
// Forward to support team
await message.forward("support-team@example.com");
// Or process email
const rawEmail = await new Response(message.raw).text();
await env.EMAILS.put(message.headers.get("message-id"), rawEmail);
},
};
```
## Tail Workers
Monitor and log other Workers.
**wrangler.toml:**
```toml
[tail_consumers]
service = "my-logging-worker"
```
**Tail Worker:**
```typescript
export default {
async tail(events: TraceItem[], env: Env) {
for (const event of events) {
if (event.outcome === "exception") {
// Log errors to external service
await fetch("https://logs.example.com", {
method: "POST",
body: JSON.stringify({
scriptName: event.scriptName,
error: event.exceptions,
timestamp: event.event.request.timestamp,
}),
});
}
}
},
};
```
## Multi-Region Deployments
Deploy Workers to specific regions (Enterprise only).
```toml
[placement]
mode = "regional"
regions = ["us-east", "eu-west"]
```
## Workers Analytics Engine
Write custom metrics.
```toml
[[analytics_engine_datasets]]
binding = "ANALYTICS"
```
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
const start = Date.now();
const response = await handleRequest(request, env);
const duration = Date.now() - start;
// Write custom metrics
env.ANALYTICS.writeDataPoint({
blobs: [request.url, request.method, String(response.status)],
doubles: [duration],
indexes: [request.headers.get("user-agent") || "unknown"],
});
return response;
},
};
```
## Additional Resources
- **Workers for Platforms**: https://developers.cloudflare.com/cloudflare-for-platforms/workers-for-platforms/
- **Smart Placement**: https://developers.cloudflare.com/workers/configuration/smart-placement/
- **WebSockets**: https://developers.cloudflare.com/workers/runtime-apis/websockets/
- **Static Assets**: https://developers.cloudflare.com/workers/static-assets/
- **Scheduled Events**: https://developers.cloudflare.com/workers/configuration/cron-triggers/

View File

@@ -0,0 +1,665 @@
# Complete Bindings Guide
Bindings are how Workers connect to Cloudflare resources and external services. They provide zero-latency access to storage, databases, queues, and other services.
## Storage Bindings
### KV (Key-Value Storage)
Global, low-latency, eventually consistent key-value storage.
**Best for:**
- Configuration data
- User sessions
- Cache
- Small objects (<25 MB)
**Configuration:**
```toml
[[kv_namespaces]]
binding = "MY_KV"
id = "your-namespace-id"
preview_id = "preview-namespace-id" # For local dev
```
**API:**
```typescript
// Write
await env.MY_KV.put("key", "value");
await env.MY_KV.put("key", "value", {
expirationTtl: 3600, // Expire in 1 hour
metadata: { user: "123" }
});
// Read
const value = await env.MY_KV.get("key");
const json = await env.MY_KV.get("key", "json");
const buffer = await env.MY_KV.get("key", "arrayBuffer");
// With metadata
const { value, metadata } = await env.MY_KV.getWithMetadata("key");
// Delete
await env.MY_KV.delete("key");
// List keys
const keys = await env.MY_KV.list({ prefix: "user:" });
```
**Limits:**
- Key size: 512 bytes
- Value size: 25 MB
- Write rate: 1 write/second per key (eventually consistent)
- Read rate: unlimited
### D1 (SQL Database)
Serverless SQLite database built on SQLite.
**Best for:**
- Structured data
- Relational data
- Complex queries
- ACID transactions
**Configuration:**
```toml
[[d1_databases]]
binding = "DB"
database_name = "my-database"
database_id = "your-database-id"
```
**API:**
```typescript
// Query with bind parameters
const result = await env.DB.prepare(
"SELECT * FROM users WHERE id = ?"
).bind(userId).all();
// Insert
await env.DB.prepare(
"INSERT INTO users (name, email) VALUES (?, ?)"
).bind(name, email).run();
// Update
await env.DB.prepare(
"UPDATE users SET last_login = ? WHERE id = ?"
).bind(new Date().toISOString(), userId).run();
// Transaction
const results = await env.DB.batch([
env.DB.prepare("INSERT INTO users (name) VALUES (?)").bind("Alice"),
env.DB.prepare("INSERT INTO users (name) VALUES (?)").bind("Bob"),
]);
// First row only
const user = await env.DB.prepare(
"SELECT * FROM users WHERE id = ?"
).bind(userId).first();
```
**Features:**
- Read replication (low latency reads globally)
- Time Travel (restore to any point in last 30 days)
- Backups
- Migrations via Wrangler
**Limits:**
- Database size: 10 GB (Paid), 500 MB (Free)
- Rows read: 25M/day (Paid), 5M/day (Free)
- Rows written: 50M/day (Paid), 100K/day (Free)
### R2 (Object Storage)
S3-compatible object storage with zero egress fees.
**Best for:**
- Large files
- Media storage
- Static assets
- Backups
**Configuration:**
```toml
[[r2_buckets]]
binding = "MY_BUCKET"
bucket_name = "my-bucket"
jurisdiction = "eu" # Optional: eu or fedramp
```
**API:**
```typescript
// Put object
await env.MY_BUCKET.put("file.txt", "contents", {
httpMetadata: {
contentType: "text/plain",
cacheControl: "max-age=3600",
},
customMetadata: {
user: "123",
},
});
// Put from stream
await env.MY_BUCKET.put("large-file.bin", request.body);
// Get object
const object = await env.MY_BUCKET.get("file.txt");
if (object) {
const text = await object.text();
const buffer = await object.arrayBuffer();
const stream = object.body; // ReadableStream
// Metadata
console.log(object.httpMetadata);
console.log(object.customMetadata);
}
// Get with range
const object = await env.MY_BUCKET.get("file.txt", {
range: { offset: 0, length: 1024 }
});
// Head (metadata only)
const object = await env.MY_BUCKET.head("file.txt");
// Delete
await env.MY_BUCKET.delete("file.txt");
// List objects
const objects = await env.MY_BUCKET.list({
prefix: "images/",
limit: 1000,
});
// Multipart upload (for large files)
const upload = await env.MY_BUCKET.createMultipartUpload("large.bin");
const part = await upload.uploadPart(1, data);
await upload.complete([part]);
```
**Features:**
- Automatic multipart uploads
- Object versioning
- Event notifications
- Public buckets
- Custom domains
**Limits:**
- Max object size: 5 TB
- Storage: unlimited
- Operations: unlimited
## Compute Bindings
### Durable Objects
Strongly consistent, coordinated stateful objects with SQLite storage.
**Best for:**
- Real-time collaboration
- WebSocket servers
- Coordination
- Strong consistency
- Per-user/per-room state
**Configuration:**
```toml
[[durable_objects.bindings]]
name = "COUNTER"
class_name = "Counter"
script_name = "my-worker" # Optional: if in different Worker
[[migrations]]
tag = "v1"
new_classes = ["Counter"]
```
**Durable Object Class:**
```typescript
export class Counter {
state: DurableObjectState;
constructor(state: DurableObjectState, env: Env) {
this.state = state;
}
async fetch(request: Request): Promise<Response> {
// Get from storage
let count = (await this.state.storage.get("count")) || 0;
// Increment
count++;
// Put to storage
await this.state.storage.put("count", count);
return Response.json({ count });
}
// Alarms (scheduled actions)
async alarm() {
await this.state.storage.delete("count");
}
}
```
**Worker Usage:**
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
// Get Durable Object ID
const id = env.COUNTER.idFromName("global-counter");
// Get stub (reference)
const stub = env.COUNTER.get(id);
// Call via fetch
return stub.fetch(request);
},
};
```
**Storage API:**
```typescript
// Single operations
await this.state.storage.put("key", "value");
const value = await this.state.storage.get("key");
await this.state.storage.delete("key");
// Batch operations
await this.state.storage.put({
key1: "value1",
key2: "value2",
});
const values = await this.state.storage.get(["key1", "key2"]);
// List
const entries = await this.state.storage.list();
// SQL (new)
const result = await this.state.storage.sql.exec(
"SELECT * FROM users WHERE id = ?", userId
);
```
**Features:**
- SQLite-backed storage
- Automatic persistence
- Alarms (scheduled actions)
- WebSocket Hibernation
- Point-in-time recovery
### Queues
Message queuing for async processing with guaranteed delivery.
**Best for:**
- Background jobs
- Async processing
- Decoupling services
- Retry logic
**Configuration:**
```toml
[[queues.producers]]
binding = "MY_QUEUE"
queue = "my-queue"
[[queues.consumers]]
queue = "my-queue"
max_batch_size = 100
max_batch_timeout = 30
```
**Producer (send messages):**
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
// Send single message
await env.MY_QUEUE.send({ userId: 123, action: "process" });
// Send batch
await env.MY_QUEUE.sendBatch([
{ body: { userId: 123 } },
{ body: { userId: 456 } },
]);
return Response.json({ queued: true });
},
};
```
**Consumer (receive messages):**
```typescript
export default {
async queue(batch: MessageBatch<any>, env: Env): Promise<void> {
for (const message of batch.messages) {
try {
await processMessage(message.body);
message.ack(); // Mark as processed
} catch (error) {
message.retry(); // Retry later
}
}
},
};
```
**Features:**
- Guaranteed delivery
- Automatic retries
- Dead letter queues
- Batch processing
- Pull consumers (API-based)
## AI & ML Bindings
### Workers AI
Run AI models directly from Workers.
**Best for:**
- Text generation (LLMs)
- Embeddings
- Image generation
- Speech recognition
- Translation
**Configuration:**
```toml
[ai]
binding = "AI"
```
**API:**
```typescript
// Text generation
const response = await env.AI.run("@cf/meta/llama-3-8b-instruct", {
messages: [
{ role: "system", content: "You are a helpful assistant" },
{ role: "user", content: "What is Cloudflare?" }
],
});
// Embeddings
const embeddings = await env.AI.run("@cf/baai/bge-base-en-v1.5", {
text: "The quick brown fox jumps over the lazy dog",
});
// Image generation
const image = await env.AI.run("@cf/stabilityai/stable-diffusion-xl-base-1.0", {
prompt: "A sunset over the ocean",
});
// Speech to text
const result = await env.AI.run("@cf/openai/whisper", {
audio: audioData,
});
// Streaming
const stream = await env.AI.run("@cf/meta/llama-3-8b-instruct", {
messages: [{ role: "user", content: "Tell me a story" }],
stream: true,
});
```
### Vectorize
Vector database for similarity search.
**Best for:**
- Semantic search
- RAG (Retrieval Augmented Generation)
- Recommendations
- Embeddings storage
**Configuration:**
```toml
[[vectorize]]
binding = "VECTORIZE"
index_name = "my-index"
```
**API:**
```typescript
// Insert vectors
await env.VECTORIZE.insert([
{ id: "1", values: [0.1, 0.2, ...], metadata: { text: "..." } },
{ id: "2", values: [0.3, 0.4, ...], metadata: { text: "..." } },
]);
// Query (similarity search)
const results = await env.VECTORIZE.query(
[0.15, 0.25, ...], // Query vector
{
topK: 5,
returnMetadata: true,
}
);
// With metadata filtering
const results = await env.VECTORIZE.query(vector, {
topK: 5,
filter: { category: "technology" },
});
```
## Database Bindings
### Hyperdrive
Accelerate access to existing databases via connection pooling and caching.
**Best for:**
- Connecting to existing Postgres/MySQL
- Reducing latency to traditional databases
- Connection pooling
**Configuration:**
```toml
[[hyperdrive]]
binding = "HYPERDRIVE"
id = "your-hyperdrive-id"
```
**Usage with postgres:**
```typescript
import { Client } from "pg";
const client = new Client({
connectionString: env.HYPERDRIVE.connectionString,
});
await client.connect();
const result = await client.query("SELECT * FROM users");
await client.end();
```
**Features:**
- Connection pooling
- Query caching
- Read replicas support
## Service Bindings
Call other Workers via RPC or HTTP.
**Configuration:**
```toml
[[services]]
binding = "AUTH_SERVICE"
service = "auth-worker"
environment = "production"
```
**HTTP-based:**
```typescript
const response = await env.AUTH_SERVICE.fetch(new Request("http://auth/verify"));
```
**RPC-based (recommended):**
```typescript
// In auth-worker
export class AuthService extends WorkerEntrypoint {
async verifyToken(token: string): Promise<boolean> {
// Verify logic
return true;
}
}
// In calling worker
const isValid = await env.AUTH_SERVICE.verifyToken(token);
```
## Additional Bindings
### Analytics Engine
Write custom analytics and metrics.
```toml
[[analytics_engine_datasets]]
binding = "ANALYTICS"
```
```typescript
env.ANALYTICS.writeDataPoint({
blobs: ["user-123", "click"],
doubles: [1.5],
indexes: ["button-1"],
});
```
### Browser Rendering
Control headless browsers.
```toml
browser = { binding = "BROWSER" }
```
```typescript
const browser = await puppeteer.launch(env.BROWSER);
const page = await browser.newPage();
await page.goto("https://example.com");
const screenshot = await page.screenshot();
```
### Rate Limiting
Built-in rate limiting.
```toml
[[unsafe.bindings]]
name = "RATE_LIMITER"
type = "ratelimit"
namespace_id = "your-namespace-id"
simple = { limit = 100, period = 60 }
```
```typescript
const { success } = await env.RATE_LIMITER.limit({ key: userId });
if (!success) {
return new Response("Rate limited", { status: 429 });
}
```
### mTLS
Present client certificates.
```toml
[[mtls_certificates]]
binding = "CERT"
certificate_id = "your-cert-id"
```
```typescript
const response = await fetch("https://api.example.com", {
certificate: env.CERT,
});
```
## Best Practices
### Binding Selection
- **KV**: Configuration, sessions, cache
- **D1**: Structured data, complex queries
- **R2**: Large files, media, backups
- **Durable Objects**: Real-time, strong consistency, coordination
- **Queues**: Background jobs, async processing
- **Workers AI**: AI/ML inference
- **Vectorize**: Similarity search, RAG
### Performance
- Use `ctx.waitUntil()` for non-critical writes
- Batch operations when possible
- Use appropriate consistency models
- Cache frequently accessed data
### Error Handling
Always handle errors from bindings:
```typescript
try {
const value = await env.MY_KV.get("key");
} catch (error) {
// Handle error
console.error("KV error:", error);
return new Response("Service unavailable", { status: 503 });
}
```
### Local Development
Use Wrangler for local testing with bindings:
```bash
# KV
wrangler kv:namespace create MY_KV --preview
# D1
wrangler d1 create my-database
# Local dev with bindings
wrangler dev
```
## Additional Resources
- **Bindings Reference**: https://developers.cloudflare.com/workers/runtime-apis/bindings/
- **KV**: https://developers.cloudflare.com/kv/
- **D1**: https://developers.cloudflare.com/d1/
- **R2**: https://developers.cloudflare.com/r2/
- **Durable Objects**: https://developers.cloudflare.com/durable-objects/
- **Queues**: https://developers.cloudflare.com/queues/
- **Workers AI**: https://developers.cloudflare.com/workers-ai/
- **Vectorize**: https://developers.cloudflare.com/vectorize/

View File

@@ -0,0 +1,805 @@
# Development Best Practices
Patterns and best practices for building robust, maintainable Workers applications.
## Testing
### Vitest Integration
Workers has first-class Vitest integration for unit and integration testing.
**Setup:**
```bash
npm install -D vitest @cloudflare/vitest-pool-workers
```
**vitest.config.ts:**
```typescript
import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config";
export default defineWorkersConfig({
test: {
poolOptions: {
workers: {
wrangler: { configPath: "./wrangler.toml" },
},
},
},
});
```
**Basic Test:**
```typescript
import { env, createExecutionContext, waitOnExecutionContext } from "cloudflare:test";
import { describe, it, expect } from "vitest";
import worker from "./index";
describe("Worker", () => {
it("responds with JSON", async () => {
const request = new Request("http://example.com/api/users");
const ctx = createExecutionContext();
const response = await worker.fetch(request, env, ctx);
await waitOnExecutionContext(ctx);
expect(response.status).toBe(200);
const data = await response.json();
expect(data).toHaveProperty("users");
});
it("handles errors gracefully", async () => {
const request = new Request("http://example.com/api/error");
const ctx = createExecutionContext();
const response = await worker.fetch(request, env, ctx);
expect(response.status).toBe(500);
});
});
```
**Testing with Bindings:**
```typescript
import { env } from "cloudflare:test";
describe("KV operations", () => {
it("reads and writes to KV", async () => {
// env provides access to bindings configured in wrangler.toml
await env.MY_KV.put("test-key", "test-value");
const value = await env.MY_KV.get("test-key");
expect(value).toBe("test-value");
});
});
```
**Testing Durable Objects:**
```typescript
import { env, runInDurableObject } from "cloudflare:test";
describe("Counter Durable Object", () => {
it("increments counter", async () => {
await runInDurableObject(env.COUNTER, async (instance, state) => {
const request = new Request("http://do/increment");
const response = await instance.fetch(request);
const data = await response.json();
expect(data.count).toBe(1);
});
});
});
```
**Run Tests:**
```bash
npm test
# or
npx vitest
```
### Integration Testing
Test full request/response cycles with external services.
```typescript
describe("External API integration", () => {
it("fetches data from external API", async () => {
const request = new Request("http://example.com/api/external");
const ctx = createExecutionContext();
const response = await worker.fetch(request, env, ctx);
expect(response.status).toBe(200);
// Verify external API was called correctly
const data = await response.json();
expect(data).toHaveProperty("externalData");
});
});
```
### Mocking
Mock external dependencies for isolated tests.
```typescript
import { vi } from "vitest";
describe("Mocked fetch", () => {
it("handles fetch errors", async () => {
// Mock global fetch
global.fetch = vi.fn().mockRejectedValue(new Error("Network error"));
const request = new Request("http://example.com/api/data");
const ctx = createExecutionContext();
const response = await worker.fetch(request, env, ctx);
expect(response.status).toBe(503);
});
});
```
## Error Handling
### Global Error Handling
Catch all errors and return appropriate responses.
```typescript
export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
try {
return await handleRequest(request, env, ctx);
} catch (error) {
console.error("Uncaught error:", error);
return Response.json(
{
error: "Internal server error",
message: error instanceof Error ? error.message : "Unknown error",
},
{ status: 500 }
);
}
},
};
async function handleRequest(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
// Your handler logic
}
```
### Custom Error Classes
Define custom error types for better error handling.
```typescript
class NotFoundError extends Error {
constructor(message: string) {
super(message);
this.name = "NotFoundError";
}
}
class UnauthorizedError extends Error {
constructor(message: string) {
super(message);
this.name = "UnauthorizedError";
}
}
class ValidationError extends Error {
constructor(public fields: Record<string, string>) {
super("Validation failed");
this.name = "ValidationError";
}
}
async function handleRequest(request: Request, env: Env): Promise<Response> {
try {
// Your logic
const user = await getUser(userId);
if (!user) {
throw new NotFoundError("User not found");
}
return Response.json(user);
} catch (error) {
if (error instanceof NotFoundError) {
return Response.json({ error: error.message }, { status: 404 });
}
if (error instanceof UnauthorizedError) {
return Response.json({ error: error.message }, { status: 401 });
}
if (error instanceof ValidationError) {
return Response.json(
{ error: "Validation failed", fields: error.fields },
{ status: 400 }
);
}
// Unknown error
console.error("Unexpected error:", error);
return Response.json({ error: "Internal server error" }, { status: 500 });
}
}
```
### Retry Logic
Implement retry logic for transient failures.
```typescript
async function fetchWithRetry(
url: string,
options: RequestInit = {},
maxRetries = 3
): Promise<Response> {
let lastError: Error;
for (let i = 0; i < maxRetries; i++) {
try {
const response = await fetch(url, options);
if (response.ok) {
return response;
}
// Don't retry on client errors (4xx)
if (response.status >= 400 && response.status < 500) {
return response;
}
lastError = new Error(`HTTP ${response.status}`);
} catch (error) {
lastError = error as Error;
}
// Exponential backoff
if (i < maxRetries - 1) {
await new Promise((resolve) => setTimeout(resolve, Math.pow(2, i) * 1000));
}
}
throw lastError!;
}
```
## Performance Optimization
### Caching Strategies
Use the Cache API effectively.
```typescript
async function handleCachedRequest(request: Request): Promise<Response> {
const cache = caches.default;
const cacheKey = new Request(request.url, request);
// Try to get from cache
let response = await cache.match(cacheKey);
if (!response) {
// Cache miss - fetch from origin
response = await fetchFromOrigin(request);
// Cache successful responses
if (response.ok) {
response = new Response(response.body, {
...response,
headers: {
...Object.fromEntries(response.headers),
"Cache-Control": "public, max-age=3600",
},
});
// Don't await - cache in background
ctx.waitUntil(cache.put(cacheKey, response.clone()));
}
}
return response;
}
```
**Cache with custom keys:**
```typescript
function getCacheKey(request: Request, userId?: string): Request {
const url = new URL(request.url);
// Include user ID in cache key for personalized content
if (userId) {
url.searchParams.set("userId", userId);
}
return new Request(url.toString(), request);
}
```
### Response Streaming
Stream responses for large data.
```typescript
export default {
async fetch(request: Request): Promise<Response> {
const { readable, writable } = new TransformStream();
const writer = writable.getWriter();
// Stream data in background
(async () => {
try {
const data = await fetchLargeDataset();
for (const item of data) {
await writer.write(new TextEncoder().encode(JSON.stringify(item) + "\n"));
}
await writer.close();
} catch (error) {
await writer.abort(error);
}
})();
return new Response(readable, {
headers: {
"Content-Type": "application/x-ndjson",
"Transfer-Encoding": "chunked",
},
});
},
};
```
### Batching Operations
Batch multiple operations for better performance.
```typescript
// Bad: Sequential operations
for (const userId of userIds) {
await env.KV.get(`user:${userId}`);
}
// Good: Batch operations
const users = await Promise.all(
userIds.map((id) => env.KV.get(`user:${id}`))
);
// Even better: Use batch APIs when available
const results = await env.DB.batch([
env.DB.prepare("SELECT * FROM users WHERE id = ?").bind(1),
env.DB.prepare("SELECT * FROM users WHERE id = ?").bind(2),
env.DB.prepare("SELECT * FROM users WHERE id = ?").bind(3),
]);
```
### Background Tasks
Use `ctx.waitUntil()` for non-critical work.
```typescript
export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
// Process request
const response = await handleRequest(request, env);
// Log analytics in background (don't block response)
ctx.waitUntil(
env.ANALYTICS.writeDataPoint({
blobs: [request.url, request.method],
doubles: [performance.now()],
})
);
// Update cache in background
ctx.waitUntil(updateCache(request, response, env));
return response;
},
};
```
## Debugging
### Console Logging
Use console methods for debugging.
```typescript
console.log("Info:", { userId, action });
console.error("Error occurred:", error);
console.warn("Deprecated API used");
console.debug("Debug info:", data);
// Structured logging
console.log(JSON.stringify({
level: "info",
timestamp: Date.now(),
userId,
action,
}));
```
### Wrangler Tail
View real-time logs during development.
```bash
# Tail logs
wrangler tail
# Filter by status
wrangler tail --status error
# Filter by method
wrangler tail --method POST
# Pretty format
wrangler tail --format pretty
```
### Source Maps
Enable source maps for better error traces.
**tsconfig.json:**
```json
{
"compilerOptions": {
"sourceMap": true
}
}
```
**wrangler.toml:**
```toml
upload_source_maps = true
```
### Local Debugging
Use DevTools for debugging.
```bash
# Start with inspector
wrangler dev --inspector
```
Then open `chrome://inspect` in Chrome and connect to the worker.
### Breakpoints
Set breakpoints in your code.
```typescript
export default {
async fetch(request: Request): Promise<Response> {
debugger; // Execution will pause here
const data = await fetchData();
return Response.json(data);
},
};
```
## Code Organization
### Router Pattern
Organize routes cleanly.
```typescript
interface Route {
pattern: URLPattern;
handler: (request: Request, env: Env, params: URLPatternResult) => Promise<Response>;
}
const routes: Route[] = [
{
pattern: new URLPattern({ pathname: "/api/users" }),
handler: handleGetUsers,
},
{
pattern: new URLPattern({ pathname: "/api/users/:id" }),
handler: handleGetUser,
},
{
pattern: new URLPattern({ pathname: "/api/users" }),
handler: handleCreateUser,
},
];
export default {
async fetch(request: Request, env: Env): Promise<Response> {
for (const route of routes) {
const match = route.pattern.exec(request.url);
if (match) {
return route.handler(request, env, match);
}
}
return Response.json({ error: "Not found" }, { status: 404 });
},
};
async function handleGetUsers(request: Request, env: Env): Promise<Response> {
const users = await env.DB.prepare("SELECT * FROM users").all();
return Response.json(users.results);
}
```
### Middleware Pattern
Chain middleware for cross-cutting concerns.
```typescript
type Middleware = (
request: Request,
env: Env,
next: () => Promise<Response>
) => Promise<Response>;
const corsMiddleware: Middleware = async (request, env, next) => {
if (request.method === "OPTIONS") {
return new Response(null, {
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, PUT, DELETE",
},
});
}
const response = await next();
response.headers.set("Access-Control-Allow-Origin", "*");
return response;
};
const authMiddleware: Middleware = async (request, env, next) => {
const token = request.headers.get("Authorization");
if (!token) {
return Response.json({ error: "Unauthorized" }, { status: 401 });
}
// Validate token
const isValid = await validateToken(token, env);
if (!isValid) {
return Response.json({ error: "Invalid token" }, { status: 401 });
}
return next();
};
const loggingMiddleware: Middleware = async (request, env, next) => {
const start = Date.now();
const response = await next();
const duration = Date.now() - start;
console.log({
method: request.method,
url: request.url,
status: response.status,
duration,
});
return response;
};
function applyMiddleware(
handler: (request: Request, env: Env) => Promise<Response>,
middlewares: Middleware[]
): (request: Request, env: Env) => Promise<Response> {
return (request: Request, env: Env) => {
let index = -1;
const dispatch = async (i: number): Promise<Response> => {
if (i <= index) {
throw new Error("next() called multiple times");
}
index = i;
if (i === middlewares.length) {
return handler(request, env);
}
const middleware = middlewares[i];
return middleware(request, env, () => dispatch(i + 1));
};
return dispatch(0);
};
}
// Usage
const handler = applyMiddleware(
async (request, env) => {
return Response.json({ message: "Hello!" });
},
[loggingMiddleware, corsMiddleware, authMiddleware]
);
export default { fetch: handler };
```
### Dependency Injection
Use environment for dependencies.
```typescript
interface Env {
DB: D1Database;
CACHE: KVNamespace;
}
class UserService {
constructor(private env: Env) {}
async getUser(id: string) {
// Try cache first
const cached = await this.env.CACHE.get(`user:${id}`);
if (cached) return JSON.parse(cached);
// Fetch from database
const user = await this.env.DB.prepare(
"SELECT * FROM users WHERE id = ?"
).bind(id).first();
// Update cache
if (user) {
await this.env.CACHE.put(`user:${id}`, JSON.stringify(user), {
expirationTtl: 3600,
});
}
return user;
}
}
export default {
async fetch(request: Request, env: Env): Promise<Response> {
const userService = new UserService(env);
const user = await userService.getUser("123");
return Response.json(user);
},
};
```
## Security Best Practices
### Input Validation
Always validate and sanitize user input.
```typescript
import { z } from "zod";
const userSchema = z.object({
name: z.string().min(1).max(100),
email: z.string().email(),
age: z.number().int().positive().max(150),
});
export default {
async fetch(request: Request, env: Env): Promise<Response> {
try {
const body = await request.json();
const validated = userSchema.parse(body);
// Use validated data
await createUser(validated, env);
return Response.json({ success: true });
} catch (error) {
if (error instanceof z.ZodError) {
return Response.json(
{ error: "Validation failed", issues: error.errors },
{ status: 400 }
);
}
throw error;
}
},
};
```
### Rate Limiting
Implement rate limiting to prevent abuse.
```typescript
async function checkRateLimit(
identifier: string,
env: Env,
limit = 100,
window = 60
): Promise<boolean> {
const key = `ratelimit:${identifier}`;
const current = await env.CACHE.get(key);
if (!current) {
await env.CACHE.put(key, "1", { expirationTtl: window });
return true;
}
const count = parseInt(current);
if (count >= limit) {
return false;
}
await env.CACHE.put(key, String(count + 1), { expirationTtl: window });
return true;
}
export default {
async fetch(request: Request, env: Env): Promise<Response> {
const ip = request.headers.get("CF-Connecting-IP") || "unknown";
const allowed = await checkRateLimit(ip, env);
if (!allowed) {
return Response.json({ error: "Rate limit exceeded" }, { status: 429 });
}
return handleRequest(request, env);
},
};
```
### CSRF Protection
Protect against cross-site request forgery.
```typescript
function generateCSRFToken(): string {
const array = new Uint8Array(32);
crypto.getRandomValues(array);
return btoa(String.fromCharCode(...array));
}
async function validateCSRFToken(
token: string,
sessionId: string,
env: Env
): Promise<boolean> {
const stored = await env.SESSIONS.get(`csrf:${sessionId}`);
return stored === token;
}
export default {
async fetch(request: Request, env: Env): Promise<Response> {
if (request.method === "POST") {
const sessionId = request.headers.get("X-Session-ID");
const csrfToken = request.headers.get("X-CSRF-Token");
if (!sessionId || !csrfToken) {
return Response.json({ error: "Missing tokens" }, { status: 403 });
}
const isValid = await validateCSRFToken(csrfToken, sessionId, env);
if (!isValid) {
return Response.json({ error: "Invalid CSRF token" }, { status: 403 });
}
}
return handleRequest(request, env);
},
};
```
## Additional Resources
- **Testing**: https://developers.cloudflare.com/workers/testing/
- **Observability**: https://developers.cloudflare.com/workers/observability/
- **Best Practices**: https://developers.cloudflare.com/workers/best-practices/
- **Examples**: https://developers.cloudflare.com/workers/examples/

View File

@@ -0,0 +1,831 @@
# Observability
Monitoring, logging, and debugging Workers in production.
## Logging
### Real-time Logs (Wrangler Tail)
View logs in real-time during development and production.
```bash
# Tail all logs
wrangler tail
# Tail specific environment
wrangler tail --env production
# Filter by status
wrangler tail --status error
wrangler tail --status ok
# Filter by HTTP method
wrangler tail --method POST
# Filter by header
wrangler tail --header "User-Agent: Chrome"
# Filter by IP
wrangler tail --ip 203.0.113.1
# Search in logs
wrangler tail --search "database error"
# Sample rate (% of requests)
wrangler tail --sampling-rate 0.1 # 10%
# Pretty format
wrangler tail --format pretty
```
### Console Logging
Use console methods in your Worker.
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
// Log levels
console.log("Info message", { userId: 123 });
console.info("Info message");
console.warn("Warning message");
console.error("Error message", error);
console.debug("Debug message", { data });
// Structured logging
console.log(JSON.stringify({
level: "info",
timestamp: Date.now(),
message: "Request processed",
userId: 123,
duration: 45,
}));
return new Response("OK");
},
};
```
### Workers Logs
Persistent logs stored in Cloudflare (Enterprise feature).
**Enable via Dashboard:**
1. Workers & Pages → Logs
2. Enable Workers Logs
3. Set retention period (1-30 days)
**Query logs:**
```bash
# Via GraphQL API
curl -X POST https://api.cloudflare.com/client/v4/graphql \
-H "Authorization: Bearer $TOKEN" \
-d '{
"query": "query { viewer { accounts(filter: {accountTag: \"$ACCOUNT_ID\"}) { workersLogsData(filter: {datetime_gt: \"2025-01-01T00:00:00Z\"}) { logs { timestamp message } } } } }"
}'
```
**Filter logs:**
```typescript
// Add custom fields for filtering
console.log(JSON.stringify({
level: "error",
service: "api",
endpoint: "/users",
userId: "123",
error: error.message,
}));
```
### Logpush Integration
Stream logs to external services.
**Supported destinations:**
- Amazon S3
- Google Cloud Storage
- Azure Blob Storage
- Datadog
- Splunk
- New Relic
- HTTP endpoint
**Setup via Dashboard:**
1. Logs → Workers Logs → Create Job
2. Select destination
3. Configure filters
4. Set fields to include
**Fields available:**
- `timestamp` - Request timestamp
- `level` - Log level (log, error, warn, info, debug)
- `message` - Log message
- `scriptName` - Worker name
- `outcome` - Request outcome (ok, exception, exceededCpu, etc.)
- `logs` - Array of console.log() messages
### Custom Logging Service
Send logs to your own service.
```typescript
interface LogEntry {
level: "info" | "warn" | "error";
message: string;
timestamp: number;
metadata?: Record<string, any>;
}
class Logger {
constructor(private env: Env) {}
private async send(entry: LogEntry) {
// Send to logging service
await fetch("https://logs.example.com/ingest", {
method: "POST",
headers: {
"Content-Type": "application/json",
"Authorization": `Bearer ${this.env.LOG_TOKEN}`,
},
body: JSON.stringify(entry),
});
}
info(message: string, metadata?: Record<string, any>) {
const entry: LogEntry = {
level: "info",
message,
timestamp: Date.now(),
metadata,
};
console.log(JSON.stringify(entry));
this.send(entry).catch(console.error);
}
error(message: string, error: Error, metadata?: Record<string, any>) {
const entry: LogEntry = {
level: "error",
message,
timestamp: Date.now(),
metadata: {
...metadata,
error: error.message,
stack: error.stack,
},
};
console.error(JSON.stringify(entry));
this.send(entry).catch(console.error);
}
}
export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
const logger = new Logger(env);
try {
logger.info("Request received", {
url: request.url,
method: request.method,
});
const response = await handleRequest(request, env);
logger.info("Request completed", {
status: response.status,
});
return response;
} catch (error) {
logger.error("Request failed", error as Error, {
url: request.url,
});
throw error;
}
},
};
```
## Metrics and Analytics
### Workers Analytics
View request metrics in the dashboard.
**Available metrics:**
- Requests (count, rate)
- Errors (count, rate)
- Success rate
- CPU time (p50, p99)
- Duration (p50, p99)
- Subrequests
**Filter by:**
- Time range
- Status code
- Path
- User agent
- Country
### GraphQL Analytics API
Query analytics programmatically.
```typescript
const query = `
query {
viewer {
accounts(filter: {accountTag: "${accountId}"}) {
workersInvocationsAdaptive(
filter: {
datetime_gt: "2025-01-01T00:00:00Z"
datetime_lt: "2025-01-02T00:00:00Z"
scriptName: "my-worker"
}
limit: 100
) {
sum {
requests
errors
subrequests
}
quantiles {
cpuTimeP50
cpuTimeP99
durationP50
durationP99
}
}
}
}
}
`;
const response = await fetch("https://api.cloudflare.com/client/v4/graphql", {
method: "POST",
headers: {
"Authorization": `Bearer ${token}`,
"Content-Type": "application/json",
},
body: JSON.stringify({ query }),
});
const data = await response.json();
```
### Custom Metrics (Analytics Engine)
Write custom metrics to Analytics Engine.
**Configuration:**
```toml
[[analytics_engine_datasets]]
binding = "ANALYTICS"
```
**Write data points:**
```typescript
export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
const start = Date.now();
try {
const response = await handleRequest(request, env);
const duration = Date.now() - start;
// Write metrics
ctx.waitUntil(
env.ANALYTICS.writeDataPoint({
// String fields (up to 20)
blobs: [
request.url,
request.method,
String(response.status),
request.headers.get("user-agent") || "unknown",
],
// Numeric fields (up to 20)
doubles: [
duration,
response.headers.get("content-length")
? parseInt(response.headers.get("content-length")!)
: 0,
],
// Indexed fields (up to 20) - for filtering
indexes: [
request.cf?.country as string || "unknown",
request.cf?.colo as string || "unknown",
],
})
);
return response;
} catch (error) {
const duration = Date.now() - start;
ctx.waitUntil(
env.ANALYTICS.writeDataPoint({
blobs: [request.url, request.method, "error"],
doubles: [duration],
indexes: ["error"],
})
);
throw error;
}
},
};
```
**Query metrics:**
```sql
SELECT
blob1 AS url,
blob2 AS method,
blob3 AS status,
COUNT() AS requests,
AVG(double1) AS avg_duration,
MAX(double1) AS max_duration
FROM ANALYTICS_DATASET
WHERE
timestamp >= NOW() - INTERVAL '1' DAY
AND index1 = 'US'
GROUP BY blob1, blob2, blob3
ORDER BY requests DESC
LIMIT 100
```
## Traces (OpenTelemetry)
Export traces to observability platforms.
**Supported platforms:**
- Datadog
- New Relic
- Honeycomb
- Grafana Cloud
- Sentry
### Export to Honeycomb
```typescript
import { trace } from "@opentelemetry/api";
import { WorkersSDK } from "@cloudflare/workers-honeycomb-logger";
export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
const sdk = new WorkersSDK(request, env, ctx, {
apiKey: env.HONEYCOMB_API_KEY,
dataset: "my-worker",
});
const tracer = trace.getTracer("my-worker");
return tracer.startActiveSpan("fetch", async (span) => {
try {
span.setAttribute("http.method", request.method);
span.setAttribute("http.url", request.url);
const response = await handleRequest(request, env);
span.setAttribute("http.status_code", response.status);
span.end();
return response;
} catch (error) {
span.recordException(error as Error);
span.end();
throw error;
}
});
},
};
```
### Export to Datadog
```typescript
export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
const startTime = Date.now();
try {
const response = await handleRequest(request, env);
const duration = Date.now() - startTime;
// Send trace to Datadog
ctx.waitUntil(
fetch("https://http-intake.logs.datadoghq.com/v1/input", {
method: "POST",
headers: {
"Content-Type": "application/json",
"DD-API-KEY": env.DATADOG_API_KEY,
},
body: JSON.stringify({
ddsource: "cloudflare-workers",
service: "my-worker",
message: "Request completed",
duration,
status: response.status,
url: request.url,
method: request.method,
}),
})
);
return response;
} catch (error) {
// Log error to Datadog
ctx.waitUntil(
fetch("https://http-intake.logs.datadoghq.com/v1/input", {
method: "POST",
headers: {
"Content-Type": "application/json",
"DD-API-KEY": env.DATADOG_API_KEY,
},
body: JSON.stringify({
ddsource: "cloudflare-workers",
service: "my-worker",
status: "error",
error: {
message: (error as Error).message,
stack: (error as Error).stack,
},
}),
})
);
throw error;
}
},
};
```
## Error Tracking
### Error Boundaries
Catch and track errors globally.
```typescript
export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
try {
return await handleRequest(request, env, ctx);
} catch (error) {
// Log error
console.error("Unhandled error:", error);
// Send to error tracking service
ctx.waitUntil(reportError(error as Error, request, env));
// Return error response
return Response.json(
{
error: "Internal server error",
requestId: crypto.randomUUID(),
},
{ status: 500 }
);
}
},
};
async function reportError(error: Error, request: Request, env: Env) {
await fetch("https://errors.example.com/report", {
method: "POST",
headers: {
"Content-Type": "application/json",
"Authorization": `Bearer ${env.ERROR_TOKEN}`,
},
body: JSON.stringify({
error: {
message: error.message,
stack: error.stack,
name: error.name,
},
request: {
url: request.url,
method: request.method,
headers: Object.fromEntries(request.headers),
},
timestamp: Date.now(),
}),
});
}
```
### Sentry Integration
```bash
npm install @sentry/browser
```
```typescript
import * as Sentry from "@sentry/browser";
export default {
async fetch(request: Request, env: Env): Promise<Response> {
Sentry.init({
dsn: env.SENTRY_DSN,
environment: env.ENVIRONMENT,
});
try {
return await handleRequest(request, env);
} catch (error) {
Sentry.captureException(error);
throw error;
}
},
};
```
## Performance Monitoring
### Request Timing
Track request performance.
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
const timings = {
start: Date.now(),
auth: 0,
database: 0,
external: 0,
total: 0,
};
// Auth
const authStart = Date.now();
const user = await authenticate(request, env);
timings.auth = Date.now() - authStart;
// Database
const dbStart = Date.now();
const data = await env.DB.prepare("SELECT * FROM users WHERE id = ?")
.bind(user.id)
.first();
timings.database = Date.now() - dbStart;
// External API
const apiStart = Date.now();
const externalData = await fetch("https://api.example.com/data");
timings.external = Date.now() - apiStart;
timings.total = Date.now() - timings.start;
// Log timings
console.log("Performance:", timings);
// Add to response headers
return Response.json(data, {
headers: {
"X-Timing-Auth": String(timings.auth),
"X-Timing-Database": String(timings.database),
"X-Timing-External": String(timings.external),
"X-Timing-Total": String(timings.total),
},
});
},
};
```
### Performance API
Use the Performance API for detailed timing.
```typescript
export default {
async fetch(request: Request): Promise<Response> {
performance.mark("start");
performance.mark("db-start");
await queryDatabase();
performance.mark("db-end");
performance.measure("database", "db-start", "db-end");
performance.mark("api-start");
await fetchExternal();
performance.mark("api-end");
performance.measure("external", "api-start", "api-end");
performance.mark("end");
performance.measure("total", "start", "end");
// Get measurements
const measurements = performance.getEntriesByType("measure");
console.log("Performance measurements:", measurements);
return Response.json({ ok: true });
},
};
```
## Debugging
### Local Debugging
Debug Workers locally with DevTools.
```bash
# Start with inspector
wrangler dev --inspector
# Connect Chrome DevTools
# Open chrome://inspect in Chrome
# Click "inspect" on your Worker
```
**Features:**
- Set breakpoints
- Step through code
- Inspect variables
- View console logs
- Profile performance
### Remote Debugging
Debug production Workers.
**Using console.log:**
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
console.log("Request:", {
url: request.url,
method: request.method,
headers: Object.fromEntries(request.headers),
});
const response = await handleRequest(request, env);
console.log("Response:", {
status: response.status,
headers: Object.fromEntries(response.headers),
});
return response;
},
};
```
**View logs:**
```bash
wrangler tail --format pretty
```
### Source Maps
Enable source maps for better error traces.
**tsconfig.json:**
```json
{
"compilerOptions": {
"sourceMap": true
}
}
```
**wrangler.toml:**
```toml
upload_source_maps = true
```
### Debugging Tips
1. **Use structured logging** - JSON format for easier parsing
2. **Log request IDs** - Track requests across services
3. **Time operations** - Identify performance bottlenecks
4. **Test locally first** - Use `wrangler dev` before deploying
5. **Use staging environment** - Test in production-like environment
6. **Monitor after deploy** - Watch logs and metrics after deployment
## Alerting
### Custom Alerts
Send alerts based on metrics.
```typescript
export default {
async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise<Response> {
try {
const response = await handleRequest(request, env);
// Alert on slow requests
const duration = Date.now() - startTime;
if (duration > 5000) {
ctx.waitUntil(sendAlert("Slow request", { duration, url: request.url }, env));
}
return response;
} catch (error) {
// Alert on errors
ctx.waitUntil(sendAlert("Request error", { error: error.message }, env));
throw error;
}
},
};
async function sendAlert(message: string, data: any, env: Env) {
// Send to Slack
await fetch(env.SLACK_WEBHOOK, {
method: "POST",
body: JSON.stringify({
text: `🚨 ${message}`,
blocks: [
{
type: "section",
text: { type: "mrkdwn", text: `*${message}*` },
},
{
type: "section",
text: { type: "mrkdwn", text: `\`\`\`${JSON.stringify(data, null, 2)}\`\`\`` },
},
],
}),
});
}
```
## Health Checks
Implement health check endpoints.
```typescript
export default {
async fetch(request: Request, env: Env): Promise<Response> {
const url = new URL(request.url);
if (url.pathname === "/health") {
return healthCheck(env);
}
return handleRequest(request, env);
},
};
async function healthCheck(env: Env): Promise<Response> {
const checks = {
database: false,
kv: false,
external: false,
};
// Check database
try {
await env.DB.prepare("SELECT 1").first();
checks.database = true;
} catch (error) {
console.error("Database check failed:", error);
}
// Check KV
try {
await env.MY_KV.get("health-check");
checks.kv = true;
} catch (error) {
console.error("KV check failed:", error);
}
// Check external API
try {
const response = await fetch("https://api.example.com/health", {
signal: AbortSignal.timeout(2000),
});
checks.external = response.ok;
} catch (error) {
console.error("External API check failed:", error);
}
const allHealthy = Object.values(checks).every((c) => c);
return Response.json(
{ healthy: allHealthy, checks },
{ status: allHealthy ? 200 : 503 }
);
}
```
## Additional Resources
- **Observability**: https://developers.cloudflare.com/workers/observability/
- **Logs**: https://developers.cloudflare.com/workers/observability/logs/
- **Metrics**: https://developers.cloudflare.com/workers/observability/metrics-and-analytics/
- **Traces**: https://developers.cloudflare.com/workers/observability/traces/
- **Dev Tools**: https://developers.cloudflare.com/workers/observability/dev-tools/

View File

@@ -0,0 +1,713 @@
# Wrangler and Deployment Guide
Wrangler is the official CLI for developing, testing, and deploying Cloudflare Workers.
## Installation
```bash
# NPM (global)
npm install -g wrangler
# NPM (project-local)
npm install --save-dev wrangler
# Verify installation
wrangler --version
```
## Authentication
```bash
# Login via browser (recommended)
wrangler login
# Or use API token
export CLOUDFLARE_API_TOKEN=your-token
```
## Essential Commands
### Project Initialization
```bash
# Initialize new project
wrangler init my-worker
# With template
wrangler init my-worker --template cloudflare/workers-sdk
# Interactive setup with C3
npm create cloudflare@latest my-worker
```
### Development
```bash
# Start local dev server
wrangler dev
# Custom port
wrangler dev --port 8080
# With remote resources (bindings)
wrangler dev --remote
# Local mode (no network requests to Cloudflare)
wrangler dev --local
# Test worker (experimental)
wrangler dev --test-scheduled
```
### Deployment
```bash
# Deploy to production
wrangler deploy
# Deploy to specific environment
wrangler deploy --env staging
wrangler deploy --env production
# Dry run (validate without deploying)
wrangler deploy --dry-run
# Deploy specific file
wrangler deploy src/index.ts
# Deploy with message
wrangler deploy --message "Fix authentication bug"
```
### Version Management
```bash
# List versions
wrangler versions list
# View specific version
wrangler versions view <version-id>
# Deploy specific version
wrangler versions deploy <version-id>
# Rollback to previous version
wrangler rollback
# Gradual rollout
wrangler versions deploy <version-id> --percentage 10
```
## Configuration (wrangler.toml)
### Basic Structure
```toml
#:schema node_modules/wrangler/config-schema.json
name = "my-worker"
main = "src/index.ts"
compatibility_date = "2025-01-01"
# Account/Zone (usually auto-detected)
account_id = "your-account-id"
# Workers.dev subdomain
workers_dev = true
# Or custom domain
routes = [
{ pattern = "example.com/*", zone_name = "example.com" },
{ pattern = "api.example.com/*", zone_name = "example.com" }
]
```
### Environment Variables
```toml
# Non-sensitive variables
[vars]
ENVIRONMENT = "production"
API_ENDPOINT = "https://api.example.com"
FEATURE_FLAGS = '{"newUI": true}'
# Per-environment
[env.staging.vars]
ENVIRONMENT = "staging"
API_ENDPOINT = "https://staging-api.example.com"
```
### Secrets
```bash
# Set secret via CLI (not in wrangler.toml!)
wrangler secret put API_KEY
# Enter value when prompted
# List secrets
wrangler secret list
# Delete secret
wrangler secret delete API_KEY
# Bulk import from .env
wrangler secret bulk .env.production
```
### Bindings Configuration
**KV:**
```toml
[[kv_namespaces]]
binding = "MY_KV"
id = "your-namespace-id"
preview_id = "preview-namespace-id"
```
**D1:**
```toml
[[d1_databases]]
binding = "DB"
database_name = "production-db"
database_id = "xxxx-xxxx-xxxx"
```
**R2:**
```toml
[[r2_buckets]]
binding = "MY_BUCKET"
bucket_name = "my-bucket"
preview_bucket_name = "my-bucket-preview"
```
**Durable Objects:**
```toml
[[durable_objects.bindings]]
name = "COUNTER"
class_name = "Counter"
script_name = "my-worker"
[[migrations]]
tag = "v1"
new_classes = ["Counter"]
```
**Queues:**
```toml
[[queues.producers]]
binding = "MY_QUEUE"
queue = "my-queue"
[[queues.consumers]]
queue = "my-queue"
max_batch_size = 10
max_batch_timeout = 30
max_retries = 3
dead_letter_queue = "my-dlq"
```
**Service Bindings:**
```toml
[[services]]
binding = "AUTH_SERVICE"
service = "auth-worker"
environment = "production"
```
**Workers AI:**
```toml
[ai]
binding = "AI"
```
### Cron Triggers
```toml
[triggers]
crons = [
"0 0 * * *", # Daily at midnight
"*/15 * * * *", # Every 15 minutes
"0 9 * * 1-5" # Weekdays at 9 AM
]
```
### Static Assets
```toml
[assets]
directory = "./public"
binding = "ASSETS"
# HTML handling
html_handling = "auto-trailing-slash" # or "drop-trailing-slash", "none"
# Not found handling
not_found_handling = "single-page-application" # or "404-page", "none"
```
### Compatibility
```toml
# Compatibility date (required)
compatibility_date = "2025-01-01"
# Compatibility flags
compatibility_flags = [
"nodejs_compat",
"transformstream_enable_standard_constructor"
]
```
### Custom Builds
```toml
[build]
command = "npm run build"
watch_dirs = ["src", "public"]
[build.upload]
format = "modules"
dir = "dist"
main = "./index.js"
```
## Multi-Environment Setup
### Environment Structure
```toml
# Global settings
name = "my-worker"
main = "src/index.ts"
compatibility_date = "2025-01-01"
# Default/production
[vars]
ENVIRONMENT = "production"
[[kv_namespaces]]
binding = "CACHE"
id = "prod-kv-id"
# Staging environment
[env.staging]
name = "my-worker-staging"
vars = { ENVIRONMENT = "staging" }
[[env.staging.kv_namespaces]]
binding = "CACHE"
id = "staging-kv-id"
# Development environment
[env.dev]
name = "my-worker-dev"
vars = { ENVIRONMENT = "development" }
[[env.dev.kv_namespaces]]
binding = "CACHE"
id = "dev-kv-id"
```
### Deploying Environments
```bash
# Deploy to production (default)
wrangler deploy
# Deploy to staging
wrangler deploy --env staging
# Deploy to dev
wrangler deploy --env dev
```
## CI/CD Integration
### GitHub Actions
**.github/workflows/deploy.yml:**
```yaml
name: Deploy Worker
on:
push:
branches: [main]
jobs:
deploy:
runs-on: ubuntu-latest
name: Deploy
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '18'
- name: Install dependencies
run: npm ci
- name: Deploy to Cloudflare Workers
uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
```
**Multi-environment:**
```yaml
name: Deploy Workers
on:
push:
branches: [main, staging]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: '18'
- name: Install dependencies
run: npm ci
- name: Deploy to staging
if: github.ref == 'refs/heads/staging'
uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
environment: 'staging'
- name: Deploy to production
if: github.ref == 'refs/heads/main'
uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
```
### GitLab CI/CD
**.gitlab-ci.yml:**
```yaml
stages:
- deploy
deploy_production:
stage: deploy
image: node:18
script:
- npm ci
- npx wrangler deploy
only:
- main
variables:
CLOUDFLARE_API_TOKEN: $CLOUDFLARE_API_TOKEN
CLOUDFLARE_ACCOUNT_ID: $CLOUDFLARE_ACCOUNT_ID
```
## Workers Builds (Git Integration)
Enable automatic deployments on git push via the dashboard.
**Setup:**
1. Connect your GitHub/GitLab repository
2. Configure build settings
3. Set environment variables
4. Enable branch deployments
**Benefits:**
- Automatic builds on push
- Preview deployments for PRs
- Build caching
- Deployment history
## Versioning & Gradual Deployments
### Versions
Workers automatically create versions on each deployment.
```bash
# List versions
wrangler versions list
# View specific version
wrangler versions view <version-id>
# Deploy specific version
wrangler versions deploy <version-id>
```
### Gradual Rollouts
Incrementally deploy new versions to reduce risk.
**Via Wrangler:**
```bash
# Deploy to 10% of traffic
wrangler versions deploy <version-id> --percentage 10
# Increase to 50%
wrangler versions deploy <version-id> --percentage 50
# Full rollout
wrangler versions deploy <version-id> --percentage 100
```
**Via Configuration:**
```toml
[[workflows.deployments]]
version_id = "new-version-id"
percentage = 10
[[workflows.deployments]]
version_id = "old-version-id"
percentage = 90
```
### Rollback
```bash
# Rollback to previous version
wrangler rollback
# List rollback history
wrangler rollback --list
# Rollback to specific version
wrangler versions deploy <previous-version-id>
```
## Resource Management
### KV Namespaces
```bash
# Create namespace
wrangler kv:namespace create "MY_KV"
wrangler kv:namespace create "MY_KV" --preview
# List namespaces
wrangler kv:namespace list
# Delete namespace
wrangler kv:namespace delete --namespace-id=<id>
# Put key-value
wrangler kv:key put "key" "value" --namespace-id=<id>
# Get value
wrangler kv:key get "key" --namespace-id=<id>
# List keys
wrangler kv:key list --namespace-id=<id>
# Delete key
wrangler kv:key delete "key" --namespace-id=<id>
# Bulk operations
wrangler kv:bulk put data.json --namespace-id=<id>
wrangler kv:bulk delete keys.json --namespace-id=<id>
```
### D1 Databases
```bash
# Create database
wrangler d1 create my-database
# List databases
wrangler d1 list
# Execute SQL
wrangler d1 execute my-database --command="SELECT * FROM users"
# Execute from file
wrangler d1 execute my-database --file=schema.sql
# Migrations
wrangler d1 migrations create my-database "add-users-table"
wrangler d1 migrations apply my-database
wrangler d1 migrations list my-database
# Export database
wrangler d1 export my-database --output=backup.sql
# Time Travel (restore)
wrangler d1 time-travel restore my-database --timestamp=<timestamp>
```
### R2 Buckets
```bash
# Create bucket
wrangler r2 bucket create my-bucket
# List buckets
wrangler r2 bucket list
# Delete bucket
wrangler r2 bucket delete my-bucket
# Put object
wrangler r2 object put my-bucket/file.txt --file=./file.txt
# Get object
wrangler r2 object get my-bucket/file.txt --file=./downloaded.txt
# List objects
wrangler r2 object list my-bucket
# Delete object
wrangler r2 object delete my-bucket/file.txt
```
### Queues
```bash
# Create queue
wrangler queues create my-queue
# List queues
wrangler queues list
# Delete queue
wrangler queues delete my-queue
# Send test message
wrangler queues send my-queue '{"test": "message"}'
```
## Debugging & Troubleshooting
### Tail Logs (Real-time)
```bash
# Tail logs from production
wrangler tail
# Tail specific environment
wrangler tail --env staging
# Filter by status
wrangler tail --status error
# Filter by method
wrangler tail --method POST
# Pretty print
wrangler tail --format pretty
```
### Deployment Issues
**Version conflicts:**
```bash
# Force overwrite
wrangler deploy --force
```
**Bundle size issues:**
```bash
# Check bundle size
wrangler deploy --dry-run --outdir=dist
# Optimize
npm run build -- --minify
```
**Authentication issues:**
```bash
# Re-login
wrangler login
# Use API token
export CLOUDFLARE_API_TOKEN=your-token
```
## Best Practices
### Configuration Management
1. **Use environments** for staging/production
2. **Store secrets in Wrangler**, not in config files
3. **Use compatibility dates** to lock runtime behavior
4. **Version control** your wrangler.toml
### Deployment Strategy
1. **Test locally** with `wrangler dev`
2. **Deploy to staging** first
3. **Use gradual rollouts** for production
4. **Monitor logs** during deployment
5. **Keep previous versions** for quick rollback
### CI/CD Best Practices
1. **Separate staging and production** workflows
2. **Use deployment keys** with minimal permissions
3. **Run tests** before deployment
4. **Tag releases** in git
5. **Notify team** on deployments
### Performance Optimization
1. **Minimize bundle size** - Tree-shake unused code
2. **Use custom builds** for complex projects
3. **Enable build caching** in CI/CD
4. **Optimize dependencies** - Use smaller packages
## Advanced Features
### Custom Domains
```toml
routes = [
{ pattern = "api.example.com/*", zone_name = "example.com", custom_domain = true }
]
```
```bash
# Add custom domain via CLI
wrangler domains add api.example.com
```
### Workers for Platforms
Deploy user-provided Workers on your infrastructure.
```toml
[[dispatch_namespaces]]
binding = "DISPATCHER"
namespace = "my-namespace"
outbound = { service = "my-worker" }
```
### Smart Placement
Automatically place Workers near data sources.
```toml
[placement]
mode = "smart"
```
## Additional Resources
- **Wrangler Docs**: https://developers.cloudflare.com/workers/wrangler/
- **Configuration**: https://developers.cloudflare.com/workers/wrangler/configuration/
- **Commands**: https://developers.cloudflare.com/workers/wrangler/commands/
- **CI/CD**: https://developers.cloudflare.com/workers/ci-cd/
- **GitHub Actions**: https://github.com/cloudflare/wrangler-action