Initial commit
This commit is contained in:
12
.claude-plugin/plugin.json
Normal file
12
.claude-plugin/plugin.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "cloudflare-durable-objects",
|
||||
"description": "Build stateful Durable Objects for real-time apps, WebSocket servers, coordination, and persistent state. Use when: implementing chat rooms, multiplayer games, rate limiting, session management, WebSocket hibernation, or troubleshooting class export, migration, WebSocket state loss, or binding errors.",
|
||||
"version": "1.0.0",
|
||||
"author": {
|
||||
"name": "Jeremy Dawes",
|
||||
"email": "jeremy@jezweb.net"
|
||||
},
|
||||
"skills": [
|
||||
"./"
|
||||
]
|
||||
}
|
||||
3
README.md
Normal file
3
README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# cloudflare-durable-objects
|
||||
|
||||
Build stateful Durable Objects for real-time apps, WebSocket servers, coordination, and persistent state. Use when: implementing chat rooms, multiplayer games, rate limiting, session management, WebSocket hibernation, or troubleshooting class export, migration, WebSocket state loss, or binding errors.
|
||||
772
SKILL.md
Normal file
772
SKILL.md
Normal file
@@ -0,0 +1,772 @@
|
||||
---
|
||||
name: cloudflare-durable-objects
|
||||
description: |
|
||||
Build stateful Durable Objects for real-time apps, WebSocket servers, coordination, and persistent state. Use when: implementing chat rooms, multiplayer games, rate limiting, session management, WebSocket hibernation, or troubleshooting class export, migration, WebSocket state loss, or binding errors.
|
||||
license: MIT
|
||||
---
|
||||
|
||||
# Cloudflare Durable Objects
|
||||
|
||||
**Status**: Production Ready ✅
|
||||
**Last Updated**: 2025-11-23
|
||||
**Dependencies**: cloudflare-worker-base (recommended)
|
||||
**Latest Versions**: wrangler@4.50.0, @cloudflare/workers-types@4.20251121.0
|
||||
**Official Docs**: https://developers.cloudflare.com/durable-objects/
|
||||
|
||||
**Recent Updates (2025)**:
|
||||
- **Oct 2025**: WebSocket message size 1 MiB → 32 MiB, Data Studio UI for SQLite DOs (view/edit storage in dashboard)
|
||||
- **Aug 2025**: `getByName()` API shortcut for named DOs
|
||||
- **June 2025**: @cloudflare/actors library (beta) - recommended SDK with migrations, alarms, Actor class pattern
|
||||
- **May 2025**: Python Workers support for Durable Objects
|
||||
- **April 2025**: SQLite GA with 10GB storage (beta → GA, 1GB → 10GB), Free tier access
|
||||
- **Feb 2025**: PRAGMA optimize support, improved error diagnostics with reference IDs
|
||||
|
||||
---
|
||||
|
||||
## Quick Start
|
||||
|
||||
**Scaffold new DO project:**
|
||||
```bash
|
||||
npm create cloudflare@latest my-durable-app -- --template=cloudflare/durable-objects-template --ts
|
||||
```
|
||||
|
||||
**Or add to existing Worker:**
|
||||
|
||||
```typescript
|
||||
// src/counter.ts - Durable Object class
|
||||
import { DurableObject } from 'cloudflare:workers';
|
||||
|
||||
export class Counter extends DurableObject {
|
||||
async increment(): Promise<number> {
|
||||
let value = (await this.ctx.storage.get<number>('value')) || 0;
|
||||
await this.ctx.storage.put('value', ++value);
|
||||
return value;
|
||||
}
|
||||
}
|
||||
export default Counter; // CRITICAL: Export required
|
||||
```
|
||||
|
||||
```jsonc
|
||||
// wrangler.jsonc - Configuration
|
||||
{
|
||||
"durable_objects": {
|
||||
"bindings": [{ "name": "COUNTER", "class_name": "Counter" }]
|
||||
},
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["Counter"] } // SQLite backend (10GB limit)
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
```typescript
|
||||
// src/index.ts - Worker
|
||||
import { Counter } from './counter';
|
||||
export { Counter };
|
||||
|
||||
export default {
|
||||
async fetch(request: Request, env: { COUNTER: DurableObjectNamespace<Counter> }) {
|
||||
const stub = env.COUNTER.getByName('global-counter'); // Aug 2025: getByName() shortcut
|
||||
return new Response(`Count: ${await stub.increment()}`);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## DO Class Essentials
|
||||
|
||||
```typescript
|
||||
import { DurableObject } from 'cloudflare:workers';
|
||||
|
||||
export class MyDO extends DurableObject {
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env); // REQUIRED first line
|
||||
|
||||
// Load state before requests (optional)
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
this.value = await ctx.storage.get('key') || defaultValue;
|
||||
});
|
||||
}
|
||||
|
||||
// RPC methods (recommended)
|
||||
async myMethod(): Promise<string> { return 'Hello'; }
|
||||
|
||||
// HTTP fetch handler (optional)
|
||||
async fetch(request: Request): Promise<Response> { return new Response('OK'); }
|
||||
}
|
||||
|
||||
export default MyDO; // CRITICAL: Export required
|
||||
|
||||
// Worker must export DO class too
|
||||
import { MyDO } from './my-do';
|
||||
export { MyDO };
|
||||
```
|
||||
|
||||
**Constructor Rules:**
|
||||
- ✅ Call `super(ctx, env)` first
|
||||
- ✅ Keep minimal - heavy work blocks hibernation wake
|
||||
- ✅ Use `ctx.blockConcurrencyWhile()` for storage initialization
|
||||
- ❌ Never `setTimeout`/`setInterval` (use alarms)
|
||||
- ❌ Don't rely on in-memory state with WebSockets (persist to storage)
|
||||
|
||||
---
|
||||
|
||||
## Storage API
|
||||
|
||||
**Two backends available:**
|
||||
- **SQLite** (recommended): 10GB storage, SQL queries, atomic operations, PITR
|
||||
- **KV**: 128MB storage, key-value only
|
||||
|
||||
**Enable SQLite in migrations:**
|
||||
```jsonc
|
||||
{ "migrations": [{ "tag": "v1", "new_sqlite_classes": ["MyDO"] }] }
|
||||
```
|
||||
|
||||
### SQL API (SQLite backend)
|
||||
|
||||
```typescript
|
||||
export class MyDO extends DurableObject {
|
||||
sql: SqlStorage;
|
||||
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
this.sql = ctx.storage.sql;
|
||||
|
||||
this.sql.exec(`
|
||||
CREATE TABLE IF NOT EXISTS messages (id INTEGER PRIMARY KEY, text TEXT, created_at INTEGER);
|
||||
CREATE INDEX IF NOT EXISTS idx_created ON messages(created_at);
|
||||
PRAGMA optimize; // Feb 2025: Query performance optimization
|
||||
`);
|
||||
}
|
||||
|
||||
async addMessage(text: string): Promise<number> {
|
||||
const cursor = this.sql.exec('INSERT INTO messages (text, created_at) VALUES (?, ?) RETURNING id', text, Date.now());
|
||||
return cursor.one<{ id: number }>().id;
|
||||
}
|
||||
|
||||
async getMessages(limit = 50): Promise<any[]> {
|
||||
return this.sql.exec('SELECT * FROM messages ORDER BY created_at DESC LIMIT ?', limit).toArray();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**SQL Methods:**
|
||||
- `sql.exec(query, ...params)` → cursor
|
||||
- `cursor.one<T>()` → single row (throws if none)
|
||||
- `cursor.one<T>({ allowNone: true })` → row or null
|
||||
- `cursor.toArray<T>()` → all rows
|
||||
- `ctx.storage.transactionSync(() => { ... })` → atomic multi-statement
|
||||
|
||||
**Rules:** Always use `?` placeholders, create indexes, use PRAGMA optimize after schema changes
|
||||
|
||||
### Key-Value API (both backends)
|
||||
|
||||
```typescript
|
||||
// Single operations
|
||||
await this.ctx.storage.put('key', value);
|
||||
const value = await this.ctx.storage.get<T>('key');
|
||||
await this.ctx.storage.delete('key');
|
||||
|
||||
// Batch operations
|
||||
await this.ctx.storage.put({ key1: val1, key2: val2 });
|
||||
const map = await this.ctx.storage.get(['key1', 'key2']);
|
||||
await this.ctx.storage.delete(['key1', 'key2']);
|
||||
|
||||
// List and delete all
|
||||
const map = await this.ctx.storage.list({ prefix: 'user:', limit: 100 });
|
||||
await this.ctx.storage.deleteAll(); // Atomic on SQLite only
|
||||
|
||||
// Transactions
|
||||
await this.ctx.storage.transaction(async (txn) => {
|
||||
await txn.put('key1', val1);
|
||||
await txn.put('key2', val2);
|
||||
});
|
||||
```
|
||||
|
||||
**Storage Limits:** SQLite 10GB (April 2025 GA) | KV 128MB
|
||||
|
||||
---
|
||||
|
||||
## WebSocket Hibernation API
|
||||
|
||||
**Capabilities:**
|
||||
- Thousands of WebSocket connections per instance
|
||||
- Hibernate when idle (~10s no activity) to save costs
|
||||
- Auto wake-up when messages arrive
|
||||
- **Message size limit**: 32 MiB (Oct 2025, up from 1 MiB)
|
||||
|
||||
**How it works:**
|
||||
1. Active → handles messages
|
||||
2. Idle → ~10s no activity
|
||||
3. Hibernation → in-memory state **cleared**, WebSockets stay connected
|
||||
4. Wake → message arrives → constructor runs → handler called
|
||||
|
||||
**CRITICAL:** In-memory state is **lost on hibernation**. Use `serializeAttachment()` to persist per-WebSocket metadata.
|
||||
|
||||
### Hibernation-Safe Pattern
|
||||
|
||||
```typescript
|
||||
export class ChatRoom extends DurableObject {
|
||||
sessions: Map<WebSocket, { userId: string; username: string }>;
|
||||
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
this.sessions = new Map();
|
||||
|
||||
// CRITICAL: Restore WebSocket metadata after hibernation
|
||||
ctx.getWebSockets().forEach((ws) => {
|
||||
this.sessions.set(ws, ws.deserializeAttachment());
|
||||
});
|
||||
}
|
||||
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
const pair = new WebSocketPair();
|
||||
const [client, server] = Object.values(pair);
|
||||
|
||||
const url = new URL(request.url);
|
||||
const metadata = { userId: url.searchParams.get('userId'), username: url.searchParams.get('username') };
|
||||
|
||||
// CRITICAL: Use ctx.acceptWebSocket(), NOT ws.accept()
|
||||
this.ctx.acceptWebSocket(server);
|
||||
server.serializeAttachment(metadata); // Persist across hibernation
|
||||
this.sessions.set(server, metadata);
|
||||
|
||||
return new Response(null, { status: 101, webSocket: client });
|
||||
}
|
||||
|
||||
async webSocketMessage(ws: WebSocket, message: string | ArrayBuffer): Promise<void> {
|
||||
const session = this.sessions.get(ws);
|
||||
// Handle message (max 32 MiB since Oct 2025)
|
||||
}
|
||||
|
||||
async webSocketClose(ws: WebSocket, code: number, reason: string, wasClean: boolean): Promise<void> {
|
||||
this.sessions.delete(ws);
|
||||
ws.close(code, 'Closing');
|
||||
}
|
||||
|
||||
async webSocketError(ws: WebSocket, error: any): Promise<void> {
|
||||
this.sessions.delete(ws);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Hibernation Rules:**
|
||||
- ✅ `ctx.acceptWebSocket(ws)` - enables hibernation
|
||||
- ✅ `ws.serializeAttachment(data)` - persist metadata
|
||||
- ✅ `ctx.getWebSockets().forEach()` - restore in constructor
|
||||
- ✅ Use alarms instead of `setTimeout`/`setInterval`
|
||||
- ❌ `ws.accept()` - standard API, no hibernation
|
||||
- ❌ `setTimeout`/`setInterval` - prevents hibernation
|
||||
- ❌ In-progress `fetch()` - blocks hibernation
|
||||
|
||||
---
|
||||
|
||||
## Alarms API
|
||||
|
||||
Schedule DO to wake at future time. **Use for:** batching, cleanup, reminders, periodic tasks.
|
||||
|
||||
```typescript
|
||||
export class Batcher extends DurableObject {
|
||||
async addItem(item: string): Promise<void> {
|
||||
// Add to buffer
|
||||
const buffer = await this.ctx.storage.get<string[]>('buffer') || [];
|
||||
buffer.push(item);
|
||||
await this.ctx.storage.put('buffer', buffer);
|
||||
|
||||
// Schedule alarm if not set
|
||||
if ((await this.ctx.storage.getAlarm()) === null) {
|
||||
await this.ctx.storage.setAlarm(Date.now() + 10000); // 10 seconds
|
||||
}
|
||||
}
|
||||
|
||||
async alarm(info: { retryCount: number; isRetry: boolean }): Promise<void> {
|
||||
if (info.retryCount > 3) return; // Give up after 3 retries
|
||||
|
||||
const buffer = await this.ctx.storage.get<string[]>('buffer') || [];
|
||||
await this.processBatch(buffer);
|
||||
await this.ctx.storage.put('buffer', []);
|
||||
// Alarm auto-deleted after success
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**API Methods:**
|
||||
- `await ctx.storage.setAlarm(Date.now() + 60000)` - set alarm (overwrites existing)
|
||||
- `await ctx.storage.getAlarm()` - get timestamp or null
|
||||
- `await ctx.storage.deleteAlarm()` - cancel alarm
|
||||
- `async alarm(info)` - handler called when alarm fires
|
||||
|
||||
**Behavior:**
|
||||
- ✅ At-least-once execution, auto-retries (up to 6x, exponential backoff)
|
||||
- ✅ Survives hibernation/eviction
|
||||
- ✅ Auto-deleted after success
|
||||
- ⚠️ One alarm per DO (new alarm overwrites)
|
||||
|
||||
---
|
||||
|
||||
## RPC vs HTTP Fetch
|
||||
|
||||
**RPC (Recommended):** Direct method calls, type-safe, simple
|
||||
|
||||
```typescript
|
||||
// DO class
|
||||
export class Counter extends DurableObject {
|
||||
async increment(): Promise<number> {
|
||||
let value = (await this.ctx.storage.get<number>('count')) || 0;
|
||||
await this.ctx.storage.put('count', ++value);
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
// Worker calls
|
||||
const stub = env.COUNTER.getByName('my-counter');
|
||||
const count = await stub.increment(); // Type-safe!
|
||||
```
|
||||
|
||||
**HTTP Fetch:** Request/response pattern, required for WebSocket upgrades
|
||||
|
||||
```typescript
|
||||
// DO class
|
||||
export class Counter extends DurableObject {
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
const url = new URL(request.url);
|
||||
if (url.pathname === '/increment') {
|
||||
let value = (await this.ctx.storage.get<number>('count')) || 0;
|
||||
await this.ctx.storage.put('count', ++value);
|
||||
return new Response(JSON.stringify({ count: value }));
|
||||
}
|
||||
return new Response('Not found', { status: 404 });
|
||||
}
|
||||
}
|
||||
|
||||
// Worker calls
|
||||
const stub = env.COUNTER.getByName('my-counter');
|
||||
const response = await stub.fetch('https://fake-host/increment', { method: 'POST' });
|
||||
const data = await response.json();
|
||||
```
|
||||
|
||||
**When to use:** RPC for new projects (simpler), HTTP Fetch for WebSocket upgrades or complex routing
|
||||
|
||||
---
|
||||
|
||||
## Getting DO Stubs
|
||||
|
||||
**Three ways to get IDs:**
|
||||
|
||||
1. **`idFromName(name)`** - Consistent routing (same name = same DO)
|
||||
```typescript
|
||||
const stub = env.CHAT_ROOM.getByName('room-123'); // Aug 2025: Shortcut for idFromName + get
|
||||
// Use for: chat rooms, user sessions, per-tenant logic, singletons
|
||||
```
|
||||
|
||||
2. **`newUniqueId()`** - Random unique ID (must store for reuse)
|
||||
```typescript
|
||||
const id = env.MY_DO.newUniqueId({ jurisdiction: 'eu' }); // Optional: EU compliance
|
||||
const idString = id.toString(); // Save to KV/D1 for later
|
||||
```
|
||||
|
||||
3. **`idFromString(idString)`** - Recreate from saved ID
|
||||
```typescript
|
||||
const id = env.MY_DO.idFromString(await env.KV.get('session:123'));
|
||||
const stub = env.MY_DO.get(id);
|
||||
```
|
||||
|
||||
**Location hints (best-effort):**
|
||||
```typescript
|
||||
const stub = env.MY_DO.get(id, { locationHint: 'enam' }); // wnam, enam, sam, weur, eeur, apac, oc, afr, me
|
||||
```
|
||||
|
||||
**Jurisdiction (strict enforcement):**
|
||||
```typescript
|
||||
const id = env.MY_DO.newUniqueId({ jurisdiction: 'eu' }); // Options: 'eu', 'fedramp'
|
||||
// Cannot combine with location hints, higher latency outside jurisdiction
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Migrations
|
||||
|
||||
**Required for:** create, rename, delete, transfer DO classes
|
||||
|
||||
**1. Create:**
|
||||
```jsonc
|
||||
{ "migrations": [{ "tag": "v1", "new_sqlite_classes": ["Counter"] }] } // SQLite 10GB
|
||||
// Or: "new_classes": ["Counter"] // KV 128MB (legacy)
|
||||
```
|
||||
|
||||
**2. Rename:**
|
||||
```jsonc
|
||||
{ "migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["OldName"] },
|
||||
{ "tag": "v2", "renamed_classes": [{ "from": "OldName", "to": "NewName" }] }
|
||||
]}
|
||||
```
|
||||
|
||||
**3. Delete:**
|
||||
```jsonc
|
||||
{ "migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["Counter"] },
|
||||
{ "tag": "v2", "deleted_classes": ["Counter"] } // Immediate deletion, cannot undo
|
||||
]}
|
||||
```
|
||||
|
||||
**4. Transfer:**
|
||||
```jsonc
|
||||
{ "migrations": [{ "tag": "v1", "transferred_classes": [
|
||||
{ "from": "OldClass", "from_script": "old-worker", "to": "NewClass" }
|
||||
]}]}
|
||||
```
|
||||
|
||||
**Migration Rules:**
|
||||
- ❌ Atomic (all instances migrate at once, no gradual rollout)
|
||||
- ❌ Tags are unique and append-only
|
||||
- ❌ Cannot enable SQLite on existing KV-backed DOs
|
||||
- ✅ Code changes don't need migrations (only schema changes)
|
||||
- ✅ Class names globally unique per account
|
||||
|
||||
---
|
||||
|
||||
## Common Patterns
|
||||
|
||||
**Rate Limiting:**
|
||||
```typescript
|
||||
async checkLimit(userId: string, limit: number, window: number): Promise<boolean> {
|
||||
const requests = (await this.ctx.storage.get<number[]>(`rate:${userId}`)) || [];
|
||||
const valid = requests.filter(t => Date.now() - t < window);
|
||||
if (valid.length >= limit) return false;
|
||||
valid.push(Date.now());
|
||||
await this.ctx.storage.put(`rate:${userId}`, valid);
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
**Session Management with TTL:**
|
||||
```typescript
|
||||
async set(key: string, value: any, ttl?: number): Promise<void> {
|
||||
const expiresAt = ttl ? Date.now() + ttl : null;
|
||||
this.sql.exec('INSERT OR REPLACE INTO session (key, value, expires_at) VALUES (?, ?, ?)',
|
||||
key, JSON.stringify(value), expiresAt);
|
||||
}
|
||||
|
||||
async alarm(): Promise<void> {
|
||||
this.sql.exec('DELETE FROM session WHERE expires_at < ?', Date.now());
|
||||
await this.ctx.storage.setAlarm(Date.now() + 3600000); // Hourly cleanup
|
||||
}
|
||||
```
|
||||
|
||||
**Leader Election:**
|
||||
```typescript
|
||||
async electLeader(workerId: string): Promise<boolean> {
|
||||
try {
|
||||
this.sql.exec('INSERT INTO leader (id, worker_id, elected_at) VALUES (1, ?, ?)', workerId, Date.now());
|
||||
return true;
|
||||
} catch { return false; } // Already has leader
|
||||
}
|
||||
```
|
||||
|
||||
**Multi-DO Coordination:**
|
||||
```typescript
|
||||
// Coordinator delegates to child DOs
|
||||
const gameRoom = env.GAME_ROOM.getByName(gameId);
|
||||
await gameRoom.initialize();
|
||||
await this.ctx.storage.put(`game:${gameId}`, { created: Date.now() });
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Critical Rules
|
||||
|
||||
### Always Do
|
||||
|
||||
✅ **Export DO class** from Worker
|
||||
```typescript
|
||||
export class MyDO extends DurableObject { }
|
||||
export default MyDO; // Required
|
||||
```
|
||||
|
||||
✅ **Call `super(ctx, env)`** in constructor
|
||||
```typescript
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env); // Required first line
|
||||
}
|
||||
```
|
||||
|
||||
✅ **Use `new_sqlite_classes`** for new DOs
|
||||
```jsonc
|
||||
{ "tag": "v1", "new_sqlite_classes": ["MyDO"] }
|
||||
```
|
||||
|
||||
✅ **Use `ctx.acceptWebSocket()`** for hibernation
|
||||
```typescript
|
||||
this.ctx.acceptWebSocket(server); // Enables hibernation
|
||||
```
|
||||
|
||||
✅ **Persist critical state** to storage (not just memory)
|
||||
```typescript
|
||||
await this.ctx.storage.put('important', value);
|
||||
```
|
||||
|
||||
✅ **Use alarms** instead of setTimeout/setInterval
|
||||
```typescript
|
||||
await this.ctx.storage.setAlarm(Date.now() + 60000);
|
||||
```
|
||||
|
||||
✅ **Use parameterized SQL queries**
|
||||
```typescript
|
||||
this.sql.exec('SELECT * FROM table WHERE id = ?', id);
|
||||
```
|
||||
|
||||
✅ **Minimize constructor work**
|
||||
```typescript
|
||||
constructor(ctx, env) {
|
||||
super(ctx, env);
|
||||
// Minimal initialization only
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
// Load from storage
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Never Do
|
||||
|
||||
❌ **Create DO without migration**
|
||||
```jsonc
|
||||
// Missing migrations array = error
|
||||
```
|
||||
|
||||
❌ **Forget to export DO class**
|
||||
```typescript
|
||||
class MyDO extends DurableObject { }
|
||||
// Missing: export default MyDO;
|
||||
```
|
||||
|
||||
❌ **Use `setTimeout` or `setInterval`**
|
||||
```typescript
|
||||
setTimeout(() => {}, 1000); // Prevents hibernation
|
||||
```
|
||||
|
||||
❌ **Rely only on in-memory state** with WebSockets
|
||||
```typescript
|
||||
// ❌ WRONG: this.sessions will be lost on hibernation
|
||||
// ✅ CORRECT: Use serializeAttachment()
|
||||
```
|
||||
|
||||
❌ **Deploy migrations gradually**
|
||||
```bash
|
||||
# Migrations are atomic - cannot use gradual rollout
|
||||
```
|
||||
|
||||
❌ **Enable SQLite on existing KV-backed DO**
|
||||
```jsonc
|
||||
// Not supported - must create new DO class instead
|
||||
```
|
||||
|
||||
❌ **Use standard WebSocket API** expecting hibernation
|
||||
```typescript
|
||||
ws.accept(); // ❌ No hibernation
|
||||
this.ctx.acceptWebSocket(ws); // ✅ Hibernation enabled
|
||||
```
|
||||
|
||||
❌ **Assume location hints are guaranteed**
|
||||
```typescript
|
||||
// Location hints are best-effort only
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Known Issues Prevention
|
||||
|
||||
This skill prevents **15+ documented issues**:
|
||||
|
||||
### Issue #1: Class Not Exported
|
||||
**Error**: `"binding not found"` or `"Class X not found"`
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/get-started/
|
||||
**Why It Happens**: DO class not exported from Worker
|
||||
**Prevention**:
|
||||
```typescript
|
||||
export class MyDO extends DurableObject { }
|
||||
export default MyDO; // ← Required
|
||||
```
|
||||
|
||||
### Issue #2: Missing Migration
|
||||
**Error**: `"migrations required"` or `"no migration found for class"`
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/reference/durable-objects-migrations/
|
||||
**Why It Happens**: Created DO class without migration entry
|
||||
**Prevention**: Always add migration when creating new DO class
|
||||
```jsonc
|
||||
{
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["MyDO"] }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Issue #3: Wrong Migration Type (KV vs SQLite)
|
||||
**Error**: Schema errors, storage API mismatch
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/api/sqlite-storage-api/
|
||||
**Why It Happens**: Used `new_classes` instead of `new_sqlite_classes`
|
||||
**Prevention**: Use `new_sqlite_classes` for SQLite backend (recommended)
|
||||
|
||||
### Issue #4: Constructor Overhead Blocks Hibernation Wake
|
||||
**Error**: Slow hibernation wake-up times
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/best-practices/access-durable-objects-storage/
|
||||
**Why It Happens**: Heavy work in constructor
|
||||
**Prevention**: Minimize constructor, use `blockConcurrencyWhile()`
|
||||
```typescript
|
||||
constructor(ctx, env) {
|
||||
super(ctx, env);
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
// Load from storage
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Issue #5: setTimeout Breaks Hibernation
|
||||
**Error**: DO never hibernates, high duration charges
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/concepts/durable-object-lifecycle/
|
||||
**Why It Happens**: `setTimeout`/`setInterval` prevents hibernation
|
||||
**Prevention**: Use alarms API instead
|
||||
```typescript
|
||||
// ❌ WRONG
|
||||
setTimeout(() => {}, 1000);
|
||||
|
||||
// ✅ CORRECT
|
||||
await this.ctx.storage.setAlarm(Date.now() + 1000);
|
||||
```
|
||||
|
||||
### Issue #6: In-Memory State Lost on Hibernation
|
||||
**Error**: WebSocket metadata lost, state reset unexpectedly
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/best-practices/websockets/
|
||||
**Why It Happens**: Relied on in-memory state that's cleared on hibernation
|
||||
**Prevention**: Use `serializeAttachment()` for WebSocket metadata
|
||||
```typescript
|
||||
ws.serializeAttachment({ userId, username });
|
||||
|
||||
// Restore in constructor
|
||||
ctx.getWebSockets().forEach(ws => {
|
||||
const metadata = ws.deserializeAttachment();
|
||||
this.sessions.set(ws, metadata);
|
||||
});
|
||||
```
|
||||
|
||||
### Issue #7: Outgoing WebSocket Cannot Hibernate
|
||||
**Error**: High charges despite hibernation API
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/best-practices/websockets/
|
||||
**Why It Happens**: Outgoing WebSockets don't support hibernation
|
||||
**Prevention**: Only use hibernation for server-side (incoming) WebSockets
|
||||
|
||||
### Issue #8: Global Uniqueness Confusion
|
||||
**Error**: Unexpected DO class name conflicts
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/platform/known-issues/#global-uniqueness
|
||||
**Why It Happens**: DO class names are globally unique per account
|
||||
**Prevention**: Understand DO class names are shared across all Workers in account
|
||||
|
||||
### Issue #9: Partial deleteAll on KV Backend
|
||||
**Error**: Storage not fully deleted, billing continues
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/api/legacy-kv-storage-api/
|
||||
**Why It Happens**: KV backend `deleteAll()` can fail partially
|
||||
**Prevention**: Use SQLite backend for atomic deleteAll
|
||||
|
||||
### Issue #10: Binding Name Mismatch
|
||||
**Error**: Runtime error accessing DO binding
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/get-started/
|
||||
**Why It Happens**: Binding name in wrangler.jsonc doesn't match code
|
||||
**Prevention**: Ensure consistency
|
||||
```jsonc
|
||||
{ "bindings": [{ "name": "MY_DO", "class_name": "MyDO" }] }
|
||||
```
|
||||
```typescript
|
||||
env.MY_DO.getByName('instance'); // Must match binding name
|
||||
```
|
||||
|
||||
### Issue #11: State Size Exceeded
|
||||
**Error**: `"state limit exceeded"` or storage errors
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/platform/pricing/
|
||||
**Why It Happens**: Exceeded 1GB (SQLite) or 128MB (KV) limit
|
||||
**Prevention**: Monitor storage size, implement cleanup with alarms
|
||||
|
||||
### Issue #12: Migration Not Atomic
|
||||
**Error**: Gradual deployment blocked
|
||||
**Source**: https://developers.cloudflare.com/workers/configuration/versions-and-deployments/gradual-deployments/
|
||||
**Why It Happens**: Tried to use gradual rollout with migrations
|
||||
**Prevention**: Migrations deploy atomically across all instances
|
||||
|
||||
### Issue #13: Location Hint Ignored
|
||||
**Error**: DO created in wrong region
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/reference/data-location/
|
||||
**Why It Happens**: Location hints are best-effort, not guaranteed
|
||||
**Prevention**: Use jurisdiction for strict requirements
|
||||
|
||||
### Issue #14: Alarm Retry Failures
|
||||
**Error**: Tasks lost after alarm failures
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/api/alarms/
|
||||
**Why It Happens**: Alarm handler throws errors repeatedly
|
||||
**Prevention**: Implement idempotent alarm handlers
|
||||
```typescript
|
||||
async alarm(info: { retryCount: number }): Promise<void> {
|
||||
if (info.retryCount > 3) {
|
||||
console.error('Giving up after 3 retries');
|
||||
return;
|
||||
}
|
||||
// Idempotent operation
|
||||
}
|
||||
```
|
||||
|
||||
### Issue #15: Fetch Blocks Hibernation
|
||||
**Error**: DO never hibernates despite using hibernation API
|
||||
**Source**: https://developers.cloudflare.com/durable-objects/concepts/durable-object-lifecycle/
|
||||
**Why It Happens**: In-progress `fetch()` requests prevent hibernation
|
||||
**Prevention**: Ensure all async I/O completes before idle period
|
||||
|
||||
---
|
||||
|
||||
## Configuration & Types
|
||||
|
||||
**wrangler.jsonc:**
|
||||
```jsonc
|
||||
{
|
||||
"compatibility_date": "2025-11-23",
|
||||
"durable_objects": {
|
||||
"bindings": [{ "name": "COUNTER", "class_name": "Counter" }]
|
||||
},
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["Counter"] },
|
||||
{ "tag": "v2", "renamed_classes": [{ "from": "Counter", "to": "CounterV2" }] }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**TypeScript:**
|
||||
```typescript
|
||||
import { DurableObject, DurableObjectState, DurableObjectNamespace } from 'cloudflare:workers';
|
||||
|
||||
interface Env { MY_DO: DurableObjectNamespace<MyDurableObject>; }
|
||||
|
||||
export class MyDurableObject extends DurableObject<Env> {
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
this.sql = ctx.storage.sql;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Official Documentation
|
||||
|
||||
- **Durable Objects**: https://developers.cloudflare.com/durable-objects/
|
||||
- **State API (SQL)**: https://developers.cloudflare.com/durable-objects/api/sqlite-storage-api/
|
||||
- **WebSocket Hibernation**: https://developers.cloudflare.com/durable-objects/best-practices/websockets/
|
||||
- **Alarms API**: https://developers.cloudflare.com/durable-objects/api/alarms/
|
||||
- **Migrations**: https://developers.cloudflare.com/durable-objects/reference/durable-objects-migrations/
|
||||
- **Best Practices**: https://developers.cloudflare.com/durable-objects/best-practices/
|
||||
- **Pricing**: https://developers.cloudflare.com/durable-objects/platform/pricing/
|
||||
|
||||
---
|
||||
|
||||
**Questions? Issues?**
|
||||
|
||||
1. Check `references/top-errors.md` for common problems
|
||||
2. Review `templates/` for working examples
|
||||
3. Consult official docs: https://developers.cloudflare.com/durable-objects/
|
||||
4. Verify migrations configuration carefully
|
||||
117
plugin.lock.json
Normal file
117
plugin.lock.json
Normal file
@@ -0,0 +1,117 @@
|
||||
{
|
||||
"$schema": "internal://schemas/plugin.lock.v1.json",
|
||||
"pluginId": "gh:jezweb/claude-skills:skills/cloudflare-durable-objects",
|
||||
"normalized": {
|
||||
"repo": null,
|
||||
"ref": "refs/tags/v20251128.0",
|
||||
"commit": "bd54584035138ef578fb89d3d284e07c15306826",
|
||||
"treeHash": "6b0151edc6322a9750a7760f562693e3fb80fb74804cbfd1f5a2e0cdd6f54e75",
|
||||
"generatedAt": "2025-11-28T10:18:57.446597Z",
|
||||
"toolVersion": "publish_plugins.py@0.2.0"
|
||||
},
|
||||
"origin": {
|
||||
"remote": "git@github.com:zhongweili/42plugin-data.git",
|
||||
"branch": "master",
|
||||
"commit": "aa1497ed0949fd50e99e70d6324a29c5b34f9390",
|
||||
"repoRoot": "/Users/zhongweili/projects/openmind/42plugin-data"
|
||||
},
|
||||
"manifest": {
|
||||
"name": "cloudflare-durable-objects",
|
||||
"description": "Build stateful Durable Objects for real-time apps, WebSocket servers, coordination, and persistent state. Use when: implementing chat rooms, multiplayer games, rate limiting, session management, WebSocket hibernation, or troubleshooting class export, migration, WebSocket state loss, or binding errors.",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"content": {
|
||||
"files": [
|
||||
{
|
||||
"path": "README.md",
|
||||
"sha256": "987e9b8930459ae498e07a08d9a3e98859a0c343bdab742c2a76f4a2e2c1e532"
|
||||
},
|
||||
{
|
||||
"path": "SKILL.md",
|
||||
"sha256": "caf9bad7bde2e01d37b07ecb41c38f2725ee76da2402b6a18c89011128256557"
|
||||
},
|
||||
{
|
||||
"path": "references/rpc-patterns.md",
|
||||
"sha256": "0e39da72c906d8c21be7f5d572d1c64c03af352f7120821721165a874d7f50d6"
|
||||
},
|
||||
{
|
||||
"path": "references/best-practices.md",
|
||||
"sha256": "6e4a3b9eaac543f3b40bd63f62090a168e77a2a23b5d5535f74193f38e33baac"
|
||||
},
|
||||
{
|
||||
"path": "references/top-errors.md",
|
||||
"sha256": "c8178461b6f407d5c09ea0c90e14155fee174c27fca78b4cd62cdae3406fdc72"
|
||||
},
|
||||
{
|
||||
"path": "references/wrangler-commands.md",
|
||||
"sha256": "04a4544d2a1fc694d57da1382f53fc90bcb4fa0a926074f5b1d5995b3e883dd5"
|
||||
},
|
||||
{
|
||||
"path": "references/migrations-guide.md",
|
||||
"sha256": "e6afe9f6d31bc5750ed00d585f8674192d335e84bce7e65fae5b027e778978d2"
|
||||
},
|
||||
{
|
||||
"path": "references/alarms-api.md",
|
||||
"sha256": "f1719e254a79caf241806d7f5e6f17e1ad34365f641d91f54af2c26718799740"
|
||||
},
|
||||
{
|
||||
"path": "references/state-api-reference.md",
|
||||
"sha256": "2ecf02876b1b2d0907b94481ec42b431442d99d818e385086b78e63ccd1b705e"
|
||||
},
|
||||
{
|
||||
"path": "references/websocket-hibernation.md",
|
||||
"sha256": "c2a2e7f97bae4bfd4a5e05133fac562f54120ba46e330bf1d8e64693ef497d94"
|
||||
},
|
||||
{
|
||||
"path": "scripts/check-versions.sh",
|
||||
"sha256": "8d9db5d81cca270a8646c8cece9369e20b4fbc4d827435055e5cc53fe4158024"
|
||||
},
|
||||
{
|
||||
"path": ".claude-plugin/plugin.json",
|
||||
"sha256": "165532a81d4b3190b62c3a0118ecf8e993e8dc85d2e5335145e369b335e4e16c"
|
||||
},
|
||||
{
|
||||
"path": "templates/multi-do-coordination.ts",
|
||||
"sha256": "439c3848e19bd5c2018d2d7588fb8e1eb41dcfa4d19c4882fe4d42eda4f737b2"
|
||||
},
|
||||
{
|
||||
"path": "templates/rpc-vs-fetch.ts",
|
||||
"sha256": "83c10d208220cc427d5e89c28d8b968e522e420311aea7a6882af1b1fc4851e8"
|
||||
},
|
||||
{
|
||||
"path": "templates/location-hints.ts",
|
||||
"sha256": "b1a5eddb0137b279fb409512a5d71ca6f798d7d1cb15eff5bb5dca3f72939831"
|
||||
},
|
||||
{
|
||||
"path": "templates/websocket-hibernation-do.ts",
|
||||
"sha256": "cecd459e95b5bedf10f041a68354dadf4c0d1a03ddd4518e8ae3ec2684438c0e"
|
||||
},
|
||||
{
|
||||
"path": "templates/state-api-patterns.ts",
|
||||
"sha256": "f90baa12092d041dbd3ae306dcaf2af4720d19817ff7792891730d8843ce3c1d"
|
||||
},
|
||||
{
|
||||
"path": "templates/alarms-api-do.ts",
|
||||
"sha256": "2478bebb34010ded0917eaf57c17fc70be04296181da51038833642aaed9238a"
|
||||
},
|
||||
{
|
||||
"path": "templates/package.json",
|
||||
"sha256": "80015b91b861158f08c0d21d02b551f2107e543e4857c842406e37558b84380d"
|
||||
},
|
||||
{
|
||||
"path": "templates/basic-do.ts",
|
||||
"sha256": "cab0380dc5203c7119d4f6ae2d7353171ee07339f3eab772902bb193450ca802"
|
||||
},
|
||||
{
|
||||
"path": "templates/wrangler-do-config.jsonc",
|
||||
"sha256": "3adcc7f03bb8327e5e3ae5e8cf31320568a42d9f33645f9e7deac79f4f8175d8"
|
||||
}
|
||||
],
|
||||
"dirSha256": "6b0151edc6322a9750a7760f562693e3fb80fb74804cbfd1f5a2e0cdd6f54e75"
|
||||
},
|
||||
"security": {
|
||||
"scannedAt": null,
|
||||
"scannerVersion": null,
|
||||
"flags": []
|
||||
}
|
||||
}
|
||||
306
references/alarms-api.md
Normal file
306
references/alarms-api.md
Normal file
@@ -0,0 +1,306 @@
|
||||
# Alarms API - Scheduled Tasks
|
||||
|
||||
Complete guide to scheduling future tasks with alarms.
|
||||
|
||||
---
|
||||
|
||||
## What are Alarms?
|
||||
|
||||
Alarms allow Durable Objects to **schedule themselves** to wake up at a specific time in the future.
|
||||
|
||||
**Use Cases:**
|
||||
- Batching (accumulate items, process in bulk)
|
||||
- Cleanup (delete old data periodically)
|
||||
- Reminders (notifications, alerts)
|
||||
- Delayed operations (rate limiting reset)
|
||||
- Periodic tasks (health checks, sync)
|
||||
|
||||
---
|
||||
|
||||
## Set Alarm
|
||||
|
||||
### `storage.setAlarm(time)`
|
||||
|
||||
```typescript
|
||||
// Fire in 10 seconds
|
||||
await this.ctx.storage.setAlarm(Date.now() + 10000);
|
||||
|
||||
// Fire at specific date/time
|
||||
await this.ctx.storage.setAlarm(new Date('2025-12-31T23:59:59Z'));
|
||||
|
||||
// Fire in 1 hour
|
||||
await this.ctx.storage.setAlarm(Date.now() + 3600000);
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `time` (number | Date): Unix timestamp (ms) or Date object
|
||||
|
||||
**Behavior:**
|
||||
- ✅ **Only ONE alarm per DO** - setting new alarm overwrites previous
|
||||
- ✅ **Persists across hibernation** - survives DO eviction
|
||||
- ✅ **Guaranteed at-least-once execution**
|
||||
|
||||
---
|
||||
|
||||
## Alarm Handler
|
||||
|
||||
### `alarm(alarmInfo)`
|
||||
|
||||
Called when alarm fires (or retries).
|
||||
|
||||
```typescript
|
||||
async alarm(alarmInfo: { retryCount: number; isRetry: boolean }): Promise<void> {
|
||||
console.log(`Alarm fired (retry: ${alarmInfo.isRetry}, count: ${alarmInfo.retryCount})`);
|
||||
|
||||
// Do work
|
||||
await this.processBatch();
|
||||
|
||||
// Alarm is automatically deleted after successful execution
|
||||
}
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `alarmInfo.retryCount` (number): Number of retries (0 on first attempt)
|
||||
- `alarmInfo.isRetry` (boolean): True if this is a retry
|
||||
|
||||
**CRITICAL:**
|
||||
- ✅ **Implement idempotent operations** (safe to retry)
|
||||
- ✅ **Limit retry attempts** (avoid infinite retries)
|
||||
- ❌ **Don't throw errors lightly** (triggers automatic retry)
|
||||
|
||||
---
|
||||
|
||||
## Get Alarm
|
||||
|
||||
### `storage.getAlarm()`
|
||||
|
||||
Get current alarm time (null if not set).
|
||||
|
||||
```typescript
|
||||
const alarmTime = await this.ctx.storage.getAlarm();
|
||||
|
||||
if (alarmTime === null) {
|
||||
// No alarm set
|
||||
await this.ctx.storage.setAlarm(Date.now() + 60000);
|
||||
} else {
|
||||
console.log(`Alarm scheduled for ${new Date(alarmTime).toISOString()}`);
|
||||
}
|
||||
```
|
||||
|
||||
**Returns:** Promise<number | null> (Unix timestamp in ms)
|
||||
|
||||
---
|
||||
|
||||
## Delete Alarm
|
||||
|
||||
### `storage.deleteAlarm()`
|
||||
|
||||
Cancel scheduled alarm.
|
||||
|
||||
```typescript
|
||||
await this.ctx.storage.deleteAlarm();
|
||||
```
|
||||
|
||||
**When to use:**
|
||||
- Cancel scheduled task
|
||||
- Before deleting DO (if using `deleteAll()`)
|
||||
|
||||
---
|
||||
|
||||
## Retry Behavior
|
||||
|
||||
**Automatic Retries:**
|
||||
- Up to **6 retries** on failure
|
||||
- Exponential backoff: **2s, 4s, 8s, 16s, 32s, 64s**
|
||||
- Retries if `alarm()` throws uncaught exception
|
||||
|
||||
**Example with retry limit:**
|
||||
|
||||
```typescript
|
||||
async alarm(alarmInfo: { retryCount: number; isRetry: boolean }): Promise<void> {
|
||||
if (alarmInfo.retryCount > 3) {
|
||||
console.error('Alarm failed after 3 retries, giving up');
|
||||
// Clean up to avoid infinite retries
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.sendNotification();
|
||||
} catch (error) {
|
||||
console.error('Alarm failed:', error);
|
||||
throw error; // Will trigger retry
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Pattern 1: Batching
|
||||
|
||||
Accumulate items, process in bulk.
|
||||
|
||||
```typescript
|
||||
async addItem(item: string): Promise<void> {
|
||||
this.buffer.push(item);
|
||||
await this.ctx.storage.put('buffer', this.buffer);
|
||||
|
||||
// Schedule alarm if not already set
|
||||
const alarm = await this.ctx.storage.getAlarm();
|
||||
if (alarm === null) {
|
||||
await this.ctx.storage.setAlarm(Date.now() + 10000); // 10s
|
||||
}
|
||||
}
|
||||
|
||||
async alarm(): Promise<void> {
|
||||
this.buffer = await this.ctx.storage.get('buffer') || [];
|
||||
|
||||
if (this.buffer.length > 0) {
|
||||
await this.processBatch(this.buffer);
|
||||
this.buffer = [];
|
||||
await this.ctx.storage.put('buffer', []);
|
||||
}
|
||||
|
||||
// Alarm automatically deleted after success
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 2: Periodic Cleanup
|
||||
|
||||
Run cleanup every hour.
|
||||
|
||||
```typescript
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
|
||||
// Schedule first cleanup
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
const alarm = await ctx.storage.getAlarm();
|
||||
if (alarm === null) {
|
||||
await ctx.storage.setAlarm(Date.now() + 3600000); // 1 hour
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async alarm(): Promise<void> {
|
||||
// Cleanup old data
|
||||
await this.cleanup();
|
||||
|
||||
// Schedule next cleanup
|
||||
await this.ctx.storage.setAlarm(Date.now() + 3600000);
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 3: Delayed Operation
|
||||
|
||||
Execute task after delay.
|
||||
|
||||
```typescript
|
||||
async scheduleTask(task: string, delayMs: number): Promise<void> {
|
||||
await this.ctx.storage.put('pendingTask', task);
|
||||
await this.ctx.storage.setAlarm(Date.now() + delayMs);
|
||||
}
|
||||
|
||||
async alarm(): Promise<void> {
|
||||
const task = await this.ctx.storage.get('pendingTask');
|
||||
|
||||
if (task) {
|
||||
await this.executeTask(task);
|
||||
await this.ctx.storage.delete('pendingTask');
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Pattern 4: Reminder/Notification
|
||||
|
||||
One-time reminder.
|
||||
|
||||
```typescript
|
||||
async setReminder(message: string, fireAt: Date): Promise<void> {
|
||||
await this.ctx.storage.put('reminder', { message, fireAt: fireAt.getTime() });
|
||||
await this.ctx.storage.setAlarm(fireAt);
|
||||
}
|
||||
|
||||
async alarm(): Promise<void> {
|
||||
const reminder = await this.ctx.storage.get('reminder');
|
||||
|
||||
if (reminder) {
|
||||
await this.sendNotification(reminder.message);
|
||||
await this.ctx.storage.delete('reminder');
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Limitations
|
||||
|
||||
⚠️ **One alarm per DO**
|
||||
- Setting new alarm overwrites previous
|
||||
- Use storage to track multiple pending tasks
|
||||
|
||||
⚠️ **No cron syntax**
|
||||
- Alarm is one-time (but can reschedule in handler)
|
||||
- For periodic tasks, reschedule in `alarm()` handler
|
||||
|
||||
⚠️ **Minimum precision: ~1 second**
|
||||
- Don't expect millisecond precision
|
||||
- Designed for longer delays (seconds to hours)
|
||||
|
||||
---
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Idempotent Operations
|
||||
|
||||
```typescript
|
||||
// ✅ GOOD: Idempotent (safe to retry)
|
||||
async alarm(): Promise<void> {
|
||||
const messageId = await this.ctx.storage.get('messageId');
|
||||
|
||||
// Check if already sent (idempotent)
|
||||
const sent = await this.checkIfSent(messageId);
|
||||
if (sent) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.sendMessage(messageId);
|
||||
await this.markAsSent(messageId);
|
||||
}
|
||||
|
||||
// ❌ BAD: Not idempotent (duplicate sends on retry)
|
||||
async alarm(): Promise<void> {
|
||||
await this.sendMessage(); // Will send duplicate if retried
|
||||
}
|
||||
```
|
||||
|
||||
### Limit Retries
|
||||
|
||||
```typescript
|
||||
async alarm(info: { retryCount: number }): Promise<void> {
|
||||
if (info.retryCount > 3) {
|
||||
console.error('Giving up after 3 retries');
|
||||
return;
|
||||
}
|
||||
|
||||
// Try operation
|
||||
await this.doWork();
|
||||
}
|
||||
```
|
||||
|
||||
### Clean Up Before `deleteAll()`
|
||||
|
||||
```typescript
|
||||
async destroy(): Promise<void> {
|
||||
// Delete alarm first
|
||||
await this.ctx.storage.deleteAlarm();
|
||||
|
||||
// Then delete all storage
|
||||
await this.ctx.storage.deleteAll();
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
**Official Docs**: https://developers.cloudflare.com/durable-objects/api/alarms/
|
||||
416
references/best-practices.md
Normal file
416
references/best-practices.md
Normal file
@@ -0,0 +1,416 @@
|
||||
# Durable Objects Best Practices
|
||||
|
||||
Production patterns and optimization strategies.
|
||||
|
||||
---
|
||||
|
||||
## Performance
|
||||
|
||||
### Minimize Constructor Work
|
||||
|
||||
Heavy work in constructor delays request handling and hibernation wake-up.
|
||||
|
||||
```typescript
|
||||
// ✅ GOOD
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
|
||||
// Minimal initialization
|
||||
this.sessions = new Map();
|
||||
|
||||
// Load from storage with blockConcurrencyWhile
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
this.data = await ctx.storage.get('data') || defaultData;
|
||||
});
|
||||
}
|
||||
|
||||
// ❌ BAD
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
|
||||
// Expensive operations delay all requests
|
||||
await this.loadMassiveDataset();
|
||||
await this.computeComplexState();
|
||||
}
|
||||
```
|
||||
|
||||
### Use Indexes for SQL Queries
|
||||
|
||||
```typescript
|
||||
// Create indexes for frequently queried columns
|
||||
this.sql.exec(`
|
||||
CREATE INDEX IF NOT EXISTS idx_user_email ON users(email);
|
||||
CREATE INDEX IF NOT EXISTS idx_created_at ON messages(created_at);
|
||||
`);
|
||||
|
||||
// Use EXPLAIN QUERY PLAN to verify index usage
|
||||
const plan = this.sql.exec('EXPLAIN QUERY PLAN SELECT * FROM users WHERE email = ?', email);
|
||||
```
|
||||
|
||||
### Batch Operations
|
||||
|
||||
```typescript
|
||||
// ✅ GOOD: Batch inserts
|
||||
this.sql.exec(`INSERT INTO messages (text, user_id) VALUES ${rows.map(() => '(?, ?)').join(', ')}`, ...flatValues);
|
||||
|
||||
// ❌ BAD: Individual inserts
|
||||
for (const row of rows) {
|
||||
this.sql.exec('INSERT INTO messages (text, user_id) VALUES (?, ?)', row.text, row.userId);
|
||||
}
|
||||
```
|
||||
|
||||
### Use Transactions
|
||||
|
||||
```typescript
|
||||
// Atomic multi-step operations
|
||||
this.ctx.storage.transactionSync(() => {
|
||||
this.sql.exec('UPDATE users SET balance = balance - ? WHERE id = ?', amount, senderId);
|
||||
this.sql.exec('UPDATE users SET balance = balance + ? WHERE id = ?', amount, receiverId);
|
||||
this.sql.exec('INSERT INTO transactions ...');
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Cost Optimization
|
||||
|
||||
### Use WebSocket Hibernation
|
||||
|
||||
```typescript
|
||||
// ✅ GOOD: Hibernates when idle (~90% cost savings)
|
||||
this.ctx.acceptWebSocket(server);
|
||||
|
||||
// ❌ BAD: Never hibernates (high duration charges)
|
||||
server.accept();
|
||||
```
|
||||
|
||||
### Use Alarms, Not setTimeout
|
||||
|
||||
```typescript
|
||||
// ✅ GOOD: Allows hibernation
|
||||
await this.ctx.storage.setAlarm(Date.now() + 60000);
|
||||
|
||||
// ❌ BAD: Prevents hibernation
|
||||
setTimeout(() => this.doWork(), 60000);
|
||||
```
|
||||
|
||||
### Minimize Storage Size
|
||||
|
||||
```typescript
|
||||
// Periodic cleanup with alarms
|
||||
async alarm(): Promise<void> {
|
||||
const oneDayAgo = Date.now() - (24 * 60 * 60 * 1000);
|
||||
|
||||
this.sql.exec('DELETE FROM messages WHERE created_at < ?', oneDayAgo);
|
||||
|
||||
// Schedule next cleanup
|
||||
await this.ctx.storage.setAlarm(Date.now() + 3600000);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Reliability
|
||||
|
||||
### Implement Idempotent Operations
|
||||
|
||||
```typescript
|
||||
// ✅ GOOD: Idempotent (safe to retry)
|
||||
async processPayment(paymentId: string, amount: number): Promise<void> {
|
||||
// Check if already processed
|
||||
const existing = await this.ctx.storage.get(`payment:${paymentId}`);
|
||||
if (existing) {
|
||||
return; // Already processed
|
||||
}
|
||||
|
||||
// Process payment
|
||||
await this.chargeCustomer(amount);
|
||||
|
||||
// Mark as processed
|
||||
await this.ctx.storage.put(`payment:${paymentId}`, { processed: true, amount });
|
||||
}
|
||||
|
||||
// ❌ BAD: Not idempotent (duplicate charges on retry)
|
||||
async processPayment(amount: number): Promise<void> {
|
||||
await this.chargeCustomer(amount);
|
||||
}
|
||||
```
|
||||
|
||||
### Limit Alarm Retries
|
||||
|
||||
```typescript
|
||||
async alarm(info: { retryCount: number }): Promise<void> {
|
||||
if (info.retryCount > 3) {
|
||||
console.error('Giving up after 3 retries');
|
||||
await this.logFailure();
|
||||
return;
|
||||
}
|
||||
|
||||
await this.doWork();
|
||||
}
|
||||
```
|
||||
|
||||
### Graceful Error Handling
|
||||
|
||||
```typescript
|
||||
async processMessage(message: string): Promise<void> {
|
||||
try {
|
||||
await this.handleMessage(message);
|
||||
} catch (error) {
|
||||
console.error('Message processing failed:', error);
|
||||
|
||||
// Store failed message for retry
|
||||
await this.ctx.storage.put(`failed:${Date.now()}`, message);
|
||||
|
||||
// Don't throw - prevents retry storm
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Security
|
||||
|
||||
### Validate Input
|
||||
|
||||
```typescript
|
||||
async createUser(email: string, username: string): Promise<void> {
|
||||
// Validate input
|
||||
if (!email || !email.includes('@')) {
|
||||
throw new Error('Invalid email');
|
||||
}
|
||||
|
||||
if (!username || username.length < 3) {
|
||||
throw new Error('Invalid username');
|
||||
}
|
||||
|
||||
// Use parameterized queries (prevents SQL injection)
|
||||
this.sql.exec(
|
||||
'INSERT INTO users (email, username) VALUES (?, ?)',
|
||||
email,
|
||||
username
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Use Parameterized Queries
|
||||
|
||||
```typescript
|
||||
// ✅ GOOD: Parameterized (safe from SQL injection)
|
||||
this.sql.exec('SELECT * FROM users WHERE email = ?', userEmail);
|
||||
|
||||
// ❌ BAD: String concatenation (SQL injection risk)
|
||||
this.sql.exec(`SELECT * FROM users WHERE email = '${userEmail}'`);
|
||||
```
|
||||
|
||||
### Authenticate Requests
|
||||
|
||||
```typescript
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
const authHeader = request.headers.get('Authorization');
|
||||
|
||||
if (!authHeader || !this.validateToken(authHeader)) {
|
||||
return new Response('Unauthorized', { status: 401 });
|
||||
}
|
||||
|
||||
// Handle authenticated request
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Data Management
|
||||
|
||||
### Monitor Storage Size
|
||||
|
||||
```typescript
|
||||
async getStorageSize(): Promise<number> {
|
||||
// Approximate size (sum of all values)
|
||||
const map = await this.ctx.storage.list();
|
||||
|
||||
let size = 0;
|
||||
for (const value of map.values()) {
|
||||
size += JSON.stringify(value).length;
|
||||
}
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
async checkStorageLimit(): Promise<void> {
|
||||
const size = await this.getStorageSize();
|
||||
|
||||
if (size > 900_000_000) { // 900MB (90% of 1GB limit)
|
||||
console.warn('Storage approaching limit');
|
||||
await this.triggerCleanup();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Cleanup Old Data
|
||||
|
||||
```typescript
|
||||
// Regular cleanup with alarms
|
||||
async alarm(): Promise<void> {
|
||||
const cutoff = Date.now() - (30 * 24 * 60 * 60 * 1000); // 30 days
|
||||
|
||||
this.sql.exec('DELETE FROM messages WHERE created_at < ?', cutoff);
|
||||
|
||||
// Schedule next cleanup
|
||||
await this.ctx.storage.setAlarm(Date.now() + 86400000); // 24 hours
|
||||
}
|
||||
```
|
||||
|
||||
### Backup Critical Data
|
||||
|
||||
```typescript
|
||||
async backup(): Promise<void> {
|
||||
// Export to R2 or D1
|
||||
const data = await this.exportData();
|
||||
|
||||
await this.env.BUCKET.put(`backup-${Date.now()}.json`, JSON.stringify(data));
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
### Local Development
|
||||
|
||||
```bash
|
||||
# Start local dev server
|
||||
npx wrangler dev
|
||||
|
||||
# Test with curl
|
||||
curl -X POST http://localhost:8787/api/increment
|
||||
```
|
||||
|
||||
### Integration Tests
|
||||
|
||||
```typescript
|
||||
// Test DO behavior
|
||||
describe('Counter DO', () => {
|
||||
it('should increment', async () => {
|
||||
const stub = env.COUNTER.getByName('test-counter');
|
||||
|
||||
const count1 = await stub.increment();
|
||||
expect(count1).toBe(1);
|
||||
|
||||
const count2 = await stub.increment();
|
||||
expect(count2).toBe(2);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Simulate Hibernation
|
||||
|
||||
```typescript
|
||||
// Test hibernation wake-up
|
||||
constructor(ctx, env) {
|
||||
super(ctx, env);
|
||||
|
||||
console.log('DO woke up!', {
|
||||
websockets: ctx.getWebSockets().length,
|
||||
});
|
||||
|
||||
// Restore state
|
||||
ctx.getWebSockets().forEach(ws => {
|
||||
const metadata = ws.deserializeAttachment();
|
||||
this.sessions.set(ws, metadata);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Monitoring
|
||||
|
||||
### Log Important Events
|
||||
|
||||
```typescript
|
||||
async importantOperation(): Promise<void> {
|
||||
console.log('Starting important operation', {
|
||||
doId: this.ctx.id.toString(),
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
await this.doWork();
|
||||
|
||||
console.log('Important operation completed');
|
||||
}
|
||||
```
|
||||
|
||||
### Track Metrics
|
||||
|
||||
```typescript
|
||||
async recordMetric(metric: string, value: number): Promise<void> {
|
||||
// Store metrics
|
||||
await this.ctx.storage.put(`metric:${metric}:${Date.now()}`, value);
|
||||
|
||||
// Or send to Analytics Engine
|
||||
// await this.env.ANALYTICS.writeDataPoint({
|
||||
// indexes: [metric],
|
||||
// doubles: [value],
|
||||
// });
|
||||
}
|
||||
```
|
||||
|
||||
### Use Tail Logs
|
||||
|
||||
```bash
|
||||
# Tail live logs
|
||||
npx wrangler tail
|
||||
|
||||
# Filter by DO
|
||||
npx wrangler tail --search "DurableObject"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Rate Limiting
|
||||
|
||||
```typescript
|
||||
async checkRateLimit(userId: string, limit: number, window: number): Promise<boolean> {
|
||||
const key = `rate:${userId}`;
|
||||
const now = Date.now();
|
||||
|
||||
const requests = await this.ctx.storage.get<number[]>(key) || [];
|
||||
|
||||
const validRequests = requests.filter(t => now - t < window);
|
||||
|
||||
if (validRequests.length >= limit) {
|
||||
return false; // Rate limited
|
||||
}
|
||||
|
||||
validRequests.push(now);
|
||||
await this.ctx.storage.put(key, validRequests);
|
||||
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
### Leader Election
|
||||
|
||||
```typescript
|
||||
async electLeader(workerId: string): Promise<boolean> {
|
||||
try {
|
||||
this.sql.exec(
|
||||
'INSERT INTO leader (id, worker_id) VALUES (1, ?)',
|
||||
workerId
|
||||
);
|
||||
return true; // Became leader
|
||||
} catch {
|
||||
return false; // Someone else is leader
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Session Management
|
||||
|
||||
See `templates/state-api-patterns.ts` for complete example.
|
||||
|
||||
---
|
||||
|
||||
**Official Docs**: https://developers.cloudflare.com/durable-objects/best-practices/
|
||||
384
references/migrations-guide.md
Normal file
384
references/migrations-guide.md
Normal file
@@ -0,0 +1,384 @@
|
||||
# Durable Objects Migrations Guide
|
||||
|
||||
Complete guide to managing DO class lifecycles with migrations.
|
||||
|
||||
---
|
||||
|
||||
## Why Migrations?
|
||||
|
||||
Migrations tell Cloudflare Workers runtime about changes to Durable Object classes:
|
||||
|
||||
**Required for:**
|
||||
- ✅ Creating new DO class
|
||||
- ✅ Renaming DO class
|
||||
- ✅ Deleting DO class
|
||||
- ✅ Transferring DO class to another Worker
|
||||
|
||||
**NOT required for:**
|
||||
- ❌ Code changes to existing DO class
|
||||
- ❌ Storage schema changes within DO
|
||||
|
||||
---
|
||||
|
||||
## Migration Types
|
||||
|
||||
### 1. Create New DO Class
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"durable_objects": {
|
||||
"bindings": [
|
||||
{
|
||||
"name": "COUNTER",
|
||||
"class_name": "Counter"
|
||||
}
|
||||
]
|
||||
},
|
||||
"migrations": [
|
||||
{
|
||||
"tag": "v1", // Unique migration identifier
|
||||
"new_sqlite_classes": [ // SQLite backend (recommended)
|
||||
"Counter"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**For KV backend (legacy):**
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"migrations": [
|
||||
{
|
||||
"tag": "v1",
|
||||
"new_classes": ["Counter"] // KV backend (128MB limit)
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**CRITICAL:**
|
||||
- ✅ Use `new_sqlite_classes` for new DOs (1GB storage, atomic operations)
|
||||
- ❌ **Cannot** change KV backend to SQLite after deployment
|
||||
|
||||
---
|
||||
|
||||
### 2. Rename DO Class
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"durable_objects": {
|
||||
"bindings": [
|
||||
{
|
||||
"name": "MY_DO",
|
||||
"class_name": "NewClassName" // Updated class name
|
||||
}
|
||||
]
|
||||
},
|
||||
"migrations": [
|
||||
{
|
||||
"tag": "v1",
|
||||
"new_sqlite_classes": ["OldClassName"]
|
||||
},
|
||||
{
|
||||
"tag": "v2", // New migration tag
|
||||
"renamed_classes": [
|
||||
{
|
||||
"from": "OldClassName",
|
||||
"to": "NewClassName"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**What happens:**
|
||||
- ✅ All existing DO instances keep their data
|
||||
- ✅ Old bindings automatically forward to new class
|
||||
- ✅ `idFromName('foo')` still routes to same instance
|
||||
- ⚠️ **Must export new class** in Worker code
|
||||
|
||||
---
|
||||
|
||||
### 3. Delete DO Class
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"migrations": [
|
||||
{
|
||||
"tag": "v1",
|
||||
"new_sqlite_classes": ["Counter"]
|
||||
},
|
||||
{
|
||||
"tag": "v2",
|
||||
"deleted_classes": ["Counter"] // Mark for deletion
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**What happens:**
|
||||
- ✅ All DO instances **immediately deleted**
|
||||
- ✅ All storage **permanently deleted**
|
||||
- ⚠️ **CANNOT UNDO** - data is gone forever
|
||||
|
||||
**Before deleting:**
|
||||
1. Export data if needed
|
||||
2. Update Workers that reference this DO
|
||||
3. Consider rename instead (if migrating)
|
||||
|
||||
---
|
||||
|
||||
### 4. Transfer DO Class to Another Worker
|
||||
|
||||
**Destination Worker:**
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"durable_objects": {
|
||||
"bindings": [
|
||||
{
|
||||
"name": "TRANSFERRED_DO",
|
||||
"class_name": "TransferredClass"
|
||||
}
|
||||
]
|
||||
},
|
||||
"migrations": [
|
||||
{
|
||||
"tag": "v1",
|
||||
"transferred_classes": [
|
||||
{
|
||||
"from": "OriginalClass",
|
||||
"from_script": "original-worker", // Source Worker name
|
||||
"to": "TransferredClass"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**What happens:**
|
||||
- ✅ DO instances move to new Worker
|
||||
- ✅ All storage is transferred
|
||||
- ✅ Old bindings automatically forward to new Worker
|
||||
- ⚠️ **Must export new class** in destination Worker
|
||||
|
||||
---
|
||||
|
||||
## Migration Rules
|
||||
|
||||
### Tags Must Be Unique
|
||||
|
||||
```jsonc
|
||||
// ✅ CORRECT
|
||||
{
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["A"] },
|
||||
{ "tag": "v2", "new_sqlite_classes": ["B"] },
|
||||
{ "tag": "v3", "renamed_classes": [{ "from": "A", "to": "C" }] }
|
||||
]
|
||||
}
|
||||
|
||||
// ❌ WRONG: Duplicate tag
|
||||
{
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["A"] },
|
||||
{ "tag": "v1", "new_sqlite_classes": ["B"] } // ERROR
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Tags Are Append-Only
|
||||
|
||||
```jsonc
|
||||
// ✅ CORRECT: Add new tag
|
||||
{
|
||||
"migrations": [
|
||||
{ "tag": "v1", ... },
|
||||
{ "tag": "v2", ... } // Append
|
||||
]
|
||||
}
|
||||
|
||||
// ❌ WRONG: Remove or reorder
|
||||
{
|
||||
"migrations": [
|
||||
{ "tag": "v2", ... } // Can't remove v1
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Migrations Are Atomic
|
||||
|
||||
⚠️ **Cannot use gradual deployments** with migrations
|
||||
|
||||
- All DO instances migrate at once when you deploy
|
||||
- No partial rollout support
|
||||
- Use canary releases at Worker level, not DO level
|
||||
|
||||
---
|
||||
|
||||
## Migration Gotchas
|
||||
|
||||
### Global Uniqueness
|
||||
|
||||
DO class names are **globally unique per account**.
|
||||
|
||||
```typescript
|
||||
// Worker A
|
||||
export class Counter extends DurableObject { }
|
||||
|
||||
// Worker B
|
||||
export class Counter extends DurableObject { }
|
||||
// ❌ ERROR: Class name "Counter" already exists in account
|
||||
```
|
||||
|
||||
**Solution:** Use unique class names (e.g., prefix with Worker name)
|
||||
|
||||
```typescript
|
||||
// Worker A
|
||||
export class CounterA extends DurableObject { }
|
||||
|
||||
// Worker B
|
||||
export class CounterB extends DurableObject { }
|
||||
```
|
||||
|
||||
### Cannot Enable SQLite on Existing KV-backed DO
|
||||
|
||||
```jsonc
|
||||
// Deployed with:
|
||||
{ "tag": "v1", "new_classes": ["Counter"] } // KV backend
|
||||
|
||||
// ❌ WRONG: Cannot change to SQLite
|
||||
{ "tag": "v2", "renamed_classes": [{ "from": "Counter", "to": "CounterSQLite" }] }
|
||||
{ "tag": "v3", "new_sqlite_classes": ["CounterSQLite"] }
|
||||
|
||||
// ✅ CORRECT: Create new class instead
|
||||
{ "tag": "v2", "new_sqlite_classes": ["CounterV2"] }
|
||||
// Then migrate data from Counter to CounterV2
|
||||
```
|
||||
|
||||
### Code Changes Don't Need Migrations
|
||||
|
||||
```typescript
|
||||
// ✅ CORRECT: Just deploy code changes
|
||||
export class Counter extends DurableObject {
|
||||
async increment(): Promise<number> {
|
||||
// Changed implementation
|
||||
let value = await this.ctx.storage.get<number>('count') || 0;
|
||||
value += 2; // Changed from += 1
|
||||
await this.ctx.storage.put('count', value);
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
// No migration needed - deploy directly
|
||||
```
|
||||
|
||||
Only schema changes (new/rename/delete/transfer) need migrations.
|
||||
|
||||
---
|
||||
|
||||
## Environment-Specific Migrations
|
||||
|
||||
You can define migrations per environment:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
// Top-level (default) migrations
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["Counter"] }
|
||||
],
|
||||
|
||||
"env": {
|
||||
"production": {
|
||||
// Production-specific migrations override top-level
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["Counter"] },
|
||||
{ "tag": "v2", "new_sqlite_classes": ["Analytics"] }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Rules:**
|
||||
- If migration defined at environment level, it overrides top-level
|
||||
- If NOT defined at environment level, inherits top-level
|
||||
|
||||
---
|
||||
|
||||
## Migration Workflow
|
||||
|
||||
### Example: Rename DO Class
|
||||
|
||||
**Step 1:** Current state (v1)
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"durable_objects": {
|
||||
"bindings": [{ "name": "MY_DO", "class_name": "OldName" }]
|
||||
},
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["OldName"] }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2:** Update wrangler.jsonc
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"durable_objects": {
|
||||
"bindings": [{ "name": "MY_DO", "class_name": "NewName" }]
|
||||
},
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["OldName"] },
|
||||
{ "tag": "v2", "renamed_classes": [{ "from": "OldName", "to": "NewName" }] }
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Step 3:** Update Worker code
|
||||
|
||||
```typescript
|
||||
// Rename class
|
||||
export class NewName extends DurableObject { }
|
||||
export default NewName;
|
||||
```
|
||||
|
||||
**Step 4:** Deploy
|
||||
|
||||
```bash
|
||||
npx wrangler deploy
|
||||
```
|
||||
|
||||
Migration applies atomically on deploy.
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Error: "Migration tag already exists"
|
||||
|
||||
**Cause:** Trying to reuse a migration tag
|
||||
|
||||
**Solution:** Use a new, unique tag
|
||||
|
||||
### Error: "Class not found"
|
||||
|
||||
**Cause:** Class not exported from Worker
|
||||
|
||||
**Solution:** Ensure `export default MyDOClass;`
|
||||
|
||||
### Error: "Cannot enable SQLite on existing class"
|
||||
|
||||
**Cause:** Trying to migrate KV-backed DO to SQLite
|
||||
|
||||
**Solution:** Create new SQLite-backed class, migrate data manually
|
||||
|
||||
---
|
||||
|
||||
**Official Docs**: https://developers.cloudflare.com/durable-objects/reference/durable-objects-migrations/
|
||||
306
references/rpc-patterns.md
Normal file
306
references/rpc-patterns.md
Normal file
@@ -0,0 +1,306 @@
|
||||
# RPC vs Fetch Patterns - Decision Guide
|
||||
|
||||
When to use RPC methods vs HTTP fetch handler.
|
||||
|
||||
---
|
||||
|
||||
## Quick Decision Matrix
|
||||
|
||||
| Requirement | Use | Why |
|
||||
|-------------|-----|-----|
|
||||
| **New project (compat_date >= 2024-04-03)** | RPC | Simpler, type-safe |
|
||||
| **Type safety important** | RPC | TypeScript knows method signatures |
|
||||
| **Simple method calls** | RPC | Less boilerplate |
|
||||
| **WebSocket upgrade needed** | Fetch | Requires HTTP upgrade |
|
||||
| **Complex HTTP routing** | Fetch | Full request/response control |
|
||||
| **Need headers, cookies, status codes** | Fetch | HTTP-specific features |
|
||||
| **Legacy compatibility** | Fetch | Pre-2024-04-03 projects |
|
||||
| **Auto-serialization wanted** | RPC | Handles structured data automatically |
|
||||
|
||||
---
|
||||
|
||||
## RPC Pattern (Recommended)
|
||||
|
||||
### Enable RPC
|
||||
|
||||
Set compatibility date `>= 2024-04-03`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"compatibility_date": "2025-10-22"
|
||||
}
|
||||
```
|
||||
|
||||
### Define RPC Methods
|
||||
|
||||
```typescript
|
||||
export class MyDO extends DurableObject {
|
||||
// Public methods are automatically exposed as RPC
|
||||
async increment(): Promise<number> {
|
||||
// ...
|
||||
}
|
||||
|
||||
async get(): Promise<number> {
|
||||
// ...
|
||||
}
|
||||
|
||||
// Private methods are NOT exposed
|
||||
private async internalHelper(): Promise<void> {
|
||||
// ...
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Call from Worker
|
||||
|
||||
```typescript
|
||||
const stub = env.MY_DO.getByName('my-instance');
|
||||
|
||||
// Direct method calls
|
||||
const count = await stub.increment();
|
||||
const value = await stub.get();
|
||||
```
|
||||
|
||||
### Advantages
|
||||
|
||||
✅ **Type-safe** - TypeScript knows method signatures
|
||||
✅ **Less boilerplate** - No HTTP ceremony
|
||||
✅ **Auto-serialization** - Structured data works seamlessly
|
||||
✅ **Exception propagation** - Errors thrown in DO received in Worker
|
||||
|
||||
### Limitations
|
||||
|
||||
❌ Cannot use HTTP-specific features (headers, status codes)
|
||||
❌ Cannot handle WebSocket upgrades
|
||||
❌ Requires compat_date >= 2024-04-03
|
||||
|
||||
---
|
||||
|
||||
## HTTP Fetch Pattern
|
||||
|
||||
### Define fetch() Handler
|
||||
|
||||
```typescript
|
||||
export class MyDO extends DurableObject {
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
const url = new URL(request.url);
|
||||
|
||||
if (url.pathname === '/increment' && request.method === 'POST') {
|
||||
// ...
|
||||
return new Response(JSON.stringify({ count }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
return new Response('Not found', { status: 404 });
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Call from Worker
|
||||
|
||||
```typescript
|
||||
const stub = env.MY_DO.getByName('my-instance');
|
||||
|
||||
const response = await stub.fetch('https://fake-host/increment', {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
```
|
||||
|
||||
### Advantages
|
||||
|
||||
✅ **Full HTTP control** - Headers, cookies, status codes
|
||||
✅ **WebSocket upgrades** - Required for WebSocket server
|
||||
✅ **Complex routing** - Use path, method, headers for routing
|
||||
✅ **Legacy compatible** - Works with pre-2024-04-03
|
||||
|
||||
### Limitations
|
||||
|
||||
❌ More boilerplate - Manual JSON parsing, response creation
|
||||
❌ No type safety - Worker doesn't know what methods exist
|
||||
❌ Manual error handling - Must parse HTTP status codes
|
||||
|
||||
---
|
||||
|
||||
## Hybrid Pattern (Both)
|
||||
|
||||
Use both RPC and fetch() in same DO:
|
||||
|
||||
```typescript
|
||||
export class MyDO extends DurableObject {
|
||||
// RPC method for simple calls
|
||||
async getStatus(): Promise<{ active: boolean }> {
|
||||
return { active: true };
|
||||
}
|
||||
|
||||
// Fetch for WebSocket upgrade
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
const upgradeHeader = request.headers.get('Upgrade');
|
||||
|
||||
if (upgradeHeader === 'websocket') {
|
||||
// Handle WebSocket upgrade
|
||||
const pair = new WebSocketPair();
|
||||
const [client, server] = Object.values(pair);
|
||||
|
||||
this.ctx.acceptWebSocket(server);
|
||||
|
||||
return new Response(null, {
|
||||
status: 101,
|
||||
webSocket: client,
|
||||
});
|
||||
}
|
||||
|
||||
return new Response('Not found', { status: 404 });
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Call from Worker:**
|
||||
|
||||
```typescript
|
||||
const stub = env.MY_DO.getByName('my-instance');
|
||||
|
||||
// Use RPC for status
|
||||
const status = await stub.getStatus();
|
||||
|
||||
// Use fetch for WebSocket upgrade
|
||||
const response = await stub.fetch(request);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## RPC Serialization
|
||||
|
||||
**What works:**
|
||||
- ✅ Primitives (string, number, boolean, null)
|
||||
- ✅ Objects (plain objects)
|
||||
- ✅ Arrays
|
||||
- ✅ Nested structures
|
||||
- ✅ Date objects
|
||||
- ✅ ArrayBuffer, Uint8Array, etc.
|
||||
|
||||
**What doesn't work:**
|
||||
- ❌ Functions
|
||||
- ❌ Symbols
|
||||
- ❌ Circular references
|
||||
- ❌ Class instances (except basic types)
|
||||
|
||||
**Example:**
|
||||
|
||||
```typescript
|
||||
// ✅ WORKS
|
||||
async getData(): Promise<{ users: string[]; count: number }> {
|
||||
return {
|
||||
users: ['alice', 'bob'],
|
||||
count: 2,
|
||||
};
|
||||
}
|
||||
|
||||
// ❌ DOESN'T WORK
|
||||
async getFunction(): Promise<() => void> {
|
||||
return () => console.log('hello'); // Functions not serializable
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Error Handling
|
||||
|
||||
### RPC Error Handling
|
||||
|
||||
```typescript
|
||||
// In DO
|
||||
async doWork(): Promise<void> {
|
||||
if (somethingWrong) {
|
||||
throw new Error('Something went wrong');
|
||||
}
|
||||
}
|
||||
|
||||
// In Worker
|
||||
try {
|
||||
await stub.doWork();
|
||||
} catch (error) {
|
||||
console.error('RPC error:', error.message);
|
||||
// Error propagated from DO
|
||||
}
|
||||
```
|
||||
|
||||
### Fetch Error Handling
|
||||
|
||||
```typescript
|
||||
// In DO
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
if (somethingWrong) {
|
||||
return new Response(JSON.stringify({ error: 'Something went wrong' }), {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
|
||||
return new Response('OK');
|
||||
}
|
||||
|
||||
// In Worker
|
||||
const response = await stub.fetch(request);
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
console.error('Fetch error:', error);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Migration from Fetch to RPC
|
||||
|
||||
**Before (Fetch):**
|
||||
|
||||
```typescript
|
||||
export class Counter extends DurableObject {
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
const url = new URL(request.url);
|
||||
|
||||
if (url.pathname === '/increment') {
|
||||
let count = await this.ctx.storage.get<number>('count') || 0;
|
||||
count += 1;
|
||||
await this.ctx.storage.put('count', count);
|
||||
|
||||
return new Response(JSON.stringify({ count }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
return new Response('Not found', { status: 404 });
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**After (RPC):**
|
||||
|
||||
```typescript
|
||||
export class Counter extends DurableObject {
|
||||
async increment(): Promise<number> {
|
||||
let count = await this.ctx.storage.get<number>('count') || 0;
|
||||
count += 1;
|
||||
await this.ctx.storage.put('count', count);
|
||||
return count;
|
||||
}
|
||||
}
|
||||
|
||||
// Worker before:
|
||||
const response = await stub.fetch('https://fake-host/increment');
|
||||
const { count } = await response.json();
|
||||
|
||||
// Worker after:
|
||||
const count = await stub.increment();
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- ✅ ~60% less code
|
||||
- ✅ Type-safe
|
||||
- ✅ Cleaner, more maintainable
|
||||
|
||||
---
|
||||
|
||||
**Official Docs**: https://developers.cloudflare.com/durable-objects/best-practices/create-durable-object-stubs-and-send-requests/
|
||||
293
references/state-api-reference.md
Normal file
293
references/state-api-reference.md
Normal file
@@ -0,0 +1,293 @@
|
||||
# Durable Objects State API Reference
|
||||
|
||||
Complete reference for the State API (SQL and Key-Value storage).
|
||||
|
||||
---
|
||||
|
||||
## SQL API (SQLite Backend)
|
||||
|
||||
Access via `ctx.storage.sql` (requires SQLite backend in migration).
|
||||
|
||||
### `exec(query, ...params)`
|
||||
|
||||
Execute SQL query with optional parameters. Returns cursor.
|
||||
|
||||
```typescript
|
||||
// Insert with RETURNING
|
||||
const cursor = this.sql.exec(
|
||||
'INSERT INTO users (name, email) VALUES (?, ?) RETURNING id',
|
||||
'Alice',
|
||||
'alice@example.com'
|
||||
);
|
||||
|
||||
// SELECT
|
||||
const cursor = this.sql.exec('SELECT * FROM users WHERE id = ?', userId);
|
||||
|
||||
// UPDATE
|
||||
this.sql.exec('UPDATE users SET email = ? WHERE id = ?', newEmail, userId);
|
||||
|
||||
// DELETE
|
||||
this.sql.exec('DELETE FROM users WHERE id = ?', userId);
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `query` (string): SQL query with `?` placeholders
|
||||
- `...params` (any[]): Values to bind to placeholders
|
||||
|
||||
**Returns:** `SqlCursor`
|
||||
|
||||
### Cursor Methods
|
||||
|
||||
```typescript
|
||||
// Get single row (throws if 0 or >1 rows)
|
||||
const row = cursor.one<{ id: number; name: string }>();
|
||||
|
||||
// Get single row (returns null if no rows)
|
||||
const row = cursor.one<RowType>({ allowNone: true });
|
||||
|
||||
// Get all rows as array
|
||||
const rows = cursor.toArray<RowType>();
|
||||
|
||||
// Iterate cursor
|
||||
for (const row of cursor) {
|
||||
console.log(row.name);
|
||||
}
|
||||
```
|
||||
|
||||
### Transactions (Synchronous)
|
||||
|
||||
```typescript
|
||||
this.ctx.storage.transactionSync(() => {
|
||||
this.sql.exec('INSERT INTO table1 ...');
|
||||
this.sql.exec('UPDATE table2 ...');
|
||||
// All or nothing - atomic
|
||||
});
|
||||
```
|
||||
|
||||
**CRITICAL:** Must be synchronous (no `async`/`await` inside).
|
||||
|
||||
---
|
||||
|
||||
## Key-Value API
|
||||
|
||||
Available on both SQLite and KV backends via `ctx.storage`.
|
||||
|
||||
### `get(key)` / `get(keys[])`
|
||||
|
||||
Get single or multiple values.
|
||||
|
||||
```typescript
|
||||
// Get single value
|
||||
const value = await this.ctx.storage.get<number>('count');
|
||||
|
||||
// Get multiple values (returns Map)
|
||||
const map = await this.ctx.storage.get<string>(['key1', 'key2', 'key3']);
|
||||
|
||||
// Iterate Map
|
||||
for (const [key, value] of map.entries()) {
|
||||
console.log(key, value);
|
||||
}
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `key` (string): Key to retrieve
|
||||
- `keys` (string[]): Array of keys to retrieve
|
||||
|
||||
**Returns:** Promise<value> or Promise<Map<string, value>>
|
||||
|
||||
### `put(key, value)` / `put(entries)`
|
||||
|
||||
Put single or multiple values.
|
||||
|
||||
```typescript
|
||||
// Put single value
|
||||
await this.ctx.storage.put('count', 42);
|
||||
|
||||
// Put multiple values
|
||||
await this.ctx.storage.put({
|
||||
key1: 'value1',
|
||||
key2: 'value2',
|
||||
key3: 'value3',
|
||||
});
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `key` (string): Key to store
|
||||
- `value` (any): Value to store (must be serializable)
|
||||
- `entries` (Record<string, any>): Object with key-value pairs
|
||||
|
||||
**Returns:** Promise<void>
|
||||
|
||||
### `delete(key)` / `delete(keys[])`
|
||||
|
||||
Delete single or multiple keys.
|
||||
|
||||
```typescript
|
||||
// Delete single key
|
||||
await this.ctx.storage.delete('key1');
|
||||
|
||||
// Delete multiple keys
|
||||
await this.ctx.storage.delete(['key1', 'key2', 'key3']);
|
||||
```
|
||||
|
||||
**Returns:** Promise<boolean> (true if deleted)
|
||||
|
||||
### `list(options)`
|
||||
|
||||
List keys with optional filtering.
|
||||
|
||||
```typescript
|
||||
// List all keys
|
||||
const map = await this.ctx.storage.list();
|
||||
|
||||
// List with prefix
|
||||
const map = await this.ctx.storage.list({ prefix: 'user:' });
|
||||
|
||||
// List with limit
|
||||
const map = await this.ctx.storage.list({ limit: 100 });
|
||||
|
||||
// List in reverse order
|
||||
const map = await this.ctx.storage.list({ reverse: true });
|
||||
|
||||
// List with start/end range
|
||||
const map = await this.ctx.storage.list({
|
||||
start: 'user:a',
|
||||
end: 'user:z',
|
||||
});
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `prefix` (string): Filter keys by prefix
|
||||
- `limit` (number): Max keys to return
|
||||
- `reverse` (boolean): Reverse order
|
||||
- `start` (string): Start key (inclusive)
|
||||
- `end` (string): End key (exclusive)
|
||||
|
||||
**Returns:** Promise<Map<string, any>>
|
||||
|
||||
### `deleteAll()`
|
||||
|
||||
Delete all storage (DO will cease to exist after shutdown).
|
||||
|
||||
```typescript
|
||||
// Delete alarm first
|
||||
await this.ctx.storage.deleteAlarm();
|
||||
|
||||
// Delete all storage
|
||||
await this.ctx.storage.deleteAll();
|
||||
```
|
||||
|
||||
**CRITICAL:**
|
||||
- ✅ Atomic on SQLite backend
|
||||
- ⚠️ May be partial on KV backend
|
||||
|
||||
**Returns:** Promise<void>
|
||||
|
||||
### `transaction(callback)`
|
||||
|
||||
Async transaction for KV operations.
|
||||
|
||||
```typescript
|
||||
await this.ctx.storage.transaction(async (txn) => {
|
||||
const value = await txn.get('count');
|
||||
await txn.put('count', value + 1);
|
||||
await txn.put('lastUpdate', Date.now());
|
||||
// All or nothing
|
||||
});
|
||||
```
|
||||
|
||||
**Returns:** Promise<any> (callback return value)
|
||||
|
||||
---
|
||||
|
||||
## Alarms API
|
||||
|
||||
### `setAlarm(time)`
|
||||
|
||||
Schedule alarm to fire at specific time.
|
||||
|
||||
```typescript
|
||||
// Fire in 60 seconds
|
||||
await this.ctx.storage.setAlarm(Date.now() + 60000);
|
||||
|
||||
// Fire at specific date
|
||||
await this.ctx.storage.setAlarm(new Date('2025-12-31T23:59:59Z'));
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `time` (number | Date): Timestamp or Date to fire
|
||||
|
||||
**Returns:** Promise<void>
|
||||
|
||||
### `getAlarm()`
|
||||
|
||||
Get current alarm time (null if not set).
|
||||
|
||||
```typescript
|
||||
const alarmTime = await this.ctx.storage.getAlarm();
|
||||
|
||||
if (alarmTime) {
|
||||
console.log(`Alarm scheduled for ${new Date(alarmTime).toISOString()}`);
|
||||
}
|
||||
```
|
||||
|
||||
**Returns:** Promise<number | null>
|
||||
|
||||
### `deleteAlarm()`
|
||||
|
||||
Delete scheduled alarm.
|
||||
|
||||
```typescript
|
||||
await this.ctx.storage.deleteAlarm();
|
||||
```
|
||||
|
||||
**Returns:** Promise<void>
|
||||
|
||||
---
|
||||
|
||||
## Storage Limits
|
||||
|
||||
| Backend | Max Storage | deleteAll() Atomic |
|
||||
|---------|-------------|-------------------|
|
||||
| SQLite | 1 GB | ✅ Yes |
|
||||
| KV | 128 MB | ❌ No (may be partial) |
|
||||
|
||||
---
|
||||
|
||||
## Best Practices
|
||||
|
||||
✅ **Always use parameterized queries** (SQL)
|
||||
```typescript
|
||||
// ✅ CORRECT
|
||||
this.sql.exec('SELECT * FROM users WHERE id = ?', userId);
|
||||
|
||||
// ❌ WRONG (SQL injection risk)
|
||||
this.sql.exec(`SELECT * FROM users WHERE id = ${userId}`);
|
||||
```
|
||||
|
||||
✅ **Use transactions for multi-step operations**
|
||||
```typescript
|
||||
this.ctx.storage.transactionSync(() => {
|
||||
this.sql.exec('INSERT ...');
|
||||
this.sql.exec('UPDATE ...');
|
||||
});
|
||||
```
|
||||
|
||||
✅ **Create indexes for frequently queried columns**
|
||||
```typescript
|
||||
this.sql.exec('CREATE INDEX idx_user_email ON users(email)');
|
||||
```
|
||||
|
||||
✅ **Monitor storage size** (approach 1GB limit)
|
||||
```typescript
|
||||
const size = await this.estimateStorageSize();
|
||||
if (size > 900_000_000) { // 900MB
|
||||
await this.cleanup();
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
**Official Docs**:
|
||||
- SQL API: https://developers.cloudflare.com/durable-objects/api/sqlite-storage-api/
|
||||
- KV API: https://developers.cloudflare.com/durable-objects/api/legacy-kv-storage-api/
|
||||
427
references/top-errors.md
Normal file
427
references/top-errors.md
Normal file
@@ -0,0 +1,427 @@
|
||||
# Top 15+ Documented Errors and Solutions
|
||||
|
||||
Complete reference for common Durable Objects errors and how to prevent them.
|
||||
|
||||
---
|
||||
|
||||
## 1. Class Not Exported
|
||||
|
||||
**Error:** `"binding not found"`, `"Class X not found"`
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/get-started/
|
||||
|
||||
**Why It Happens:** Durable Object class not exported from Worker
|
||||
|
||||
**Solution:**
|
||||
```typescript
|
||||
export class MyDO extends DurableObject { }
|
||||
|
||||
// CRITICAL: Export as default
|
||||
export default MyDO;
|
||||
|
||||
// In Worker, also export for Wrangler
|
||||
export { MyDO };
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Missing Migration
|
||||
|
||||
**Error:** `"migrations required"`, `"no migration found for class"`
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/reference/durable-objects-migrations/
|
||||
|
||||
**Why It Happens:** Created DO class without migration entry
|
||||
|
||||
**Solution:** Always add migration when creating new DO class
|
||||
```jsonc
|
||||
{
|
||||
"migrations": [
|
||||
{
|
||||
"tag": "v1",
|
||||
"new_sqlite_classes": ["MyDO"]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 3. Wrong Migration Type (KV vs SQLite)
|
||||
|
||||
**Error:** Schema errors, storage API mismatch
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/api/sqlite-storage-api/
|
||||
|
||||
**Why It Happens:** Used `new_classes` instead of `new_sqlite_classes`
|
||||
|
||||
**Solution:** Use `new_sqlite_classes` for SQLite backend (recommended)
|
||||
```jsonc
|
||||
{
|
||||
"migrations": [
|
||||
{
|
||||
"tag": "v1",
|
||||
"new_sqlite_classes": ["MyDO"] // ← SQLite (1GB, atomic)
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 4. Constructor Overhead Blocks Hibernation Wake
|
||||
|
||||
**Error:** Slow hibernation wake-up times, high latency
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/best-practices/access-durable-objects-storage/
|
||||
|
||||
**Why It Happens:** Heavy work in constructor delays all requests
|
||||
|
||||
**Solution:** Minimize constructor, use `blockConcurrencyWhile()`
|
||||
```typescript
|
||||
constructor(ctx, env) {
|
||||
super(ctx, env);
|
||||
|
||||
// Minimal initialization
|
||||
this.sessions = new Map();
|
||||
|
||||
// Load from storage (blocks requests until complete)
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
this.data = await ctx.storage.get('data');
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. setTimeout Breaks Hibernation
|
||||
|
||||
**Error:** DO never hibernates, high duration charges
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/concepts/durable-object-lifecycle/
|
||||
|
||||
**Why It Happens:** `setTimeout`/`setInterval` prevents hibernation
|
||||
|
||||
**Solution:** Use alarms API instead
|
||||
```typescript
|
||||
// ❌ WRONG: Prevents hibernation
|
||||
setTimeout(() => this.doWork(), 60000);
|
||||
|
||||
// ✅ CORRECT: Allows hibernation
|
||||
await this.ctx.storage.setAlarm(Date.now() + 60000);
|
||||
|
||||
async alarm() {
|
||||
this.doWork();
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 6. In-Memory State Lost on Hibernation
|
||||
|
||||
**Error:** WebSocket metadata lost, state reset unexpectedly
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/best-practices/websockets/
|
||||
|
||||
**Why It Happens:** Relied on in-memory state that's cleared on hibernation
|
||||
|
||||
**Solution:** Use `serializeAttachment()` for WebSocket metadata
|
||||
```typescript
|
||||
// Persist metadata
|
||||
ws.serializeAttachment({ userId, username });
|
||||
|
||||
// Restore in constructor
|
||||
constructor(ctx, env) {
|
||||
super(ctx, env);
|
||||
|
||||
this.sessions = new Map();
|
||||
|
||||
ctx.getWebSockets().forEach(ws => {
|
||||
const metadata = ws.deserializeAttachment();
|
||||
this.sessions.set(ws, metadata);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 7. Outgoing WebSocket Cannot Hibernate
|
||||
|
||||
**Error:** High charges despite using hibernation API
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/best-practices/websockets/
|
||||
|
||||
**Why It Happens:** Outgoing WebSockets don't support hibernation
|
||||
|
||||
**Solution:** Only use hibernation for server-side (incoming) WebSockets
|
||||
|
||||
**Note:** DO must be WebSocket server, not client.
|
||||
|
||||
---
|
||||
|
||||
## 8. Global Uniqueness Confusion
|
||||
|
||||
**Error:** Unexpected DO class name conflicts across Workers
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/platform/known-issues/#global-uniqueness
|
||||
|
||||
**Why It Happens:** DO class names are globally unique per account
|
||||
|
||||
**Solution:** Understand scope and use unique class names
|
||||
```typescript
|
||||
// Worker A
|
||||
export class CounterA extends DurableObject { }
|
||||
|
||||
// Worker B
|
||||
export class CounterB extends DurableObject { }
|
||||
|
||||
// ❌ WRONG: Both use "Counter" → conflict
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 9. Partial deleteAll on KV Backend
|
||||
|
||||
**Error:** Storage not fully deleted, billing continues
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/api/legacy-kv-storage-api/
|
||||
|
||||
**Why It Happens:** KV backend `deleteAll()` can fail partially
|
||||
|
||||
**Solution:** Use SQLite backend for atomic deleteAll
|
||||
```jsonc
|
||||
{
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_sqlite_classes": ["MyDO"] } // Atomic operations
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 10. Binding Name Mismatch
|
||||
|
||||
**Error:** Runtime error accessing DO binding, `undefined`
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/get-started/
|
||||
|
||||
**Why It Happens:** Binding name in wrangler.jsonc doesn't match code
|
||||
|
||||
**Solution:** Ensure consistency
|
||||
```jsonc
|
||||
{
|
||||
"durable_objects": {
|
||||
"bindings": [
|
||||
{ "name": "MY_DO", "class_name": "MyDO" }
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```typescript
|
||||
// Must match binding name
|
||||
env.MY_DO.getByName('instance');
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 11. State Size Exceeded
|
||||
|
||||
**Error:** `"state limit exceeded"`, storage errors
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/platform/pricing/
|
||||
|
||||
**Why It Happens:** Exceeded 1GB (SQLite) or 128MB (KV) limit
|
||||
|
||||
**Solution:** Monitor storage size, implement cleanup
|
||||
```typescript
|
||||
async checkStorageSize(): Promise<void> {
|
||||
const size = await this.estimateSize();
|
||||
|
||||
if (size > 900_000_000) { // 900MB
|
||||
await this.cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
async alarm() {
|
||||
// Periodic cleanup
|
||||
const cutoff = Date.now() - (30 * 24 * 60 * 60 * 1000);
|
||||
this.sql.exec('DELETE FROM messages WHERE created_at < ?', cutoff);
|
||||
|
||||
await this.ctx.storage.setAlarm(Date.now() + 86400000);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 12. Migration Not Atomic
|
||||
|
||||
**Error:** Gradual deployment blocked, migration errors
|
||||
|
||||
**Source:** https://developers.cloudflare.com/workers/configuration/versions-and-deployments/gradual-deployments/
|
||||
|
||||
**Why It Happens:** Tried to use gradual rollout with migrations
|
||||
|
||||
**Solution:** Understand migrations deploy atomically
|
||||
- All DO instances migrate at once
|
||||
- Cannot use gradual deployment with migrations
|
||||
- Test thoroughly before deploying
|
||||
|
||||
---
|
||||
|
||||
## 13. Location Hint Ignored
|
||||
|
||||
**Error:** DO created in wrong region, higher latency
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/reference/data-location/
|
||||
|
||||
**Why It Happens:** Location hints are best-effort, not guaranteed
|
||||
|
||||
**Solution:** Use jurisdiction for strict requirements
|
||||
```typescript
|
||||
// ⚠️ Best-effort (not guaranteed)
|
||||
const stub = env.MY_DO.get(id, { locationHint: 'enam' });
|
||||
|
||||
// ✅ Strictly enforced
|
||||
const euId = env.MY_DO.newUniqueId({ jurisdiction: 'eu' });
|
||||
const stub = env.MY_DO.get(euId);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 14. Alarm Retry Failures
|
||||
|
||||
**Error:** Tasks lost after repeated alarm failures
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/api/alarms/
|
||||
|
||||
**Why It Happens:** Alarm handler throws errors repeatedly, exhausts retries
|
||||
|
||||
**Solution:** Implement idempotent alarm handlers with retry limits
|
||||
```typescript
|
||||
async alarm(info: { retryCount: number }): Promise<void> {
|
||||
if (info.retryCount > 3) {
|
||||
console.error('Giving up after 3 retries');
|
||||
// Log failure, clean up state
|
||||
await this.logFailure();
|
||||
return;
|
||||
}
|
||||
|
||||
// Idempotent operation (safe to retry)
|
||||
await this.processWithIdempotency();
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 15. Fetch Blocks Hibernation
|
||||
|
||||
**Error:** DO never hibernates despite using hibernation API
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/concepts/durable-object-lifecycle/
|
||||
|
||||
**Why It Happens:** In-progress `fetch()` requests prevent hibernation
|
||||
|
||||
**Solution:** Ensure all async I/O completes before idle period
|
||||
```typescript
|
||||
async webSocketMessage(ws: WebSocket, message: string): Promise<void> {
|
||||
// ✅ GOOD: Await all I/O before returning
|
||||
const response = await fetch('https://api.example.com/data');
|
||||
const data = await response.json();
|
||||
ws.send(JSON.stringify(data));
|
||||
// Handler completes → can hibernate
|
||||
|
||||
// ❌ BAD: Background fetch prevents hibernation
|
||||
this.ctx.waitUntil(
|
||||
fetch('https://api.example.com/log').then(r => r.json())
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 16. Cannot Enable SQLite on Existing KV DO
|
||||
|
||||
**Error:** Migration fails, schema errors
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/reference/durable-objects-migrations/
|
||||
|
||||
**Why It Happens:** Attempted to migrate existing KV-backed DO to SQLite
|
||||
|
||||
**Solution:** Create new SQLite-backed DO class, migrate data manually
|
||||
```jsonc
|
||||
// ❌ WRONG: Cannot change existing DO backend
|
||||
{
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_classes": ["Counter"] }, // KV backend
|
||||
{ "tag": "v2", "renamed_classes": [{ "from": "Counter", "to": "CounterSQLite" }] }
|
||||
// This doesn't change backend!
|
||||
]
|
||||
}
|
||||
|
||||
// ✅ CORRECT: Create new class
|
||||
{
|
||||
"migrations": [
|
||||
{ "tag": "v1", "new_classes": ["Counter"] },
|
||||
{ "tag": "v2", "new_sqlite_classes": ["CounterV2"] }
|
||||
]
|
||||
}
|
||||
|
||||
// Then migrate data from Counter to CounterV2
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 17. SQL Injection Vulnerability
|
||||
|
||||
**Error:** Security vulnerability, data breach
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/api/sqlite-storage-api/
|
||||
|
||||
**Why It Happens:** String concatenation in SQL queries
|
||||
|
||||
**Solution:** Always use parameterized queries
|
||||
```typescript
|
||||
// ❌ WRONG: SQL injection risk
|
||||
this.sql.exec(`SELECT * FROM users WHERE email = '${userEmail}'`);
|
||||
|
||||
// ✅ CORRECT: Parameterized query
|
||||
this.sql.exec('SELECT * FROM users WHERE email = ?', userEmail);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 18. Standard WebSocket API Used
|
||||
|
||||
**Error:** High duration charges, no hibernation
|
||||
|
||||
**Source:** https://developers.cloudflare.com/durable-objects/best-practices/websockets/
|
||||
|
||||
**Why It Happens:** Used `ws.accept()` instead of `ctx.acceptWebSocket()`
|
||||
|
||||
**Solution:** Use hibernation API
|
||||
```typescript
|
||||
// ❌ WRONG: Standard API, no hibernation
|
||||
server.accept();
|
||||
|
||||
// ✅ CORRECT: Hibernation API
|
||||
this.ctx.acceptWebSocket(server);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Quick Error Lookup
|
||||
|
||||
| Error Message | Issue # | Quick Fix |
|
||||
|---------------|---------|-----------|
|
||||
| "binding not found" | #1 | Export DO class |
|
||||
| "migrations required" | #2 | Add migration |
|
||||
| Slow wake-up | #4 | Minimize constructor |
|
||||
| High duration charges | #5, #15 | Use alarms, await I/O |
|
||||
| State lost | #6 | serializeAttachment |
|
||||
| "state limit exceeded" | #11 | Implement cleanup |
|
||||
| "SQL injection" | #17 | Parameterized queries |
|
||||
|
||||
---
|
||||
|
||||
**For more help:** Check official docs and GitHub issues at https://github.com/cloudflare/workerd/issues
|
||||
289
references/websocket-hibernation.md
Normal file
289
references/websocket-hibernation.md
Normal file
@@ -0,0 +1,289 @@
|
||||
# WebSocket Hibernation API Deep Dive
|
||||
|
||||
Complete guide to WebSocket hibernation for cost savings.
|
||||
|
||||
---
|
||||
|
||||
## Why WebSocket Hibernation?
|
||||
|
||||
Traditional WebSocket connections keep the Durable Object **active in memory**, incurring duration charges even when idle.
|
||||
|
||||
**With Hibernation:**
|
||||
- ✅ DO hibernates when idle (~10 seconds no activity)
|
||||
- ✅ WebSocket clients **stay connected** to Cloudflare edge
|
||||
- ✅ DO wakes up automatically when messages arrive
|
||||
- ✅ **Massive cost savings** for long-lived connections
|
||||
|
||||
**Cost Example:**
|
||||
- 1000 WebSocket connections for 1 hour
|
||||
- Without hibernation: ~$0.50/hour (assuming 90% idle time)
|
||||
- With hibernation: ~$0.05/hour
|
||||
- **~90% savings**
|
||||
|
||||
---
|
||||
|
||||
## Hibernation Lifecycle
|
||||
|
||||
```
|
||||
1. ACTIVE → DO in memory, handling messages
|
||||
2. IDLE → No messages for ~10 seconds
|
||||
3. HIBERNATE → In-memory state cleared, WebSockets stay connected
|
||||
4. WAKE → New message → constructor runs → handler called
|
||||
```
|
||||
|
||||
**CRITICAL:** In-memory state is **LOST** on hibernation!
|
||||
|
||||
---
|
||||
|
||||
## Enable Hibernation
|
||||
|
||||
### Use `ctx.acceptWebSocket()`
|
||||
|
||||
```typescript
|
||||
// ✅ CORRECT: Enables hibernation
|
||||
this.ctx.acceptWebSocket(server);
|
||||
|
||||
// ❌ WRONG: Standard API, NO hibernation
|
||||
server.accept();
|
||||
```
|
||||
|
||||
**Only works for server-side (incoming) WebSockets.**
|
||||
|
||||
---
|
||||
|
||||
## Handler Methods
|
||||
|
||||
### `webSocketMessage(ws, message)`
|
||||
|
||||
Called when WebSocket receives a message (even if hibernated).
|
||||
|
||||
```typescript
|
||||
async webSocketMessage(ws: WebSocket, message: string | ArrayBuffer): Promise<void> {
|
||||
if (typeof message === 'string') {
|
||||
const data = JSON.parse(message);
|
||||
// Handle message
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `ws` (WebSocket): The WebSocket that received the message
|
||||
- `message` (string | ArrayBuffer): The message data
|
||||
|
||||
### `webSocketClose(ws, code, reason, wasClean)`
|
||||
|
||||
Called when WebSocket closes.
|
||||
|
||||
```typescript
|
||||
async webSocketClose(ws: WebSocket, code: number, reason: string, wasClean: boolean): Promise<void> {
|
||||
// Cleanup
|
||||
this.sessions.delete(ws);
|
||||
|
||||
// Close the WebSocket
|
||||
ws.close(code, 'Durable Object closing WebSocket');
|
||||
}
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
- `ws` (WebSocket): The WebSocket that closed
|
||||
- `code` (number): Close code
|
||||
- `reason` (string): Close reason
|
||||
- `wasClean` (boolean): True if closed cleanly
|
||||
|
||||
### `webSocketError(ws, error)`
|
||||
|
||||
Called on WebSocket errors (not disconnections).
|
||||
|
||||
```typescript
|
||||
async webSocketError(ws: WebSocket, error: any): Promise<void> {
|
||||
console.error('WebSocket error:', error);
|
||||
this.sessions.delete(ws);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Persist Metadata with Attachments
|
||||
|
||||
Use `serializeAttachment()` / `deserializeAttachment()` to persist per-WebSocket metadata across hibernation.
|
||||
|
||||
### Serialize on Accept
|
||||
|
||||
```typescript
|
||||
const metadata = { userId: '123', username: 'Alice' };
|
||||
|
||||
// Persist metadata
|
||||
server.serializeAttachment(metadata);
|
||||
|
||||
// Track in-memory
|
||||
this.sessions.set(server, metadata);
|
||||
```
|
||||
|
||||
### Deserialize in Constructor
|
||||
|
||||
```typescript
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
|
||||
// Restore WebSocket connections after hibernation
|
||||
this.sessions = new Map();
|
||||
|
||||
ctx.getWebSockets().forEach((ws) => {
|
||||
// Restore metadata
|
||||
const metadata = ws.deserializeAttachment();
|
||||
this.sessions.set(ws, metadata);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
**CRITICAL:** Metadata is **persisted to storage**, not just memory.
|
||||
|
||||
---
|
||||
|
||||
## Get Active WebSockets
|
||||
|
||||
```typescript
|
||||
// Get all WebSockets accepted by this DO
|
||||
const webSockets = this.ctx.getWebSockets();
|
||||
|
||||
console.log(`${webSockets.length} active connections`);
|
||||
|
||||
// Filter by tag (if tagged)
|
||||
const taggedWs = this.ctx.getWebSockets('room:123');
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Tag WebSockets (Optional)
|
||||
|
||||
Tag WebSockets for grouping (e.g., by room, channel).
|
||||
|
||||
```typescript
|
||||
// Accept with tag
|
||||
this.ctx.acceptWebSocket(server, ['room:123']);
|
||||
|
||||
// Get by tag
|
||||
const roomSockets = this.ctx.getWebSockets('room:123');
|
||||
|
||||
// Get all tags
|
||||
const tags = ws.getTags();
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## When Hibernation Does NOT Occur
|
||||
|
||||
Hibernation is **blocked** if:
|
||||
|
||||
❌ `setTimeout` or `setInterval` callbacks are pending
|
||||
❌ In-progress `fetch()` request (awaited I/O)
|
||||
❌ Standard WebSocket API used (not hibernation API)
|
||||
❌ Request/event still being processed
|
||||
❌ Outgoing WebSocket (DO is client, not server)
|
||||
|
||||
---
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Minimize Constructor Work
|
||||
|
||||
Heavy work in constructor **delays wake-up**.
|
||||
|
||||
```typescript
|
||||
// ✅ GOOD: Minimal constructor
|
||||
constructor(ctx, env) {
|
||||
super(ctx, env);
|
||||
|
||||
this.sessions = new Map();
|
||||
|
||||
ctx.getWebSockets().forEach((ws) => {
|
||||
const metadata = ws.deserializeAttachment();
|
||||
this.sessions.set(ws, metadata);
|
||||
});
|
||||
}
|
||||
|
||||
// ❌ BAD: Heavy work delays wake-up
|
||||
constructor(ctx, env) {
|
||||
super(ctx, env);
|
||||
|
||||
// Don't do expensive I/O here
|
||||
await this.loadLotsOfData();
|
||||
}
|
||||
```
|
||||
|
||||
### Use Alarms, Not setTimeout
|
||||
|
||||
```typescript
|
||||
// ❌ WRONG: Prevents hibernation
|
||||
setTimeout(() => {
|
||||
this.doSomething();
|
||||
}, 60000);
|
||||
|
||||
// ✅ CORRECT: Use alarms
|
||||
await this.ctx.storage.setAlarm(Date.now() + 60000);
|
||||
|
||||
async alarm() {
|
||||
this.doSomething();
|
||||
}
|
||||
```
|
||||
|
||||
### Persist Critical State
|
||||
|
||||
```typescript
|
||||
// ❌ WRONG: Only in-memory (lost on hibernation)
|
||||
this.userCount = 42;
|
||||
|
||||
// ✅ CORRECT: Persist to storage
|
||||
await this.ctx.storage.put('userCount', 42);
|
||||
|
||||
// Or use serializeAttachment for per-WebSocket data
|
||||
ws.serializeAttachment({ userId, username });
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Debugging Hibernation
|
||||
|
||||
### Check if DO is Hibernating
|
||||
|
||||
```typescript
|
||||
// Log in constructor
|
||||
constructor(ctx, env) {
|
||||
super(ctx, env);
|
||||
console.log('DO woke up! Active WebSockets:', ctx.getWebSockets().length);
|
||||
}
|
||||
|
||||
// If you see this log frequently, DO is hibernating
|
||||
```
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Issue:** DO never hibernates (high duration charges)
|
||||
|
||||
**Possible Causes:**
|
||||
- `setTimeout`/`setInterval` active
|
||||
- In-progress `fetch()` requests
|
||||
- Standard WebSocket API used (`ws.accept()` instead of `ctx.acceptWebSocket()`)
|
||||
|
||||
**Solution:** Check for blocking operations, use alarms instead.
|
||||
|
||||
---
|
||||
|
||||
## Limitations
|
||||
|
||||
⚠️ **Hibernation only for server-side WebSockets**
|
||||
- DO must be WebSocket server (accept connections)
|
||||
- Outgoing WebSockets (DO as client) **cannot hibernate**
|
||||
|
||||
⚠️ **In-memory state is lost**
|
||||
- Restore state in constructor
|
||||
- Use `serializeAttachment()` for per-WebSocket metadata
|
||||
- Use storage for DO-wide state
|
||||
|
||||
⚠️ **No WebSocket Standard API** with hibernation
|
||||
- Cannot use `addEventListener('message', ...)`
|
||||
- Must use handler methods (`webSocketMessage`, etc.)
|
||||
|
||||
---
|
||||
|
||||
**Official Docs**: https://developers.cloudflare.com/durable-objects/best-practices/websockets/
|
||||
221
references/wrangler-commands.md
Normal file
221
references/wrangler-commands.md
Normal file
@@ -0,0 +1,221 @@
|
||||
# Wrangler CLI Commands for Durable Objects
|
||||
|
||||
Complete reference for managing Durable Objects with wrangler CLI.
|
||||
|
||||
---
|
||||
|
||||
## Development Commands
|
||||
|
||||
### Dev Server
|
||||
|
||||
```bash
|
||||
# Start local dev server
|
||||
npx wrangler dev
|
||||
|
||||
# Dev with remote Durable Objects (not local)
|
||||
npx wrangler dev --remote
|
||||
|
||||
# Dev with specific port
|
||||
npx wrangler dev --port 8787
|
||||
```
|
||||
|
||||
### Deployment
|
||||
|
||||
```bash
|
||||
# Deploy to production
|
||||
npx wrangler deploy
|
||||
|
||||
# Deploy specific environment
|
||||
npx wrangler deploy --env production
|
||||
|
||||
# Dry run (show what would be deployed)
|
||||
npx wrangler deploy --dry-run
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Durable Objects Commands
|
||||
|
||||
### List DO Namespaces
|
||||
|
||||
```bash
|
||||
# List all DO namespaces in account
|
||||
npx wrangler d1 list
|
||||
```
|
||||
|
||||
### View DO Objects
|
||||
|
||||
```bash
|
||||
# List all instances of a DO class
|
||||
npx wrangler durable-objects namespace list <BINDING_NAME>
|
||||
|
||||
# Get info about specific DO instance
|
||||
npx wrangler durable-objects namespace get <BINDING_NAME> --id <OBJECT_ID>
|
||||
```
|
||||
|
||||
### Delete DO Instances
|
||||
|
||||
```bash
|
||||
# Delete specific DO instance (deletes all storage)
|
||||
npx wrangler durable-objects namespace delete <BINDING_NAME> --id <OBJECT_ID>
|
||||
|
||||
# DANGEROUS: Delete all instances in namespace
|
||||
npx wrangler durable-objects namespace delete-all <BINDING_NAME>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Logs and Debugging
|
||||
|
||||
### Tail Logs
|
||||
|
||||
```bash
|
||||
# Tail logs from deployed Worker
|
||||
npx wrangler tail
|
||||
|
||||
# Tail with filter
|
||||
npx wrangler tail --format pretty
|
||||
|
||||
# Tail specific DO
|
||||
npx wrangler tail --search "DurableObject"
|
||||
```
|
||||
|
||||
### View Logs
|
||||
|
||||
```bash
|
||||
# View recent logs
|
||||
npx wrangler pages deployment tail
|
||||
|
||||
# Filter by log level
|
||||
npx wrangler tail --level error
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Type Generation
|
||||
|
||||
### Generate TypeScript Types
|
||||
|
||||
```bash
|
||||
# Generate types for bindings
|
||||
npx wrangler types
|
||||
|
||||
# This creates worker-configuration.d.ts with:
|
||||
# - DurableObjectNamespace types
|
||||
# - Env interface
|
||||
# - Binding types
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Migrations
|
||||
|
||||
**Migrations are configured in `wrangler.jsonc`, not via CLI commands.**
|
||||
|
||||
Example migration workflow:
|
||||
|
||||
1. Edit `wrangler.jsonc` to add migration
|
||||
2. Run `npx wrangler deploy`
|
||||
3. Migration applies atomically on deploy
|
||||
|
||||
See `migrations-guide.md` for detailed migration patterns.
|
||||
|
||||
---
|
||||
|
||||
## Useful Flags
|
||||
|
||||
### Common Flags
|
||||
|
||||
```bash
|
||||
# Show help
|
||||
npx wrangler --help
|
||||
npx wrangler deploy --help
|
||||
|
||||
# Specify config file
|
||||
npx wrangler deploy --config wrangler.production.jsonc
|
||||
|
||||
# Specify environment
|
||||
npx wrangler deploy --env staging
|
||||
|
||||
# Verbose output
|
||||
npx wrangler deploy --verbose
|
||||
|
||||
# Compatibility date
|
||||
npx wrangler deploy --compatibility-date 2025-10-22
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Example Workflows
|
||||
|
||||
### Initial Setup
|
||||
|
||||
```bash
|
||||
# 1. Initialize project
|
||||
npm create cloudflare@latest my-do-app -- \
|
||||
--template=cloudflare/durable-objects-template \
|
||||
--ts --git --deploy false
|
||||
|
||||
cd my-do-app
|
||||
|
||||
# 2. Install dependencies
|
||||
npm install
|
||||
|
||||
# 3. Start dev server
|
||||
npm run dev
|
||||
|
||||
# 4. Deploy
|
||||
npm run deploy
|
||||
```
|
||||
|
||||
### Update and Deploy
|
||||
|
||||
```bash
|
||||
# 1. Make code changes
|
||||
# 2. Test locally
|
||||
npm run dev
|
||||
|
||||
# 3. Deploy
|
||||
npm run deploy
|
||||
|
||||
# 4. Tail logs
|
||||
npx wrangler tail
|
||||
```
|
||||
|
||||
### Add New DO Class
|
||||
|
||||
```bash
|
||||
# 1. Create DO class file (e.g., src/counter.ts)
|
||||
# 2. Update wrangler.jsonc:
|
||||
# - Add binding
|
||||
# - Add migration
|
||||
# 3. Deploy
|
||||
npm run deploy
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Check Deployment Status
|
||||
|
||||
```bash
|
||||
npx wrangler deployments list
|
||||
```
|
||||
|
||||
### Rollback Deployment
|
||||
|
||||
```bash
|
||||
# Cloudflare automatically keeps recent versions
|
||||
# Use dashboard to rollback if needed
|
||||
```
|
||||
|
||||
### Clear Local Cache
|
||||
|
||||
```bash
|
||||
rm -rf .wrangler
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
**Official Docs**: https://developers.cloudflare.com/workers/wrangler/commands/
|
||||
26
scripts/check-versions.sh
Executable file
26
scripts/check-versions.sh
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Check Versions Script for Cloudflare Durable Objects Skill
|
||||
# Verifies that package versions are current
|
||||
|
||||
echo "Checking package versions for cloudflare-durable-objects skill..."
|
||||
echo ""
|
||||
|
||||
# Check wrangler
|
||||
echo "📦 wrangler:"
|
||||
npm view wrangler version
|
||||
echo ""
|
||||
|
||||
# Check @cloudflare/workers-types
|
||||
echo "📦 @cloudflare/workers-types:"
|
||||
npm view @cloudflare/workers-types version
|
||||
echo ""
|
||||
|
||||
# Check TypeScript
|
||||
echo "📦 typescript:"
|
||||
npm view typescript version
|
||||
echo ""
|
||||
|
||||
echo "✅ Version check complete"
|
||||
echo ""
|
||||
echo "Update templates/package.json if newer versions are available."
|
||||
232
templates/alarms-api-do.ts
Normal file
232
templates/alarms-api-do.ts
Normal file
@@ -0,0 +1,232 @@
|
||||
/**
|
||||
* Alarms API Example: Batcher
|
||||
*
|
||||
* Demonstrates:
|
||||
* - storage.setAlarm() to schedule future tasks
|
||||
* - alarm() handler method
|
||||
* - Guaranteed at-least-once execution
|
||||
* - Retry behavior
|
||||
* - Idempotent alarm patterns
|
||||
*/
|
||||
|
||||
import { DurableObject, DurableObjectState } from 'cloudflare:workers';
|
||||
|
||||
interface Env {
|
||||
BATCHER: DurableObjectNamespace<Batcher>;
|
||||
// Example: API to send batch to
|
||||
// API_ENDPOINT: string;
|
||||
}
|
||||
|
||||
interface AlarmInfo {
|
||||
retryCount: number;
|
||||
isRetry: boolean;
|
||||
}
|
||||
|
||||
export class Batcher extends DurableObject<Env> {
|
||||
buffer: string[];
|
||||
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
|
||||
// Restore buffer from storage
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
this.buffer = await ctx.storage.get<string[]>('buffer') || [];
|
||||
console.log(`Batcher constructor: restored ${this.buffer.length} items`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add item to batch
|
||||
*/
|
||||
async addItem(item: string): Promise<void> {
|
||||
this.buffer.push(item);
|
||||
await this.ctx.storage.put('buffer', this.buffer);
|
||||
|
||||
// Schedule alarm for 10 seconds from now (if not already set)
|
||||
const currentAlarm = await this.ctx.storage.getAlarm();
|
||||
|
||||
if (currentAlarm === null) {
|
||||
// No alarm set - schedule one
|
||||
await this.ctx.storage.setAlarm(Date.now() + 10000);
|
||||
console.log(`Alarm scheduled for ${new Date(Date.now() + 10000).toISOString()}`);
|
||||
} else {
|
||||
console.log(`Alarm already scheduled for ${new Date(currentAlarm).toISOString()}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Alarm handler - called when alarm fires
|
||||
*
|
||||
* CRITICAL:
|
||||
* - Guaranteed at-least-once execution
|
||||
* - Retried up to 6 times with exponential backoff (2s, 4s, 8s, ...)
|
||||
* - Implement idempotent operations
|
||||
*/
|
||||
async alarm(alarmInfo: AlarmInfo): Promise<void> {
|
||||
console.log(`Alarm fired (retry: ${alarmInfo.isRetry}, count: ${alarmInfo.retryCount})`);
|
||||
|
||||
// Reload buffer from storage (may have changed since constructor)
|
||||
this.buffer = await this.ctx.storage.get<string[]>('buffer') || [];
|
||||
|
||||
if (this.buffer.length === 0) {
|
||||
console.log('No items to process');
|
||||
return; // Alarm will be deleted automatically
|
||||
}
|
||||
|
||||
// Limit retries
|
||||
if (alarmInfo.retryCount > 3) {
|
||||
console.error('Alarm failed after 3 retries, giving up');
|
||||
// Still clear buffer to avoid infinite retries
|
||||
this.buffer = [];
|
||||
await this.ctx.storage.put('buffer', []);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Process batch (idempotent operation)
|
||||
await this.processBatch(this.buffer);
|
||||
|
||||
// Clear buffer after successful processing
|
||||
this.buffer = [];
|
||||
await this.ctx.storage.put('buffer', []);
|
||||
|
||||
console.log('Batch processed successfully');
|
||||
|
||||
// Alarm is automatically deleted after successful execution
|
||||
|
||||
} catch (error) {
|
||||
console.error('Batch processing failed:', error);
|
||||
// Throwing error will trigger retry
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process batch - idempotent operation
|
||||
*/
|
||||
private async processBatch(items: string[]): Promise<void> {
|
||||
console.log(`Processing batch of ${items.length} items:`, items);
|
||||
|
||||
// Example: Send to external API
|
||||
// const response = await fetch(this.env.API_ENDPOINT, {
|
||||
// method: 'POST',
|
||||
// headers: { 'content-type': 'application/json' },
|
||||
// body: JSON.stringify({ items }),
|
||||
// });
|
||||
//
|
||||
// if (!response.ok) {
|
||||
// throw new Error(`API error: ${response.status}`);
|
||||
// }
|
||||
|
||||
// Simulate processing
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current alarm time (if set)
|
||||
*/
|
||||
async getAlarmTime(): Promise<number | null> {
|
||||
return await this.ctx.storage.getAlarm();
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel alarm
|
||||
*/
|
||||
async cancelAlarm(): Promise<void> {
|
||||
await this.ctx.storage.deleteAlarm();
|
||||
console.log('Alarm cancelled');
|
||||
}
|
||||
}
|
||||
|
||||
// CRITICAL: Export the class
|
||||
export default Batcher;
|
||||
|
||||
/**
|
||||
* Alternative pattern: Periodic cleanup with alarms
|
||||
*/
|
||||
export class PeriodicCleaner extends DurableObject {
|
||||
constructor(ctx: DurableObjectState, env: any) {
|
||||
super(ctx, env);
|
||||
|
||||
// Schedule alarm on first run
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
const alarm = await ctx.storage.getAlarm();
|
||||
if (alarm === null) {
|
||||
// Schedule first cleanup in 1 hour
|
||||
await ctx.storage.setAlarm(Date.now() + 3600000);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Periodic cleanup alarm
|
||||
*/
|
||||
async alarm(): Promise<void> {
|
||||
console.log('Running periodic cleanup');
|
||||
|
||||
try {
|
||||
// Cleanup expired data
|
||||
await this.cleanup();
|
||||
|
||||
// Schedule next cleanup in 1 hour
|
||||
await this.ctx.storage.setAlarm(Date.now() + 3600000);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Cleanup failed:', error);
|
||||
throw error; // Will retry
|
||||
}
|
||||
}
|
||||
|
||||
private async cleanup(): Promise<void> {
|
||||
const now = Date.now();
|
||||
const oneDayAgo = now - (24 * 60 * 60 * 1000);
|
||||
|
||||
// Get all keys
|
||||
const map = await this.ctx.storage.list();
|
||||
|
||||
// Delete old entries
|
||||
const keysToDelete: string[] = [];
|
||||
|
||||
for (const [key, value] of map.entries()) {
|
||||
if (typeof value === 'object' && value !== null && 'timestamp' in value) {
|
||||
if ((value as any).timestamp < oneDayAgo) {
|
||||
keysToDelete.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (keysToDelete.length > 0) {
|
||||
await this.ctx.storage.delete(keysToDelete);
|
||||
console.log(`Deleted ${keysToDelete.length} old entries`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Alternative pattern: Reminder/notification with alarms
|
||||
*/
|
||||
export class ReminderDO extends DurableObject {
|
||||
async setReminder(message: string, fireAt: Date): Promise<void> {
|
||||
// Store reminder data
|
||||
await this.ctx.storage.put('reminder', { message, fireAt: fireAt.getTime() });
|
||||
|
||||
// Schedule alarm
|
||||
await this.ctx.storage.setAlarm(fireAt);
|
||||
|
||||
console.log(`Reminder set for ${fireAt.toISOString()}`);
|
||||
}
|
||||
|
||||
async alarm(): Promise<void> {
|
||||
const reminder = await this.ctx.storage.get<{ message: string; fireAt: number }>('reminder');
|
||||
|
||||
if (reminder) {
|
||||
console.log(`REMINDER: ${reminder.message}`);
|
||||
|
||||
// Send notification (e.g., via email, webhook, etc.)
|
||||
// await this.sendNotification(reminder.message);
|
||||
|
||||
// Clear reminder
|
||||
await this.ctx.storage.delete('reminder');
|
||||
}
|
||||
}
|
||||
}
|
||||
107
templates/basic-do.ts
Normal file
107
templates/basic-do.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
/**
|
||||
* Basic Durable Object Example: Counter
|
||||
*
|
||||
* Demonstrates:
|
||||
* - DurableObject class structure
|
||||
* - RPC methods (recommended pattern)
|
||||
* - Key-value storage API
|
||||
* - State persistence
|
||||
*/
|
||||
|
||||
import { DurableObject, DurableObjectState } from 'cloudflare:workers';
|
||||
|
||||
interface Env {
|
||||
COUNTER: DurableObjectNamespace<Counter>;
|
||||
}
|
||||
|
||||
export class Counter extends DurableObject<Env> {
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
|
||||
// Optional: Initialize from storage
|
||||
// Use blockConcurrencyWhile to load before handling requests
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
const value = await ctx.storage.get<number>('value');
|
||||
if (value === undefined) {
|
||||
// First time initialization
|
||||
await ctx.storage.put('value', 0);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// RPC method: increment counter
|
||||
async increment(): Promise<number> {
|
||||
let value = await this.ctx.storage.get<number>('value') || 0;
|
||||
value += 1;
|
||||
await this.ctx.storage.put('value', value);
|
||||
return value;
|
||||
}
|
||||
|
||||
// RPC method: decrement counter
|
||||
async decrement(): Promise<number> {
|
||||
let value = await this.ctx.storage.get<number>('value') || 0;
|
||||
value -= 1;
|
||||
await this.ctx.storage.put('value', value);
|
||||
return value;
|
||||
}
|
||||
|
||||
// RPC method: get current value
|
||||
async get(): Promise<number> {
|
||||
return await this.ctx.storage.get<number>('value') || 0;
|
||||
}
|
||||
|
||||
// RPC method: reset counter
|
||||
async reset(): Promise<void> {
|
||||
await this.ctx.storage.put('value', 0);
|
||||
}
|
||||
}
|
||||
|
||||
// CRITICAL: Export the class
|
||||
export default Counter;
|
||||
|
||||
/**
|
||||
* Worker that uses the Counter DO
|
||||
*/
|
||||
export default {
|
||||
async fetch(request: Request, env: Env): Promise<Response> {
|
||||
const url = new URL(request.url);
|
||||
|
||||
// Get DO stub (using named DO for global counter)
|
||||
const id = env.COUNTER.idFromName('global-counter');
|
||||
const stub = env.COUNTER.get(id);
|
||||
|
||||
// Or use shortcut for named DOs:
|
||||
// const stub = env.COUNTER.getByName('global-counter');
|
||||
|
||||
// Route requests
|
||||
if (url.pathname === '/increment' && request.method === 'POST') {
|
||||
const count = await stub.increment();
|
||||
return new Response(JSON.stringify({ count }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname === '/decrement' && request.method === 'POST') {
|
||||
const count = await stub.decrement();
|
||||
return new Response(JSON.stringify({ count }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname === '/reset' && request.method === 'POST') {
|
||||
await stub.reset();
|
||||
return new Response(JSON.stringify({ count: 0 }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname === '/get' && request.method === 'GET') {
|
||||
const count = await stub.get();
|
||||
return new Response(JSON.stringify({ count }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
return new Response('Not found', { status: 404 });
|
||||
},
|
||||
};
|
||||
238
templates/location-hints.ts
Normal file
238
templates/location-hints.ts
Normal file
@@ -0,0 +1,238 @@
|
||||
/**
|
||||
* Location Hints and Geographic Routing
|
||||
*
|
||||
* Demonstrates:
|
||||
* - Location hints for geographic routing
|
||||
* - Jurisdiction restrictions (EU, FedRAMP)
|
||||
* - When to use each approach
|
||||
* - Limitations and best practices
|
||||
*/
|
||||
|
||||
import { DurableObject, DurableObjectState } from 'cloudflare:workers';
|
||||
|
||||
interface Env {
|
||||
USER_DATA: DurableObjectNamespace<UserDataDO>;
|
||||
}
|
||||
|
||||
export class UserDataDO extends DurableObject<Env> {
|
||||
async storeUserData(data: any): Promise<void> {
|
||||
await this.ctx.storage.put('userData', data);
|
||||
}
|
||||
|
||||
async getUserData(): Promise<any> {
|
||||
return await this.ctx.storage.get('userData');
|
||||
}
|
||||
}
|
||||
|
||||
// CRITICAL: Export the class
|
||||
export default UserDataDO;
|
||||
|
||||
/**
|
||||
* Worker demonstrating location hints and jurisdiction
|
||||
*/
|
||||
export default {
|
||||
async fetch(request: Request, env: Env): Promise<Response> {
|
||||
const url = new URL(request.url);
|
||||
const userId = url.searchParams.get('userId');
|
||||
|
||||
if (!userId) {
|
||||
return new Response('Missing userId', { status: 400 });
|
||||
}
|
||||
|
||||
// Pattern 1: Location Hints (Best-Effort)
|
||||
// Use when you want to create DO near user's location for lower latency
|
||||
|
||||
// Get user's location from request
|
||||
const userRegion = request.cf?.continent as string || 'NA';
|
||||
|
||||
// Map continent to location hint
|
||||
const locationHint = getLocationHint(userRegion);
|
||||
|
||||
// Create DO with location hint
|
||||
const id = env.USER_DATA.idFromName(userId);
|
||||
const stub = env.USER_DATA.get(id, { locationHint });
|
||||
|
||||
// ⚠️ Location hint only affects FIRST creation
|
||||
// Subsequent access uses existing DO location
|
||||
|
||||
await stub.storeUserData({ userId, region: userRegion });
|
||||
|
||||
return new Response(JSON.stringify({ success: true, locationHint }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Map user region to Cloudflare location hint
|
||||
*/
|
||||
function getLocationHint(continent: string): string {
|
||||
switch (continent) {
|
||||
case 'NA':
|
||||
return 'enam'; // Eastern North America
|
||||
case 'EU':
|
||||
return 'weur'; // Western Europe
|
||||
case 'AS':
|
||||
return 'apac'; // Asia-Pacific
|
||||
case 'SA':
|
||||
return 'sam'; // South America
|
||||
case 'AF':
|
||||
return 'afr'; // Africa
|
||||
case 'OC':
|
||||
return 'oc'; // Oceania
|
||||
default:
|
||||
return 'enam'; // Default
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Available location hints:
|
||||
*
|
||||
* - 'wnam' - Western North America
|
||||
* - 'enam' - Eastern North America
|
||||
* - 'sam' - South America
|
||||
* - 'weur' - Western Europe
|
||||
* - 'eeur' - Eastern Europe
|
||||
* - 'apac' - Asia-Pacific
|
||||
* - 'oc' - Oceania
|
||||
* - 'afr' - Africa
|
||||
* - 'me' - Middle East
|
||||
*/
|
||||
|
||||
/**
|
||||
* Pattern 2: Jurisdiction Restriction (Strictly Enforced)
|
||||
* Use for regulatory compliance (GDPR, FedRAMP)
|
||||
*/
|
||||
export const jurisdictionWorker = {
|
||||
async fetch(request: Request, env: Env): Promise<Response> {
|
||||
const url = new URL(request.url);
|
||||
const userId = url.searchParams.get('userId');
|
||||
const requireEU = url.searchParams.get('requireEU') === 'true';
|
||||
|
||||
if (!userId) {
|
||||
return new Response('Missing userId', { status: 400 });
|
||||
}
|
||||
|
||||
if (requireEU) {
|
||||
// STRICT: DO MUST stay in EU
|
||||
// Cannot combine jurisdiction with location hints
|
||||
const euId = env.USER_DATA.newUniqueId({ jurisdiction: 'eu' });
|
||||
const stub = env.USER_DATA.get(euId);
|
||||
|
||||
// Store ID for future access
|
||||
// await env.KV.put(`user:${userId}`, euId.toString());
|
||||
|
||||
await stub.storeUserData({ userId, jurisdiction: 'eu' });
|
||||
|
||||
return new Response(JSON.stringify({
|
||||
success: true,
|
||||
jurisdiction: 'eu',
|
||||
id: euId.toString(),
|
||||
}), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
// Non-EU user: use location hint
|
||||
const id = env.USER_DATA.idFromName(userId);
|
||||
const stub = env.USER_DATA.get(id, { locationHint: 'enam' });
|
||||
|
||||
await stub.storeUserData({ userId });
|
||||
|
||||
return new Response(JSON.stringify({ success: true }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Available jurisdictions:
|
||||
*
|
||||
* - 'eu' - European Union (GDPR compliance)
|
||||
* - 'fedramp' - FedRAMP (US government)
|
||||
*/
|
||||
|
||||
/**
|
||||
* Location Hints vs Jurisdiction: Decision Matrix
|
||||
*
|
||||
* | Requirement | Use |
|
||||
* |-------------|-----|
|
||||
* | Lower latency (nice-to-have) | Location hints |
|
||||
* | Data residency (MUST) | Jurisdiction |
|
||||
* | Regulatory compliance (GDPR, FedRAMP) | Jurisdiction |
|
||||
* | Optimize for user location | Location hints |
|
||||
* | Strict data sovereignty | Jurisdiction |
|
||||
* | Performance optimization | Location hints |
|
||||
*/
|
||||
|
||||
/**
|
||||
* CRITICAL Limitations:
|
||||
*
|
||||
* ❌ Location hints are BEST-EFFORT (not guaranteed)
|
||||
* ❌ Location hints only affect FIRST creation
|
||||
* ❌ Cannot move existing DOs to new location
|
||||
* ❌ Cannot combine jurisdiction with location hints
|
||||
*
|
||||
* ✅ Jurisdiction is STRICTLY ENFORCED
|
||||
* ✅ Jurisdiction guarantees data never leaves region
|
||||
*/
|
||||
|
||||
/**
|
||||
* Example: Multi-region routing based on user IP
|
||||
*/
|
||||
export const multiRegionWorker = {
|
||||
async fetch(request: Request, env: Env): Promise<Response> {
|
||||
const userId = new URL(request.url).searchParams.get('userId');
|
||||
|
||||
if (!userId) {
|
||||
return new Response('Missing userId', { status: 400 });
|
||||
}
|
||||
|
||||
// Get user's country from request
|
||||
const country = request.cf?.country as string;
|
||||
|
||||
// Determine if user is in EU
|
||||
const euCountries = ['DE', 'FR', 'IT', 'ES', 'NL', 'BE', 'AT', 'PL', 'PT', 'GR', /* ... */];
|
||||
const isEU = euCountries.includes(country);
|
||||
|
||||
if (isEU) {
|
||||
// EU user: MUST use jurisdiction
|
||||
const euId = env.USER_DATA.newUniqueId({ jurisdiction: 'eu' });
|
||||
const stub = env.USER_DATA.get(euId);
|
||||
|
||||
// Store ID for future access
|
||||
// await env.DB.prepare('INSERT INTO user_do_ids (user_id, do_id) VALUES (?, ?)')
|
||||
// .bind(userId, euId.toString())
|
||||
// .run();
|
||||
|
||||
await stub.storeUserData({ userId, jurisdiction: 'eu' });
|
||||
|
||||
return new Response(JSON.stringify({ region: 'EU', id: euId.toString() }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
// Non-EU user: use location hint for optimization
|
||||
const locationHint = getLocationHintFromCountry(country);
|
||||
const id = env.USER_DATA.idFromName(userId);
|
||||
const stub = env.USER_DATA.get(id, { locationHint });
|
||||
|
||||
await stub.storeUserData({ userId });
|
||||
|
||||
return new Response(JSON.stringify({ region: locationHint }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
function getLocationHintFromCountry(country: string): string {
|
||||
// Simplified mapping - expand as needed
|
||||
if (['US', 'CA', 'MX'].includes(country)) return 'enam';
|
||||
if (['GB', 'FR', 'DE', 'ES', 'IT'].includes(country)) return 'weur';
|
||||
if (['CN', 'JP', 'KR', 'SG', 'IN'].includes(country)) return 'apac';
|
||||
if (['BR', 'AR', 'CL'].includes(country)) return 'sam';
|
||||
if (['AU', 'NZ'].includes(country)) return 'oc';
|
||||
if (['ZA', 'EG', 'KE'].includes(country)) return 'afr';
|
||||
|
||||
return 'enam'; // Default
|
||||
}
|
||||
283
templates/multi-do-coordination.ts
Normal file
283
templates/multi-do-coordination.ts
Normal file
@@ -0,0 +1,283 @@
|
||||
/**
|
||||
* Multi-DO Coordination Example
|
||||
*
|
||||
* Demonstrates:
|
||||
* - Multiple DO instances working together
|
||||
* - Inter-DO communication via RPC
|
||||
* - Coordinator pattern
|
||||
* - Hierarchical DO structures
|
||||
*/
|
||||
|
||||
import { DurableObject, DurableObjectState } from 'cloudflare:workers';
|
||||
|
||||
interface Env {
|
||||
GAME_COORDINATOR: DurableObjectNamespace<GameCoordinator>;
|
||||
GAME_ROOM: DurableObjectNamespace<GameRoom>;
|
||||
PLAYER: DurableObjectNamespace<Player>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Coordinator DO: Manages multiple game rooms
|
||||
*/
|
||||
export class GameCoordinator extends DurableObject<Env> {
|
||||
async createGame(gameId: string): Promise<void> {
|
||||
// Get game room DO (creates if doesn't exist)
|
||||
const gameRoom = this.env.GAME_ROOM.getByName(gameId);
|
||||
|
||||
// Initialize game room
|
||||
await gameRoom.initialize();
|
||||
|
||||
// Track in coordinator
|
||||
await this.ctx.storage.put(`game:${gameId}`, {
|
||||
id: gameId,
|
||||
created: Date.now(),
|
||||
status: 'waiting',
|
||||
});
|
||||
|
||||
console.log(`Game created: ${gameId}`);
|
||||
}
|
||||
|
||||
async listGames(): Promise<any[]> {
|
||||
const games = await this.ctx.storage.list({ prefix: 'game:' });
|
||||
|
||||
return Array.from(games.values());
|
||||
}
|
||||
|
||||
async deleteGame(gameId: string): Promise<void> {
|
||||
// Get game room DO
|
||||
const gameRoom = this.env.GAME_ROOM.getByName(gameId);
|
||||
|
||||
// Tell game room to clean up
|
||||
await gameRoom.cleanup();
|
||||
|
||||
// Remove from coordinator
|
||||
await this.ctx.storage.delete(`game:${gameId}`);
|
||||
|
||||
console.log(`Game deleted: ${gameId}`);
|
||||
}
|
||||
|
||||
async getGameStatus(gameId: string): Promise<any> {
|
||||
const gameRoom = this.env.GAME_ROOM.getByName(gameId);
|
||||
return await gameRoom.getStatus();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Game Room DO: Manages players in a single game
|
||||
*/
|
||||
export class GameRoom extends DurableObject<Env> {
|
||||
async initialize(): Promise<void> {
|
||||
await this.ctx.storage.put('state', {
|
||||
players: [],
|
||||
started: false,
|
||||
created: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
async addPlayer(playerId: string, playerName: string): Promise<void> {
|
||||
const state = await this.ctx.storage.get<any>('state');
|
||||
|
||||
if (!state) {
|
||||
await this.initialize();
|
||||
return this.addPlayer(playerId, playerName);
|
||||
}
|
||||
|
||||
// Check if player already in game
|
||||
if (state.players.some((p: any) => p.id === playerId)) {
|
||||
throw new Error('Player already in game');
|
||||
}
|
||||
|
||||
// Add player
|
||||
state.players.push({ id: playerId, name: playerName, joined: Date.now() });
|
||||
await this.ctx.storage.put('state', state);
|
||||
|
||||
// Notify player DO
|
||||
const playerDO = this.env.PLAYER.getByName(playerId);
|
||||
await playerDO.joinedGame(this.ctx.id.toString());
|
||||
|
||||
console.log(`Player ${playerName} joined game`);
|
||||
}
|
||||
|
||||
async removePlayer(playerId: string): Promise<void> {
|
||||
const state = await this.ctx.storage.get<any>('state');
|
||||
|
||||
if (!state) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Remove player
|
||||
state.players = state.players.filter((p: any) => p.id !== playerId);
|
||||
await this.ctx.storage.put('state', state);
|
||||
|
||||
// Notify player DO
|
||||
const playerDO = this.env.PLAYER.getByName(playerId);
|
||||
await playerDO.leftGame(this.ctx.id.toString());
|
||||
|
||||
console.log(`Player ${playerId} left game`);
|
||||
}
|
||||
|
||||
async startGame(): Promise<void> {
|
||||
const state = await this.ctx.storage.get<any>('state');
|
||||
|
||||
if (!state) {
|
||||
throw new Error('Game not initialized');
|
||||
}
|
||||
|
||||
if (state.players.length < 2) {
|
||||
throw new Error('Not enough players');
|
||||
}
|
||||
|
||||
state.started = true;
|
||||
state.startedAt = Date.now();
|
||||
await this.ctx.storage.put('state', state);
|
||||
|
||||
// Notify all players
|
||||
for (const player of state.players) {
|
||||
const playerDO = this.env.PLAYER.getByName(player.id);
|
||||
await playerDO.gameStarted(this.ctx.id.toString());
|
||||
}
|
||||
|
||||
console.log('Game started');
|
||||
}
|
||||
|
||||
async getStatus(): Promise<any> {
|
||||
const state = await this.ctx.storage.get<any>('state');
|
||||
return state || { players: [], started: false };
|
||||
}
|
||||
|
||||
async cleanup(): Promise<void> {
|
||||
const state = await this.ctx.storage.get<any>('state');
|
||||
|
||||
if (state) {
|
||||
// Notify all players
|
||||
for (const player of state.players) {
|
||||
const playerDO = this.env.PLAYER.getByName(player.id);
|
||||
await playerDO.gameEnded(this.ctx.id.toString());
|
||||
}
|
||||
}
|
||||
|
||||
// Delete all storage
|
||||
await this.ctx.storage.deleteAll();
|
||||
|
||||
console.log('Game room cleaned up');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Player DO: Manages individual player state
|
||||
*/
|
||||
export class Player extends DurableObject<Env> {
|
||||
async joinedGame(gameId: string): Promise<void> {
|
||||
// Track which game player is in
|
||||
await this.ctx.storage.put('currentGame', gameId);
|
||||
|
||||
console.log(`Player tracking: joined game ${gameId}`);
|
||||
}
|
||||
|
||||
async leftGame(gameId: string): Promise<void> {
|
||||
const currentGame = await this.ctx.storage.get<string>('currentGame');
|
||||
|
||||
if (currentGame === gameId) {
|
||||
await this.ctx.storage.delete('currentGame');
|
||||
}
|
||||
|
||||
console.log(`Player tracking: left game ${gameId}`);
|
||||
}
|
||||
|
||||
async gameStarted(gameId: string): Promise<void> {
|
||||
console.log(`Player notified: game ${gameId} started`);
|
||||
|
||||
// Update player stats
|
||||
const stats = await this.ctx.storage.get<any>('stats') || { gamesPlayed: 0 };
|
||||
stats.gamesPlayed += 1;
|
||||
await this.ctx.storage.put('stats', stats);
|
||||
}
|
||||
|
||||
async gameEnded(gameId: string): Promise<void> {
|
||||
console.log(`Player notified: game ${gameId} ended`);
|
||||
|
||||
const currentGame = await this.ctx.storage.get<string>('currentGame');
|
||||
|
||||
if (currentGame === gameId) {
|
||||
await this.ctx.storage.delete('currentGame');
|
||||
}
|
||||
}
|
||||
|
||||
async getStats(): Promise<any> {
|
||||
return await this.ctx.storage.get('stats') || { gamesPlayed: 0 };
|
||||
}
|
||||
|
||||
async getCurrentGame(): Promise<string | null> {
|
||||
return await this.ctx.storage.get<string>('currentGame') || null;
|
||||
}
|
||||
}
|
||||
|
||||
// CRITICAL: Export classes
|
||||
export { GameCoordinator, GameRoom, Player };
|
||||
export default GameCoordinator;
|
||||
|
||||
/**
|
||||
* Worker that orchestrates multiple DOs
|
||||
*/
|
||||
export default {
|
||||
async fetch(request: Request, env: Env): Promise<Response> {
|
||||
const url = new URL(request.url);
|
||||
|
||||
// Global coordinator (singleton)
|
||||
const coordinator = env.GAME_COORDINATOR.getByName('global');
|
||||
|
||||
if (url.pathname === '/games/create' && request.method === 'POST') {
|
||||
const { gameId } = await request.json<{ gameId: string }>();
|
||||
|
||||
await coordinator.createGame(gameId);
|
||||
|
||||
return new Response(JSON.stringify({ success: true }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname === '/games/list' && request.method === 'GET') {
|
||||
const games = await coordinator.listGames();
|
||||
|
||||
return new Response(JSON.stringify({ games }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname.startsWith('/games/') && url.pathname.endsWith('/join')) {
|
||||
const gameId = url.pathname.split('/')[2];
|
||||
const { playerId, playerName } = await request.json<{ playerId: string; playerName: string }>();
|
||||
|
||||
const gameRoom = env.GAME_ROOM.getByName(gameId);
|
||||
await gameRoom.addPlayer(playerId, playerName);
|
||||
|
||||
return new Response(JSON.stringify({ success: true }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname.startsWith('/games/') && url.pathname.endsWith('/start')) {
|
||||
const gameId = url.pathname.split('/')[2];
|
||||
|
||||
const gameRoom = env.GAME_ROOM.getByName(gameId);
|
||||
await gameRoom.startGame();
|
||||
|
||||
return new Response(JSON.stringify({ success: true }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname.startsWith('/players/') && url.pathname.endsWith('/stats')) {
|
||||
const playerId = url.pathname.split('/')[2];
|
||||
|
||||
const player = env.PLAYER.getByName(playerId);
|
||||
const stats = await player.getStats();
|
||||
|
||||
return new Response(JSON.stringify({ stats }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
return new Response('Not found', { status: 404 });
|
||||
},
|
||||
};
|
||||
17
templates/package.json
Normal file
17
templates/package.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"name": "cloudflare-durable-objects-examples",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "wrangler dev",
|
||||
"deploy": "wrangler deploy",
|
||||
"tail": "wrangler tail",
|
||||
"types": "wrangler types"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cloudflare/workers-types": "^4.20251014.0",
|
||||
"wrangler": "^4.43.0",
|
||||
"typescript": "^5.7.2"
|
||||
}
|
||||
}
|
||||
233
templates/rpc-vs-fetch.ts
Normal file
233
templates/rpc-vs-fetch.ts
Normal file
@@ -0,0 +1,233 @@
|
||||
/**
|
||||
* RPC vs HTTP Fetch Patterns
|
||||
*
|
||||
* Demonstrates:
|
||||
* - RPC methods (recommended for compat_date >= 2024-04-03)
|
||||
* - HTTP fetch handler (for HTTP flows or legacy compatibility)
|
||||
* - When to use each pattern
|
||||
*/
|
||||
|
||||
import { DurableObject, DurableObjectState } from 'cloudflare:workers';
|
||||
|
||||
interface Env {
|
||||
RPC_EXAMPLE: DurableObjectNamespace<RpcExample>;
|
||||
FETCH_EXAMPLE: DurableObjectNamespace<FetchExample>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pattern 1: RPC Methods (Recommended)
|
||||
*
|
||||
* ✅ Use when:
|
||||
* - New project (compat_date >= 2024-04-03)
|
||||
* - Type safety is important
|
||||
* - Simple method calls (not HTTP-specific logic)
|
||||
* - Auto-serialization of structured data
|
||||
*/
|
||||
export class RpcExample extends DurableObject<Env> {
|
||||
count: number = 0;
|
||||
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
this.count = await ctx.storage.get<number>('count') || 0;
|
||||
});
|
||||
}
|
||||
|
||||
// Public RPC methods (automatically exposed)
|
||||
async increment(): Promise<number> {
|
||||
this.count += 1;
|
||||
await this.ctx.storage.put('count', this.count);
|
||||
return this.count;
|
||||
}
|
||||
|
||||
async decrement(): Promise<number> {
|
||||
this.count -= 1;
|
||||
await this.ctx.storage.put('count', this.count);
|
||||
return this.count;
|
||||
}
|
||||
|
||||
async get(): Promise<number> {
|
||||
return this.count;
|
||||
}
|
||||
|
||||
async reset(): Promise<void> {
|
||||
this.count = 0;
|
||||
await this.ctx.storage.put('count', 0);
|
||||
}
|
||||
|
||||
// Complex return types work seamlessly
|
||||
async getStats(): Promise<{ count: number; timestamp: number }> {
|
||||
return {
|
||||
count: this.count,
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
}
|
||||
|
||||
// Methods can accept complex parameters
|
||||
async addMultiple(numbers: number[]): Promise<number> {
|
||||
const sum = numbers.reduce((acc, n) => acc + n, 0);
|
||||
this.count += sum;
|
||||
await this.ctx.storage.put('count', this.count);
|
||||
return this.count;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Worker using RPC pattern
|
||||
*/
|
||||
const rpcWorker = {
|
||||
async fetch(request: Request, env: Env): Promise<Response> {
|
||||
// Get stub
|
||||
const stub = env.RPC_EXAMPLE.getByName('my-counter');
|
||||
|
||||
// Call RPC methods directly (type-safe)
|
||||
const count = await stub.increment();
|
||||
const stats = await stub.getStats();
|
||||
|
||||
return new Response(JSON.stringify({ count, stats }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Pattern 2: HTTP Fetch Handler (Legacy / HTTP-specific flows)
|
||||
*
|
||||
* ✅ Use when:
|
||||
* - Need HTTP request/response pattern
|
||||
* - Complex routing logic
|
||||
* - WebSocket upgrade (requires fetch)
|
||||
* - Legacy compatibility (pre-2024-04-03)
|
||||
*/
|
||||
export class FetchExample extends DurableObject<Env> {
|
||||
count: number = 0;
|
||||
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
|
||||
ctx.blockConcurrencyWhile(async () => {
|
||||
this.count = await ctx.storage.get<number>('count') || 0;
|
||||
});
|
||||
}
|
||||
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
const url = new URL(request.url);
|
||||
|
||||
// Route based on path
|
||||
if (url.pathname === '/increment' && request.method === 'POST') {
|
||||
this.count += 1;
|
||||
await this.ctx.storage.put('count', this.count);
|
||||
|
||||
return new Response(JSON.stringify({ count: this.count }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname === '/decrement' && request.method === 'POST') {
|
||||
this.count -= 1;
|
||||
await this.ctx.storage.put('count', this.count);
|
||||
|
||||
return new Response(JSON.stringify({ count: this.count }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname === '/get' && request.method === 'GET') {
|
||||
return new Response(JSON.stringify({ count: this.count }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
if (url.pathname === '/reset' && request.method === 'POST') {
|
||||
this.count = 0;
|
||||
await this.ctx.storage.put('count', 0);
|
||||
|
||||
return new Response(JSON.stringify({ count: 0 }), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
// Complex HTTP logic (headers, cookies, etc.)
|
||||
if (url.pathname === '/stats' && request.method === 'GET') {
|
||||
const authHeader = request.headers.get('Authorization');
|
||||
|
||||
if (!authHeader) {
|
||||
return new Response('Unauthorized', { status: 401 });
|
||||
}
|
||||
|
||||
return new Response(JSON.stringify({
|
||||
count: this.count,
|
||||
timestamp: Date.now(),
|
||||
}), {
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
'cache-control': 'no-cache',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return new Response('Not found', { status: 404 });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Worker using HTTP fetch pattern
|
||||
*/
|
||||
const fetchWorker = {
|
||||
async fetch(request: Request, env: Env): Promise<Response> {
|
||||
// Get stub
|
||||
const stub = env.FETCH_EXAMPLE.getByName('my-counter');
|
||||
|
||||
// Call fetch method (HTTP-style)
|
||||
const response = await stub.fetch('https://fake-host/increment', {
|
||||
method: 'POST',
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
return new Response(JSON.stringify(data), {
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Pattern 3: Hybrid (RPC + Fetch)
|
||||
*
|
||||
* Use both patterns in the same DO:
|
||||
* - RPC for simple method calls
|
||||
* - fetch() for WebSocket upgrades or HTTP-specific logic
|
||||
*/
|
||||
export class HybridExample extends DurableObject<Env> {
|
||||
// RPC method
|
||||
async getStatus(): Promise<{ active: boolean; connections: number }> {
|
||||
return {
|
||||
active: true,
|
||||
connections: this.ctx.getWebSockets().length,
|
||||
};
|
||||
}
|
||||
|
||||
// HTTP fetch for WebSocket upgrade
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
const upgradeHeader = request.headers.get('Upgrade');
|
||||
|
||||
if (upgradeHeader === 'websocket') {
|
||||
// WebSocket upgrade logic
|
||||
const webSocketPair = new WebSocketPair();
|
||||
const [client, server] = Object.values(webSocketPair);
|
||||
|
||||
this.ctx.acceptWebSocket(server);
|
||||
|
||||
return new Response(null, {
|
||||
status: 101,
|
||||
webSocket: client,
|
||||
});
|
||||
}
|
||||
|
||||
return new Response('Not found', { status: 404 });
|
||||
}
|
||||
}
|
||||
|
||||
// CRITICAL: Export classes
|
||||
export default RpcExample;
|
||||
227
templates/state-api-patterns.ts
Normal file
227
templates/state-api-patterns.ts
Normal file
@@ -0,0 +1,227 @@
|
||||
/**
|
||||
* State API Patterns Example
|
||||
*
|
||||
* Demonstrates:
|
||||
* - SQL API (SQLite backend)
|
||||
* - Key-Value API (available on both SQLite and KV backends)
|
||||
* - Transactions
|
||||
* - Combining SQL and KV storage
|
||||
*/
|
||||
|
||||
import { DurableObject, DurableObjectState, SqlStorage } from 'cloudflare:workers';
|
||||
|
||||
interface Env {
|
||||
STORAGE_EXAMPLE: DurableObjectNamespace<StorageExample>;
|
||||
}
|
||||
|
||||
export class StorageExample extends DurableObject<Env> {
|
||||
sql: SqlStorage;
|
||||
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
|
||||
// Access SQL storage
|
||||
this.sql = ctx.storage.sql;
|
||||
|
||||
// Create tables on first run
|
||||
this.sql.exec(`
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
user_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
username TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS messages (
|
||||
message_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
text TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
FOREIGN KEY (user_id) REFERENCES users(user_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_user_id ON messages(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_created_at ON messages(created_at);
|
||||
`);
|
||||
}
|
||||
|
||||
/**
|
||||
* SQL API Example: Insert with RETURNING
|
||||
*/
|
||||
async createUser(email: string, username: string): Promise<number> {
|
||||
const cursor = this.sql.exec(
|
||||
'INSERT INTO users (email, username, created_at) VALUES (?, ?, ?) RETURNING user_id',
|
||||
email,
|
||||
username,
|
||||
Date.now()
|
||||
);
|
||||
|
||||
const row = cursor.one<{ user_id: number }>();
|
||||
return row.user_id;
|
||||
}
|
||||
|
||||
/**
|
||||
* SQL API Example: Query with parameters
|
||||
*/
|
||||
async getUserByEmail(email: string): Promise<any | null> {
|
||||
const cursor = this.sql.exec(
|
||||
'SELECT * FROM users WHERE email = ?',
|
||||
email
|
||||
);
|
||||
|
||||
return cursor.one({ allowNone: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* SQL API Example: Query multiple rows
|
||||
*/
|
||||
async getRecentMessages(limit: number = 50): Promise<any[]> {
|
||||
const cursor = this.sql.exec(
|
||||
`SELECT m.*, u.username
|
||||
FROM messages m
|
||||
JOIN users u ON m.user_id = u.user_id
|
||||
ORDER BY m.created_at DESC
|
||||
LIMIT ?`,
|
||||
limit
|
||||
);
|
||||
|
||||
return cursor.toArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* SQL API Example: Transaction (synchronous)
|
||||
*/
|
||||
async createUserWithMessage(email: string, username: string, messageText: string): Promise<void> {
|
||||
this.ctx.storage.transactionSync(() => {
|
||||
// Insert user
|
||||
const userCursor = this.sql.exec(
|
||||
'INSERT INTO users (email, username, created_at) VALUES (?, ?, ?) RETURNING user_id',
|
||||
email,
|
||||
username,
|
||||
Date.now()
|
||||
);
|
||||
const { user_id } = userCursor.one<{ user_id: number }>();
|
||||
|
||||
// Insert message
|
||||
this.sql.exec(
|
||||
'INSERT INTO messages (user_id, text, created_at) VALUES (?, ?, ?)',
|
||||
user_id,
|
||||
messageText,
|
||||
Date.now()
|
||||
);
|
||||
|
||||
// All or nothing - if either fails, both are rolled back
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* SQL API Example: Iterate cursor
|
||||
*/
|
||||
async getAllUsers(): Promise<string[]> {
|
||||
const cursor = this.sql.exec('SELECT username FROM users');
|
||||
|
||||
const usernames: string[] = [];
|
||||
for (const row of cursor) {
|
||||
usernames.push(row.username as string);
|
||||
}
|
||||
|
||||
return usernames;
|
||||
}
|
||||
|
||||
/**
|
||||
* Key-Value API Example: Get/Put single value
|
||||
*/
|
||||
async setConfig(key: string, value: any): Promise<void> {
|
||||
await this.ctx.storage.put(`config:${key}`, value);
|
||||
}
|
||||
|
||||
async getConfig(key: string): Promise<any> {
|
||||
return await this.ctx.storage.get(`config:${key}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Key-Value API Example: Get/Put multiple values
|
||||
*/
|
||||
async setConfigs(configs: Record<string, any>): Promise<void> {
|
||||
const entries: Record<string, any> = {};
|
||||
|
||||
for (const [key, value] of Object.entries(configs)) {
|
||||
entries[`config:${key}`] = value;
|
||||
}
|
||||
|
||||
await this.ctx.storage.put(entries);
|
||||
}
|
||||
|
||||
async getConfigs(): Promise<Record<string, any>> {
|
||||
const map = await this.ctx.storage.list({ prefix: 'config:' });
|
||||
|
||||
const configs: Record<string, any> = {};
|
||||
for (const [key, value] of map.entries()) {
|
||||
const configKey = key.replace('config:', '');
|
||||
configs[configKey] = value;
|
||||
}
|
||||
|
||||
return configs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Key-Value API Example: Delete
|
||||
*/
|
||||
async deleteConfig(key: string): Promise<void> {
|
||||
await this.ctx.storage.delete(`config:${key}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Key-Value API Example: List with pagination
|
||||
*/
|
||||
async listKeys(prefix: string, limit: number = 100): Promise<string[]> {
|
||||
const map = await this.ctx.storage.list({ prefix, limit });
|
||||
return Array.from(map.keys());
|
||||
}
|
||||
|
||||
/**
|
||||
* Key-Value API Example: Async transaction
|
||||
*/
|
||||
async updateMultipleConfigs(updates: Record<string, any>): Promise<void> {
|
||||
await this.ctx.storage.transaction(async (txn) => {
|
||||
for (const [key, value] of Object.entries(updates)) {
|
||||
await txn.put(`config:${key}`, value);
|
||||
}
|
||||
// All or nothing
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Combining SQL and KV: Mixed storage patterns
|
||||
*/
|
||||
async recordUserActivity(userId: number, activity: string): Promise<void> {
|
||||
// Store structured data in SQL
|
||||
this.sql.exec(
|
||||
'UPDATE users SET last_activity = ? WHERE user_id = ?',
|
||||
Date.now(),
|
||||
userId
|
||||
);
|
||||
|
||||
// Store ephemeral data in KV (faster access)
|
||||
await this.ctx.storage.put(`activity:${userId}`, {
|
||||
type: activity,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all storage (DO will cease to exist after shutdown)
|
||||
*/
|
||||
async deleteAllStorage(): Promise<void> {
|
||||
// Delete alarm first (if set)
|
||||
await this.ctx.storage.deleteAlarm();
|
||||
|
||||
// Delete all storage (atomic on SQLite backend)
|
||||
await this.ctx.storage.deleteAll();
|
||||
|
||||
// After this, DO will not exist once it shuts down
|
||||
}
|
||||
}
|
||||
|
||||
// CRITICAL: Export the class
|
||||
export default StorageExample;
|
||||
225
templates/websocket-hibernation-do.ts
Normal file
225
templates/websocket-hibernation-do.ts
Normal file
@@ -0,0 +1,225 @@
|
||||
/**
|
||||
* WebSocket Hibernation Example: Chat Room
|
||||
*
|
||||
* Demonstrates:
|
||||
* - WebSocket Hibernation API
|
||||
* - ctx.acceptWebSocket() for cost savings
|
||||
* - WebSocket handler methods (webSocketMessage, webSocketClose, webSocketError)
|
||||
* - serializeAttachment / deserializeAttachment for metadata persistence
|
||||
* - State restoration in constructor after hibernation
|
||||
*/
|
||||
|
||||
import { DurableObject, DurableObjectState } from 'cloudflare:workers';
|
||||
|
||||
interface Env {
|
||||
CHAT_ROOM: DurableObjectNamespace<ChatRoom>;
|
||||
}
|
||||
|
||||
interface SessionMetadata {
|
||||
userId: string;
|
||||
username: string;
|
||||
}
|
||||
|
||||
export class ChatRoom extends DurableObject<Env> {
|
||||
// In-memory state (restored after hibernation)
|
||||
sessions: Map<WebSocket, SessionMetadata>;
|
||||
|
||||
constructor(ctx: DurableObjectState, env: Env) {
|
||||
super(ctx, env);
|
||||
|
||||
// Restore WebSocket connections after hibernation
|
||||
this.sessions = new Map();
|
||||
|
||||
// Get all active WebSockets and restore their metadata
|
||||
ctx.getWebSockets().forEach((ws) => {
|
||||
// Deserialize metadata (persisted via serializeAttachment)
|
||||
const metadata = ws.deserializeAttachment<SessionMetadata>();
|
||||
this.sessions.set(ws, metadata);
|
||||
});
|
||||
|
||||
console.log(`ChatRoom constructor: restored ${this.sessions.size} connections`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Accept WebSocket connections
|
||||
*/
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
// Expect WebSocket upgrade request
|
||||
const upgradeHeader = request.headers.get('Upgrade');
|
||||
if (upgradeHeader !== 'websocket') {
|
||||
return new Response('Expected Upgrade: websocket', { status: 426 });
|
||||
}
|
||||
|
||||
if (request.method !== 'GET') {
|
||||
return new Response('Expected GET method', { status: 400 });
|
||||
}
|
||||
|
||||
// Get user info from URL parameters
|
||||
const url = new URL(request.url);
|
||||
const userId = url.searchParams.get('userId') || 'anonymous';
|
||||
const username = url.searchParams.get('username') || 'Anonymous';
|
||||
|
||||
// Create WebSocket pair
|
||||
const webSocketPair = new WebSocketPair();
|
||||
const [client, server] = Object.values(webSocketPair);
|
||||
|
||||
// CRITICAL: Use ctx.acceptWebSocket (NOT ws.accept())
|
||||
// This enables hibernation to save costs
|
||||
this.ctx.acceptWebSocket(server);
|
||||
|
||||
// Serialize metadata to persist across hibernation
|
||||
const metadata: SessionMetadata = { userId, username };
|
||||
server.serializeAttachment(metadata);
|
||||
|
||||
// Track in-memory (will be restored after hibernation)
|
||||
this.sessions.set(server, metadata);
|
||||
|
||||
// Notify others that user joined
|
||||
this.broadcast({
|
||||
type: 'system',
|
||||
text: `${username} joined the room`,
|
||||
timestamp: Date.now(),
|
||||
}, server);
|
||||
|
||||
// Send welcome message to new user
|
||||
server.send(JSON.stringify({
|
||||
type: 'system',
|
||||
text: `Welcome to the chat room! ${this.sessions.size} user(s) online.`,
|
||||
timestamp: Date.now(),
|
||||
}));
|
||||
|
||||
// Return client WebSocket to browser
|
||||
return new Response(null, {
|
||||
status: 101,
|
||||
webSocket: client,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when WebSocket receives a message
|
||||
* This method is called even if the DO was hibernated
|
||||
*/
|
||||
async webSocketMessage(ws: WebSocket, message: string | ArrayBuffer): Promise<void> {
|
||||
const session = this.sessions.get(ws);
|
||||
|
||||
if (!session) {
|
||||
console.error('WebSocket not found in sessions');
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle text messages
|
||||
if (typeof message === 'string') {
|
||||
try {
|
||||
const data = JSON.parse(message);
|
||||
|
||||
if (data.type === 'chat') {
|
||||
// Broadcast chat message to all connections
|
||||
this.broadcast({
|
||||
type: 'chat',
|
||||
userId: session.userId,
|
||||
username: session.username,
|
||||
text: data.text,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
if (data.type === 'typing') {
|
||||
// Broadcast typing indicator to others
|
||||
this.broadcast({
|
||||
type: 'typing',
|
||||
userId: session.userId,
|
||||
username: session.username,
|
||||
}, ws);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to parse message:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when WebSocket closes
|
||||
*/
|
||||
async webSocketClose(ws: WebSocket, code: number, reason: string, wasClean: boolean): Promise<void> {
|
||||
const session = this.sessions.get(ws);
|
||||
|
||||
// Close the WebSocket
|
||||
ws.close(code, 'Durable Object closing WebSocket');
|
||||
|
||||
// Remove from sessions
|
||||
this.sessions.delete(ws);
|
||||
|
||||
// Notify others
|
||||
if (session) {
|
||||
this.broadcast({
|
||||
type: 'system',
|
||||
text: `${session.username} left the room`,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
console.log(`WebSocket closed: ${session?.username || 'unknown'}, code: ${code}, clean: ${wasClean}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Called on WebSocket errors
|
||||
*/
|
||||
async webSocketError(ws: WebSocket, error: any): Promise<void> {
|
||||
console.error('WebSocket error:', error);
|
||||
|
||||
const session = this.sessions.get(ws);
|
||||
this.sessions.delete(ws);
|
||||
|
||||
if (session) {
|
||||
this.broadcast({
|
||||
type: 'system',
|
||||
text: `${session.username} disconnected (error)`,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Broadcast message to all connections (except sender)
|
||||
*/
|
||||
private broadcast(message: any, except?: WebSocket): void {
|
||||
const messageStr = JSON.stringify(message);
|
||||
|
||||
this.sessions.forEach((session, ws) => {
|
||||
if (ws !== except && ws.readyState === WebSocket.OPEN) {
|
||||
ws.send(messageStr);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// CRITICAL: Export the class
|
||||
export default ChatRoom;
|
||||
|
||||
/**
|
||||
* Worker that creates and routes to chat rooms
|
||||
*/
|
||||
export default {
|
||||
async fetch(request: Request, env: Env): Promise<Response> {
|
||||
const url = new URL(request.url);
|
||||
|
||||
// Extract room ID from path (e.g., /room/abc123)
|
||||
const match = url.pathname.match(/^\/room\/([^/]+)/);
|
||||
|
||||
if (!match) {
|
||||
return new Response('Usage: /room/{roomId}?userId={userId}&username={username}', {
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
const roomId = match[1];
|
||||
|
||||
// Get or create chat room DO
|
||||
const id = env.CHAT_ROOM.idFromName(roomId);
|
||||
const stub = env.CHAT_ROOM.get(id);
|
||||
|
||||
// Forward request to DO
|
||||
return stub.fetch(request);
|
||||
},
|
||||
};
|
||||
98
templates/wrangler-do-config.jsonc
Normal file
98
templates/wrangler-do-config.jsonc
Normal file
@@ -0,0 +1,98 @@
|
||||
{
|
||||
"$schema": "node_modules/wrangler/config-schema.json",
|
||||
"name": "my-durable-objects-worker",
|
||||
"main": "src/index.ts",
|
||||
"compatibility_date": "2025-10-22",
|
||||
"compatibility_flags": ["nodejs_compat"],
|
||||
|
||||
// Durable Objects Configuration
|
||||
"durable_objects": {
|
||||
"bindings": [
|
||||
{
|
||||
// Simple counter example
|
||||
"name": "COUNTER",
|
||||
"class_name": "Counter"
|
||||
},
|
||||
{
|
||||
// WebSocket chat room example
|
||||
"name": "CHAT_ROOM",
|
||||
"class_name": "ChatRoom"
|
||||
},
|
||||
{
|
||||
// Rate limiter example
|
||||
"name": "RATE_LIMITER",
|
||||
"class_name": "RateLimiter"
|
||||
},
|
||||
{
|
||||
// Session management example
|
||||
"name": "USER_SESSION",
|
||||
"class_name": "UserSession"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
// Migrations (REQUIRED for all DO changes)
|
||||
"migrations": [
|
||||
{
|
||||
// Initial migration: Create new DO classes
|
||||
"tag": "v1",
|
||||
"new_sqlite_classes": [
|
||||
"Counter",
|
||||
"ChatRoom",
|
||||
"RateLimiter",
|
||||
"UserSession"
|
||||
]
|
||||
}
|
||||
// Example: Rename a DO class
|
||||
// {
|
||||
// "tag": "v2",
|
||||
// "renamed_classes": [
|
||||
// {
|
||||
// "from": "Counter",
|
||||
// "to": "CounterV2"
|
||||
// }
|
||||
// ]
|
||||
// }
|
||||
|
||||
// Example: Delete a DO class
|
||||
// {
|
||||
// "tag": "v3",
|
||||
// "deleted_classes": ["OldClass"]
|
||||
// }
|
||||
|
||||
// Example: Transfer a DO class from another Worker
|
||||
// {
|
||||
// "tag": "v4",
|
||||
// "transferred_classes": [
|
||||
// {
|
||||
// "from": "OriginalClass",
|
||||
// "from_script": "original-worker-name",
|
||||
// "to": "TransferredClass"
|
||||
// }
|
||||
// ]
|
||||
// }
|
||||
],
|
||||
|
||||
// Optional: Integrations with other Cloudflare services
|
||||
"d1_databases": [
|
||||
{
|
||||
"binding": "DB",
|
||||
"database_name": "my-database",
|
||||
"database_id": "your-database-id"
|
||||
}
|
||||
],
|
||||
|
||||
"kv_namespaces": [
|
||||
{
|
||||
"binding": "KV",
|
||||
"id": "your-kv-namespace-id"
|
||||
}
|
||||
],
|
||||
|
||||
"r2_buckets": [
|
||||
{
|
||||
"binding": "BUCKET",
|
||||
"bucket_name": "my-bucket"
|
||||
}
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user