Initial commit
This commit is contained in:
12
.claude-plugin/plugin.json
Normal file
12
.claude-plugin/plugin.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"name": "cloudflare-d1",
|
||||||
|
"description": "Build with D1 serverless SQLite database on Cloudflares edge. Use when: creating databases, writing SQL migrations, querying D1 from Workers, handling relational data, or troubleshooting D1_ERROR, statement too long, migration failures, or query performance issues.",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"author": {
|
||||||
|
"name": "Jeremy Dawes",
|
||||||
|
"email": "jeremy@jezweb.net"
|
||||||
|
},
|
||||||
|
"skills": [
|
||||||
|
"./"
|
||||||
|
]
|
||||||
|
}
|
||||||
3
README.md
Normal file
3
README.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# cloudflare-d1
|
||||||
|
|
||||||
|
Build with D1 serverless SQLite database on Cloudflares edge. Use when: creating databases, writing SQL migrations, querying D1 from Workers, handling relational data, or troubleshooting D1_ERROR, statement too long, migration failures, or query performance issues.
|
||||||
505
SKILL.md
Normal file
505
SKILL.md
Normal file
@@ -0,0 +1,505 @@
|
|||||||
|
---
|
||||||
|
name: cloudflare-d1
|
||||||
|
description: |
|
||||||
|
Build with D1 serverless SQLite database on Cloudflare's edge. Use when: creating databases, writing SQL migrations, querying D1 from Workers, handling relational data, or troubleshooting D1_ERROR, statement too long, migration failures, or query performance issues.
|
||||||
|
license: MIT
|
||||||
|
---
|
||||||
|
|
||||||
|
# Cloudflare D1 Database
|
||||||
|
|
||||||
|
**Status**: Production Ready ✅
|
||||||
|
**Last Updated**: 2025-11-23
|
||||||
|
**Dependencies**: cloudflare-worker-base (for Worker setup)
|
||||||
|
**Latest Versions**: wrangler@4.50.0, @cloudflare/workers-types@4.20251121.0
|
||||||
|
|
||||||
|
**Recent Updates (2025)**:
|
||||||
|
- **Nov 2025**: Jurisdiction support (data localization compliance), remote bindings GA (wrangler@4.37.0+), automatic resource provisioning
|
||||||
|
- **Sept 2025**: Automatic read-only query retries (up to 2 attempts), remote bindings public beta
|
||||||
|
- **July 2025**: Storage limits increased (250GB → 1TB), alpha backup access removed, REST API 50-500ms faster
|
||||||
|
- **May 2025**: HTTP API permissions security fix (D1:Edit required for writes)
|
||||||
|
- **April 2025**: Read replication public beta (read-only replicas across regions)
|
||||||
|
- **Feb 2025**: PRAGMA optimize support, read-only access permission bug fix
|
||||||
|
- **Jan 2025**: Free tier limits enforcement (Feb 10 start), Worker API 40-60% faster queries
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Quick Start (5 Minutes)
|
||||||
|
|
||||||
|
### 1. Create D1 Database
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create a new D1 database
|
||||||
|
npx wrangler d1 create my-database
|
||||||
|
|
||||||
|
# Output includes database_id - save this!
|
||||||
|
# ✅ Successfully created DB 'my-database'
|
||||||
|
#
|
||||||
|
# [[d1_databases]]
|
||||||
|
# binding = "DB"
|
||||||
|
# database_name = "my-database"
|
||||||
|
# database_id = "<UUID>"
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Configure Bindings
|
||||||
|
|
||||||
|
Add to your `wrangler.jsonc`:
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"name": "my-worker",
|
||||||
|
"main": "src/index.ts",
|
||||||
|
"compatibility_date": "2025-10-11",
|
||||||
|
"d1_databases": [
|
||||||
|
{
|
||||||
|
"binding": "DB", // Available as env.DB in your Worker
|
||||||
|
"database_name": "my-database", // Name from wrangler d1 create
|
||||||
|
"database_id": "<UUID>", // ID from wrangler d1 create
|
||||||
|
"preview_database_id": "local-db" // For local development
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**CRITICAL:**
|
||||||
|
- `binding` is how you access the database in code (`env.DB`)
|
||||||
|
- `database_id` is the production database UUID
|
||||||
|
- `preview_database_id` is for local dev (can be any string)
|
||||||
|
- **Never commit real `database_id` values to public repos** - use environment variables or secrets
|
||||||
|
|
||||||
|
### 3. Create Your First Migration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create migration file
|
||||||
|
npx wrangler d1 migrations create my-database create_users_table
|
||||||
|
|
||||||
|
# This creates: migrations/0001_create_users_table.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
Edit the migration file:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- migrations/0001_create_users_table.sql
|
||||||
|
DROP TABLE IF EXISTS users;
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
user_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
email TEXT NOT NULL UNIQUE,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
created_at INTEGER NOT NULL,
|
||||||
|
updated_at INTEGER
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Create index for common queries
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
||||||
|
|
||||||
|
-- Optimize database
|
||||||
|
PRAGMA optimize;
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Apply Migration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Apply locally first (for testing)
|
||||||
|
npx wrangler d1 migrations apply my-database --local
|
||||||
|
|
||||||
|
# Apply to production when ready
|
||||||
|
npx wrangler d1 migrations apply my-database --remote
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Query from Your Worker
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// src/index.ts
|
||||||
|
import { Hono } from 'hono';
|
||||||
|
|
||||||
|
type Bindings = {
|
||||||
|
DB: D1Database;
|
||||||
|
};
|
||||||
|
|
||||||
|
const app = new Hono<{ Bindings: Bindings }>();
|
||||||
|
|
||||||
|
app.get('/api/users/:email', async (c) => {
|
||||||
|
const email = c.req.param('email');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// ALWAYS use prepared statements with bind()
|
||||||
|
const result = await c.env.DB.prepare(
|
||||||
|
'SELECT * FROM users WHERE email = ?'
|
||||||
|
)
|
||||||
|
.bind(email)
|
||||||
|
.first();
|
||||||
|
|
||||||
|
if (!result) {
|
||||||
|
return c.json({ error: 'User not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.json(result);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('D1 Error:', error.message);
|
||||||
|
return c.json({ error: 'Database error' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export default app;
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## D1 Migrations System
|
||||||
|
|
||||||
|
### Migration Workflow
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Create migration
|
||||||
|
npx wrangler d1 migrations create <DATABASE_NAME> <MIGRATION_NAME>
|
||||||
|
|
||||||
|
# 2. List unapplied migrations
|
||||||
|
npx wrangler d1 migrations list <DATABASE_NAME> --local
|
||||||
|
npx wrangler d1 migrations list <DATABASE_NAME> --remote
|
||||||
|
|
||||||
|
# 3. Apply migrations
|
||||||
|
npx wrangler d1 migrations apply <DATABASE_NAME> --local # Test locally
|
||||||
|
npx wrangler d1 migrations apply <DATABASE_NAME> --remote # Deploy to production
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migration File Naming
|
||||||
|
|
||||||
|
Migrations are automatically versioned:
|
||||||
|
|
||||||
|
```
|
||||||
|
migrations/
|
||||||
|
├── 0000_initial_schema.sql
|
||||||
|
├── 0001_add_users_table.sql
|
||||||
|
├── 0002_add_posts_table.sql
|
||||||
|
└── 0003_add_indexes.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rules:**
|
||||||
|
- Files are executed in sequential order
|
||||||
|
- Each migration runs once (tracked in `d1_migrations` table)
|
||||||
|
- Failed migrations roll back (transactional)
|
||||||
|
- Can't modify or delete applied migrations
|
||||||
|
|
||||||
|
### Custom Migration Configuration
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"d1_databases": [
|
||||||
|
{
|
||||||
|
"binding": "DB",
|
||||||
|
"database_name": "my-database",
|
||||||
|
"database_id": "<UUID>",
|
||||||
|
"migrations_dir": "db/migrations", // Custom directory (default: migrations/)
|
||||||
|
"migrations_table": "schema_migrations" // Custom tracking table (default: d1_migrations)
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migration Best Practices
|
||||||
|
|
||||||
|
#### ✅ Always Do:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Use IF NOT EXISTS to make migrations idempotent
|
||||||
|
CREATE TABLE IF NOT EXISTS users (...);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
||||||
|
|
||||||
|
-- Run PRAGMA optimize after schema changes
|
||||||
|
PRAGMA optimize;
|
||||||
|
|
||||||
|
-- Use transactions for data migrations
|
||||||
|
BEGIN TRANSACTION;
|
||||||
|
UPDATE users SET updated_at = unixepoch() WHERE updated_at IS NULL;
|
||||||
|
COMMIT;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### ❌ Never Do:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- DON'T include BEGIN TRANSACTION at start (D1 handles this)
|
||||||
|
BEGIN TRANSACTION; -- ❌ Remove this
|
||||||
|
|
||||||
|
-- DON'T use MySQL/PostgreSQL syntax
|
||||||
|
ALTER TABLE users MODIFY COLUMN email VARCHAR(255); -- ❌ Not SQLite
|
||||||
|
|
||||||
|
-- DON'T create tables without IF NOT EXISTS
|
||||||
|
CREATE TABLE users (...); -- ❌ Fails if table exists
|
||||||
|
```
|
||||||
|
|
||||||
|
### Handling Foreign Keys in Migrations
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Temporarily disable foreign key checks during schema changes
|
||||||
|
PRAGMA defer_foreign_keys = true;
|
||||||
|
|
||||||
|
-- Make schema changes that would violate foreign keys
|
||||||
|
ALTER TABLE posts DROP COLUMN author_id;
|
||||||
|
ALTER TABLE posts ADD COLUMN user_id INTEGER REFERENCES users(user_id);
|
||||||
|
|
||||||
|
-- Foreign keys re-enabled automatically at end of migration
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## D1 Workers API
|
||||||
|
|
||||||
|
**Type Definitions:**
|
||||||
|
```typescript
|
||||||
|
interface Env { DB: D1Database; }
|
||||||
|
type Bindings = { DB: D1Database; };
|
||||||
|
const app = new Hono<{ Bindings: Bindings }>();
|
||||||
|
```
|
||||||
|
|
||||||
|
**prepare() - PRIMARY METHOD (always use for user input):**
|
||||||
|
```typescript
|
||||||
|
const user = await env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||||
|
.bind(email).first();
|
||||||
|
```
|
||||||
|
Why: Prevents SQL injection, reusable, better performance, type-safe
|
||||||
|
|
||||||
|
**Query Result Methods:**
|
||||||
|
- `.all()` → `{ results, meta }` - Get all rows
|
||||||
|
- `.first()` → row object or null - Get first row
|
||||||
|
- `.first('column')` → value - Get single column value (e.g., COUNT)
|
||||||
|
- `.run()` → `{ success, meta }` - Execute INSERT/UPDATE/DELETE (no results)
|
||||||
|
|
||||||
|
**batch() - CRITICAL FOR PERFORMANCE:**
|
||||||
|
```typescript
|
||||||
|
const results = await env.DB.batch([
|
||||||
|
env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(1),
|
||||||
|
env.DB.prepare('SELECT * FROM posts WHERE user_id = ?').bind(1)
|
||||||
|
]);
|
||||||
|
```
|
||||||
|
- Executes sequentially, single network round trip
|
||||||
|
- If one fails, remaining statements don't execute
|
||||||
|
- Use for: bulk inserts, fetching related data
|
||||||
|
|
||||||
|
**exec() - AVOID IN PRODUCTION:**
|
||||||
|
```typescript
|
||||||
|
await env.DB.exec('SELECT * FROM users;'); // Only for migrations/maintenance
|
||||||
|
```
|
||||||
|
- ❌ Never use with user input (SQL injection risk)
|
||||||
|
- ✅ Only use for: migration files, one-off tasks
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Query Patterns
|
||||||
|
|
||||||
|
### Basic CRUD Operations
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// CREATE
|
||||||
|
const { meta } = await env.DB.prepare(
|
||||||
|
'INSERT INTO users (email, username, created_at) VALUES (?, ?, ?)'
|
||||||
|
).bind(email, username, Date.now()).run();
|
||||||
|
const newUserId = meta.last_row_id;
|
||||||
|
|
||||||
|
// READ (single)
|
||||||
|
const user = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId).first();
|
||||||
|
|
||||||
|
// READ (multiple)
|
||||||
|
const { results } = await env.DB.prepare('SELECT * FROM users LIMIT ?')
|
||||||
|
.bind(10).all();
|
||||||
|
|
||||||
|
// UPDATE
|
||||||
|
const { meta } = await env.DB.prepare('UPDATE users SET username = ? WHERE user_id = ?')
|
||||||
|
.bind(newUsername, userId).run();
|
||||||
|
const rowsAffected = meta.rows_written;
|
||||||
|
|
||||||
|
// DELETE
|
||||||
|
await env.DB.prepare('DELETE FROM users WHERE user_id = ?').bind(userId).run();
|
||||||
|
|
||||||
|
// COUNT
|
||||||
|
const count = await env.DB.prepare('SELECT COUNT(*) as total FROM users').first('total');
|
||||||
|
|
||||||
|
// EXISTS check
|
||||||
|
const exists = await env.DB.prepare('SELECT 1 FROM users WHERE email = ? LIMIT 1')
|
||||||
|
.bind(email).first();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pagination Pattern
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const page = parseInt(c.req.query('page') || '1');
|
||||||
|
const limit = 20;
|
||||||
|
const offset = (page - 1) * limit;
|
||||||
|
|
||||||
|
const [countResult, usersResult] = await c.env.DB.batch([
|
||||||
|
c.env.DB.prepare('SELECT COUNT(*) as total FROM users'),
|
||||||
|
c.env.DB.prepare('SELECT * FROM users ORDER BY created_at DESC LIMIT ? OFFSET ?')
|
||||||
|
.bind(limit, offset)
|
||||||
|
]);
|
||||||
|
|
||||||
|
return c.json({
|
||||||
|
users: usersResult.results,
|
||||||
|
pagination: { page, limit, total: countResult.results[0].total }
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Batch Pattern (Pseudo-Transactions)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// D1 doesn't support multi-statement transactions, but batch() provides sequential execution
|
||||||
|
await env.DB.batch([
|
||||||
|
env.DB.prepare('UPDATE users SET credits = credits - ? WHERE user_id = ?').bind(amount, fromUserId),
|
||||||
|
env.DB.prepare('UPDATE users SET credits = credits + ? WHERE user_id = ?').bind(amount, toUserId),
|
||||||
|
env.DB.prepare('INSERT INTO transactions (from_user, to_user, amount) VALUES (?, ?, ?)').bind(fromUserId, toUserId, amount)
|
||||||
|
]);
|
||||||
|
// If any statement fails, batch stops (transaction-like behavior)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
**Common Error Types:**
|
||||||
|
- `D1_ERROR` - General D1 error
|
||||||
|
- `D1_EXEC_ERROR` - SQL syntax error
|
||||||
|
- `D1_TYPE_ERROR` - Type mismatch (undefined instead of null)
|
||||||
|
- `D1_COLUMN_NOTFOUND` - Column doesn't exist
|
||||||
|
|
||||||
|
**Common Errors and Fixes:**
|
||||||
|
|
||||||
|
| Error | Cause | Solution |
|
||||||
|
|-------|-------|----------|
|
||||||
|
| **Statement too long** | Large INSERT with 1000+ rows | Break into batches of 100-250 using `batch()` |
|
||||||
|
| **Too many requests queued** | Individual queries in loop | Use `batch()` instead of loop |
|
||||||
|
| **D1_TYPE_ERROR** | Using `undefined` in bind | Use `null` for optional values: `.bind(email, bio \|\| null)` |
|
||||||
|
| **Transaction conflicts** | BEGIN TRANSACTION in migration | Remove BEGIN/COMMIT (D1 handles automatically) |
|
||||||
|
| **Foreign key violations** | Schema changes break constraints | Use `PRAGMA defer_foreign_keys = true` |
|
||||||
|
|
||||||
|
**Automatic Retries (Sept 2025):**
|
||||||
|
D1 automatically retries read-only queries (SELECT, EXPLAIN, WITH) up to 2 times on retryable errors. Check `meta.total_attempts` in response for retry count.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Performance Optimization
|
||||||
|
|
||||||
|
**Index Best Practices:**
|
||||||
|
- ✅ Index columns in WHERE clauses: `CREATE INDEX idx_users_email ON users(email)`
|
||||||
|
- ✅ Index foreign keys: `CREATE INDEX idx_posts_user_id ON posts(user_id)`
|
||||||
|
- ✅ Index columns for sorting: `CREATE INDEX idx_posts_created_at ON posts(created_at DESC)`
|
||||||
|
- ✅ Multi-column indexes: `CREATE INDEX idx_posts_user_published ON posts(user_id, published)`
|
||||||
|
- ✅ Partial indexes: `CREATE INDEX idx_users_active ON users(email) WHERE deleted = 0`
|
||||||
|
- ✅ Test with: `EXPLAIN QUERY PLAN SELECT ...`
|
||||||
|
|
||||||
|
**PRAGMA optimize (Feb 2025):**
|
||||||
|
```sql
|
||||||
|
CREATE INDEX idx_users_email ON users(email);
|
||||||
|
PRAGMA optimize; -- Run after schema changes
|
||||||
|
```
|
||||||
|
|
||||||
|
**Query Optimization:**
|
||||||
|
- ✅ Use specific columns (not `SELECT *`)
|
||||||
|
- ✅ Always include LIMIT on large result sets
|
||||||
|
- ✅ Use indexes for WHERE conditions
|
||||||
|
- ❌ Avoid functions in WHERE (can't use indexes): `WHERE LOWER(email)` → store lowercase instead
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Local Development
|
||||||
|
|
||||||
|
**Local vs Remote (Nov 2025 - Remote Bindings GA):**
|
||||||
|
```bash
|
||||||
|
# Local database (automatic creation)
|
||||||
|
npx wrangler d1 migrations apply my-database --local
|
||||||
|
npx wrangler d1 execute my-database --local --command "SELECT * FROM users"
|
||||||
|
|
||||||
|
# Remote database
|
||||||
|
npx wrangler d1 execute my-database --remote --command "SELECT * FROM users"
|
||||||
|
|
||||||
|
# Remote bindings (wrangler@4.37.0+) - connect local Worker to deployed D1
|
||||||
|
# Add to wrangler.jsonc: { "binding": "DB", "remote": true }
|
||||||
|
```
|
||||||
|
|
||||||
|
**Local Database Location:**
|
||||||
|
`.wrangler/state/v3/d1/miniflare-D1DatabaseObject/<database_id>.sqlite`
|
||||||
|
|
||||||
|
**Seed Local Database:**
|
||||||
|
```bash
|
||||||
|
npx wrangler d1 execute my-database --local --file=seed.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Best Practices Summary
|
||||||
|
|
||||||
|
### ✅ Always Do:
|
||||||
|
|
||||||
|
1. **Use prepared statements** with `.bind()` for user input
|
||||||
|
2. **Use `.batch()`** for multiple queries (reduces latency)
|
||||||
|
3. **Create indexes** on frequently queried columns
|
||||||
|
4. **Run `PRAGMA optimize`** after schema changes
|
||||||
|
5. **Use `IF NOT EXISTS`** in migrations for idempotency
|
||||||
|
6. **Test migrations locally** before applying to production
|
||||||
|
7. **Handle errors gracefully** with try/catch
|
||||||
|
8. **Use `null`** instead of `undefined` for optional values
|
||||||
|
9. **Validate input** before binding to queries
|
||||||
|
10. **Check `meta.rows_written`** after UPDATE/DELETE
|
||||||
|
|
||||||
|
### ❌ Never Do:
|
||||||
|
|
||||||
|
1. **Never use `.exec()`** with user input (SQL injection risk)
|
||||||
|
2. **Never hardcode `database_id`** in public repos
|
||||||
|
3. **Never use `undefined`** in bind parameters (causes D1_TYPE_ERROR)
|
||||||
|
4. **Never fire individual queries in loops** (use batch instead)
|
||||||
|
5. **Never forget `LIMIT`** on potentially large result sets
|
||||||
|
6. **Never use `SELECT *`** in production (specify columns)
|
||||||
|
7. **Never include `BEGIN TRANSACTION`** in migration files
|
||||||
|
8. **Never modify applied migrations** (create new ones)
|
||||||
|
9. **Never skip error handling** on database operations
|
||||||
|
10. **Never assume queries succeed** (always check results)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Known Issues Prevented
|
||||||
|
|
||||||
|
| Issue | Description | How to Avoid |
|
||||||
|
|-------|-------------|--------------|
|
||||||
|
| **Statement too long** | Large INSERT statements exceed D1 limits | Break into batches of 100-250 rows |
|
||||||
|
| **Transaction conflicts** | `BEGIN TRANSACTION` in migration files | Remove BEGIN/COMMIT (D1 handles this) |
|
||||||
|
| **Foreign key violations** | Schema changes break foreign key constraints | Use `PRAGMA defer_foreign_keys = true` |
|
||||||
|
| **Rate limiting / queue overload** | Too many individual queries | Use `batch()` instead of loops |
|
||||||
|
| **Memory limit exceeded** | Query loads too much data into memory | Add LIMIT, paginate results, shard queries |
|
||||||
|
| **Type mismatch errors** | Using `undefined` instead of `null` | Always use `null` for optional values |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Wrangler Commands Reference
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Database management
|
||||||
|
wrangler d1 create <DATABASE_NAME>
|
||||||
|
wrangler d1 list
|
||||||
|
wrangler d1 delete <DATABASE_NAME>
|
||||||
|
wrangler d1 info <DATABASE_NAME>
|
||||||
|
|
||||||
|
# Migrations
|
||||||
|
wrangler d1 migrations create <DATABASE_NAME> <MIGRATION_NAME>
|
||||||
|
wrangler d1 migrations list <DATABASE_NAME> --local|--remote
|
||||||
|
wrangler d1 migrations apply <DATABASE_NAME> --local|--remote
|
||||||
|
|
||||||
|
# Execute queries
|
||||||
|
wrangler d1 execute <DATABASE_NAME> --local|--remote --command "SELECT * FROM users"
|
||||||
|
wrangler d1 execute <DATABASE_NAME> --local|--remote --file=./query.sql
|
||||||
|
|
||||||
|
# Time Travel (view historical data)
|
||||||
|
wrangler d1 time-travel info <DATABASE_NAME> --timestamp "2025-10-20"
|
||||||
|
wrangler d1 time-travel restore <DATABASE_NAME> --timestamp "2025-10-20"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Official Documentation
|
||||||
|
|
||||||
|
- **D1 Overview**: https://developers.cloudflare.com/d1/
|
||||||
|
- **Get Started**: https://developers.cloudflare.com/d1/get-started/
|
||||||
|
- **Migrations**: https://developers.cloudflare.com/d1/reference/migrations/
|
||||||
|
- **Workers API**: https://developers.cloudflare.com/d1/worker-api/
|
||||||
|
- **Best Practices**: https://developers.cloudflare.com/d1/best-practices/
|
||||||
|
- **Wrangler Commands**: https://developers.cloudflare.com/workers/wrangler/commands/#d1
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Ready to build with D1!** 🚀
|
||||||
65
plugin.lock.json
Normal file
65
plugin.lock.json
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
{
|
||||||
|
"$schema": "internal://schemas/plugin.lock.v1.json",
|
||||||
|
"pluginId": "gh:jezweb/claude-skills:skills/cloudflare-d1",
|
||||||
|
"normalized": {
|
||||||
|
"repo": null,
|
||||||
|
"ref": "refs/tags/v20251128.0",
|
||||||
|
"commit": "5c240a15cde58ee5bfffa543f52cb14e68f217aa",
|
||||||
|
"treeHash": "a7949b99834f4c0c7e61aa2d3a7e90cd76031a73bfe866012563b56959e21cf9",
|
||||||
|
"generatedAt": "2025-11-28T10:18:55.948066Z",
|
||||||
|
"toolVersion": "publish_plugins.py@0.2.0"
|
||||||
|
},
|
||||||
|
"origin": {
|
||||||
|
"remote": "git@github.com:zhongweili/42plugin-data.git",
|
||||||
|
"branch": "master",
|
||||||
|
"commit": "aa1497ed0949fd50e99e70d6324a29c5b34f9390",
|
||||||
|
"repoRoot": "/Users/zhongweili/projects/openmind/42plugin-data"
|
||||||
|
},
|
||||||
|
"manifest": {
|
||||||
|
"name": "cloudflare-d1",
|
||||||
|
"description": "Build with D1 serverless SQLite database on Cloudflares edge. Use when: creating databases, writing SQL migrations, querying D1 from Workers, handling relational data, or troubleshooting D1_ERROR, statement too long, migration failures, or query performance issues.",
|
||||||
|
"version": "1.0.0"
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"path": "README.md",
|
||||||
|
"sha256": "66466b23a4e99d1dea1b15a90933e875c6236a3dc987742c3e90a4275473b8ba"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "SKILL.md",
|
||||||
|
"sha256": "5ba914560fd1c634962db91846fab4eeb997b187972c2f6be7f9d8b87da6ce52"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "references/best-practices.md",
|
||||||
|
"sha256": "b7ee17509c8b2191552cf43fe60c37c8faaa171d3e3ef3cdc2715ecbe4a96e9c"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "references/query-patterns.md",
|
||||||
|
"sha256": "73d285af581c03bc8abcf17a160a35d70d2353cacc4251f7f540b427cadc0de5"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": ".claude-plugin/plugin.json",
|
||||||
|
"sha256": "3f3dfc294a47f14511721a8cdd443c54a6d460312dc6d51bc0ac83dc521d6841"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "templates/d1-setup-migration.sh",
|
||||||
|
"sha256": "95223ccd6cd5f72acbbbbb70363a3aef2e215db01410865548a2d99562e99519"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "templates/schema-example.sql",
|
||||||
|
"sha256": "16d69eec5e6152a80b27baa2d42d4421eba849216decedfe4cc1a45e3aa52281"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"path": "templates/d1-worker-queries.ts",
|
||||||
|
"sha256": "90b3c39bade6af186df08b29851e14c81fab97d1e78550a1db6b7c00b1769f9e"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"dirSha256": "a7949b99834f4c0c7e61aa2d3a7e90cd76031a73bfe866012563b56959e21cf9"
|
||||||
|
},
|
||||||
|
"security": {
|
||||||
|
"scannedAt": null,
|
||||||
|
"scannerVersion": null,
|
||||||
|
"flags": []
|
||||||
|
}
|
||||||
|
}
|
||||||
652
references/best-practices.md
Normal file
652
references/best-practices.md
Normal file
@@ -0,0 +1,652 @@
|
|||||||
|
# D1 Best Practices
|
||||||
|
|
||||||
|
**Production-ready patterns for Cloudflare D1**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
1. [Security](#security)
|
||||||
|
2. [Performance](#performance)
|
||||||
|
3. [Migrations](#migrations)
|
||||||
|
4. [Error Handling](#error-handling)
|
||||||
|
5. [Data Modeling](#data-modeling)
|
||||||
|
6. [Testing](#testing)
|
||||||
|
7. [Deployment](#deployment)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Security
|
||||||
|
|
||||||
|
### Always Use Prepared Statements
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ NEVER: SQL injection vulnerability
|
||||||
|
const email = c.req.query('email');
|
||||||
|
await env.DB.exec(`SELECT * FROM users WHERE email = '${email}'`);
|
||||||
|
|
||||||
|
// ✅ ALWAYS: Safe prepared statement
|
||||||
|
const user = await env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||||
|
.bind(email)
|
||||||
|
.first();
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why?** User input like `'; DROP TABLE users; --` would execute in the first example!
|
||||||
|
|
||||||
|
### Use null Instead of undefined
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG: undefined causes D1_TYPE_ERROR
|
||||||
|
await env.DB.prepare('INSERT INTO users (email, bio) VALUES (?, ?)')
|
||||||
|
.bind(email, undefined);
|
||||||
|
|
||||||
|
// ✅ CORRECT: Use null for optional values
|
||||||
|
await env.DB.prepare('INSERT INTO users (email, bio) VALUES (?, ?)')
|
||||||
|
.bind(email, bio || null);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Never Commit Sensitive IDs
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
// ❌ WRONG: Database ID in public repo
|
||||||
|
{
|
||||||
|
"d1_databases": [
|
||||||
|
{
|
||||||
|
"database_id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890" // ❌
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
// ✅ BETTER: Use environment variable or secret
|
||||||
|
{
|
||||||
|
"d1_databases": [
|
||||||
|
{
|
||||||
|
"database_id": "$D1_DATABASE_ID" // Reference env var
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Or use wrangler secrets:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx wrangler secret put D1_DATABASE_ID
|
||||||
|
```
|
||||||
|
|
||||||
|
### Validate Input Before Binding
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ✅ Validate email format
|
||||||
|
function isValidEmail(email: string): boolean {
|
||||||
|
return /^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(email);
|
||||||
|
}
|
||||||
|
|
||||||
|
app.post('/api/users', async (c) => {
|
||||||
|
const { email } = await c.req.json();
|
||||||
|
|
||||||
|
if (!isValidEmail(email)) {
|
||||||
|
return c.json({ error: 'Invalid email format' }, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now safe to use
|
||||||
|
const user = await c.env.DB.prepare('INSERT INTO users (email) VALUES (?)')
|
||||||
|
.bind(email)
|
||||||
|
.run();
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Performance
|
||||||
|
|
||||||
|
### Use Batch for Multiple Queries
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ BAD: 3 network round trips (~150ms)
|
||||||
|
const user = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(1).first();
|
||||||
|
const posts = await env.DB.prepare('SELECT * FROM posts WHERE user_id = ?').bind(1).all();
|
||||||
|
const comments = await env.DB.prepare('SELECT * FROM comments WHERE user_id = ?').bind(1).all();
|
||||||
|
|
||||||
|
// ✅ GOOD: 1 network round trip (~50ms)
|
||||||
|
const [userResult, postsResult, commentsResult] = await env.DB.batch([
|
||||||
|
env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(1),
|
||||||
|
env.DB.prepare('SELECT * FROM posts WHERE user_id = ?').bind(1),
|
||||||
|
env.DB.prepare('SELECT * FROM comments WHERE user_id = ?').bind(1)
|
||||||
|
]);
|
||||||
|
|
||||||
|
const user = userResult.results[0];
|
||||||
|
const posts = postsResult.results;
|
||||||
|
const comments = commentsResult.results;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Performance win: 3x faster!**
|
||||||
|
|
||||||
|
### Create Indexes for WHERE Clauses
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- ❌ Slow: Full table scan
|
||||||
|
SELECT * FROM posts WHERE user_id = 123;
|
||||||
|
|
||||||
|
-- ✅ Fast: Create index first
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_posts_user_id ON posts(user_id);
|
||||||
|
|
||||||
|
-- Now this query is fast
|
||||||
|
SELECT * FROM posts WHERE user_id = 123;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Verify index is being used:**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
EXPLAIN QUERY PLAN SELECT * FROM posts WHERE user_id = 123;
|
||||||
|
-- Should see: SEARCH posts USING INDEX idx_posts_user_id
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run PRAGMA optimize After Schema Changes
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- After creating indexes or altering schema
|
||||||
|
PRAGMA optimize;
|
||||||
|
```
|
||||||
|
|
||||||
|
This collects statistics that help the query planner choose the best execution plan.
|
||||||
|
|
||||||
|
### Select Only Needed Columns
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ Bad: Fetches all columns (wastes bandwidth)
|
||||||
|
const users = await env.DB.prepare('SELECT * FROM users').all();
|
||||||
|
|
||||||
|
// ✅ Good: Only fetch what you need
|
||||||
|
const users = await env.DB.prepare('SELECT user_id, email, username FROM users').all();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Always Use LIMIT
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ Dangerous: Could return millions of rows
|
||||||
|
const posts = await env.DB.prepare('SELECT * FROM posts WHERE published = 1').all();
|
||||||
|
|
||||||
|
// ✅ Safe: Limit result set
|
||||||
|
const posts = await env.DB.prepare(
|
||||||
|
'SELECT * FROM posts WHERE published = 1 LIMIT 100'
|
||||||
|
).all();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use Partial Indexes
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Index only published posts (smaller index, faster writes)
|
||||||
|
CREATE INDEX idx_posts_published ON posts(created_at DESC)
|
||||||
|
WHERE published = 1;
|
||||||
|
|
||||||
|
-- Index only active users (exclude deleted)
|
||||||
|
CREATE INDEX idx_users_active ON users(email)
|
||||||
|
WHERE deleted_at IS NULL;
|
||||||
|
```
|
||||||
|
|
||||||
|
Benefits:
|
||||||
|
- ✅ Smaller indexes (faster queries)
|
||||||
|
- ✅ Fewer index updates (faster writes)
|
||||||
|
- ✅ Only index relevant data
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Migrations
|
||||||
|
|
||||||
|
### Make Migrations Idempotent
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- ✅ ALWAYS use IF NOT EXISTS
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
user_id INTEGER PRIMARY KEY,
|
||||||
|
email TEXT NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
||||||
|
|
||||||
|
-- ✅ Use IF EXISTS for drops
|
||||||
|
DROP TABLE IF EXISTS temp_table;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why?** Re-running a migration won't fail if it's already applied.
|
||||||
|
|
||||||
|
### Never Modify Applied Migrations
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# ❌ WRONG: Editing applied migration
|
||||||
|
vim migrations/0001_create_users.sql # Already applied!
|
||||||
|
|
||||||
|
# ✅ CORRECT: Create new migration
|
||||||
|
npx wrangler d1 migrations create my-database add_users_bio_column
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why?** D1 tracks which migrations have been applied. Modifying them causes inconsistencies.
|
||||||
|
|
||||||
|
### Test Migrations Locally First
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Apply to local database
|
||||||
|
npx wrangler d1 migrations apply my-database --local
|
||||||
|
|
||||||
|
# 2. Test queries locally
|
||||||
|
npx wrangler d1 execute my-database --local --command "SELECT * FROM users"
|
||||||
|
|
||||||
|
# 3. Only then apply to production
|
||||||
|
npx wrangler d1 migrations apply my-database --remote
|
||||||
|
```
|
||||||
|
|
||||||
|
### Handle Foreign Keys Carefully
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Disable foreign key checks temporarily during schema changes
|
||||||
|
PRAGMA defer_foreign_keys = true;
|
||||||
|
|
||||||
|
-- Make schema changes that would violate foreign keys
|
||||||
|
ALTER TABLE posts DROP COLUMN old_user_id;
|
||||||
|
ALTER TABLE posts ADD COLUMN user_id INTEGER REFERENCES users(user_id);
|
||||||
|
|
||||||
|
-- Foreign keys re-enabled automatically at end of migration
|
||||||
|
```
|
||||||
|
|
||||||
|
### Break Large Data Migrations into Batches
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- ❌ BAD: Single massive INSERT (causes "statement too long")
|
||||||
|
INSERT INTO users (email) VALUES
|
||||||
|
('user1@example.com'),
|
||||||
|
('user2@example.com'),
|
||||||
|
... -- 10,000 more rows
|
||||||
|
|
||||||
|
-- ✅ GOOD: Split into batches of 100-250 rows
|
||||||
|
-- File: 0001_migrate_users_batch1.sql
|
||||||
|
INSERT INTO users (email) VALUES
|
||||||
|
('user1@example.com'),
|
||||||
|
... -- 100 rows
|
||||||
|
|
||||||
|
-- File: 0002_migrate_users_batch2.sql
|
||||||
|
INSERT INTO users (email) VALUES
|
||||||
|
('user101@example.com'),
|
||||||
|
... -- next 100 rows
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
### Check for Errors After Every Query
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
try {
|
||||||
|
const result = await env.DB.prepare('INSERT INTO users (email) VALUES (?)')
|
||||||
|
.bind(email)
|
||||||
|
.run();
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
console.error('Insert failed');
|
||||||
|
return c.json({ error: 'Failed to create user' }, 500);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success!
|
||||||
|
const userId = result.meta.last_row_id;
|
||||||
|
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Database error:', error.message);
|
||||||
|
return c.json({ error: 'Database operation failed' }, 500);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Implement Retry Logic for Transient Errors
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
async function queryWithRetry<T>(
|
||||||
|
queryFn: () => Promise<T>,
|
||||||
|
maxRetries = 3
|
||||||
|
): Promise<T> {
|
||||||
|
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
||||||
|
try {
|
||||||
|
return await queryFn();
|
||||||
|
} catch (error: any) {
|
||||||
|
const message = error.message;
|
||||||
|
|
||||||
|
// Check if error is retryable
|
||||||
|
const isRetryable =
|
||||||
|
message.includes('Network connection lost') ||
|
||||||
|
message.includes('storage caused object to be reset') ||
|
||||||
|
message.includes('reset because its code was updated');
|
||||||
|
|
||||||
|
if (!isRetryable || attempt === maxRetries - 1) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exponential backoff: 1s, 2s, 4s
|
||||||
|
const delay = Math.min(1000 * Math.pow(2, attempt), 5000);
|
||||||
|
await new Promise(resolve => setTimeout(resolve, delay));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Max retries exceeded');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Usage
|
||||||
|
const user = await queryWithRetry(() =>
|
||||||
|
env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId)
|
||||||
|
.first()
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Handle Common D1 Errors
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
try {
|
||||||
|
await env.DB.prepare(query).bind(...params).run();
|
||||||
|
} catch (error: any) {
|
||||||
|
const message = error.message;
|
||||||
|
|
||||||
|
if (message.includes('D1_ERROR')) {
|
||||||
|
// D1-specific error
|
||||||
|
console.error('D1 error:', message);
|
||||||
|
} else if (message.includes('UNIQUE constraint failed')) {
|
||||||
|
// Duplicate key error
|
||||||
|
return c.json({ error: 'Email already exists' }, 409);
|
||||||
|
} else if (message.includes('FOREIGN KEY constraint failed')) {
|
||||||
|
// Invalid foreign key
|
||||||
|
return c.json({ error: 'Invalid user reference' }, 400);
|
||||||
|
} else {
|
||||||
|
// Unknown error
|
||||||
|
console.error('Unknown database error:', message);
|
||||||
|
return c.json({ error: 'Database operation failed' }, 500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Data Modeling
|
||||||
|
|
||||||
|
### Use Appropriate Data Types
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE users (
|
||||||
|
user_id INTEGER PRIMARY KEY AUTOINCREMENT, -- Auto-incrementing ID
|
||||||
|
email TEXT NOT NULL, -- String
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
age INTEGER, -- Number
|
||||||
|
balance REAL, -- Decimal/float
|
||||||
|
is_active INTEGER DEFAULT 1, -- Boolean (0 or 1)
|
||||||
|
metadata TEXT, -- JSON (stored as TEXT)
|
||||||
|
created_at INTEGER NOT NULL -- Unix timestamp
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
**SQLite has 5 types**: NULL, INTEGER, REAL, TEXT, BLOB
|
||||||
|
|
||||||
|
### Store Timestamps as Unix Epoch
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- ✅ RECOMMENDED: Unix timestamp (INTEGER)
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (unixepoch())
|
||||||
|
|
||||||
|
-- ❌ AVOID: ISO 8601 strings (harder to query/compare)
|
||||||
|
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why?** Unix timestamps are easier to compare, filter, and work with in JavaScript:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Easy to work with
|
||||||
|
const timestamp = Date.now(); // 1698000000
|
||||||
|
const date = new Date(timestamp);
|
||||||
|
|
||||||
|
// Easy to query
|
||||||
|
const recentPosts = await env.DB.prepare(
|
||||||
|
'SELECT * FROM posts WHERE created_at > ?'
|
||||||
|
).bind(Date.now() - 86400000).all(); // Last 24 hours
|
||||||
|
```
|
||||||
|
|
||||||
|
### Store JSON as TEXT
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE users (
|
||||||
|
user_id INTEGER PRIMARY KEY,
|
||||||
|
email TEXT NOT NULL,
|
||||||
|
settings TEXT -- Store JSON here
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Insert JSON
|
||||||
|
const settings = { theme: 'dark', language: 'en' };
|
||||||
|
await env.DB.prepare('INSERT INTO users (email, settings) VALUES (?, ?)')
|
||||||
|
.bind(email, JSON.stringify(settings))
|
||||||
|
.run();
|
||||||
|
|
||||||
|
// Read JSON
|
||||||
|
const user = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId)
|
||||||
|
.first();
|
||||||
|
|
||||||
|
const settings = JSON.parse(user.settings);
|
||||||
|
console.log(settings.theme); // 'dark'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use Soft Deletes
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE users (
|
||||||
|
user_id INTEGER PRIMARY KEY,
|
||||||
|
email TEXT NOT NULL,
|
||||||
|
deleted_at INTEGER -- NULL = active, timestamp = deleted
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Index for active users only
|
||||||
|
CREATE INDEX idx_users_active ON users(user_id)
|
||||||
|
WHERE deleted_at IS NULL;
|
||||||
|
```
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Soft delete
|
||||||
|
await env.DB.prepare('UPDATE users SET deleted_at = ? WHERE user_id = ?')
|
||||||
|
.bind(Date.now(), userId)
|
||||||
|
.run();
|
||||||
|
|
||||||
|
// Query only active users
|
||||||
|
const activeUsers = await env.DB.prepare(
|
||||||
|
'SELECT * FROM users WHERE deleted_at IS NULL'
|
||||||
|
).all();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Normalize Related Data
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- ✅ GOOD: Normalized (users in separate table)
|
||||||
|
CREATE TABLE posts (
|
||||||
|
post_id INTEGER PRIMARY KEY,
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(user_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ❌ BAD: Denormalized (user data duplicated in every post)
|
||||||
|
CREATE TABLE posts (
|
||||||
|
post_id INTEGER PRIMARY KEY,
|
||||||
|
user_email TEXT NOT NULL,
|
||||||
|
user_name TEXT NOT NULL,
|
||||||
|
title TEXT NOT NULL
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
### Test Migrations Locally
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Create local database
|
||||||
|
npx wrangler d1 migrations apply my-database --local
|
||||||
|
|
||||||
|
# 2. Seed with test data
|
||||||
|
npx wrangler d1 execute my-database --local --file=seed.sql
|
||||||
|
|
||||||
|
# 3. Run test queries
|
||||||
|
npx wrangler d1 execute my-database --local --command "SELECT COUNT(*) FROM users"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use Separate Databases for Development
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"d1_databases": [
|
||||||
|
{
|
||||||
|
"binding": "DB",
|
||||||
|
"database_name": "my-app-prod",
|
||||||
|
"database_id": "<PROD_UUID>",
|
||||||
|
"preview_database_id": "local-dev" // Local only
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Benefits:**
|
||||||
|
- ✅ Never accidentally modify production data
|
||||||
|
- ✅ Fast local development (no network latency)
|
||||||
|
- ✅ Can reset local DB anytime
|
||||||
|
|
||||||
|
### Backup Before Major Migrations
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Export current database
|
||||||
|
npx wrangler d1 export my-database --remote --output=backup-$(date +%Y%m%d).sql
|
||||||
|
|
||||||
|
# Apply migration
|
||||||
|
npx wrangler d1 migrations apply my-database --remote
|
||||||
|
|
||||||
|
# If something goes wrong, restore from backup
|
||||||
|
npx wrangler d1 execute my-database --remote --file=backup-20251021.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Deployment
|
||||||
|
|
||||||
|
### Use Preview Databases for Testing
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"d1_databases": [
|
||||||
|
{
|
||||||
|
"binding": "DB",
|
||||||
|
"database_name": "my-app-prod",
|
||||||
|
"database_id": "<PROD_UUID>",
|
||||||
|
"preview_database_id": "<PREVIEW_UUID>" // Separate preview database
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Deploy preview:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx wrangler deploy --env preview
|
||||||
|
```
|
||||||
|
|
||||||
|
### Apply Migrations Before Deploying Code
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Apply migrations first
|
||||||
|
npx wrangler d1 migrations apply my-database --remote
|
||||||
|
|
||||||
|
# 2. Then deploy Worker code
|
||||||
|
npx wrangler deploy
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why?** Ensures database schema is ready before code expects it.
|
||||||
|
|
||||||
|
### Monitor Query Performance
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
app.get('/api/users', async (c) => {
|
||||||
|
const start = Date.now();
|
||||||
|
|
||||||
|
const { results, meta } = await c.env.DB.prepare('SELECT * FROM users LIMIT 100')
|
||||||
|
.all();
|
||||||
|
|
||||||
|
const duration = Date.now() - start;
|
||||||
|
|
||||||
|
// Log slow queries
|
||||||
|
if (duration > 100) {
|
||||||
|
console.warn(`Slow query: ${duration}ms, rows_read: ${meta.rows_read}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.json({ users: results });
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use Time Travel for Data Recovery
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# View database state 2 hours ago
|
||||||
|
npx wrangler d1 time-travel info my-database --timestamp "2025-10-21T10:00:00Z"
|
||||||
|
|
||||||
|
# Restore database to 2 hours ago
|
||||||
|
npx wrangler d1 time-travel restore my-database --timestamp "2025-10-21T10:00:00Z"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: Time Travel available for last 30 days.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Summary Checklist
|
||||||
|
|
||||||
|
### Security ✅
|
||||||
|
- [ ] Always use `.prepare().bind()` for user input
|
||||||
|
- [ ] Use `null` instead of `undefined`
|
||||||
|
- [ ] Validate input before binding
|
||||||
|
- [ ] Never commit database IDs to public repos
|
||||||
|
|
||||||
|
### Performance ✅
|
||||||
|
- [ ] Use `.batch()` for multiple queries
|
||||||
|
- [ ] Create indexes on filtered columns
|
||||||
|
- [ ] Run `PRAGMA optimize` after schema changes
|
||||||
|
- [ ] Select only needed columns
|
||||||
|
- [ ] Always use `LIMIT`
|
||||||
|
|
||||||
|
### Migrations ✅
|
||||||
|
- [ ] Make migrations idempotent (IF NOT EXISTS)
|
||||||
|
- [ ] Never modify applied migrations
|
||||||
|
- [ ] Test locally before production
|
||||||
|
- [ ] Break large data migrations into batches
|
||||||
|
|
||||||
|
### Error Handling ✅
|
||||||
|
- [ ] Wrap queries in try/catch
|
||||||
|
- [ ] Implement retry logic for transient errors
|
||||||
|
- [ ] Check `result.success` and `meta.rows_written`
|
||||||
|
- [ ] Log errors with context
|
||||||
|
|
||||||
|
### Data Modeling ✅
|
||||||
|
- [ ] Use appropriate SQLite data types
|
||||||
|
- [ ] Store timestamps as Unix epoch (INTEGER)
|
||||||
|
- [ ] Use soft deletes (deleted_at column)
|
||||||
|
- [ ] Normalize related data with foreign keys
|
||||||
|
|
||||||
|
### Testing ✅
|
||||||
|
- [ ] Test migrations locally first
|
||||||
|
- [ ] Use separate development/production databases
|
||||||
|
- [ ] Backup before major migrations
|
||||||
|
|
||||||
|
### Deployment ✅
|
||||||
|
- [ ] Apply migrations before deploying code
|
||||||
|
- [ ] Use preview databases for testing
|
||||||
|
- [ ] Monitor query performance
|
||||||
|
- [ ] Use Time Travel for recovery
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Official Documentation
|
||||||
|
|
||||||
|
- **Best Practices**: https://developers.cloudflare.com/d1/best-practices/
|
||||||
|
- **Indexes**: https://developers.cloudflare.com/d1/best-practices/use-indexes/
|
||||||
|
- **Local Development**: https://developers.cloudflare.com/d1/best-practices/local-development/
|
||||||
|
- **Retry Queries**: https://developers.cloudflare.com/d1/best-practices/retry-queries/
|
||||||
|
- **Time Travel**: https://developers.cloudflare.com/d1/reference/time-travel/
|
||||||
587
references/query-patterns.md
Normal file
587
references/query-patterns.md
Normal file
@@ -0,0 +1,587 @@
|
|||||||
|
# D1 Query Patterns Reference
|
||||||
|
|
||||||
|
**Complete guide to all D1 Workers API methods with examples**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
1. [D1 API Methods Overview](#d1-api-methods-overview)
|
||||||
|
2. [prepare() - Prepared Statements](#prepare---prepared-statements)
|
||||||
|
3. [Query Result Methods](#query-result-methods)
|
||||||
|
4. [batch() - Multiple Queries](#batch---multiple-queries)
|
||||||
|
5. [exec() - Raw SQL](#exec---raw-sql)
|
||||||
|
6. [Common Query Patterns](#common-query-patterns)
|
||||||
|
7. [Performance Tips](#performance-tips)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## D1 API Methods Overview
|
||||||
|
|
||||||
|
| Method | Use Case | Returns Results | Safe for User Input |
|
||||||
|
|--------|----------|-----------------|---------------------|
|
||||||
|
| `.prepare().bind()` | **Primary method** for queries | Yes | ✅ Yes (prevents SQL injection) |
|
||||||
|
| `.batch()` | Multiple queries in one round trip | Yes | ✅ Yes (if using prepare) |
|
||||||
|
| `.exec()` | Raw SQL execution | No | ❌ No (SQL injection risk) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## prepare() - Prepared Statements
|
||||||
|
|
||||||
|
**Primary method for all queries with user input.**
|
||||||
|
|
||||||
|
### Basic Syntax
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const stmt = env.DB.prepare(sql);
|
||||||
|
const bound = stmt.bind(...parameters);
|
||||||
|
const result = await bound.all(); // or .first(), .run()
|
||||||
|
```
|
||||||
|
|
||||||
|
### Method Chaining (Most Common)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const result = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId)
|
||||||
|
.first();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Parameter Binding
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Single parameter
|
||||||
|
const user = await env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||||
|
.bind('user@example.com')
|
||||||
|
.first();
|
||||||
|
|
||||||
|
// Multiple parameters
|
||||||
|
const posts = await env.DB.prepare(
|
||||||
|
'SELECT * FROM posts WHERE user_id = ? AND published = ? LIMIT ?'
|
||||||
|
)
|
||||||
|
.bind(userId, 1, 10)
|
||||||
|
.all();
|
||||||
|
|
||||||
|
// Use null for optional values (NEVER undefined)
|
||||||
|
const updated = await env.DB.prepare(
|
||||||
|
'UPDATE users SET bio = ?, avatar_url = ? WHERE user_id = ?'
|
||||||
|
)
|
||||||
|
.bind(bio || null, avatarUrl || null, userId)
|
||||||
|
.run();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Why use prepare()?
|
||||||
|
|
||||||
|
- ✅ **SQL injection protection** - Parameters are safely escaped
|
||||||
|
- ✅ **Performance** - Query plans can be cached
|
||||||
|
- ✅ **Reusability** - Same statement, different parameters
|
||||||
|
- ✅ **Type safety** - Works with TypeScript generics
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Query Result Methods
|
||||||
|
|
||||||
|
### .all() - Get All Rows
|
||||||
|
|
||||||
|
Returns all matching rows as an array.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const { results, meta } = await env.DB.prepare('SELECT * FROM users')
|
||||||
|
.all();
|
||||||
|
|
||||||
|
console.log(results); // Array of row objects
|
||||||
|
console.log(meta); // { duration, rows_read, rows_written }
|
||||||
|
```
|
||||||
|
|
||||||
|
**With Type Safety:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface User {
|
||||||
|
user_id: number;
|
||||||
|
email: string;
|
||||||
|
username: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { results } = await env.DB.prepare('SELECT * FROM users')
|
||||||
|
.all<User>();
|
||||||
|
|
||||||
|
// results is now typed as User[]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response Structure:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
success: true,
|
||||||
|
results: [
|
||||||
|
{ user_id: 1, email: 'alice@example.com', username: 'alice' },
|
||||||
|
{ user_id: 2, email: 'bob@example.com', username: 'bob' }
|
||||||
|
],
|
||||||
|
meta: {
|
||||||
|
duration: 2.5, // Milliseconds
|
||||||
|
rows_read: 2, // Rows scanned
|
||||||
|
rows_written: 0 // Rows modified
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### .first() - Get First Row
|
||||||
|
|
||||||
|
Returns the first row or `null` if no results.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const user = await env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||||
|
.bind('alice@example.com')
|
||||||
|
.first();
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return c.json({ error: 'User not found' }, 404);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**With Type Safety:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const user = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId)
|
||||||
|
.first<User>();
|
||||||
|
|
||||||
|
// user is typed as User | null
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: `.first()` doesn't add `LIMIT 1` automatically. For better performance:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ✅ Better: Add LIMIT 1 yourself
|
||||||
|
const user = await env.DB.prepare('SELECT * FROM users WHERE email = ? LIMIT 1')
|
||||||
|
.bind(email)
|
||||||
|
.first();
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### .first(column) - Get Single Column Value
|
||||||
|
|
||||||
|
Returns the value of a specific column from the first row.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Get count
|
||||||
|
const total = await env.DB.prepare('SELECT COUNT(*) as total FROM users')
|
||||||
|
.first('total');
|
||||||
|
|
||||||
|
console.log(total); // 42 (just the number, not an object)
|
||||||
|
|
||||||
|
// Get specific field
|
||||||
|
const email = await env.DB.prepare('SELECT email FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId)
|
||||||
|
.first('email');
|
||||||
|
|
||||||
|
console.log(email); // 'user@example.com'
|
||||||
|
```
|
||||||
|
|
||||||
|
**Use Cases:**
|
||||||
|
- Counting rows
|
||||||
|
- Checking existence (SELECT 1)
|
||||||
|
- Getting single values (MAX, MIN, AVG)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### .run() - Execute Without Results
|
||||||
|
|
||||||
|
Used for INSERT, UPDATE, DELETE when you don't need the data back.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const { success, meta } = await env.DB.prepare(
|
||||||
|
'INSERT INTO users (email, username, created_at) VALUES (?, ?, ?)'
|
||||||
|
)
|
||||||
|
.bind(email, username, Date.now())
|
||||||
|
.run();
|
||||||
|
|
||||||
|
console.log(success); // true/false
|
||||||
|
console.log(meta.last_row_id); // ID of inserted row
|
||||||
|
console.log(meta.rows_written); // Number of rows affected
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response Structure:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
success: true,
|
||||||
|
meta: {
|
||||||
|
duration: 1.2,
|
||||||
|
rows_read: 0,
|
||||||
|
rows_written: 1,
|
||||||
|
last_row_id: 42 // Only for INSERT with AUTOINCREMENT
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Check if rows were affected:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const result = await env.DB.prepare('DELETE FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId)
|
||||||
|
.run();
|
||||||
|
|
||||||
|
if (result.meta.rows_written === 0) {
|
||||||
|
return c.json({ error: 'User not found' }, 404);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## batch() - Multiple Queries
|
||||||
|
|
||||||
|
**CRITICAL FOR PERFORMANCE**: Execute multiple queries in one network round trip.
|
||||||
|
|
||||||
|
### Basic Batch
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const [users, posts, comments] = await env.DB.batch([
|
||||||
|
env.DB.prepare('SELECT * FROM users LIMIT 10'),
|
||||||
|
env.DB.prepare('SELECT * FROM posts LIMIT 10'),
|
||||||
|
env.DB.prepare('SELECT * FROM comments LIMIT 10')
|
||||||
|
]);
|
||||||
|
|
||||||
|
console.log(users.results); // User rows
|
||||||
|
console.log(posts.results); // Post rows
|
||||||
|
console.log(comments.results); // Comment rows
|
||||||
|
```
|
||||||
|
|
||||||
|
### Batch with Parameters
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const stmt1 = env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(1);
|
||||||
|
const stmt2 = env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(2);
|
||||||
|
const stmt3 = env.DB.prepare('SELECT * FROM posts WHERE user_id = ?').bind(1);
|
||||||
|
|
||||||
|
const results = await env.DB.batch([stmt1, stmt2, stmt3]);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Bulk Insert with Batch
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const users = [
|
||||||
|
{ email: 'user1@example.com', username: 'user1' },
|
||||||
|
{ email: 'user2@example.com', username: 'user2' },
|
||||||
|
{ email: 'user3@example.com', username: 'user3' }
|
||||||
|
];
|
||||||
|
|
||||||
|
const inserts = users.map(u =>
|
||||||
|
env.DB.prepare('INSERT INTO users (email, username, created_at) VALUES (?, ?, ?)')
|
||||||
|
.bind(u.email, u.username, Date.now())
|
||||||
|
);
|
||||||
|
|
||||||
|
const results = await env.DB.batch(inserts);
|
||||||
|
|
||||||
|
const successCount = results.filter(r => r.success).length;
|
||||||
|
console.log(`Inserted ${successCount} users`);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Transaction-like Behavior
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// All statements execute sequentially
|
||||||
|
// If one fails, remaining statements don't execute
|
||||||
|
await env.DB.batch([
|
||||||
|
// Deduct credits from user 1
|
||||||
|
env.DB.prepare('UPDATE users SET credits = credits - ? WHERE user_id = ?')
|
||||||
|
.bind(100, userId1),
|
||||||
|
|
||||||
|
// Add credits to user 2
|
||||||
|
env.DB.prepare('UPDATE users SET credits = credits + ? WHERE user_id = ?')
|
||||||
|
.bind(100, userId2),
|
||||||
|
|
||||||
|
// Record transaction
|
||||||
|
env.DB.prepare('INSERT INTO transactions (from_user, to_user, amount) VALUES (?, ?, ?)')
|
||||||
|
.bind(userId1, userId2, 100)
|
||||||
|
]);
|
||||||
|
```
|
||||||
|
|
||||||
|
**Batch Behavior:**
|
||||||
|
- Executes statements **sequentially** (in order)
|
||||||
|
- Each statement commits individually (auto-commit mode)
|
||||||
|
- If one fails, **remaining statements don't execute**
|
||||||
|
- All statements in one **network round trip** (huge performance win)
|
||||||
|
|
||||||
|
### Batch Performance Comparison
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ BAD: 10 separate queries = 10 network round trips
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(i)
|
||||||
|
.first();
|
||||||
|
}
|
||||||
|
// ~500ms total latency
|
||||||
|
|
||||||
|
// ✅ GOOD: 1 batch query = 1 network round trip
|
||||||
|
const userIds = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||||
|
const queries = userIds.map(id =>
|
||||||
|
env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(id)
|
||||||
|
);
|
||||||
|
const results = await env.DB.batch(queries);
|
||||||
|
// ~50ms total latency
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## exec() - Raw SQL
|
||||||
|
|
||||||
|
**AVOID IN PRODUCTION**. Only use for migrations and one-off tasks.
|
||||||
|
|
||||||
|
### Basic Exec
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const result = await env.DB.exec('SELECT * FROM users');
|
||||||
|
|
||||||
|
console.log(result);
|
||||||
|
// { count: 1, duration: 2.5 }
|
||||||
|
```
|
||||||
|
|
||||||
|
**NOTE**: `exec()` does **not return data**, only count and duration!
|
||||||
|
|
||||||
|
### Multiple Statements
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const result = await env.DB.exec(`
|
||||||
|
DROP TABLE IF EXISTS temp_users;
|
||||||
|
CREATE TABLE temp_users (user_id INTEGER PRIMARY KEY);
|
||||||
|
INSERT INTO temp_users VALUES (1), (2), (3);
|
||||||
|
`);
|
||||||
|
|
||||||
|
console.log(result);
|
||||||
|
// { count: 3, duration: 5.2 }
|
||||||
|
```
|
||||||
|
|
||||||
|
### ⚠️ NEVER Use exec() For:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ NEVER: SQL injection vulnerability
|
||||||
|
const email = userInput;
|
||||||
|
await env.DB.exec(`SELECT * FROM users WHERE email = '${email}'`);
|
||||||
|
|
||||||
|
// ✅ ALWAYS: Use prepared statements instead
|
||||||
|
await env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||||
|
.bind(email)
|
||||||
|
.first();
|
||||||
|
```
|
||||||
|
|
||||||
|
### ✅ ONLY Use exec() For:
|
||||||
|
|
||||||
|
- Running migration files locally
|
||||||
|
- One-off maintenance tasks (PRAGMA optimize)
|
||||||
|
- Database initialization scripts
|
||||||
|
- CLI tools (not production Workers)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Common Query Patterns
|
||||||
|
|
||||||
|
### Existence Check
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Check if email exists
|
||||||
|
const exists = await env.DB.prepare('SELECT 1 FROM users WHERE email = ? LIMIT 1')
|
||||||
|
.bind(email)
|
||||||
|
.first();
|
||||||
|
|
||||||
|
if (exists) {
|
||||||
|
return c.json({ error: 'Email already registered' }, 409);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Get or Create
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Try to find user
|
||||||
|
let user = await env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||||
|
.bind(email)
|
||||||
|
.first<User>();
|
||||||
|
|
||||||
|
// Create if doesn't exist
|
||||||
|
if (!user) {
|
||||||
|
const result = await env.DB.prepare(
|
||||||
|
'INSERT INTO users (email, username, created_at) VALUES (?, ?, ?)'
|
||||||
|
)
|
||||||
|
.bind(email, username, Date.now())
|
||||||
|
.run();
|
||||||
|
|
||||||
|
const userId = result.meta.last_row_id;
|
||||||
|
|
||||||
|
user = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId)
|
||||||
|
.first<User>();
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pagination
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const page = 1;
|
||||||
|
const limit = 20;
|
||||||
|
const offset = (page - 1) * limit;
|
||||||
|
|
||||||
|
const [countResult, dataResult] = await env.DB.batch([
|
||||||
|
env.DB.prepare('SELECT COUNT(*) as total FROM posts WHERE published = 1'),
|
||||||
|
env.DB.prepare(
|
||||||
|
'SELECT * FROM posts WHERE published = 1 ORDER BY created_at DESC LIMIT ? OFFSET ?'
|
||||||
|
).bind(limit, offset)
|
||||||
|
]);
|
||||||
|
|
||||||
|
const total = countResult.results[0].total;
|
||||||
|
const posts = dataResult.results;
|
||||||
|
|
||||||
|
return {
|
||||||
|
posts,
|
||||||
|
pagination: {
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
total,
|
||||||
|
pages: Math.ceil(total / limit)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Upsert (INSERT or UPDATE)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// SQLite 3.24.0+ supports UPSERT
|
||||||
|
await env.DB.prepare(`
|
||||||
|
INSERT INTO user_settings (user_id, theme, language)
|
||||||
|
VALUES (?, ?, ?)
|
||||||
|
ON CONFLICT(user_id) DO UPDATE SET
|
||||||
|
theme = excluded.theme,
|
||||||
|
language = excluded.language,
|
||||||
|
updated_at = unixepoch()
|
||||||
|
`)
|
||||||
|
.bind(userId, theme, language)
|
||||||
|
.run();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Bulk Upsert
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const settings = [
|
||||||
|
{ user_id: 1, theme: 'dark', language: 'en' },
|
||||||
|
{ user_id: 2, theme: 'light', language: 'es' }
|
||||||
|
];
|
||||||
|
|
||||||
|
const upserts = settings.map(s =>
|
||||||
|
env.DB.prepare(`
|
||||||
|
INSERT INTO user_settings (user_id, theme, language)
|
||||||
|
VALUES (?, ?, ?)
|
||||||
|
ON CONFLICT(user_id) DO UPDATE SET
|
||||||
|
theme = excluded.theme,
|
||||||
|
language = excluded.language
|
||||||
|
`).bind(s.user_id, s.theme, s.language)
|
||||||
|
);
|
||||||
|
|
||||||
|
await env.DB.batch(upserts);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Performance Tips
|
||||||
|
|
||||||
|
### Use SELECT Column Names (Not SELECT *)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ Bad: Fetches all columns
|
||||||
|
const users = await env.DB.prepare('SELECT * FROM users').all();
|
||||||
|
|
||||||
|
// ✅ Good: Only fetch needed columns
|
||||||
|
const users = await env.DB.prepare('SELECT user_id, email, username FROM users').all();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Always Use LIMIT
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ Bad: Could return millions of rows
|
||||||
|
const posts = await env.DB.prepare('SELECT * FROM posts').all();
|
||||||
|
|
||||||
|
// ✅ Good: Limit result set
|
||||||
|
const posts = await env.DB.prepare('SELECT * FROM posts LIMIT 100').all();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use Indexes
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Create index for common queries
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_posts_published_created
|
||||||
|
ON posts(published, created_at DESC)
|
||||||
|
WHERE published = 1;
|
||||||
|
```
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Query will use the index
|
||||||
|
const posts = await env.DB.prepare(
|
||||||
|
'SELECT * FROM posts WHERE published = 1 ORDER BY created_at DESC LIMIT 10'
|
||||||
|
).all();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Check Index Usage
|
||||||
|
|
||||||
|
```sql
|
||||||
|
EXPLAIN QUERY PLAN SELECT * FROM posts WHERE published = 1;
|
||||||
|
-- Should see: SEARCH posts USING INDEX idx_posts_published_created
|
||||||
|
```
|
||||||
|
|
||||||
|
### Batch Instead of Loop
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ Bad: Multiple network round trips
|
||||||
|
for (const id of userIds) {
|
||||||
|
const user = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(id)
|
||||||
|
.first();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ✅ Good: One network round trip
|
||||||
|
const queries = userIds.map(id =>
|
||||||
|
env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(id)
|
||||||
|
);
|
||||||
|
const results = await env.DB.batch(queries);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Meta Object Reference
|
||||||
|
|
||||||
|
Every D1 query returns a `meta` object with execution details:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
duration: 2.5, // Query execution time in milliseconds
|
||||||
|
rows_read: 100, // Number of rows scanned
|
||||||
|
rows_written: 1, // Number of rows modified (INSERT/UPDATE/DELETE)
|
||||||
|
last_row_id: 42, // ID of last inserted row (INSERT only)
|
||||||
|
changed: 1 // Rows affected (UPDATE/DELETE only)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Using Meta for Debugging
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const result = await env.DB.prepare('SELECT * FROM large_table WHERE status = ?')
|
||||||
|
.bind('active')
|
||||||
|
.all();
|
||||||
|
|
||||||
|
console.log(`Query took ${result.meta.duration}ms`);
|
||||||
|
console.log(`Scanned ${result.meta.rows_read} rows`);
|
||||||
|
console.log(`Returned ${result.results.length} rows`);
|
||||||
|
|
||||||
|
// If rows_read is much higher than results.length, add an index!
|
||||||
|
if (result.meta.rows_read > result.results.length * 10) {
|
||||||
|
console.warn('Query is inefficient - consider adding an index');
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Official Documentation
|
||||||
|
|
||||||
|
- **Workers API**: https://developers.cloudflare.com/d1/worker-api/
|
||||||
|
- **Prepared Statements**: https://developers.cloudflare.com/d1/worker-api/prepared-statements/
|
||||||
|
- **Return Object**: https://developers.cloudflare.com/d1/worker-api/return-object/
|
||||||
174
templates/d1-setup-migration.sh
Executable file
174
templates/d1-setup-migration.sh
Executable file
@@ -0,0 +1,174 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Cloudflare D1 Setup and Migration Workflow
|
||||||
|
#
|
||||||
|
# This script demonstrates the complete D1 workflow:
|
||||||
|
# 1. Create a D1 database
|
||||||
|
# 2. Configure bindings
|
||||||
|
# 3. Create and apply migrations
|
||||||
|
# 4. Query the database
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# chmod +x d1-setup-migration.sh
|
||||||
|
# ./d1-setup-migration.sh my-app-database
|
||||||
|
#
|
||||||
|
|
||||||
|
set -e # Exit on error
|
||||||
|
|
||||||
|
DATABASE_NAME="${1:-my-database}"
|
||||||
|
|
||||||
|
echo "========================================="
|
||||||
|
echo "Cloudflare D1 Setup and Migration"
|
||||||
|
echo "========================================="
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Step 1: Create D1 Database
|
||||||
|
echo "📦 Step 1: Creating D1 database '$DATABASE_NAME'..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
npx wrangler d1 create "$DATABASE_NAME"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ Database created!"
|
||||||
|
echo ""
|
||||||
|
echo "📝 IMPORTANT: Copy the output above and add to your wrangler.jsonc:"
|
||||||
|
echo ""
|
||||||
|
echo ' {
|
||||||
|
"d1_databases": [
|
||||||
|
{
|
||||||
|
"binding": "DB",
|
||||||
|
"database_name": "'"$DATABASE_NAME"'",
|
||||||
|
"database_id": "<UUID_FROM_OUTPUT_ABOVE>",
|
||||||
|
"preview_database_id": "local-dev-db"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}'
|
||||||
|
echo ""
|
||||||
|
read -p "Press ENTER when you've added the binding to wrangler.jsonc..."
|
||||||
|
|
||||||
|
# Step 2: Create Migrations Directory
|
||||||
|
echo ""
|
||||||
|
echo "📁 Step 2: Setting up migrations directory..."
|
||||||
|
mkdir -p migrations
|
||||||
|
|
||||||
|
# Step 3: Create Initial Migration
|
||||||
|
echo ""
|
||||||
|
echo "🔨 Step 3: Creating initial migration..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
npx wrangler d1 migrations create "$DATABASE_NAME" create_initial_schema
|
||||||
|
|
||||||
|
# Find the created migration file (most recent .sql file in migrations/)
|
||||||
|
MIGRATION_FILE=$(ls -t migrations/*.sql | head -n1)
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ Migration file created: $MIGRATION_FILE"
|
||||||
|
echo ""
|
||||||
|
echo "📝 Add your schema to this file. Example:"
|
||||||
|
echo ""
|
||||||
|
echo " DROP TABLE IF EXISTS users;
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
user_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
email TEXT NOT NULL UNIQUE,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
created_at INTEGER NOT NULL,
|
||||||
|
updated_at INTEGER
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
||||||
|
|
||||||
|
PRAGMA optimize;"
|
||||||
|
echo ""
|
||||||
|
read -p "Press ENTER when you've edited the migration file..."
|
||||||
|
|
||||||
|
# Step 4: Apply Migration Locally
|
||||||
|
echo ""
|
||||||
|
echo "🔧 Step 4: Applying migration to LOCAL database..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
npx wrangler d1 migrations apply "$DATABASE_NAME" --local
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ Local migration applied!"
|
||||||
|
|
||||||
|
# Step 5: Verify Local Database
|
||||||
|
echo ""
|
||||||
|
echo "🔍 Step 5: Verifying local database..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
npx wrangler d1 execute "$DATABASE_NAME" --local --command "SELECT name FROM sqlite_master WHERE type='table'"
|
||||||
|
|
||||||
|
# Step 6: Seed Local Database (Optional)
|
||||||
|
echo ""
|
||||||
|
echo "🌱 Step 6: Would you like to seed the local database with test data?"
|
||||||
|
read -p "Seed database? (y/n): " -n 1 -r
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
echo "Creating seed data..."
|
||||||
|
|
||||||
|
cat > seed.sql << 'EOF'
|
||||||
|
-- Seed data for testing
|
||||||
|
INSERT INTO users (email, username, created_at) VALUES
|
||||||
|
('alice@example.com', 'alice', unixepoch()),
|
||||||
|
('bob@example.com', 'bob', unixepoch()),
|
||||||
|
('charlie@example.com', 'charlie', unixepoch());
|
||||||
|
EOF
|
||||||
|
|
||||||
|
npx wrangler d1 execute "$DATABASE_NAME" --local --file=seed.sql
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ Seed data inserted!"
|
||||||
|
echo ""
|
||||||
|
echo "🔍 Verifying data..."
|
||||||
|
npx wrangler d1 execute "$DATABASE_NAME" --local --command "SELECT * FROM users"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Step 7: Apply to Production (Optional)
|
||||||
|
echo ""
|
||||||
|
echo "🚀 Step 7: Ready to apply migration to PRODUCTION?"
|
||||||
|
echo ""
|
||||||
|
echo "⚠️ WARNING: This will modify your production database!"
|
||||||
|
read -p "Apply to production? (y/n): " -n 1 -r
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
echo "Applying migration to production..."
|
||||||
|
npx wrangler d1 migrations apply "$DATABASE_NAME" --remote
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "✅ Production migration applied!"
|
||||||
|
else
|
||||||
|
echo "Skipping production migration."
|
||||||
|
echo ""
|
||||||
|
echo "To apply later, run:"
|
||||||
|
echo " npx wrangler d1 migrations apply $DATABASE_NAME --remote"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
echo ""
|
||||||
|
echo "========================================="
|
||||||
|
echo "✅ D1 Setup Complete!"
|
||||||
|
echo "========================================="
|
||||||
|
echo ""
|
||||||
|
echo "Database: $DATABASE_NAME"
|
||||||
|
echo "Local database: ✅"
|
||||||
|
echo "Migrations: ✅"
|
||||||
|
echo ""
|
||||||
|
echo "📚 Next steps:"
|
||||||
|
echo ""
|
||||||
|
echo "1. Start dev server:"
|
||||||
|
echo " npm run dev"
|
||||||
|
echo ""
|
||||||
|
echo "2. Query from your Worker:"
|
||||||
|
echo ' const user = await env.DB.prepare("SELECT * FROM users WHERE email = ?")
|
||||||
|
.bind("alice@example.com")
|
||||||
|
.first();'
|
||||||
|
echo ""
|
||||||
|
echo "3. Create more migrations as needed:"
|
||||||
|
echo " npx wrangler d1 migrations create $DATABASE_NAME <migration_name>"
|
||||||
|
echo ""
|
||||||
|
echo "4. View all tables:"
|
||||||
|
echo " npx wrangler d1 execute $DATABASE_NAME --local --command \"SELECT name FROM sqlite_master WHERE type='table'\""
|
||||||
|
echo ""
|
||||||
|
echo "========================================="
|
||||||
591
templates/d1-worker-queries.ts
Normal file
591
templates/d1-worker-queries.ts
Normal file
@@ -0,0 +1,591 @@
|
|||||||
|
/**
|
||||||
|
* Cloudflare D1 Worker Query Examples
|
||||||
|
*
|
||||||
|
* This file demonstrates type-safe D1 queries in a Cloudflare Worker with Hono.
|
||||||
|
*
|
||||||
|
* Topics covered:
|
||||||
|
* - Type definitions for D1 bindings
|
||||||
|
* - CRUD operations (Create, Read, Update, Delete)
|
||||||
|
* - Batch queries for performance
|
||||||
|
* - Error handling and validation
|
||||||
|
* - Pagination patterns
|
||||||
|
* - JOIN queries
|
||||||
|
* - Transaction-like behavior
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* 1. Copy relevant patterns to your Worker
|
||||||
|
* 2. Update table/column names to match your schema
|
||||||
|
* 3. Add proper input validation
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Hono } from 'hono';
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// Type Definitions
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
interface Env {
|
||||||
|
DB: D1Database;
|
||||||
|
// ... other bindings
|
||||||
|
}
|
||||||
|
|
||||||
|
type Bindings = {
|
||||||
|
DB: D1Database;
|
||||||
|
};
|
||||||
|
|
||||||
|
interface User {
|
||||||
|
user_id: number;
|
||||||
|
email: string;
|
||||||
|
username: string;
|
||||||
|
full_name: string | null;
|
||||||
|
created_at: number;
|
||||||
|
updated_at: number | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Post {
|
||||||
|
post_id: number;
|
||||||
|
user_id: number;
|
||||||
|
title: string;
|
||||||
|
content: string;
|
||||||
|
slug: string;
|
||||||
|
published: number;
|
||||||
|
created_at: number;
|
||||||
|
published_at: number | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PostWithAuthor extends Post {
|
||||||
|
author_name: string;
|
||||||
|
author_email: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// App Setup
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
const app = new Hono<{ Bindings: Bindings }>();
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// CREATE Operations
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
// Create a new user
|
||||||
|
app.post('/api/users', async (c) => {
|
||||||
|
try {
|
||||||
|
const { email, username, full_name } = await c.req.json();
|
||||||
|
|
||||||
|
// Validate input
|
||||||
|
if (!email || !username) {
|
||||||
|
return c.json({ error: 'Email and username are required' }, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if email already exists
|
||||||
|
const existing = await c.env.DB.prepare(
|
||||||
|
'SELECT user_id FROM users WHERE email = ? LIMIT 1'
|
||||||
|
)
|
||||||
|
.bind(email)
|
||||||
|
.first();
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
return c.json({ error: 'Email already registered' }, 409);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert new user
|
||||||
|
const result = await c.env.DB.prepare(
|
||||||
|
'INSERT INTO users (email, username, full_name, created_at) VALUES (?, ?, ?, ?)'
|
||||||
|
)
|
||||||
|
.bind(email, username, full_name || null, Date.now())
|
||||||
|
.run();
|
||||||
|
|
||||||
|
const userId = result.meta.last_row_id;
|
||||||
|
|
||||||
|
// Fetch the created user
|
||||||
|
const user = await c.env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId)
|
||||||
|
.first<User>();
|
||||||
|
|
||||||
|
return c.json({ user }, 201);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error creating user:', error.message);
|
||||||
|
return c.json({ error: 'Failed to create user' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Bulk insert with batch()
|
||||||
|
app.post('/api/users/bulk', async (c) => {
|
||||||
|
try {
|
||||||
|
const { users } = await c.req.json();
|
||||||
|
|
||||||
|
if (!Array.isArray(users) || users.length === 0) {
|
||||||
|
return c.json({ error: 'Invalid users array' }, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create batch of insert statements
|
||||||
|
const inserts = users.map(user =>
|
||||||
|
c.env.DB.prepare(
|
||||||
|
'INSERT INTO users (email, username, full_name, created_at) VALUES (?, ?, ?, ?)'
|
||||||
|
).bind(user.email, user.username, user.full_name || null, Date.now())
|
||||||
|
);
|
||||||
|
|
||||||
|
// Execute all inserts in one batch
|
||||||
|
const results = await c.env.DB.batch(inserts);
|
||||||
|
|
||||||
|
const insertedCount = results.filter(r => r.success).length;
|
||||||
|
|
||||||
|
return c.json({
|
||||||
|
message: `Inserted ${insertedCount} users`,
|
||||||
|
count: insertedCount
|
||||||
|
}, 201);
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error bulk inserting users:', error.message);
|
||||||
|
return c.json({ error: 'Failed to insert users' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// READ Operations
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
// Get single user by ID
|
||||||
|
app.get('/api/users/:id', async (c) => {
|
||||||
|
try {
|
||||||
|
const userId = parseInt(c.req.param('id'));
|
||||||
|
|
||||||
|
const user = await c.env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId)
|
||||||
|
.first<User>();
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return c.json({ error: 'User not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.json({ user });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error fetching user:', error.message);
|
||||||
|
return c.json({ error: 'Failed to fetch user' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get user by email
|
||||||
|
app.get('/api/users/email/:email', async (c) => {
|
||||||
|
try {
|
||||||
|
const email = c.req.param('email');
|
||||||
|
|
||||||
|
const user = await c.env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||||
|
.bind(email)
|
||||||
|
.first<User>();
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return c.json({ error: 'User not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.json({ user });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error fetching user:', error.message);
|
||||||
|
return c.json({ error: 'Failed to fetch user' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// List users with pagination
|
||||||
|
app.get('/api/users', async (c) => {
|
||||||
|
try {
|
||||||
|
const page = parseInt(c.req.query('page') || '1');
|
||||||
|
const limit = Math.min(parseInt(c.req.query('limit') || '20'), 100); // Max 100
|
||||||
|
const offset = (page - 1) * limit;
|
||||||
|
|
||||||
|
// Use batch to get count and users in one round trip
|
||||||
|
const [countResult, usersResult] = await c.env.DB.batch([
|
||||||
|
c.env.DB.prepare('SELECT COUNT(*) as total FROM users WHERE deleted_at IS NULL'),
|
||||||
|
c.env.DB.prepare(
|
||||||
|
'SELECT * FROM users WHERE deleted_at IS NULL ORDER BY created_at DESC LIMIT ? OFFSET ?'
|
||||||
|
).bind(limit, offset)
|
||||||
|
]);
|
||||||
|
|
||||||
|
const total = (countResult.results[0] as any).total as number;
|
||||||
|
const users = usersResult.results as User[];
|
||||||
|
|
||||||
|
return c.json({
|
||||||
|
users,
|
||||||
|
pagination: {
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
total,
|
||||||
|
pages: Math.ceil(total / limit)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error listing users:', error.message);
|
||||||
|
return c.json({ error: 'Failed to list users' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// UPDATE Operations
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
// Update user
|
||||||
|
app.put('/api/users/:id', async (c) => {
|
||||||
|
try {
|
||||||
|
const userId = parseInt(c.req.param('id'));
|
||||||
|
const { username, full_name, bio } = await c.req.json();
|
||||||
|
|
||||||
|
// Build dynamic update query
|
||||||
|
const updates: string[] = [];
|
||||||
|
const values: any[] = [];
|
||||||
|
|
||||||
|
if (username !== undefined) {
|
||||||
|
updates.push('username = ?');
|
||||||
|
values.push(username);
|
||||||
|
}
|
||||||
|
if (full_name !== undefined) {
|
||||||
|
updates.push('full_name = ?');
|
||||||
|
values.push(full_name);
|
||||||
|
}
|
||||||
|
if (bio !== undefined) {
|
||||||
|
updates.push('bio = ?');
|
||||||
|
values.push(bio);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updates.length === 0) {
|
||||||
|
return c.json({ error: 'No fields to update' }, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add updated_at
|
||||||
|
updates.push('updated_at = ?');
|
||||||
|
values.push(Date.now());
|
||||||
|
|
||||||
|
// Add user_id for WHERE clause
|
||||||
|
values.push(userId);
|
||||||
|
|
||||||
|
const sql = `UPDATE users SET ${updates.join(', ')} WHERE user_id = ?`;
|
||||||
|
|
||||||
|
const result = await c.env.DB.prepare(sql).bind(...values).run();
|
||||||
|
|
||||||
|
if (result.meta.rows_written === 0) {
|
||||||
|
return c.json({ error: 'User not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch updated user
|
||||||
|
const user = await c.env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId)
|
||||||
|
.first<User>();
|
||||||
|
|
||||||
|
return c.json({ user });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error updating user:', error.message);
|
||||||
|
return c.json({ error: 'Failed to update user' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Increment post view count (simple update)
|
||||||
|
app.post('/api/posts/:id/view', async (c) => {
|
||||||
|
try {
|
||||||
|
const postId = parseInt(c.req.param('id'));
|
||||||
|
|
||||||
|
const result = await c.env.DB.prepare(
|
||||||
|
'UPDATE posts SET view_count = view_count + 1 WHERE post_id = ?'
|
||||||
|
)
|
||||||
|
.bind(postId)
|
||||||
|
.run();
|
||||||
|
|
||||||
|
if (result.meta.rows_written === 0) {
|
||||||
|
return c.json({ error: 'Post not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.json({ success: true });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error incrementing view count:', error.message);
|
||||||
|
return c.json({ error: 'Failed to update view count' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// DELETE Operations
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
// Soft delete user
|
||||||
|
app.delete('/api/users/:id', async (c) => {
|
||||||
|
try {
|
||||||
|
const userId = parseInt(c.req.param('id'));
|
||||||
|
|
||||||
|
const result = await c.env.DB.prepare(
|
||||||
|
'UPDATE users SET deleted_at = ? WHERE user_id = ? AND deleted_at IS NULL'
|
||||||
|
)
|
||||||
|
.bind(Date.now(), userId)
|
||||||
|
.run();
|
||||||
|
|
||||||
|
if (result.meta.rows_written === 0) {
|
||||||
|
return c.json({ error: 'User not found or already deleted' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.json({ success: true });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error deleting user:', error.message);
|
||||||
|
return c.json({ error: 'Failed to delete user' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Hard delete post
|
||||||
|
app.delete('/api/posts/:id/permanent', async (c) => {
|
||||||
|
try {
|
||||||
|
const postId = parseInt(c.req.param('id'));
|
||||||
|
|
||||||
|
const result = await c.env.DB.prepare('DELETE FROM posts WHERE post_id = ?')
|
||||||
|
.bind(postId)
|
||||||
|
.run();
|
||||||
|
|
||||||
|
if (result.meta.rows_written === 0) {
|
||||||
|
return c.json({ error: 'Post not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.json({ success: true });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error deleting post:', error.message);
|
||||||
|
return c.json({ error: 'Failed to delete post' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// JOIN Queries
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
// Get posts with author information
|
||||||
|
app.get('/api/posts', async (c) => {
|
||||||
|
try {
|
||||||
|
const limit = Math.min(parseInt(c.req.query('limit') || '20'), 100);
|
||||||
|
|
||||||
|
const { results } = await c.env.DB.prepare(`
|
||||||
|
SELECT
|
||||||
|
posts.*,
|
||||||
|
users.username as author_name,
|
||||||
|
users.email as author_email
|
||||||
|
FROM posts
|
||||||
|
INNER JOIN users ON posts.user_id = users.user_id
|
||||||
|
WHERE posts.published = 1
|
||||||
|
AND users.deleted_at IS NULL
|
||||||
|
ORDER BY posts.published_at DESC
|
||||||
|
LIMIT ?
|
||||||
|
`)
|
||||||
|
.bind(limit)
|
||||||
|
.all<PostWithAuthor>();
|
||||||
|
|
||||||
|
return c.json({ posts: results });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error fetching posts:', error.message);
|
||||||
|
return c.json({ error: 'Failed to fetch posts' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get post with author and tags
|
||||||
|
app.get('/api/posts/:slug', async (c) => {
|
||||||
|
try {
|
||||||
|
const slug = c.req.param('slug');
|
||||||
|
|
||||||
|
// Use batch to get post+author and tags in one round trip
|
||||||
|
const [postResult, tagsResult] = await c.env.DB.batch([
|
||||||
|
// Get post with author
|
||||||
|
c.env.DB.prepare(`
|
||||||
|
SELECT
|
||||||
|
posts.*,
|
||||||
|
users.username as author_name,
|
||||||
|
users.email as author_email
|
||||||
|
FROM posts
|
||||||
|
INNER JOIN users ON posts.user_id = users.user_id
|
||||||
|
WHERE posts.slug = ?
|
||||||
|
LIMIT 1
|
||||||
|
`).bind(slug),
|
||||||
|
|
||||||
|
// Get post's tags
|
||||||
|
c.env.DB.prepare(`
|
||||||
|
SELECT tags.*
|
||||||
|
FROM tags
|
||||||
|
INNER JOIN post_tags ON tags.tag_id = post_tags.tag_id
|
||||||
|
INNER JOIN posts ON post_tags.post_id = posts.post_id
|
||||||
|
WHERE posts.slug = ?
|
||||||
|
`).bind(slug)
|
||||||
|
]);
|
||||||
|
|
||||||
|
const post = postResult.results[0] as PostWithAuthor | undefined;
|
||||||
|
|
||||||
|
if (!post) {
|
||||||
|
return c.json({ error: 'Post not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
const tags = tagsResult.results;
|
||||||
|
|
||||||
|
return c.json({ post, tags });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error fetching post:', error.message);
|
||||||
|
return c.json({ error: 'Failed to fetch post' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// Transaction-like Behavior with Batch
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
// Publish post (update post + record event)
|
||||||
|
app.post('/api/posts/:id/publish', async (c) => {
|
||||||
|
try {
|
||||||
|
const postId = parseInt(c.req.param('id'));
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
// Execute multiple related updates in one batch
|
||||||
|
const results = await c.env.DB.batch([
|
||||||
|
// Update post status
|
||||||
|
c.env.DB.prepare(
|
||||||
|
'UPDATE posts SET published = 1, published_at = ?, updated_at = ? WHERE post_id = ?'
|
||||||
|
).bind(now, now, postId),
|
||||||
|
|
||||||
|
// Record publish event (example analytics table)
|
||||||
|
c.env.DB.prepare(
|
||||||
|
'INSERT INTO post_events (post_id, event_type, created_at) VALUES (?, ?, ?)'
|
||||||
|
).bind(postId, 'published', now)
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Check if post update succeeded
|
||||||
|
if (results[0].meta.rows_written === 0) {
|
||||||
|
return c.json({ error: 'Post not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.json({ success: true });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error publishing post:', error.message);
|
||||||
|
return c.json({ error: 'Failed to publish post' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// Advanced Patterns
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
// Search posts by keyword (simple full-text search)
|
||||||
|
app.get('/api/posts/search', async (c) => {
|
||||||
|
try {
|
||||||
|
const query = c.req.query('q') || '';
|
||||||
|
const limit = Math.min(parseInt(c.req.query('limit') || '20'), 100);
|
||||||
|
|
||||||
|
if (query.length < 2) {
|
||||||
|
return c.json({ error: 'Query must be at least 2 characters' }, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
const searchTerm = `%${query}%`;
|
||||||
|
|
||||||
|
const { results } = await c.env.DB.prepare(`
|
||||||
|
SELECT
|
||||||
|
posts.*,
|
||||||
|
users.username as author_name
|
||||||
|
FROM posts
|
||||||
|
INNER JOIN users ON posts.user_id = users.user_id
|
||||||
|
WHERE posts.published = 1
|
||||||
|
AND (posts.title LIKE ? OR posts.content LIKE ?)
|
||||||
|
ORDER BY posts.published_at DESC
|
||||||
|
LIMIT ?
|
||||||
|
`)
|
||||||
|
.bind(searchTerm, searchTerm, limit)
|
||||||
|
.all<PostWithAuthor>();
|
||||||
|
|
||||||
|
return c.json({ posts: results, query });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error searching posts:', error.message);
|
||||||
|
return c.json({ error: 'Failed to search posts' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get user stats (multiple aggregations in batch)
|
||||||
|
app.get('/api/users/:id/stats', async (c) => {
|
||||||
|
try {
|
||||||
|
const userId = parseInt(c.req.param('id'));
|
||||||
|
|
||||||
|
const [userResult, statsResults] = await c.env.DB.batch([
|
||||||
|
// Get user
|
||||||
|
c.env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(userId),
|
||||||
|
|
||||||
|
// Get all stats in one query with UNION
|
||||||
|
c.env.DB.prepare(`
|
||||||
|
SELECT 'posts' as metric, COUNT(*) as count FROM posts WHERE user_id = ?
|
||||||
|
UNION ALL
|
||||||
|
SELECT 'comments', COUNT(*) FROM comments WHERE user_id = ?
|
||||||
|
UNION ALL
|
||||||
|
SELECT 'published_posts', COUNT(*) FROM posts WHERE user_id = ? AND published = 1
|
||||||
|
`).bind(userId, userId, userId)
|
||||||
|
]);
|
||||||
|
|
||||||
|
const user = userResult.results[0] as User | undefined;
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return c.json({ error: 'User not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse stats results
|
||||||
|
const stats: Record<string, number> = {};
|
||||||
|
for (const row of statsResults.results as any[]) {
|
||||||
|
stats[row.metric] = row.count;
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.json({ user, stats });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error fetching user stats:', error.message);
|
||||||
|
return c.json({ error: 'Failed to fetch user stats' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// Error Handling Example with Retry
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
async function queryWithRetry<T>(
|
||||||
|
queryFn: () => Promise<T>,
|
||||||
|
maxRetries = 3
|
||||||
|
): Promise<T> {
|
||||||
|
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
||||||
|
try {
|
||||||
|
return await queryFn();
|
||||||
|
} catch (error: any) {
|
||||||
|
const message = error.message;
|
||||||
|
|
||||||
|
// Check if error is retryable
|
||||||
|
const isRetryable =
|
||||||
|
message.includes('Network connection lost') ||
|
||||||
|
message.includes('storage caused object to be reset') ||
|
||||||
|
message.includes('reset because its code was updated');
|
||||||
|
|
||||||
|
if (!isRetryable || attempt === maxRetries - 1) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exponential backoff
|
||||||
|
const delay = Math.min(1000 * Math.pow(2, attempt), 5000);
|
||||||
|
await new Promise(resolve => setTimeout(resolve, delay));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Retry logic failed');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Example usage with retry
|
||||||
|
app.get('/api/users/:id/with-retry', async (c) => {
|
||||||
|
try {
|
||||||
|
const userId = parseInt(c.req.param('id'));
|
||||||
|
|
||||||
|
const user = await queryWithRetry(() =>
|
||||||
|
c.env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||||
|
.bind(userId)
|
||||||
|
.first<User>()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
|
return c.json({ error: 'User not found' }, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.json({ user });
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error('Error fetching user (with retry):', error.message);
|
||||||
|
return c.json({ error: 'Failed to fetch user' }, 500);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// Export App
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
export default app;
|
||||||
248
templates/schema-example.sql
Normal file
248
templates/schema-example.sql
Normal file
@@ -0,0 +1,248 @@
|
|||||||
|
-- Cloudflare D1 Schema Example
|
||||||
|
-- Production-ready database schema with best practices
|
||||||
|
--
|
||||||
|
-- This file demonstrates:
|
||||||
|
-- - Proper table creation with constraints
|
||||||
|
-- - Primary and foreign keys
|
||||||
|
-- - Indexes for performance
|
||||||
|
-- - Sample data for testing
|
||||||
|
--
|
||||||
|
-- Apply with:
|
||||||
|
-- npx wrangler d1 execute my-database --local --file=schema-example.sql
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Users Table
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS users;
|
||||||
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
|
user_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
email TEXT NOT NULL UNIQUE,
|
||||||
|
username TEXT NOT NULL,
|
||||||
|
full_name TEXT,
|
||||||
|
bio TEXT,
|
||||||
|
avatar_url TEXT,
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (unixepoch()),
|
||||||
|
updated_at INTEGER,
|
||||||
|
deleted_at INTEGER -- Soft delete pattern
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Index for email lookups (login, registration checks)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
||||||
|
|
||||||
|
-- Index for filtering out deleted users
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_users_active ON users(user_id) WHERE deleted_at IS NULL;
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Posts Table
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS posts;
|
||||||
|
CREATE TABLE IF NOT EXISTS posts (
|
||||||
|
post_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
content TEXT NOT NULL,
|
||||||
|
slug TEXT NOT NULL UNIQUE,
|
||||||
|
published INTEGER NOT NULL DEFAULT 0, -- 0 = draft, 1 = published
|
||||||
|
view_count INTEGER NOT NULL DEFAULT 0,
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (unixepoch()),
|
||||||
|
updated_at INTEGER,
|
||||||
|
published_at INTEGER,
|
||||||
|
|
||||||
|
-- Foreign key constraint
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(user_id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Index for user's posts
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_posts_user_id ON posts(user_id);
|
||||||
|
|
||||||
|
-- Index for published posts (most common query)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_posts_published_created ON posts(published, created_at DESC)
|
||||||
|
WHERE published = 1;
|
||||||
|
|
||||||
|
-- Index for slug lookups (e.g., /blog/my-post-slug)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_posts_slug ON posts(slug);
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Comments Table
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS comments;
|
||||||
|
CREATE TABLE IF NOT EXISTS comments (
|
||||||
|
comment_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
post_id INTEGER NOT NULL,
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
parent_comment_id INTEGER, -- For threaded comments (NULL = top-level)
|
||||||
|
content TEXT NOT NULL,
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (unixepoch()),
|
||||||
|
updated_at INTEGER,
|
||||||
|
deleted_at INTEGER,
|
||||||
|
|
||||||
|
-- Foreign keys
|
||||||
|
FOREIGN KEY (post_id) REFERENCES posts(post_id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(user_id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY (parent_comment_id) REFERENCES comments(comment_id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Index for post's comments
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_comments_post_id ON comments(post_id);
|
||||||
|
|
||||||
|
-- Index for user's comments
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_comments_user_id ON comments(user_id);
|
||||||
|
|
||||||
|
-- Index for threaded replies
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_comments_parent ON comments(parent_comment_id)
|
||||||
|
WHERE parent_comment_id IS NOT NULL;
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Tags Table (Many-to-Many Example)
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS tags;
|
||||||
|
CREATE TABLE IF NOT EXISTS tags (
|
||||||
|
tag_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL UNIQUE,
|
||||||
|
slug TEXT NOT NULL UNIQUE,
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (unixepoch())
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Index for tag lookups
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_tags_slug ON tags(slug);
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Post Tags Junction Table
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS post_tags;
|
||||||
|
CREATE TABLE IF NOT EXISTS post_tags (
|
||||||
|
post_id INTEGER NOT NULL,
|
||||||
|
tag_id INTEGER NOT NULL,
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (unixepoch()),
|
||||||
|
|
||||||
|
-- Composite primary key
|
||||||
|
PRIMARY KEY (post_id, tag_id),
|
||||||
|
|
||||||
|
-- Foreign keys
|
||||||
|
FOREIGN KEY (post_id) REFERENCES posts(post_id) ON DELETE CASCADE,
|
||||||
|
FOREIGN KEY (tag_id) REFERENCES tags(tag_id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Index for finding posts by tag
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_post_tags_tag_id ON post_tags(tag_id);
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Sessions Table (Example: Auth Sessions)
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS sessions;
|
||||||
|
CREATE TABLE IF NOT EXISTS sessions (
|
||||||
|
session_id TEXT PRIMARY KEY, -- UUID or random token
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
ip_address TEXT,
|
||||||
|
user_agent TEXT,
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (unixepoch()),
|
||||||
|
expires_at INTEGER NOT NULL,
|
||||||
|
last_activity_at INTEGER NOT NULL DEFAULT (unixepoch()),
|
||||||
|
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(user_id) ON DELETE CASCADE
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Index for session cleanup (delete expired sessions)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sessions_expires ON sessions(expires_at);
|
||||||
|
|
||||||
|
-- Index for user's sessions
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_sessions_user_id ON sessions(user_id);
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Analytics Table (High-Write Pattern)
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS page_views;
|
||||||
|
CREATE TABLE IF NOT EXISTS page_views (
|
||||||
|
view_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
post_id INTEGER,
|
||||||
|
user_id INTEGER, -- NULL for anonymous views
|
||||||
|
ip_address TEXT,
|
||||||
|
referrer TEXT,
|
||||||
|
user_agent TEXT,
|
||||||
|
viewed_at INTEGER NOT NULL DEFAULT (unixepoch())
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Partial index: only index recent views (last 30 days)
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_page_views_recent ON page_views(post_id, viewed_at)
|
||||||
|
WHERE viewed_at > unixepoch() - 2592000; -- 30 days in seconds
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Optimize Database
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
-- Run PRAGMA optimize to collect statistics for query planner
|
||||||
|
PRAGMA optimize;
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Sample Seed Data (Optional - for testing)
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
-- Insert test users
|
||||||
|
INSERT INTO users (email, username, full_name, bio) VALUES
|
||||||
|
('alice@example.com', 'alice', 'Alice Johnson', 'Software engineer and blogger'),
|
||||||
|
('bob@example.com', 'bob', 'Bob Smith', 'Tech enthusiast'),
|
||||||
|
('charlie@example.com', 'charlie', 'Charlie Brown', 'Writer and photographer');
|
||||||
|
|
||||||
|
-- Insert test tags
|
||||||
|
INSERT INTO tags (name, slug) VALUES
|
||||||
|
('JavaScript', 'javascript'),
|
||||||
|
('TypeScript', 'typescript'),
|
||||||
|
('Cloudflare', 'cloudflare'),
|
||||||
|
('Web Development', 'web-development'),
|
||||||
|
('Tutorial', 'tutorial');
|
||||||
|
|
||||||
|
-- Insert test posts
|
||||||
|
INSERT INTO posts (user_id, title, content, slug, published, published_at) VALUES
|
||||||
|
(1, 'Getting Started with D1', 'Learn how to use Cloudflare D1 database...', 'getting-started-with-d1', 1, unixepoch()),
|
||||||
|
(1, 'Building APIs with Hono', 'Hono is a lightweight web framework...', 'building-apis-with-hono', 1, unixepoch() - 86400),
|
||||||
|
(2, 'My First Draft', 'This is a draft post...', 'my-first-draft', 0, NULL);
|
||||||
|
|
||||||
|
-- Link posts to tags
|
||||||
|
INSERT INTO post_tags (post_id, tag_id) VALUES
|
||||||
|
(1, 3), -- Getting Started with D1 -> Cloudflare
|
||||||
|
(1, 5), -- Getting Started with D1 -> Tutorial
|
||||||
|
(2, 1), -- Building APIs with Hono -> JavaScript
|
||||||
|
(2, 3), -- Building APIs with Hono -> Cloudflare
|
||||||
|
(2, 5); -- Building APIs with Hono -> Tutorial
|
||||||
|
|
||||||
|
-- Insert test comments
|
||||||
|
INSERT INTO comments (post_id, user_id, content) VALUES
|
||||||
|
(1, 2, 'Great tutorial! Really helpful.'),
|
||||||
|
(1, 3, 'Thanks for sharing this!'),
|
||||||
|
(2, 3, 'Looking forward to more content on Hono.');
|
||||||
|
|
||||||
|
-- Insert threaded reply
|
||||||
|
INSERT INTO comments (post_id, user_id, parent_comment_id, content) VALUES
|
||||||
|
(1, 1, 1, 'Glad you found it useful!');
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Verification Queries
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
-- Count records in each table
|
||||||
|
SELECT 'users' as table_name, COUNT(*) as count FROM users
|
||||||
|
UNION ALL
|
||||||
|
SELECT 'posts', COUNT(*) FROM posts
|
||||||
|
UNION ALL
|
||||||
|
SELECT 'comments', COUNT(*) FROM comments
|
||||||
|
UNION ALL
|
||||||
|
SELECT 'tags', COUNT(*) FROM tags
|
||||||
|
UNION ALL
|
||||||
|
SELECT 'post_tags', COUNT(*) FROM post_tags;
|
||||||
|
|
||||||
|
-- List all tables and indexes
|
||||||
|
SELECT
|
||||||
|
type,
|
||||||
|
name,
|
||||||
|
tbl_name as table_name
|
||||||
|
FROM sqlite_master
|
||||||
|
WHERE type IN ('table', 'index')
|
||||||
|
AND name NOT LIKE 'sqlite_%'
|
||||||
|
ORDER BY type, tbl_name, name;
|
||||||
Reference in New Issue
Block a user