Initial commit
This commit is contained in:
652
references/best-practices.md
Normal file
652
references/best-practices.md
Normal file
@@ -0,0 +1,652 @@
|
||||
# D1 Best Practices
|
||||
|
||||
**Production-ready patterns for Cloudflare D1**
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Security](#security)
|
||||
2. [Performance](#performance)
|
||||
3. [Migrations](#migrations)
|
||||
4. [Error Handling](#error-handling)
|
||||
5. [Data Modeling](#data-modeling)
|
||||
6. [Testing](#testing)
|
||||
7. [Deployment](#deployment)
|
||||
|
||||
---
|
||||
|
||||
## Security
|
||||
|
||||
### Always Use Prepared Statements
|
||||
|
||||
```typescript
|
||||
// ❌ NEVER: SQL injection vulnerability
|
||||
const email = c.req.query('email');
|
||||
await env.DB.exec(`SELECT * FROM users WHERE email = '${email}'`);
|
||||
|
||||
// ✅ ALWAYS: Safe prepared statement
|
||||
const user = await env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||
.bind(email)
|
||||
.first();
|
||||
```
|
||||
|
||||
**Why?** User input like `'; DROP TABLE users; --` would execute in the first example!
|
||||
|
||||
### Use null Instead of undefined
|
||||
|
||||
```typescript
|
||||
// ❌ WRONG: undefined causes D1_TYPE_ERROR
|
||||
await env.DB.prepare('INSERT INTO users (email, bio) VALUES (?, ?)')
|
||||
.bind(email, undefined);
|
||||
|
||||
// ✅ CORRECT: Use null for optional values
|
||||
await env.DB.prepare('INSERT INTO users (email, bio) VALUES (?, ?)')
|
||||
.bind(email, bio || null);
|
||||
```
|
||||
|
||||
### Never Commit Sensitive IDs
|
||||
|
||||
```jsonc
|
||||
// ❌ WRONG: Database ID in public repo
|
||||
{
|
||||
"d1_databases": [
|
||||
{
|
||||
"database_id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890" // ❌
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
// ✅ BETTER: Use environment variable or secret
|
||||
{
|
||||
"d1_databases": [
|
||||
{
|
||||
"database_id": "$D1_DATABASE_ID" // Reference env var
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Or use wrangler secrets:
|
||||
|
||||
```bash
|
||||
npx wrangler secret put D1_DATABASE_ID
|
||||
```
|
||||
|
||||
### Validate Input Before Binding
|
||||
|
||||
```typescript
|
||||
// ✅ Validate email format
|
||||
function isValidEmail(email: string): boolean {
|
||||
return /^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(email);
|
||||
}
|
||||
|
||||
app.post('/api/users', async (c) => {
|
||||
const { email } = await c.req.json();
|
||||
|
||||
if (!isValidEmail(email)) {
|
||||
return c.json({ error: 'Invalid email format' }, 400);
|
||||
}
|
||||
|
||||
// Now safe to use
|
||||
const user = await c.env.DB.prepare('INSERT INTO users (email) VALUES (?)')
|
||||
.bind(email)
|
||||
.run();
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Performance
|
||||
|
||||
### Use Batch for Multiple Queries
|
||||
|
||||
```typescript
|
||||
// ❌ BAD: 3 network round trips (~150ms)
|
||||
const user = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(1).first();
|
||||
const posts = await env.DB.prepare('SELECT * FROM posts WHERE user_id = ?').bind(1).all();
|
||||
const comments = await env.DB.prepare('SELECT * FROM comments WHERE user_id = ?').bind(1).all();
|
||||
|
||||
// ✅ GOOD: 1 network round trip (~50ms)
|
||||
const [userResult, postsResult, commentsResult] = await env.DB.batch([
|
||||
env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(1),
|
||||
env.DB.prepare('SELECT * FROM posts WHERE user_id = ?').bind(1),
|
||||
env.DB.prepare('SELECT * FROM comments WHERE user_id = ?').bind(1)
|
||||
]);
|
||||
|
||||
const user = userResult.results[0];
|
||||
const posts = postsResult.results;
|
||||
const comments = commentsResult.results;
|
||||
```
|
||||
|
||||
**Performance win: 3x faster!**
|
||||
|
||||
### Create Indexes for WHERE Clauses
|
||||
|
||||
```sql
|
||||
-- ❌ Slow: Full table scan
|
||||
SELECT * FROM posts WHERE user_id = 123;
|
||||
|
||||
-- ✅ Fast: Create index first
|
||||
CREATE INDEX IF NOT EXISTS idx_posts_user_id ON posts(user_id);
|
||||
|
||||
-- Now this query is fast
|
||||
SELECT * FROM posts WHERE user_id = 123;
|
||||
```
|
||||
|
||||
**Verify index is being used:**
|
||||
|
||||
```sql
|
||||
EXPLAIN QUERY PLAN SELECT * FROM posts WHERE user_id = 123;
|
||||
-- Should see: SEARCH posts USING INDEX idx_posts_user_id
|
||||
```
|
||||
|
||||
### Run PRAGMA optimize After Schema Changes
|
||||
|
||||
```sql
|
||||
-- After creating indexes or altering schema
|
||||
PRAGMA optimize;
|
||||
```
|
||||
|
||||
This collects statistics that help the query planner choose the best execution plan.
|
||||
|
||||
### Select Only Needed Columns
|
||||
|
||||
```typescript
|
||||
// ❌ Bad: Fetches all columns (wastes bandwidth)
|
||||
const users = await env.DB.prepare('SELECT * FROM users').all();
|
||||
|
||||
// ✅ Good: Only fetch what you need
|
||||
const users = await env.DB.prepare('SELECT user_id, email, username FROM users').all();
|
||||
```
|
||||
|
||||
### Always Use LIMIT
|
||||
|
||||
```typescript
|
||||
// ❌ Dangerous: Could return millions of rows
|
||||
const posts = await env.DB.prepare('SELECT * FROM posts WHERE published = 1').all();
|
||||
|
||||
// ✅ Safe: Limit result set
|
||||
const posts = await env.DB.prepare(
|
||||
'SELECT * FROM posts WHERE published = 1 LIMIT 100'
|
||||
).all();
|
||||
```
|
||||
|
||||
### Use Partial Indexes
|
||||
|
||||
```sql
|
||||
-- Index only published posts (smaller index, faster writes)
|
||||
CREATE INDEX idx_posts_published ON posts(created_at DESC)
|
||||
WHERE published = 1;
|
||||
|
||||
-- Index only active users (exclude deleted)
|
||||
CREATE INDEX idx_users_active ON users(email)
|
||||
WHERE deleted_at IS NULL;
|
||||
```
|
||||
|
||||
Benefits:
|
||||
- ✅ Smaller indexes (faster queries)
|
||||
- ✅ Fewer index updates (faster writes)
|
||||
- ✅ Only index relevant data
|
||||
|
||||
---
|
||||
|
||||
## Migrations
|
||||
|
||||
### Make Migrations Idempotent
|
||||
|
||||
```sql
|
||||
-- ✅ ALWAYS use IF NOT EXISTS
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
user_id INTEGER PRIMARY KEY,
|
||||
email TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_users_email ON users(email);
|
||||
|
||||
-- ✅ Use IF EXISTS for drops
|
||||
DROP TABLE IF EXISTS temp_table;
|
||||
```
|
||||
|
||||
**Why?** Re-running a migration won't fail if it's already applied.
|
||||
|
||||
### Never Modify Applied Migrations
|
||||
|
||||
```bash
|
||||
# ❌ WRONG: Editing applied migration
|
||||
vim migrations/0001_create_users.sql # Already applied!
|
||||
|
||||
# ✅ CORRECT: Create new migration
|
||||
npx wrangler d1 migrations create my-database add_users_bio_column
|
||||
```
|
||||
|
||||
**Why?** D1 tracks which migrations have been applied. Modifying them causes inconsistencies.
|
||||
|
||||
### Test Migrations Locally First
|
||||
|
||||
```bash
|
||||
# 1. Apply to local database
|
||||
npx wrangler d1 migrations apply my-database --local
|
||||
|
||||
# 2. Test queries locally
|
||||
npx wrangler d1 execute my-database --local --command "SELECT * FROM users"
|
||||
|
||||
# 3. Only then apply to production
|
||||
npx wrangler d1 migrations apply my-database --remote
|
||||
```
|
||||
|
||||
### Handle Foreign Keys Carefully
|
||||
|
||||
```sql
|
||||
-- Disable foreign key checks temporarily during schema changes
|
||||
PRAGMA defer_foreign_keys = true;
|
||||
|
||||
-- Make schema changes that would violate foreign keys
|
||||
ALTER TABLE posts DROP COLUMN old_user_id;
|
||||
ALTER TABLE posts ADD COLUMN user_id INTEGER REFERENCES users(user_id);
|
||||
|
||||
-- Foreign keys re-enabled automatically at end of migration
|
||||
```
|
||||
|
||||
### Break Large Data Migrations into Batches
|
||||
|
||||
```sql
|
||||
-- ❌ BAD: Single massive INSERT (causes "statement too long")
|
||||
INSERT INTO users (email) VALUES
|
||||
('user1@example.com'),
|
||||
('user2@example.com'),
|
||||
... -- 10,000 more rows
|
||||
|
||||
-- ✅ GOOD: Split into batches of 100-250 rows
|
||||
-- File: 0001_migrate_users_batch1.sql
|
||||
INSERT INTO users (email) VALUES
|
||||
('user1@example.com'),
|
||||
... -- 100 rows
|
||||
|
||||
-- File: 0002_migrate_users_batch2.sql
|
||||
INSERT INTO users (email) VALUES
|
||||
('user101@example.com'),
|
||||
... -- next 100 rows
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Error Handling
|
||||
|
||||
### Check for Errors After Every Query
|
||||
|
||||
```typescript
|
||||
try {
|
||||
const result = await env.DB.prepare('INSERT INTO users (email) VALUES (?)')
|
||||
.bind(email)
|
||||
.run();
|
||||
|
||||
if (!result.success) {
|
||||
console.error('Insert failed');
|
||||
return c.json({ error: 'Failed to create user' }, 500);
|
||||
}
|
||||
|
||||
// Success!
|
||||
const userId = result.meta.last_row_id;
|
||||
|
||||
} catch (error: any) {
|
||||
console.error('Database error:', error.message);
|
||||
return c.json({ error: 'Database operation failed' }, 500);
|
||||
}
|
||||
```
|
||||
|
||||
### Implement Retry Logic for Transient Errors
|
||||
|
||||
```typescript
|
||||
async function queryWithRetry<T>(
|
||||
queryFn: () => Promise<T>,
|
||||
maxRetries = 3
|
||||
): Promise<T> {
|
||||
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
||||
try {
|
||||
return await queryFn();
|
||||
} catch (error: any) {
|
||||
const message = error.message;
|
||||
|
||||
// Check if error is retryable
|
||||
const isRetryable =
|
||||
message.includes('Network connection lost') ||
|
||||
message.includes('storage caused object to be reset') ||
|
||||
message.includes('reset because its code was updated');
|
||||
|
||||
if (!isRetryable || attempt === maxRetries - 1) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Exponential backoff: 1s, 2s, 4s
|
||||
const delay = Math.min(1000 * Math.pow(2, attempt), 5000);
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Max retries exceeded');
|
||||
}
|
||||
|
||||
// Usage
|
||||
const user = await queryWithRetry(() =>
|
||||
env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||
.bind(userId)
|
||||
.first()
|
||||
);
|
||||
```
|
||||
|
||||
### Handle Common D1 Errors
|
||||
|
||||
```typescript
|
||||
try {
|
||||
await env.DB.prepare(query).bind(...params).run();
|
||||
} catch (error: any) {
|
||||
const message = error.message;
|
||||
|
||||
if (message.includes('D1_ERROR')) {
|
||||
// D1-specific error
|
||||
console.error('D1 error:', message);
|
||||
} else if (message.includes('UNIQUE constraint failed')) {
|
||||
// Duplicate key error
|
||||
return c.json({ error: 'Email already exists' }, 409);
|
||||
} else if (message.includes('FOREIGN KEY constraint failed')) {
|
||||
// Invalid foreign key
|
||||
return c.json({ error: 'Invalid user reference' }, 400);
|
||||
} else {
|
||||
// Unknown error
|
||||
console.error('Unknown database error:', message);
|
||||
return c.json({ error: 'Database operation failed' }, 500);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Data Modeling
|
||||
|
||||
### Use Appropriate Data Types
|
||||
|
||||
```sql
|
||||
CREATE TABLE users (
|
||||
user_id INTEGER PRIMARY KEY AUTOINCREMENT, -- Auto-incrementing ID
|
||||
email TEXT NOT NULL, -- String
|
||||
username TEXT NOT NULL,
|
||||
age INTEGER, -- Number
|
||||
balance REAL, -- Decimal/float
|
||||
is_active INTEGER DEFAULT 1, -- Boolean (0 or 1)
|
||||
metadata TEXT, -- JSON (stored as TEXT)
|
||||
created_at INTEGER NOT NULL -- Unix timestamp
|
||||
);
|
||||
```
|
||||
|
||||
**SQLite has 5 types**: NULL, INTEGER, REAL, TEXT, BLOB
|
||||
|
||||
### Store Timestamps as Unix Epoch
|
||||
|
||||
```sql
|
||||
-- ✅ RECOMMENDED: Unix timestamp (INTEGER)
|
||||
created_at INTEGER NOT NULL DEFAULT (unixepoch())
|
||||
|
||||
-- ❌ AVOID: ISO 8601 strings (harder to query/compare)
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
```
|
||||
|
||||
**Why?** Unix timestamps are easier to compare, filter, and work with in JavaScript:
|
||||
|
||||
```typescript
|
||||
// Easy to work with
|
||||
const timestamp = Date.now(); // 1698000000
|
||||
const date = new Date(timestamp);
|
||||
|
||||
// Easy to query
|
||||
const recentPosts = await env.DB.prepare(
|
||||
'SELECT * FROM posts WHERE created_at > ?'
|
||||
).bind(Date.now() - 86400000).all(); // Last 24 hours
|
||||
```
|
||||
|
||||
### Store JSON as TEXT
|
||||
|
||||
```sql
|
||||
CREATE TABLE users (
|
||||
user_id INTEGER PRIMARY KEY,
|
||||
email TEXT NOT NULL,
|
||||
settings TEXT -- Store JSON here
|
||||
);
|
||||
```
|
||||
|
||||
```typescript
|
||||
// Insert JSON
|
||||
const settings = { theme: 'dark', language: 'en' };
|
||||
await env.DB.prepare('INSERT INTO users (email, settings) VALUES (?, ?)')
|
||||
.bind(email, JSON.stringify(settings))
|
||||
.run();
|
||||
|
||||
// Read JSON
|
||||
const user = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||
.bind(userId)
|
||||
.first();
|
||||
|
||||
const settings = JSON.parse(user.settings);
|
||||
console.log(settings.theme); // 'dark'
|
||||
```
|
||||
|
||||
### Use Soft Deletes
|
||||
|
||||
```sql
|
||||
CREATE TABLE users (
|
||||
user_id INTEGER PRIMARY KEY,
|
||||
email TEXT NOT NULL,
|
||||
deleted_at INTEGER -- NULL = active, timestamp = deleted
|
||||
);
|
||||
|
||||
-- Index for active users only
|
||||
CREATE INDEX idx_users_active ON users(user_id)
|
||||
WHERE deleted_at IS NULL;
|
||||
```
|
||||
|
||||
```typescript
|
||||
// Soft delete
|
||||
await env.DB.prepare('UPDATE users SET deleted_at = ? WHERE user_id = ?')
|
||||
.bind(Date.now(), userId)
|
||||
.run();
|
||||
|
||||
// Query only active users
|
||||
const activeUsers = await env.DB.prepare(
|
||||
'SELECT * FROM users WHERE deleted_at IS NULL'
|
||||
).all();
|
||||
```
|
||||
|
||||
### Normalize Related Data
|
||||
|
||||
```sql
|
||||
-- ✅ GOOD: Normalized (users in separate table)
|
||||
CREATE TABLE posts (
|
||||
post_id INTEGER PRIMARY KEY,
|
||||
user_id INTEGER NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
FOREIGN KEY (user_id) REFERENCES users(user_id)
|
||||
);
|
||||
|
||||
-- ❌ BAD: Denormalized (user data duplicated in every post)
|
||||
CREATE TABLE posts (
|
||||
post_id INTEGER PRIMARY KEY,
|
||||
user_email TEXT NOT NULL,
|
||||
user_name TEXT NOT NULL,
|
||||
title TEXT NOT NULL
|
||||
);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
### Test Migrations Locally
|
||||
|
||||
```bash
|
||||
# 1. Create local database
|
||||
npx wrangler d1 migrations apply my-database --local
|
||||
|
||||
# 2. Seed with test data
|
||||
npx wrangler d1 execute my-database --local --file=seed.sql
|
||||
|
||||
# 3. Run test queries
|
||||
npx wrangler d1 execute my-database --local --command "SELECT COUNT(*) FROM users"
|
||||
```
|
||||
|
||||
### Use Separate Databases for Development
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"d1_databases": [
|
||||
{
|
||||
"binding": "DB",
|
||||
"database_name": "my-app-prod",
|
||||
"database_id": "<PROD_UUID>",
|
||||
"preview_database_id": "local-dev" // Local only
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- ✅ Never accidentally modify production data
|
||||
- ✅ Fast local development (no network latency)
|
||||
- ✅ Can reset local DB anytime
|
||||
|
||||
### Backup Before Major Migrations
|
||||
|
||||
```bash
|
||||
# Export current database
|
||||
npx wrangler d1 export my-database --remote --output=backup-$(date +%Y%m%d).sql
|
||||
|
||||
# Apply migration
|
||||
npx wrangler d1 migrations apply my-database --remote
|
||||
|
||||
# If something goes wrong, restore from backup
|
||||
npx wrangler d1 execute my-database --remote --file=backup-20251021.sql
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Deployment
|
||||
|
||||
### Use Preview Databases for Testing
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"d1_databases": [
|
||||
{
|
||||
"binding": "DB",
|
||||
"database_name": "my-app-prod",
|
||||
"database_id": "<PROD_UUID>",
|
||||
"preview_database_id": "<PREVIEW_UUID>" // Separate preview database
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Deploy preview:
|
||||
|
||||
```bash
|
||||
npx wrangler deploy --env preview
|
||||
```
|
||||
|
||||
### Apply Migrations Before Deploying Code
|
||||
|
||||
```bash
|
||||
# 1. Apply migrations first
|
||||
npx wrangler d1 migrations apply my-database --remote
|
||||
|
||||
# 2. Then deploy Worker code
|
||||
npx wrangler deploy
|
||||
```
|
||||
|
||||
**Why?** Ensures database schema is ready before code expects it.
|
||||
|
||||
### Monitor Query Performance
|
||||
|
||||
```typescript
|
||||
app.get('/api/users', async (c) => {
|
||||
const start = Date.now();
|
||||
|
||||
const { results, meta } = await c.env.DB.prepare('SELECT * FROM users LIMIT 100')
|
||||
.all();
|
||||
|
||||
const duration = Date.now() - start;
|
||||
|
||||
// Log slow queries
|
||||
if (duration > 100) {
|
||||
console.warn(`Slow query: ${duration}ms, rows_read: ${meta.rows_read}`);
|
||||
}
|
||||
|
||||
return c.json({ users: results });
|
||||
});
|
||||
```
|
||||
|
||||
### Use Time Travel for Data Recovery
|
||||
|
||||
```bash
|
||||
# View database state 2 hours ago
|
||||
npx wrangler d1 time-travel info my-database --timestamp "2025-10-21T10:00:00Z"
|
||||
|
||||
# Restore database to 2 hours ago
|
||||
npx wrangler d1 time-travel restore my-database --timestamp "2025-10-21T10:00:00Z"
|
||||
```
|
||||
|
||||
**Note**: Time Travel available for last 30 days.
|
||||
|
||||
---
|
||||
|
||||
## Summary Checklist
|
||||
|
||||
### Security ✅
|
||||
- [ ] Always use `.prepare().bind()` for user input
|
||||
- [ ] Use `null` instead of `undefined`
|
||||
- [ ] Validate input before binding
|
||||
- [ ] Never commit database IDs to public repos
|
||||
|
||||
### Performance ✅
|
||||
- [ ] Use `.batch()` for multiple queries
|
||||
- [ ] Create indexes on filtered columns
|
||||
- [ ] Run `PRAGMA optimize` after schema changes
|
||||
- [ ] Select only needed columns
|
||||
- [ ] Always use `LIMIT`
|
||||
|
||||
### Migrations ✅
|
||||
- [ ] Make migrations idempotent (IF NOT EXISTS)
|
||||
- [ ] Never modify applied migrations
|
||||
- [ ] Test locally before production
|
||||
- [ ] Break large data migrations into batches
|
||||
|
||||
### Error Handling ✅
|
||||
- [ ] Wrap queries in try/catch
|
||||
- [ ] Implement retry logic for transient errors
|
||||
- [ ] Check `result.success` and `meta.rows_written`
|
||||
- [ ] Log errors with context
|
||||
|
||||
### Data Modeling ✅
|
||||
- [ ] Use appropriate SQLite data types
|
||||
- [ ] Store timestamps as Unix epoch (INTEGER)
|
||||
- [ ] Use soft deletes (deleted_at column)
|
||||
- [ ] Normalize related data with foreign keys
|
||||
|
||||
### Testing ✅
|
||||
- [ ] Test migrations locally first
|
||||
- [ ] Use separate development/production databases
|
||||
- [ ] Backup before major migrations
|
||||
|
||||
### Deployment ✅
|
||||
- [ ] Apply migrations before deploying code
|
||||
- [ ] Use preview databases for testing
|
||||
- [ ] Monitor query performance
|
||||
- [ ] Use Time Travel for recovery
|
||||
|
||||
---
|
||||
|
||||
## Official Documentation
|
||||
|
||||
- **Best Practices**: https://developers.cloudflare.com/d1/best-practices/
|
||||
- **Indexes**: https://developers.cloudflare.com/d1/best-practices/use-indexes/
|
||||
- **Local Development**: https://developers.cloudflare.com/d1/best-practices/local-development/
|
||||
- **Retry Queries**: https://developers.cloudflare.com/d1/best-practices/retry-queries/
|
||||
- **Time Travel**: https://developers.cloudflare.com/d1/reference/time-travel/
|
||||
587
references/query-patterns.md
Normal file
587
references/query-patterns.md
Normal file
@@ -0,0 +1,587 @@
|
||||
# D1 Query Patterns Reference
|
||||
|
||||
**Complete guide to all D1 Workers API methods with examples**
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [D1 API Methods Overview](#d1-api-methods-overview)
|
||||
2. [prepare() - Prepared Statements](#prepare---prepared-statements)
|
||||
3. [Query Result Methods](#query-result-methods)
|
||||
4. [batch() - Multiple Queries](#batch---multiple-queries)
|
||||
5. [exec() - Raw SQL](#exec---raw-sql)
|
||||
6. [Common Query Patterns](#common-query-patterns)
|
||||
7. [Performance Tips](#performance-tips)
|
||||
|
||||
---
|
||||
|
||||
## D1 API Methods Overview
|
||||
|
||||
| Method | Use Case | Returns Results | Safe for User Input |
|
||||
|--------|----------|-----------------|---------------------|
|
||||
| `.prepare().bind()` | **Primary method** for queries | Yes | ✅ Yes (prevents SQL injection) |
|
||||
| `.batch()` | Multiple queries in one round trip | Yes | ✅ Yes (if using prepare) |
|
||||
| `.exec()` | Raw SQL execution | No | ❌ No (SQL injection risk) |
|
||||
|
||||
---
|
||||
|
||||
## prepare() - Prepared Statements
|
||||
|
||||
**Primary method for all queries with user input.**
|
||||
|
||||
### Basic Syntax
|
||||
|
||||
```typescript
|
||||
const stmt = env.DB.prepare(sql);
|
||||
const bound = stmt.bind(...parameters);
|
||||
const result = await bound.all(); // or .first(), .run()
|
||||
```
|
||||
|
||||
### Method Chaining (Most Common)
|
||||
|
||||
```typescript
|
||||
const result = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||
.bind(userId)
|
||||
.first();
|
||||
```
|
||||
|
||||
### Parameter Binding
|
||||
|
||||
```typescript
|
||||
// Single parameter
|
||||
const user = await env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||
.bind('user@example.com')
|
||||
.first();
|
||||
|
||||
// Multiple parameters
|
||||
const posts = await env.DB.prepare(
|
||||
'SELECT * FROM posts WHERE user_id = ? AND published = ? LIMIT ?'
|
||||
)
|
||||
.bind(userId, 1, 10)
|
||||
.all();
|
||||
|
||||
// Use null for optional values (NEVER undefined)
|
||||
const updated = await env.DB.prepare(
|
||||
'UPDATE users SET bio = ?, avatar_url = ? WHERE user_id = ?'
|
||||
)
|
||||
.bind(bio || null, avatarUrl || null, userId)
|
||||
.run();
|
||||
```
|
||||
|
||||
### Why use prepare()?
|
||||
|
||||
- ✅ **SQL injection protection** - Parameters are safely escaped
|
||||
- ✅ **Performance** - Query plans can be cached
|
||||
- ✅ **Reusability** - Same statement, different parameters
|
||||
- ✅ **Type safety** - Works with TypeScript generics
|
||||
|
||||
---
|
||||
|
||||
## Query Result Methods
|
||||
|
||||
### .all() - Get All Rows
|
||||
|
||||
Returns all matching rows as an array.
|
||||
|
||||
```typescript
|
||||
const { results, meta } = await env.DB.prepare('SELECT * FROM users')
|
||||
.all();
|
||||
|
||||
console.log(results); // Array of row objects
|
||||
console.log(meta); // { duration, rows_read, rows_written }
|
||||
```
|
||||
|
||||
**With Type Safety:**
|
||||
|
||||
```typescript
|
||||
interface User {
|
||||
user_id: number;
|
||||
email: string;
|
||||
username: string;
|
||||
}
|
||||
|
||||
const { results } = await env.DB.prepare('SELECT * FROM users')
|
||||
.all<User>();
|
||||
|
||||
// results is now typed as User[]
|
||||
```
|
||||
|
||||
**Response Structure:**
|
||||
|
||||
```typescript
|
||||
{
|
||||
success: true,
|
||||
results: [
|
||||
{ user_id: 1, email: 'alice@example.com', username: 'alice' },
|
||||
{ user_id: 2, email: 'bob@example.com', username: 'bob' }
|
||||
],
|
||||
meta: {
|
||||
duration: 2.5, // Milliseconds
|
||||
rows_read: 2, // Rows scanned
|
||||
rows_written: 0 // Rows modified
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### .first() - Get First Row
|
||||
|
||||
Returns the first row or `null` if no results.
|
||||
|
||||
```typescript
|
||||
const user = await env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||
.bind('alice@example.com')
|
||||
.first();
|
||||
|
||||
if (!user) {
|
||||
return c.json({ error: 'User not found' }, 404);
|
||||
}
|
||||
```
|
||||
|
||||
**With Type Safety:**
|
||||
|
||||
```typescript
|
||||
const user = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||
.bind(userId)
|
||||
.first<User>();
|
||||
|
||||
// user is typed as User | null
|
||||
```
|
||||
|
||||
**Note**: `.first()` doesn't add `LIMIT 1` automatically. For better performance:
|
||||
|
||||
```typescript
|
||||
// ✅ Better: Add LIMIT 1 yourself
|
||||
const user = await env.DB.prepare('SELECT * FROM users WHERE email = ? LIMIT 1')
|
||||
.bind(email)
|
||||
.first();
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### .first(column) - Get Single Column Value
|
||||
|
||||
Returns the value of a specific column from the first row.
|
||||
|
||||
```typescript
|
||||
// Get count
|
||||
const total = await env.DB.prepare('SELECT COUNT(*) as total FROM users')
|
||||
.first('total');
|
||||
|
||||
console.log(total); // 42 (just the number, not an object)
|
||||
|
||||
// Get specific field
|
||||
const email = await env.DB.prepare('SELECT email FROM users WHERE user_id = ?')
|
||||
.bind(userId)
|
||||
.first('email');
|
||||
|
||||
console.log(email); // 'user@example.com'
|
||||
```
|
||||
|
||||
**Use Cases:**
|
||||
- Counting rows
|
||||
- Checking existence (SELECT 1)
|
||||
- Getting single values (MAX, MIN, AVG)
|
||||
|
||||
---
|
||||
|
||||
### .run() - Execute Without Results
|
||||
|
||||
Used for INSERT, UPDATE, DELETE when you don't need the data back.
|
||||
|
||||
```typescript
|
||||
const { success, meta } = await env.DB.prepare(
|
||||
'INSERT INTO users (email, username, created_at) VALUES (?, ?, ?)'
|
||||
)
|
||||
.bind(email, username, Date.now())
|
||||
.run();
|
||||
|
||||
console.log(success); // true/false
|
||||
console.log(meta.last_row_id); // ID of inserted row
|
||||
console.log(meta.rows_written); // Number of rows affected
|
||||
```
|
||||
|
||||
**Response Structure:**
|
||||
|
||||
```typescript
|
||||
{
|
||||
success: true,
|
||||
meta: {
|
||||
duration: 1.2,
|
||||
rows_read: 0,
|
||||
rows_written: 1,
|
||||
last_row_id: 42 // Only for INSERT with AUTOINCREMENT
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Check if rows were affected:**
|
||||
|
||||
```typescript
|
||||
const result = await env.DB.prepare('DELETE FROM users WHERE user_id = ?')
|
||||
.bind(userId)
|
||||
.run();
|
||||
|
||||
if (result.meta.rows_written === 0) {
|
||||
return c.json({ error: 'User not found' }, 404);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## batch() - Multiple Queries
|
||||
|
||||
**CRITICAL FOR PERFORMANCE**: Execute multiple queries in one network round trip.
|
||||
|
||||
### Basic Batch
|
||||
|
||||
```typescript
|
||||
const [users, posts, comments] = await env.DB.batch([
|
||||
env.DB.prepare('SELECT * FROM users LIMIT 10'),
|
||||
env.DB.prepare('SELECT * FROM posts LIMIT 10'),
|
||||
env.DB.prepare('SELECT * FROM comments LIMIT 10')
|
||||
]);
|
||||
|
||||
console.log(users.results); // User rows
|
||||
console.log(posts.results); // Post rows
|
||||
console.log(comments.results); // Comment rows
|
||||
```
|
||||
|
||||
### Batch with Parameters
|
||||
|
||||
```typescript
|
||||
const stmt1 = env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(1);
|
||||
const stmt2 = env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(2);
|
||||
const stmt3 = env.DB.prepare('SELECT * FROM posts WHERE user_id = ?').bind(1);
|
||||
|
||||
const results = await env.DB.batch([stmt1, stmt2, stmt3]);
|
||||
```
|
||||
|
||||
### Bulk Insert with Batch
|
||||
|
||||
```typescript
|
||||
const users = [
|
||||
{ email: 'user1@example.com', username: 'user1' },
|
||||
{ email: 'user2@example.com', username: 'user2' },
|
||||
{ email: 'user3@example.com', username: 'user3' }
|
||||
];
|
||||
|
||||
const inserts = users.map(u =>
|
||||
env.DB.prepare('INSERT INTO users (email, username, created_at) VALUES (?, ?, ?)')
|
||||
.bind(u.email, u.username, Date.now())
|
||||
);
|
||||
|
||||
const results = await env.DB.batch(inserts);
|
||||
|
||||
const successCount = results.filter(r => r.success).length;
|
||||
console.log(`Inserted ${successCount} users`);
|
||||
```
|
||||
|
||||
### Transaction-like Behavior
|
||||
|
||||
```typescript
|
||||
// All statements execute sequentially
|
||||
// If one fails, remaining statements don't execute
|
||||
await env.DB.batch([
|
||||
// Deduct credits from user 1
|
||||
env.DB.prepare('UPDATE users SET credits = credits - ? WHERE user_id = ?')
|
||||
.bind(100, userId1),
|
||||
|
||||
// Add credits to user 2
|
||||
env.DB.prepare('UPDATE users SET credits = credits + ? WHERE user_id = ?')
|
||||
.bind(100, userId2),
|
||||
|
||||
// Record transaction
|
||||
env.DB.prepare('INSERT INTO transactions (from_user, to_user, amount) VALUES (?, ?, ?)')
|
||||
.bind(userId1, userId2, 100)
|
||||
]);
|
||||
```
|
||||
|
||||
**Batch Behavior:**
|
||||
- Executes statements **sequentially** (in order)
|
||||
- Each statement commits individually (auto-commit mode)
|
||||
- If one fails, **remaining statements don't execute**
|
||||
- All statements in one **network round trip** (huge performance win)
|
||||
|
||||
### Batch Performance Comparison
|
||||
|
||||
```typescript
|
||||
// ❌ BAD: 10 separate queries = 10 network round trips
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||
.bind(i)
|
||||
.first();
|
||||
}
|
||||
// ~500ms total latency
|
||||
|
||||
// ✅ GOOD: 1 batch query = 1 network round trip
|
||||
const userIds = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
const queries = userIds.map(id =>
|
||||
env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(id)
|
||||
);
|
||||
const results = await env.DB.batch(queries);
|
||||
// ~50ms total latency
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## exec() - Raw SQL
|
||||
|
||||
**AVOID IN PRODUCTION**. Only use for migrations and one-off tasks.
|
||||
|
||||
### Basic Exec
|
||||
|
||||
```typescript
|
||||
const result = await env.DB.exec('SELECT * FROM users');
|
||||
|
||||
console.log(result);
|
||||
// { count: 1, duration: 2.5 }
|
||||
```
|
||||
|
||||
**NOTE**: `exec()` does **not return data**, only count and duration!
|
||||
|
||||
### Multiple Statements
|
||||
|
||||
```typescript
|
||||
const result = await env.DB.exec(`
|
||||
DROP TABLE IF EXISTS temp_users;
|
||||
CREATE TABLE temp_users (user_id INTEGER PRIMARY KEY);
|
||||
INSERT INTO temp_users VALUES (1), (2), (3);
|
||||
`);
|
||||
|
||||
console.log(result);
|
||||
// { count: 3, duration: 5.2 }
|
||||
```
|
||||
|
||||
### ⚠️ NEVER Use exec() For:
|
||||
|
||||
```typescript
|
||||
// ❌ NEVER: SQL injection vulnerability
|
||||
const email = userInput;
|
||||
await env.DB.exec(`SELECT * FROM users WHERE email = '${email}'`);
|
||||
|
||||
// ✅ ALWAYS: Use prepared statements instead
|
||||
await env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||
.bind(email)
|
||||
.first();
|
||||
```
|
||||
|
||||
### ✅ ONLY Use exec() For:
|
||||
|
||||
- Running migration files locally
|
||||
- One-off maintenance tasks (PRAGMA optimize)
|
||||
- Database initialization scripts
|
||||
- CLI tools (not production Workers)
|
||||
|
||||
---
|
||||
|
||||
## Common Query Patterns
|
||||
|
||||
### Existence Check
|
||||
|
||||
```typescript
|
||||
// Check if email exists
|
||||
const exists = await env.DB.prepare('SELECT 1 FROM users WHERE email = ? LIMIT 1')
|
||||
.bind(email)
|
||||
.first();
|
||||
|
||||
if (exists) {
|
||||
return c.json({ error: 'Email already registered' }, 409);
|
||||
}
|
||||
```
|
||||
|
||||
### Get or Create
|
||||
|
||||
```typescript
|
||||
// Try to find user
|
||||
let user = await env.DB.prepare('SELECT * FROM users WHERE email = ?')
|
||||
.bind(email)
|
||||
.first<User>();
|
||||
|
||||
// Create if doesn't exist
|
||||
if (!user) {
|
||||
const result = await env.DB.prepare(
|
||||
'INSERT INTO users (email, username, created_at) VALUES (?, ?, ?)'
|
||||
)
|
||||
.bind(email, username, Date.now())
|
||||
.run();
|
||||
|
||||
const userId = result.meta.last_row_id;
|
||||
|
||||
user = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||
.bind(userId)
|
||||
.first<User>();
|
||||
}
|
||||
```
|
||||
|
||||
### Pagination
|
||||
|
||||
```typescript
|
||||
const page = 1;
|
||||
const limit = 20;
|
||||
const offset = (page - 1) * limit;
|
||||
|
||||
const [countResult, dataResult] = await env.DB.batch([
|
||||
env.DB.prepare('SELECT COUNT(*) as total FROM posts WHERE published = 1'),
|
||||
env.DB.prepare(
|
||||
'SELECT * FROM posts WHERE published = 1 ORDER BY created_at DESC LIMIT ? OFFSET ?'
|
||||
).bind(limit, offset)
|
||||
]);
|
||||
|
||||
const total = countResult.results[0].total;
|
||||
const posts = dataResult.results;
|
||||
|
||||
return {
|
||||
posts,
|
||||
pagination: {
|
||||
page,
|
||||
limit,
|
||||
total,
|
||||
pages: Math.ceil(total / limit)
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Upsert (INSERT or UPDATE)
|
||||
|
||||
```typescript
|
||||
// SQLite 3.24.0+ supports UPSERT
|
||||
await env.DB.prepare(`
|
||||
INSERT INTO user_settings (user_id, theme, language)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(user_id) DO UPDATE SET
|
||||
theme = excluded.theme,
|
||||
language = excluded.language,
|
||||
updated_at = unixepoch()
|
||||
`)
|
||||
.bind(userId, theme, language)
|
||||
.run();
|
||||
```
|
||||
|
||||
### Bulk Upsert
|
||||
|
||||
```typescript
|
||||
const settings = [
|
||||
{ user_id: 1, theme: 'dark', language: 'en' },
|
||||
{ user_id: 2, theme: 'light', language: 'es' }
|
||||
];
|
||||
|
||||
const upserts = settings.map(s =>
|
||||
env.DB.prepare(`
|
||||
INSERT INTO user_settings (user_id, theme, language)
|
||||
VALUES (?, ?, ?)
|
||||
ON CONFLICT(user_id) DO UPDATE SET
|
||||
theme = excluded.theme,
|
||||
language = excluded.language
|
||||
`).bind(s.user_id, s.theme, s.language)
|
||||
);
|
||||
|
||||
await env.DB.batch(upserts);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Performance Tips
|
||||
|
||||
### Use SELECT Column Names (Not SELECT *)
|
||||
|
||||
```typescript
|
||||
// ❌ Bad: Fetches all columns
|
||||
const users = await env.DB.prepare('SELECT * FROM users').all();
|
||||
|
||||
// ✅ Good: Only fetch needed columns
|
||||
const users = await env.DB.prepare('SELECT user_id, email, username FROM users').all();
|
||||
```
|
||||
|
||||
### Always Use LIMIT
|
||||
|
||||
```typescript
|
||||
// ❌ Bad: Could return millions of rows
|
||||
const posts = await env.DB.prepare('SELECT * FROM posts').all();
|
||||
|
||||
// ✅ Good: Limit result set
|
||||
const posts = await env.DB.prepare('SELECT * FROM posts LIMIT 100').all();
|
||||
```
|
||||
|
||||
### Use Indexes
|
||||
|
||||
```sql
|
||||
-- Create index for common queries
|
||||
CREATE INDEX IF NOT EXISTS idx_posts_published_created
|
||||
ON posts(published, created_at DESC)
|
||||
WHERE published = 1;
|
||||
```
|
||||
|
||||
```typescript
|
||||
// Query will use the index
|
||||
const posts = await env.DB.prepare(
|
||||
'SELECT * FROM posts WHERE published = 1 ORDER BY created_at DESC LIMIT 10'
|
||||
).all();
|
||||
```
|
||||
|
||||
### Check Index Usage
|
||||
|
||||
```sql
|
||||
EXPLAIN QUERY PLAN SELECT * FROM posts WHERE published = 1;
|
||||
-- Should see: SEARCH posts USING INDEX idx_posts_published_created
|
||||
```
|
||||
|
||||
### Batch Instead of Loop
|
||||
|
||||
```typescript
|
||||
// ❌ Bad: Multiple network round trips
|
||||
for (const id of userIds) {
|
||||
const user = await env.DB.prepare('SELECT * FROM users WHERE user_id = ?')
|
||||
.bind(id)
|
||||
.first();
|
||||
}
|
||||
|
||||
// ✅ Good: One network round trip
|
||||
const queries = userIds.map(id =>
|
||||
env.DB.prepare('SELECT * FROM users WHERE user_id = ?').bind(id)
|
||||
);
|
||||
const results = await env.DB.batch(queries);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Meta Object Reference
|
||||
|
||||
Every D1 query returns a `meta` object with execution details:
|
||||
|
||||
```typescript
|
||||
{
|
||||
duration: 2.5, // Query execution time in milliseconds
|
||||
rows_read: 100, // Number of rows scanned
|
||||
rows_written: 1, // Number of rows modified (INSERT/UPDATE/DELETE)
|
||||
last_row_id: 42, // ID of last inserted row (INSERT only)
|
||||
changed: 1 // Rows affected (UPDATE/DELETE only)
|
||||
}
|
||||
```
|
||||
|
||||
### Using Meta for Debugging
|
||||
|
||||
```typescript
|
||||
const result = await env.DB.prepare('SELECT * FROM large_table WHERE status = ?')
|
||||
.bind('active')
|
||||
.all();
|
||||
|
||||
console.log(`Query took ${result.meta.duration}ms`);
|
||||
console.log(`Scanned ${result.meta.rows_read} rows`);
|
||||
console.log(`Returned ${result.results.length} rows`);
|
||||
|
||||
// If rows_read is much higher than results.length, add an index!
|
||||
if (result.meta.rows_read > result.results.length * 10) {
|
||||
console.warn('Query is inefficient - consider adding an index');
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Official Documentation
|
||||
|
||||
- **Workers API**: https://developers.cloudflare.com/d1/worker-api/
|
||||
- **Prepared Statements**: https://developers.cloudflare.com/d1/worker-api/prepared-statements/
|
||||
- **Return Object**: https://developers.cloudflare.com/d1/worker-api/return-object/
|
||||
Reference in New Issue
Block a user