Everything you need to build production backend systems. REST APIs, authentication, rate limiting, caching, WebSockets, message queues, event-driven architecture, database patterns, and deployment strategies -- all with Node.js/Express examples.
GET -- Read resources. Idempotent, cacheable. GET /api/users/123
POST -- Create resources. Not idempotent. POST /api/users
PUT -- Replace entire resource. Idempotent. PUT /api/users/123
PATCH -- Partial update. PATCH /api/users/123
DELETE -- Remove resource. Idempotent. DELETE /api/users/123
200 OK -- success
201 Created -- resource created (POST)
204 No Content -- success, no body (DELETE)
400 Bad Request -- client sent invalid data
401 Unauthorized -- not authenticated (no/bad token)
403 Forbidden -- authenticated but not authorized
404 Not Found -- resource doesn't exist
409 Conflict -- duplicate resource (email already exists)
422 Unprocessable Entity -- validation failed
429 Too Many Requests -- rate limited
500 Internal Server Error -- your bug
502 Bad Gateway -- upstream service down
503 Service Unavailable -- overloaded/maintenance
REST
// Good: nouns, plural, hierarchical
GET /api/users
GET /api/users/123
GET /api/users/123/posts
POST /api/users/123/posts
GET /api/users/123/posts/456/comments
// Bad: verbs, actions in URL
GET /api/getUser/123 // verb in URL
POST /api/createPost // action in URL
GET /api/user/123 // singular (use plural)
JavaScript
import express from 'express';
const app = express();
app.use(express.json());
// GET all users
app.get('/api/users', async (req, res) => {
const users = await db.query('SELECT id, username, email FROM users');
res.json(users.rows);
});
// GET single user
app.get('/api/users/:id', async (req, res) => {
const { rows } = await db.query('SELECT * FROM users WHERE id = $1', [req.params.id]);
if (!rows[0]) return res.status(404).json({ error: 'User not found' });
res.json(rows[0]);
});
// POST create user
app.post('/api/users', async (req, res) => {
const { email, username, password } = req.body;
const hash = await bcrypt.hash(password, 10);
const { rows } = await db.query(
'INSERT INTO users (email, username, password_hash) VALUES ($1, $2, $3) RETURNING id, email, username',
[email, username, hash]
);
res.status(201).json(rows[0]);
});
// DELETE user
app.delete('/api/users/:id', async (req, res) => {
await db.query('DELETE FROM users WHERE id = $1', [req.params.id]);
res.status(204).send();
});
Middleware functions run between receiving a request and sending a response. They form a chain -- each one can modify the request/response or stop the chain.
JavaScript
// Request flow: Client -> Middleware 1 -> Middleware 2 -> Route Handler -> Response
// Logging middleware
const logger = (req, res, next) => {
console.log(`${req.method} ${req.url} - ${new Date().toISOString()}`);
const start = Date.now();
res.on('finish', () => {
console.log(`${req.method} ${req.url} ${res.statusCode} - ${Date.now() - start}ms`);
});
next(); // pass to next middleware
};
// Auth middleware
const requireAuth = (req, res, next) => {
const token = req.headers.authorization?.split(' ')[1];
if (!token) return res.status(401).json({ error: 'No token provided' });
try {
const payload = jwt.verify(token, process.env.JWT_SECRET);
req.user = payload; // attach user to request
next();
} catch {
res.status(401).json({ error: 'Invalid token' });
}
};
// Apply globally
app.use(logger);
app.use(express.json());
// Apply to specific routes
app.get('/api/profile', requireAuth, (req, res) => {
res.json(req.user);
});
// Error middleware (4 params -- must be last)
app.use((err, req, res, next) => {
console.error(err.stack);
res.status(err.status || 500).json({
error: err.message || 'Internal server error',
});
});
JWT is a self-contained token with a payload (claims) signed by the server. The client sends it with every request. No server-side session storage needed.
header.payload.signature
Header: { "alg": "HS256", "typ": "JWT" }
Payload: { "userId": 123, "role": "admin", "iat": 1710000000, "exp": 1710003600 }
Signature: HMACSHA256(base64(header) + "." + base64(payload), secret)
JavaScript
// npm install jsonwebtoken bcrypt
import jwt from 'jsonwebtoken';
import bcrypt from 'bcrypt';
const JWT_SECRET = process.env.JWT_SECRET;
const ACCESS_TTL = '15m';
const REFRESH_TTL = '7d';
// Register
app.post('/api/auth/register', async (req, res) => {
const { email, password } = req.body;
const hash = await bcrypt.hash(password, 12); // 12 rounds
const { rows } = await db.query(
'INSERT INTO users (email, password_hash) VALUES ($1, $2) RETURNING id, email',
[email, hash]
);
res.status(201).json(rows[0]);
});
// Login -- return access + refresh tokens
app.post('/api/auth/login', async (req, res) => {
const { email, password } = req.body;
const { rows } = await db.query('SELECT * FROM users WHERE email = $1', [email]);
const user = rows[0];
if (!user || !(await bcrypt.compare(password, user.password_hash))) {
return res.status(401).json({ error: 'Invalid credentials' });
}
const accessToken = jwt.sign({ userId: user.id, role: user.role }, JWT_SECRET, { expiresIn: ACCESS_TTL });
const refreshToken = jwt.sign({ userId: user.id }, JWT_SECRET, { expiresIn: REFRESH_TTL });
// Store refresh token in httpOnly cookie
res.cookie('refreshToken', refreshToken, {
httpOnly: true,
secure: true,
sameSite: 'strict',
maxAge: 7 * 24 * 60 * 60 * 1000,
});
res.json({ accessToken });
});
// Refresh token endpoint
app.post('/api/auth/refresh', (req, res) => {
const token = req.cookies.refreshToken;
if (!token) return res.status(401).json({ error: 'No refresh token' });
try {
const { userId } = jwt.verify(token, JWT_SECRET);
const accessToken = jwt.sign({ userId }, JWT_SECRET, { expiresIn: ACCESS_TTL });
res.json({ accessToken });
} catch {
res.status(401).json({ error: 'Invalid refresh token' });
}
});
JavaScript
const requireRole = (...roles) => (req, res, next) => {
if (!roles.includes(req.user.role)) {
return res.status(403).json({ error: 'Insufficient permissions' });
}
next();
};
// Only admins can delete users
app.delete('/api/users/:id', requireAuth, requireRole('admin'), async (req, res) => {
await db.query('DELETE FROM users WHERE id = $1', [req.params.id]);
res.status(204).send();
});
Access token: Short-lived (15min), sent in Authorization header, contains user claims. If stolen, damage is time-limited.
Refresh token: Long-lived (7 days), stored in httpOnly cookie (JS can't read it), used only to get new access tokens. Can be revoked server-side.
Prevents abuse by limiting how many requests a client can make in a time window.
Fixed Window: Count requests per time window (e.g., 100 per minute). Simple but bursty at window boundaries.
Sliding Window: Weighted count across current and previous window. Smoother than fixed.
Token Bucket: Tokens refill at a steady rate. Each request consumes a token. Allows controlled bursts.
Leaky Bucket: Requests enter a queue and are processed at a fixed rate. Smoothest output.
JavaScript
class TokenBucket {
constructor(capacity, refillRate) {
this.capacity = capacity; // max tokens
this.tokens = capacity; // current tokens
this.refillRate = refillRate; // tokens per second
this.lastRefill = Date.now();
}
consume() {
this.refill();
if (this.tokens >= 1) {
this.tokens -= 1;
return true;
}
return false;
}
refill() {
const now = Date.now();
const elapsed = (now - this.lastRefill) / 1000;
this.tokens = Math.min(this.capacity, this.tokens + elapsed * this.refillRate);
this.lastRefill = now;
}
}
// Rate limiter middleware (in-memory -- use Redis for multi-server)
const buckets = new Map();
const rateLimit = (maxRequests, windowSeconds) => (req, res, next) => {
const key = req.ip;
if (!buckets.has(key)) {
buckets.set(key, new TokenBucket(maxRequests, maxRequests / windowSeconds));
}
const bucket = buckets.get(key);
if (!bucket.consume()) {
res.set('Retry-After', String(windowSeconds));
return res.status(429).json({ error: 'Too many requests' });
}
next();
};
app.use(rateLimit(100, 60)); // 100 requests per 60 seconds
JavaScript
// Sliding window with Redis sorted sets
import Redis from 'ioredis';
const redis = new Redis();
const slidingWindowLimit = (maxRequests, windowMs) => async (req, res, next) => {
const key = `ratelimit:${req.ip}`;
const now = Date.now();
const windowStart = now - windowMs;
const pipe = redis.pipeline();
pipe.zremrangebyscore(key, 0, windowStart); // remove old entries
pipe.zadd(key, now, `${now}-${Math.random()}`); // add current request
pipe.zcard(key); // count requests in window
pipe.pexpire(key, windowMs); // auto-cleanup
const results = await pipe.exec();
const count = results[2][1];
res.set('X-RateLimit-Limit', String(maxRequests));
res.set('X-RateLimit-Remaining', String(Math.max(0, maxRequests - count)));
if (count > maxRequests) {
return res.status(429).json({ error: 'Rate limit exceeded' });
}
next();
};
app.use(slidingWindowLimit(100, 60000)); // 100 per minute
JavaScript
// GET /api/posts?page=2&limit=20
app.get('/api/posts', async (req, res) => {
const page = parseInt(req.query.page) || 1;
const limit = Math.min(parseInt(req.query.limit) || 20, 100);
const offset = (page - 1) * limit;
const [posts, countResult] = await Promise.all([
db.query('SELECT * FROM posts ORDER BY created_at DESC LIMIT $1 OFFSET $2', [limit, offset]),
db.query('SELECT COUNT(*) FROM posts'),
]);
const total = parseInt(countResult.rows[0].count);
res.json({
data: posts.rows,
meta: { page, limit, total, totalPages: Math.ceil(total / limit) },
});
});
OFFSET skips rows by scanning them -- OFFSET 100000 scans 100K rows then discards them. Gets slower as pages increase. Also, if new rows are inserted while paginating, you'll see duplicates or miss items.
JavaScript
// GET /api/posts?cursor=2024-01-15T10:30:00Z&limit=20
app.get('/api/posts', async (req, res) => {
const limit = Math.min(parseInt(req.query.limit) || 20, 100);
const cursor = req.query.cursor; // timestamp of last seen item
let query, params;
if (cursor) {
query = 'SELECT * FROM posts WHERE created_at < $1 ORDER BY created_at DESC LIMIT $2';
params = [cursor, limit + 1]; // fetch one extra to check if more exist
} else {
query = 'SELECT * FROM posts ORDER BY created_at DESC LIMIT $1';
params = [limit + 1];
}
const { rows } = await db.query(query, params);
const hasMore = rows.length > limit;
const data = hasMore ? rows.slice(0, limit) : rows;
res.json({
data,
meta: {
hasMore,
nextCursor: hasMore ? data[data.length - 1].created_at : null,
},
});
});
Offset: Easy, supports jumping to page N. Slow for large offsets, inconsistent with live data.
Cursor: Fast regardless of position, consistent with live data. Can't jump to page N. Use for infinite scroll, feeds, APIs.
Most common pattern. App checks cache first, falls back to DB, then populates cache.
JavaScript
import Redis from 'ioredis';
const redis = new Redis();
async function getUser(id) {
// 1. Check cache
const cached = await redis.get(`user:${id}`);
if (cached) return JSON.parse(cached);
// 2. Cache miss -- query DB
const { rows } = await db.query('SELECT * FROM users WHERE id = $1', [id]);
const user = rows[0];
if (!user) return null;
// 3. Populate cache (TTL: 1 hour)
await redis.setex(`user:${id}`, 3600, JSON.stringify(user));
return user;
}
// Invalidate on update
async function updateUser(id, data) {
await db.query('UPDATE users SET username = $1 WHERE id = $2', [data.username, id]);
await redis.del(`user:${id}`); // invalidate cache
}
JavaScript
// Cache-Control header
app.get('/api/posts/:id', async (req, res) => {
const post = await getPost(req.params.id);
// Cache for 5 minutes, stale-while-revalidate for 1 hour
res.set('Cache-Control', 'public, max-age=300, stale-while-revalidate=3600');
// ETag for conditional requests
const etag = `"${post.updated_at.getTime()}"`;
res.set('ETag', etag);
if (req.headers['if-none-match'] === etag) {
return res.status(304).send(); // Not Modified
}
res.json(post);
});
Cache-Aside: App manages cache. Best for read-heavy workloads.
Write-Through: Write to cache and DB simultaneously. Consistent but slower writes.
Write-Behind: Write to cache, async write to DB. Fast writes, risk of data loss.
Read-Through: Cache loads from DB automatically on miss. Cache acts as main data source.
WebSockets provide full-duplex communication over a single TCP connection. Unlike HTTP (request-response), both client and server can send messages at any time.
JavaScript
// npm install ws
import { WebSocketServer } from 'ws';
const wss = new WebSocketServer({ port: 8080 });
wss.on('connection', (ws) => {
console.log('Client connected');
ws.on('message', (data) => {
const msg = JSON.parse(data);
console.log('Received:', msg);
// Broadcast to all connected clients
wss.clients.forEach((client) => {
if (client.readyState === 1) { // OPEN
client.send(JSON.stringify(msg));
}
});
});
ws.on('close', () => console.log('Client disconnected'));
ws.send(JSON.stringify({ type: 'welcome', message: 'Connected!' }));
});
JavaScript
// npm install socket.io
import { Server } from 'socket.io';
const io = new Server(httpServer, {
cors: { origin: 'http://localhost:3000' },
});
io.on('connection', (socket) => {
console.log(`User connected: ${socket.id}`);
// Join a room (e.g., chat room)
socket.on('join-room', (roomId) => {
socket.join(roomId);
socket.to(roomId).emit('user-joined', socket.id);
});
// Send message to room
socket.on('chat-message', ({ roomId, message }) => {
io.to(roomId).emit('chat-message', {
from: socket.id,
message,
timestamp: Date.now(),
});
});
// Typing indicator
socket.on('typing', (roomId) => {
socket.to(roomId).emit('user-typing', socket.id);
});
socket.on('disconnect', () => console.log('User disconnected'));
});
WebSocket connections are stateful -- each server holds its own connections. To scale across multiple servers, use a Redis adapter: @socket.io/redis-adapter. Redis pub/sub broadcasts events across all server instances.
JavaScript
import { EventEmitter } from 'events';
const events = new EventEmitter();
// Register handlers
events.on('user:created', async (user) => {
await sendWelcomeEmail(user.email);
});
events.on('user:created', async (user) => {
await createDefaultSettings(user.id);
});
events.on('order:completed', async (order) => {
await updateInventory(order.items);
await sendReceipt(order);
});
// Emit events from your routes
app.post('/api/users', async (req, res) => {
const user = await createUser(req.body);
events.emit('user:created', user); // side effects happen async
res.status(201).json(user);
});
CRUD: Store current state. UPDATE accounts SET balance = 900. You lose history.
Event Sourcing: Store events. AccountDebited { amount: 100 }. Current state = replay all events. You keep full audit trail.
Use event sourcing for: financial systems, audit logs, collaborative editing, systems where "why" matters as much as "what".
Command side: Handles writes. Validates, applies business logic, stores events.
Query side: Handles reads. Uses optimized read models (denormalized views).
Separate the write model (normalized, consistent) from the read model (denormalized, fast). Sync them via events. Overkill for most apps, but powerful for complex domains.
Decouple services by communicating through messages instead of direct calls. Producer sends a message to a queue, consumer processes it later. This enables async processing, retry logic, and load leveling.
JavaScript
// npm install amqplib
import amqp from 'amqplib';
// Producer: send email job to queue
async function sendEmailJob(to, subject, body) {
const conn = await amqp.connect('amqp://localhost');
const channel = await conn.createChannel();
const queue = 'email_queue';
await channel.assertQueue(queue, { durable: true });
channel.sendToQueue(queue,
Buffer.from(JSON.stringify({ to, subject, body })),
{ persistent: true } // survives broker restart
);
console.log('Email job queued');
}
// Consumer: process email jobs
async function startEmailWorker() {
const conn = await amqp.connect('amqp://localhost');
const channel = await conn.createChannel();
const queue = 'email_queue';
await channel.assertQueue(queue, { durable: true });
channel.prefetch(1); // process one at a time
channel.consume(queue, async (msg) => {
const job = JSON.parse(msg.content.toString());
try {
await sendEmail(job.to, job.subject, job.body);
channel.ack(msg); // remove from queue
} catch (err) {
channel.nack(msg, false, true); // requeue on failure
}
});
}
JavaScript
// npm install bullmq ioredis
import { Queue, Worker } from 'bullmq';
// Create queue
const emailQueue = new Queue('emails', {
connection: { host: 'localhost', port: 6379 },
});
// Add job
await emailQueue.add('welcome', {
to: 'sean@dev.com',
subject: 'Welcome!',
}, {
attempts: 3, // retry 3 times
backoff: { type: 'exponential', delay: 1000 },
removeOnComplete: 100, // keep last 100 completed jobs
});
// Process jobs
const worker = new Worker('emails', async (job) => {
console.log(`Processing ${job.name}: ${job.data.to}`);
await sendEmail(job.data);
}, {
connection: { host: 'localhost', port: 6379 },
concurrency: 5, // process 5 jobs simultaneously
});
worker.on('completed', (job) => console.log(`Job ${job.id} done`));
worker.on('failed', (job, err) => console.error(`Job ${job.id} failed: ${err.message}`));
1. Email/SMS sending (don't block the API response)
2. Image/video processing
3. PDF generation
4. Webhook delivery with retries
5. Data import/export
6. Any work that can be deferred
JavaScript
// npm install node-cron
import cron from 'node-cron';
// Every day at midnight
cron.schedule('0 0 * * *', async () => {
console.log('Running daily cleanup...');
await db.query("DELETE FROM sessions WHERE expires_at < NOW()");
});
// Every 5 minutes
cron.schedule('*/5 * * * *', async () => {
await checkHealthOfExternalServices();
});
// Cron syntax: minute hour day month weekday
// */5 * * * * = every 5 minutes
// 0 */2 * * * = every 2 hours
// 0 9 * * 1-5 = 9 AM weekdays
// 0 0 1 * * = midnight on 1st of month
JavaScript
async function withRetry(fn, maxRetries = 3, baseDelay = 1000) {
for (let attempt = 0; attempt <= maxRetries; attempt++) {
try {
return await fn();
} catch (err) {
if (attempt === maxRetries) throw err;
// Exponential backoff with jitter
const delay = baseDelay * Math.pow(2, attempt) + Math.random() * 1000;
console.log(`Retry ${attempt + 1}/${maxRetries} in ${delay}ms`);
await new Promise(r => setTimeout(r, delay));
}
}
}
// Usage
await withRetry(() => sendWebhook(url, payload), 5, 2000);
JavaScript
// npm install zod
import { z } from 'zod';
const createUserSchema = z.object({
email: z.string().email('Invalid email'),
username: z.string().min(3).max(30).regex(/^[a-zA-Z0-9_]+$/),
password: z.string().min(8).max(128),
age: z.number().int().min(13).max(150).optional(),
});
// Validation middleware
const validate = (schema) => (req, res, next) => {
const result = schema.safeParse(req.body);
if (!result.success) {
return res.status(422).json({
error: 'Validation failed',
details: result.error.issues,
});
}
req.body = result.data; // use parsed/typed data
next();
};
app.post('/api/users', validate(createUserSchema), async (req, res) => {
// req.body is guaranteed to be valid here
const user = await createUser(req.body);
res.status(201).json(user);
});
JavaScript
// BAD: string concatenation = SQL injection
const query = `SELECT * FROM users WHERE email = '${req.body.email}'`;
// Attacker sends: ' OR '1'='1' --
// Result: SELECT * FROM users WHERE email = '' OR '1'='1' --'
// Returns ALL users!
// GOOD: parameterized queries
const { rows } = await db.query(
'SELECT * FROM users WHERE email = $1',
[req.body.email] // safely escaped
);
1. Always use parameterized queries (never concat user input into SQL)
2. Always validate and sanitize input on the server (never trust the client)
3. Hash passwords with bcrypt/argon2 (never store plaintext)
4. Escape HTML output to prevent XSS (React does this by default)
5. Use HTTPS everywhere
6. Set security headers: helmet middleware
JavaScript
// Custom error classes
class AppError extends Error {
constructor(message, statusCode, code) {
super(message);
this.statusCode = statusCode;
this.code = code;
this.isOperational = true;
}
}
class NotFoundError extends AppError {
constructor(resource) {
super(`${resource} not found`, 404, 'NOT_FOUND');
}
}
class ConflictError extends AppError {
constructor(message) {
super(message, 409, 'CONFLICT');
}
}
// Use in routes
app.get('/api/users/:id', async (req, res, next) => {
try {
const user = await getUser(req.params.id);
if (!user) throw new NotFoundError('User');
res.json(user);
} catch (err) {
next(err);
}
});
// Global error handler (must be last middleware)
app.use((err, req, res, next) => {
if (err.isOperational) {
// Expected error -- send clean response
res.status(err.statusCode).json({
error: { message: err.message, code: err.code },
});
} else {
// Unexpected error -- log and send generic message
console.error('UNEXPECTED ERROR:', err);
res.status(500).json({
error: { message: 'Internal server error', code: 'INTERNAL' },
});
}
});
JavaScript
// Don't create a new connection per request -- use a pool
import pg from 'pg';
const pool = new pg.Pool({
max: 20, // max connections
idleTimeoutMillis: 30000, // close idle connections after 30s
connectionTimeoutMillis: 2000, // fail fast if no connection available
});
// Pool automatically manages connections
const result = await pool.query('SELECT * FROM users');
// For production: use PgBouncer as external connection pooler
// PgBouncer sits between your app and PostgreSQL
// Handles thousands of app connections with fewer DB connections
Write to the primary database, read from replicas. This distributes read load across multiple servers.
JavaScript
// Simple read/write splitting
const writePool = new pg.Pool({ host: 'primary.db.internal', max: 10 });
const readPool = new pg.Pool({ host: 'replica.db.internal', max: 30 });
async function query(sql, params, write = false) {
const pool = write ? writePool : readPool;
return pool.query(sql, params);
}
// Writes go to primary
await query('INSERT INTO posts (title) VALUES ($1)', [title], true);
// Reads go to replica
const posts = await query('SELECT * FROM posts ORDER BY created_at DESC LIMIT 20');
JavaScript
// Using node-pg-migrate
// npm install node-pg-migrate
// migrations/001_create_users.js
exports.up = (pgm) => {
pgm.createTable('users', {
id: 'id', // shorthand for serial primary key
email: { type: 'varchar(255)', notNull: true, unique: true },
username: { type: 'varchar(50)', notNull: true },
password_hash: { type: 'text', notNull: true },
created_at: { type: 'timestamp', default: pgm.func('NOW()') },
});
};
exports.down = (pgm) => {
pgm.dropTable('users');
};
// Run: npx node-pg-migrate up
// Rollback: npx node-pg-migrate down
JavaScript
// npm install pino
import pino from 'pino';
const logger = pino({ level: process.env.LOG_LEVEL || 'info' });
// Structured logs (JSON -- queryable by log services)
logger.info({ userId: 123, action: 'login' }, 'User logged in');
logger.error({ err, requestId: req.id }, 'Payment failed');
logger.warn({ queueDepth: 1500 }, 'Queue depth high');
// Request logging middleware
import pinoHttp from 'pino-http';
app.use(pinoHttp({ logger }));
JavaScript
app.get('/health', async (req, res) => {
const checks = {};
try {
await db.query('SELECT 1');
checks.database = 'ok';
} catch {
checks.database = 'error';
}
try {
await redis.ping();
checks.redis = 'ok';
} catch {
checks.redis = 'error';
}
const healthy = Object.values(checks).every(v => v === 'ok');
res.status(healthy ? 200 : 503).json({
status: healthy ? 'healthy' : 'degraded',
checks,
uptime: process.uptime(),
});
});
Rolling Update: Replace instances one by one. No downtime but old and new versions run simultaneously during rollout. Default in Kubernetes.
Blue-Green: Run two identical environments (blue = current, green = new). Switch traffic from blue to green instantly. Easy rollback -- just switch back.
Canary: Route a small percentage (1-5%) of traffic to the new version. Monitor errors/latency. Gradually increase if healthy. Safest for critical services.
Feature Flags: Deploy new code behind a flag. Enable for specific users/percentages without redeploying. Decouple deployment from release.
JavaScript
// Simple feature flag system
const flags = {
newCheckoutFlow: {
enabled: true,
percentage: 10, // 10% of users
allowlist: ['sean'], // always enabled for these users
},
};
function isEnabled(flagName, userId) {
const flag = flags[flagName];
if (!flag?.enabled) return false;
if (flag.allowlist?.includes(userId)) return true;
// Deterministic: same user always gets same result
const hash = simpleHash(`${flagName}:${userId}`) % 100;
return hash < flag.percentage;
}
// Usage in route
app.get('/checkout', (req, res) => {
if (isEnabled('newCheckoutFlow', req.user.id)) {
return newCheckout(req, res);
}
oldCheckout(req, res);
});
1. HTTPS everywhere (use Let's Encrypt)
2. Environment variables for secrets (never hardcode)
3. Health check endpoint
4. Structured logging
5. Error tracking (Sentry)
6. Rate limiting on all public endpoints
7. CORS configured properly
8. Security headers (helmet)
9. Database connection pooling
10. Graceful shutdown handling
11. Automated database backups
12. CI/CD pipeline with tests