diff --git a/.env.example b/.env.example index 5fe6381..7de2835 100644 --- a/.env.example +++ b/.env.example @@ -2,3 +2,13 @@ DATABASE_URL="postgresql://postgres:postgres@localhost:5432/locations_api" DIRECT_DATABASE_URL="postgresql://postgres:postgres@localhost:5432/locations_api" PORT="8080" PAGE_SIZE="10" +REQUEST_BODY_LIMIT="16kb" +# TRUST_PROXY="loopback, linklocal, uniquelocal" +RATE_LIMIT_WINDOW_MS="60000" +RATE_LIMIT_MAX_REQUESTS="120" +RATE_LIMIT_BURST_WINDOW_MS="10000" +RATE_LIMIT_BURST_MAX_REQUESTS="30" +SEARCH_RATE_LIMIT_WINDOW_MS="60000" +SEARCH_RATE_LIMIT_MAX_REQUESTS="30" +SEARCH_RATE_LIMIT_BURST_WINDOW_MS="10000" +SEARCH_RATE_LIMIT_BURST_MAX_REQUESTS="10" diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100755 index 0000000..0e5c964 --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,8 @@ +#!/bin/sh +set -eu + +if [ "${SKIP_GIT_HOOKS:-0}" = "1" ]; then + exit 0 +fi + +pnpm hooks:pre-commit diff --git a/.githooks/pre-push b/.githooks/pre-push new file mode 100755 index 0000000..c1b24ca --- /dev/null +++ b/.githooks/pre-push @@ -0,0 +1,8 @@ +#!/bin/sh +set -eu + +if [ "${SKIP_GIT_HOOKS:-0}" = "1" ]; then + exit 0 +fi + +pnpm hooks:pre-push diff --git a/README.md b/README.md index 94d4101..8a9d5ef 100644 --- a/README.md +++ b/README.md @@ -35,6 +35,7 @@ Compatibility-first REST API for Tanzania location data backed by PostgreSQL and - Local and test environments use a direct PostgreSQL `DATABASE_URL`. - Production can use either a direct PostgreSQL `DATABASE_URL` or a Prisma Accelerate `DATABASE_URL`. - If `DATABASE_URL` points at Prisma Accelerate, also provide `DIRECT_DATABASE_URL` so migrations can talk to Postgres directly. + - Legacy environments that already use `DIRECT_URL` are still accepted as a fallback for direct Postgres access. 4. Apply the checked-in schema and seed deterministic fixture data. @@ -64,6 +65,27 @@ pnpm test:ci pnpm openapi:json ``` +## Runtime Protection + +- API routes are protected by per-IP rate limits with both sustained and burst thresholds +- `/search` has a stricter limit than the rest of the API because it is the easiest expensive endpoint to abuse +- Request bodies are capped with `REQUEST_BODY_LIMIT`, even though the public API is mostly read-only +- Rate limiting keys off Express `req.ip`; if you deploy behind a trusted proxy/load balancer, set `TRUST_PROXY` so Express resolves the real client IP correctly +- All limits are configurable with environment variables: + + ```bash + REQUEST_BODY_LIMIT=16kb + TRUST_PROXY="loopback, linklocal, uniquelocal" + RATE_LIMIT_WINDOW_MS=60000 + RATE_LIMIT_MAX_REQUESTS=120 + RATE_LIMIT_BURST_WINDOW_MS=10000 + RATE_LIMIT_BURST_MAX_REQUESTS=30 + SEARCH_RATE_LIMIT_WINDOW_MS=60000 + SEARCH_RATE_LIMIT_MAX_REQUESTS=30 + SEARCH_RATE_LIMIT_BURST_WINDOW_MS=10000 + SEARCH_RATE_LIMIT_BURST_MAX_REQUESTS=10 + ``` + ## Migration Behavior - `pnpm db:migrate` is the supported entrypoint for schema changes in this repo @@ -72,6 +94,7 @@ pnpm openapi:json - Prefer `pnpm db:migrate` over calling `prisma migrate deploy` directly - `DATABASE_URL` may point at direct Postgres or Prisma Accelerate - If `DATABASE_URL` points at Prisma Accelerate, `pnpm db:migrate` still requires a direct Postgres URL in `DIRECT_DATABASE_URL` +- `DIRECT_URL` remains supported as a legacy alias for `DIRECT_DATABASE_URL` ## Testing @@ -154,6 +177,13 @@ Additional filters: - `.github/dependabot.yml` opens weekly update PRs for npm packages and GitHub Actions - `.github/workflows/ci.yml` validates every PR against Postgres on Node `22.13.0` +## Git Hooks + +- `pnpm prepare` and `pnpm hooks:install` configure `core.hooksPath` to `.githooks` +- Pre-commit runs `pnpm hooks:pre-commit` (`lint` + `typecheck`) +- Pre-push runs `pnpm hooks:pre-push`, which first builds the app, then creates a temporary Postgres database and runs `pnpm test:ci` +- Pre-push requires `DIRECT_DATABASE_URL` or legacy `DIRECT_URL` to be a direct PostgreSQL URL +- Pre-push refuses non-local databases by default; set `ALLOW_REMOTE_PREPUSH_DB=1` only if you intentionally want hook verification against a remote direct Postgres instance ## License This project is licensed under the CopyLeft License. See [LICENSE](./LICENSE). diff --git a/package.json b/package.json index 7d208d7..2f1c5f4 100644 --- a/package.json +++ b/package.json @@ -9,9 +9,13 @@ }, "scripts": { "dev": "tsx watch server.ts", + "prepare": "tsx scripts/install-git-hooks.ts", "generate": "prisma generate", "db:migrate": "tsx scripts/migrate.ts", "db:seed": "prisma db seed", + "hooks:install": "tsx scripts/install-git-hooks.ts", + "hooks:pre-commit": "pnpm lint && pnpm typecheck", + "hooks:pre-push": "pnpm build && tsx scripts/run-pre-push-checks.ts", "lint": "pnpm generate && eslint server.ts \"src/**/*.ts\" \"tests/**/*.ts\" \"scripts/**/*.ts\" \"prisma/**/*.ts\"", "typecheck": "pnpm generate && tsc --noEmit", "build:ci": "pnpm generate && pnpm lint && pnpm typecheck && pnpm build", diff --git a/prisma.config.ts b/prisma.config.ts index 91c27ae..e177362 100644 --- a/prisma.config.ts +++ b/prisma.config.ts @@ -10,6 +10,7 @@ export default defineConfig({ datasource: { url: process.env.DIRECT_DATABASE_URL ?? + process.env.DIRECT_URL ?? process.env.DATABASE_URL ?? 'postgresql://postgres:postgres@localhost:5432/locations_api', }, diff --git a/scripts/install-git-hooks.ts b/scripts/install-git-hooks.ts new file mode 100644 index 0000000..1db3205 --- /dev/null +++ b/scripts/install-git-hooks.ts @@ -0,0 +1,17 @@ +import { execFileSync } from 'node:child_process'; +import { existsSync } from 'node:fs'; +import path from 'node:path'; + +const repoRoot = process.cwd(); +const gitDir = path.join(repoRoot, '.git'); + +if (!existsSync(gitDir)) { + process.exit(0); +} + +execFileSync('git', ['config', 'core.hooksPath', '.githooks'], { + cwd: repoRoot, + stdio: 'inherit', +}); + +console.log('Configured git hooks path to .githooks'); diff --git a/scripts/migrate.ts b/scripts/migrate.ts index ce5db50..ce8cda7 100644 --- a/scripts/migrate.ts +++ b/scripts/migrate.ts @@ -6,7 +6,7 @@ const pnpmCommand = process.platform === 'win32' ? 'pnpm.cmd' : 'pnpm'; const directDatabaseUrl = config.directDatabaseUrl; if (!directDatabaseUrl) { - throw new Error('db:migrate requires DIRECT_DATABASE_URL when DATABASE_URL uses Prisma Accelerate.'); + throw new Error('db:migrate requires DIRECT_DATABASE_URL or legacy DIRECT_URL when DATABASE_URL uses Prisma Accelerate.'); } function runPrisma(args: string[]) { diff --git a/scripts/run-pre-push-checks.ts b/scripts/run-pre-push-checks.ts new file mode 100644 index 0000000..9be06fb --- /dev/null +++ b/scripts/run-pre-push-checks.ts @@ -0,0 +1,119 @@ +import { execFileSync } from 'node:child_process'; +import { randomUUID } from 'node:crypto'; +import dotenv from 'dotenv'; +import { Pool } from 'pg'; + +const pnpmCommand = process.platform === 'win32' ? 'pnpm.cmd' : 'pnpm'; + +dotenv.config(); + +function resolveDirectDatabaseUrl() { + const candidate = process.env.DIRECT_DATABASE_URL ?? process.env.DIRECT_URL; + + if (!candidate) { + throw new Error('Set DIRECT_DATABASE_URL or legacy DIRECT_URL in your shell or .env before pushing.'); + } + + if (candidate.startsWith('prisma://') || candidate.startsWith('prisma+postgres://')) { + throw new Error('Pre-push checks require DIRECT_DATABASE_URL or DIRECT_URL to point at direct PostgreSQL, not Prisma Accelerate.'); + } + + return new URL(candidate); +} + +function isLocalDatabaseHost(hostname: string) { + return hostname === 'localhost' || hostname === '127.0.0.1' || hostname === '::1'; +} + +function tempDatabaseUrl(baseUrl: URL, databaseName: string) { + const next = new URL(baseUrl.toString()); + next.pathname = `/${databaseName}`; + + return next.toString(); +} + +function adminDatabaseUrl(baseUrl: URL) { + const next = new URL(baseUrl.toString()); + next.pathname = '/postgres'; + + return next.toString(); +} + +function quoteIdentifier(value: string) { + return `"${value.replaceAll('"', '""')}"`; +} + +function toError(error: unknown) { + if (error instanceof Error) { + return error; + } + + return new Error(String(error)); +} + +function runPnpm(args: string[], env: NodeJS.ProcessEnv) { + execFileSync(pnpmCommand, args, { + env, + stdio: 'inherit', + }); +} + +async function dropTemporaryDatabase(pool: Pool, databaseName: string) { + await pool.query( + `SELECT pg_terminate_backend(pid) + FROM pg_stat_activity + WHERE datname = $1 + AND pid <> pg_backend_pid()`, + [databaseName], + ); + await pool.query(`DROP DATABASE IF EXISTS ${quoteIdentifier(databaseName)}`); +} + +async function main() { + const directUrl = resolveDirectDatabaseUrl(); + + if (!isLocalDatabaseHost(directUrl.hostname) && process.env.ALLOW_REMOTE_PREPUSH_DB !== '1') { + throw new Error('Pre-push checks refuse to use non-local databases by default. Set ALLOW_REMOTE_PREPUSH_DB=1 if you really want that.'); + } + + const originalDatabase = directUrl.pathname.replace(/^\//, '') || 'locations_api'; + const tempDatabaseName = `${originalDatabase}_prepush_${randomUUID().replace(/-/g, '').slice(0, 8)}`; + const isolatedDatabaseUrl = tempDatabaseUrl(directUrl, tempDatabaseName); + const adminPool = new Pool({ + connectionString: adminDatabaseUrl(directUrl), + }); + + let primaryError: unknown; + + try { + await adminPool.query(`CREATE DATABASE ${quoteIdentifier(tempDatabaseName)}`); + + runPnpm(['test:ci'], { + ...process.env, + DATABASE_URL: isolatedDatabaseUrl, + DIRECT_DATABASE_URL: isolatedDatabaseUrl, + NODE_ENV: 'test', + }); + } catch (error) { + primaryError = error; + } + + try { + await dropTemporaryDatabase(adminPool, tempDatabaseName); + } catch (cleanupError) { + if (primaryError) { + console.error('Failed to drop temporary pre-push database after the primary failure.'); + console.error(cleanupError); + } else { + throw cleanupError; + } + } finally { + await adminPool.end(); + } + + if (primaryError) { + throw toError(primaryError); + } +} + +await main(); diff --git a/src/app.ts b/src/app.ts index 1f64960..5abbd5f 100644 --- a/src/app.ts +++ b/src/app.ts @@ -7,6 +7,7 @@ import config from './config.js'; import { checkDatabaseConnection } from './db/prisma.js'; import { setupSwagger } from './docs/swagger.js'; import { errorHandler } from './middleware/errorHandler.js'; +import { createRateLimiter } from './middleware/rateLimit.js'; import { apiCompatibilityHeaders, attachRequestContext, @@ -14,6 +15,16 @@ import { import routes from './routes.js'; const app = express(); +const apiRateLimiter = createRateLimiter({ + ...config.rateLimit, + name: 'api', +}); +const searchRateLimiter = createRateLimiter({ + ...config.searchRateLimit, + name: 'search', +}); + +app.set('trust proxy', config.trustProxy); morgan.token('request-id', (req) => (req as Request).requestId ?? '-'); @@ -34,8 +45,11 @@ app.disable('x-powered-by'); app.use(attachRequestContext); app.use(morgan(logFormatter)); -app.use(express.json()); -app.use(express.urlencoded({ extended: true })); +app.use(express.json({ limit: config.requestBodyLimit })); +app.use(express.urlencoded({ extended: true, limit: config.requestBodyLimit })); + +app.use(['/v1', '/api', '/openapi.json', '/api-docs'], apiRateLimiter); +app.use(['/v1/search', '/api/search'], searchRateLimiter); app.get('/health', async (_: Request, res: Response) => { const database = await checkDatabaseConnection({ logErrors: false }); diff --git a/src/config.ts b/src/config.ts index 7f3fb8e..5317b31 100644 --- a/src/config.ts +++ b/src/config.ts @@ -7,28 +7,88 @@ function isAccelerateUrl(url: string) { return url.startsWith('prisma://') || url.startsWith('prisma+postgres://'); } +function parseTrustProxy(value?: string) { + if (!value) { + return false; + } + + const trimmed = value.trim(); + + if (trimmed === 'true') { + return true; + } + + if (trimmed === 'false') { + return false; + } + + if (/^\d+$/.test(trimmed)) { + return Number(trimmed); + } + + if (trimmed.includes(',')) { + return trimmed + .split(',') + .map((entry) => entry.trim()) + .filter(Boolean); + } + + return trimmed; +} + const envSchema = z.object({ - DATABASE_URL: z.string().min(1, 'DATABASE_URL is required'), + DATABASE_URL: z.string().min(1, 'DATABASE_URL cannot be empty').optional(), DIRECT_DATABASE_URL: z.string().min(1, 'DIRECT_DATABASE_URL cannot be empty').optional(), + DIRECT_URL: z.string().min(1, 'DIRECT_URL cannot be empty').optional(), NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), PAGE_SIZE: z.coerce.number().int().positive().max(100).default(10), PORT: z.coerce.number().int().positive().default(8080), + REQUEST_BODY_LIMIT: z.string().trim().min(1).default('16kb'), + TRUST_PROXY: z.string().trim().min(1).optional(), + RATE_LIMIT_WINDOW_MS: z.coerce.number().int().positive().default(60_000), + RATE_LIMIT_MAX_REQUESTS: z.coerce.number().int().positive().default(120), + RATE_LIMIT_BURST_WINDOW_MS: z.coerce.number().int().positive().default(10_000), + RATE_LIMIT_BURST_MAX_REQUESTS: z.coerce.number().int().positive().default(30), + SEARCH_RATE_LIMIT_WINDOW_MS: z.coerce.number().int().positive().default(60_000), + SEARCH_RATE_LIMIT_MAX_REQUESTS: z.coerce.number().int().positive().default(30), + SEARCH_RATE_LIMIT_BURST_WINDOW_MS: z.coerce.number().int().positive().default(10_000), + SEARCH_RATE_LIMIT_BURST_MAX_REQUESTS: z.coerce.number().int().positive().default(10), }); const env = envSchema.parse(process.env); -const usesAccelerate = isAccelerateUrl(env.DATABASE_URL); -const directDatabaseUrl = env.DIRECT_DATABASE_URL ?? (usesAccelerate ? undefined : env.DATABASE_URL); +const databaseUrl = env.DATABASE_URL ?? env.DIRECT_DATABASE_URL ?? env.DIRECT_URL; + +if (!databaseUrl) { + throw new Error('DATABASE_URL is required. DIRECT_DATABASE_URL or legacy DIRECT_URL may be used as a fallback.'); +} + +const usesAccelerate = isAccelerateUrl(databaseUrl); +const directDatabaseUrl = env.DIRECT_DATABASE_URL ?? env.DIRECT_URL ?? (usesAccelerate ? undefined : databaseUrl); if (env.NODE_ENV !== 'production' && !directDatabaseUrl) { - throw new Error('Non-production requires a direct PostgreSQL URL via DIRECT_DATABASE_URL or DATABASE_URL.'); + throw new Error('Non-production requires a direct PostgreSQL URL via DIRECT_DATABASE_URL, DIRECT_URL, or DATABASE_URL.'); } const config = { - databaseUrl: env.DATABASE_URL, + databaseUrl, directDatabaseUrl, nodeEnv: env.NODE_ENV, pageSize: env.PAGE_SIZE, port: env.PORT, + requestBodyLimit: env.REQUEST_BODY_LIMIT, + trustProxy: parseTrustProxy(env.TRUST_PROXY), + rateLimit: { + burstMaxRequests: env.RATE_LIMIT_BURST_MAX_REQUESTS, + burstWindowMs: env.RATE_LIMIT_BURST_WINDOW_MS, + maxRequests: env.RATE_LIMIT_MAX_REQUESTS, + windowMs: env.RATE_LIMIT_WINDOW_MS, + }, + searchRateLimit: { + burstMaxRequests: env.SEARCH_RATE_LIMIT_BURST_MAX_REQUESTS, + burstWindowMs: env.SEARCH_RATE_LIMIT_BURST_WINDOW_MS, + maxRequests: env.SEARCH_RATE_LIMIT_MAX_REQUESTS, + windowMs: env.SEARCH_RATE_LIMIT_WINDOW_MS, + }, usesAccelerate, }; diff --git a/src/db/prisma.ts b/src/db/prisma.ts index f9a1a16..4420adb 100644 --- a/src/db/prisma.ts +++ b/src/db/prisma.ts @@ -40,7 +40,7 @@ function serializeError(error: unknown) { function createPool() { if (!config.directDatabaseUrl) { - throw new Error('DIRECT_DATABASE_URL is required for direct PostgreSQL connections.'); + throw new Error('DIRECT_DATABASE_URL or legacy DIRECT_URL is required for direct PostgreSQL connections.'); } return new Pool({ diff --git a/src/middleware/rateLimit.ts b/src/middleware/rateLimit.ts new file mode 100644 index 0000000..4c8b3b5 --- /dev/null +++ b/src/middleware/rateLimit.ts @@ -0,0 +1,122 @@ +import type { Request, RequestHandler, Response } from 'express'; + +const CLEANUP_INTERVAL = 200; + +interface ClientState { + burstTimestamps: number[]; + count: number; + lastSeenAt: number; + resetAt: number; +} + +interface RateLimitOptions { + burstMaxRequests?: number; + burstWindowMs?: number; + maxRequests: number; + name: string; + skip?: (req: Request) => boolean; + windowMs: number; +} + +function clientAddress(req: Request) { + return req.ip || req.socket.remoteAddress || 'unknown'; +} + +function cleanupStaleEntries(entries: Map, ttlMs: number, now: number) { + for (const [key, state] of entries.entries()) { + if (state.lastSeenAt + ttlMs < now) { + entries.delete(key); + } + } +} + +function setRateLimitHeaders(res: Response, options: RateLimitOptions, remaining: number, resetAt: number) { + const windowSeconds = Math.ceil(options.windowMs / 1000); + const burstPolicy = options.burstWindowMs && options.burstMaxRequests + ? `, burst=${options.burstMaxRequests}/${Math.ceil(options.burstWindowMs / 1000)}s` + : ''; + + res.setHeader('X-RateLimit-Limit', String(options.maxRequests)); + res.setHeader('X-RateLimit-Remaining', String(remaining)); + res.setHeader('X-RateLimit-Reset', String(Math.ceil(resetAt / 1000))); + res.setHeader('X-RateLimit-Policy', `${options.name}; window=${windowSeconds}s; limit=${options.maxRequests}${burstPolicy}`); +} + +export function createRateLimiter(options: RateLimitOptions): RequestHandler { + const entries = new Map(); + const ttlMs = Math.max(options.windowMs, options.burstWindowMs ?? 0) * 2; + let requestCounter = 0; + + return (req, res, next) => { + if (req.method === 'OPTIONS' || options.skip?.(req)) { + next(); + return; + } + + const now = Date.now(); + requestCounter += 1; + + if (requestCounter % CLEANUP_INTERVAL === 0) { + cleanupStaleEntries(entries, ttlMs, now); + } + + const key = clientAddress(req); + const state = entries.get(key) ?? { + burstTimestamps: [], + count: 0, + lastSeenAt: now, + resetAt: now + options.windowMs, + }; + + if (now >= state.resetAt) { + state.count = 0; + state.resetAt = now + options.windowMs; + } + + state.count += 1; + state.lastSeenAt = now; + + let remaining = Math.max(0, options.maxRequests - state.count); + const windowLimited = state.count > options.maxRequests; + const windowRetryAfterMs = state.resetAt - now; + + let burstLimited = false; + let burstRetryAfterMs = 0; + + if (options.burstWindowMs && options.burstMaxRequests) { + const burstWindowStart = now - options.burstWindowMs; + state.burstTimestamps = state.burstTimestamps.filter((timestamp) => timestamp > burstWindowStart); + state.burstTimestamps.push(now); + + remaining = Math.min(remaining, Math.max(0, options.burstMaxRequests - state.burstTimestamps.length)); + burstLimited = state.burstTimestamps.length > options.burstMaxRequests; + + if (burstLimited) { + const oldestTimestamp = state.burstTimestamps[0]; + burstRetryAfterMs = oldestTimestamp + options.burstWindowMs - now; + } + } + + entries.set(key, state); + const resetAt = windowLimited || burstLimited + ? now + Math.max(windowLimited ? windowRetryAfterMs : 0, burstLimited ? burstRetryAfterMs : 0) + : state.resetAt; + + setRateLimitHeaders(res, options, remaining, resetAt); + + if (windowLimited || burstLimited) { + const retryAfterMs = Math.max(windowLimited ? windowRetryAfterMs : 0, burstLimited ? burstRetryAfterMs : 0); + const retryAfterSeconds = Math.max(1, Math.ceil(retryAfterMs / 1000)); + + res.setHeader('Retry-After', String(retryAfterSeconds)); + res.status(429).json({ + error: { + message: 'Rate limit exceeded. Please slow down and try again later.', + }, + }); + return; + } + + next(); + }; +} diff --git a/tests/rate-limit.test.ts b/tests/rate-limit.test.ts new file mode 100644 index 0000000..b6bc428 --- /dev/null +++ b/tests/rate-limit.test.ts @@ -0,0 +1,118 @@ +import express from 'express'; +import request from 'supertest'; +import { createRateLimiter } from '../src/middleware/rateLimit.js'; + +function createTestApp(limiter = createRateLimiter({ + burstMaxRequests: 2, + burstWindowMs: 1_000, + maxRequests: 3, + name: 'test', + windowMs: 60_000, +})) { + const app = express(); + + app.use(limiter); + app.get('/limited', (_req, res) => { + res.json({ ok: true }); + }); + app.get('/health', (_req, res) => { + res.json({ ok: true }); + }); + + return app; +} + +describe('rate limiting middleware', () => { + it('returns rate limit headers for allowed requests', async () => { + const app = createTestApp(); + const res = await request(app).get('/limited'); + + expect(res.statusCode).toBe(200); + expect(res.headers['x-ratelimit-limit']).toBe('3'); + expect(res.headers['x-ratelimit-remaining']).toBe('1'); + expect(res.headers['x-ratelimit-policy']).toContain('test'); + }); + + it('blocks bursts before the sustained limit is reached', async () => { + const app = createTestApp(createRateLimiter({ + burstMaxRequests: 2, + burstWindowMs: 60_000, + maxRequests: 10, + name: 'burst-test', + windowMs: 60_000, + })); + + await request(app).get('/limited'); + await request(app).get('/limited'); + const res = await request(app).get('/limited'); + + expect(res.statusCode).toBe(429); + expect(res.headers['retry-after']).toBeDefined(); + expect(res.body.error.message).toMatch(/Rate limit exceeded/i); + }); + + it('blocks requests after the sustained limit is reached', async () => { + const app = createTestApp(createRateLimiter({ + maxRequests: 2, + name: 'window-test', + windowMs: 60_000, + })); + + await request(app).get('/limited'); + await request(app).get('/limited'); + const res = await request(app).get('/limited'); + + expect(res.statusCode).toBe(429); + expect(res.headers['retry-after']).toBeDefined(); + }); + + it('supports skipping selected routes', async () => { + const app = createTestApp(createRateLimiter({ + maxRequests: 1, + name: 'skip-test', + skip: (req) => req.path === '/health', + windowMs: 60_000, + })); + + await request(app).get('/health'); + const healthRes = await request(app).get('/health'); + await request(app).get('/limited'); + const limitedRes = await request(app).get('/limited'); + + expect(healthRes.statusCode).toBe(200); + expect(limitedRes.statusCode).toBe(429); + }); + + it('does not trust spoofed forwarded headers by default', async () => { + const app = createTestApp(createRateLimiter({ + maxRequests: 1, + name: 'spoof-test', + windowMs: 60_000, + })); + + await request(app).get('/limited').set('x-forwarded-for', '203.0.113.10'); + const res = await request(app).get('/limited').set('x-forwarded-for', '198.51.100.25'); + + expect(res.statusCode).toBe(429); + }); + + it('uses forwarded client IPs when Express trust proxy is enabled', async () => { + const app = express(); + + app.set('trust proxy', true); + app.use(createRateLimiter({ + maxRequests: 1, + name: 'trusted-proxy-test', + windowMs: 60_000, + })); + app.get('/limited', (_req, res) => { + res.json({ ok: true }); + }); + + const first = await request(app).get('/limited').set('x-forwarded-for', '203.0.113.10'); + const second = await request(app).get('/limited').set('x-forwarded-for', '198.51.100.25'); + + expect(first.statusCode).toBe(200); + expect(second.statusCode).toBe(200); + }); +});