diff --git a/FEATURE_SUGGESTIONS.md b/FEATURE_SUGGESTIONS.md index 6da8229..508e787 100644 --- a/FEATURE_SUGGESTIONS.md +++ b/FEATURE_SUGGESTIONS.md @@ -2,20 +2,12 @@ ## Recommended Additions -### 1. Rate Limiting Helper +### 1. Key Rotation ```typescript -interface RateLimitConfig { - maxRequests: number - windowMs: number -} - -// Usage: -const rateLimiter = keys.createRateLimiter({ - maxRequests: 100, - windowMs: 60000, // 1 minute +// Rotate a key (create new, mark old as rotating) +const { newKey, oldRecord } = await keys.rotate(oldKeyId, { + gracePeriodMs: 86400000, // 24 hours }) - -await rateLimiter.check(apiKeyRecord) ``` ### 2. Usage Analytics @@ -31,7 +23,22 @@ await keys.trackUsage(keyId, { const stats = await keys.getUsageStats(keyId) ``` -### 2. IP Whitelisting +### 3. Webhook Events +```typescript +keys.on('key.created', async (event) => { + await sendWebhook(event.ownerId, 'key_created', event.data) +}) + +keys.on('key.used', async (event) => { + // Log to analytics +}) + +keys.on('key.expired', async (event) => { + // Notify owner +}) +``` + +### 4. IP Whitelisting ```typescript await keys.create({ ownerId: 'user_123', @@ -41,7 +48,7 @@ await keys.create({ await keys.verify(key, { ipAddress: req.ip }) ``` -### 3. Request Signing +### 5. Request Signing ```typescript // HMAC-based request signing const signature = keys.sign(request, apiKey) @@ -50,7 +57,7 @@ const signature = keys.sign(request, apiKey) const isValid = await keys.verifySignature(request, signature, keyId) ``` -### 4. Bulk Operations +### 6. Bulk Operations ```typescript // Bulk create const results = await keys.createBulk([ @@ -62,7 +69,7 @@ const results = await keys.createBulk([ await keys.revokeBulk(['key_1', 'key_2', 'key_3']) ``` -### 5. Key Templates +### 7. Key Templates ```typescript // Define reusable templates const template = keys.defineTemplate('readonly', { @@ -96,6 +103,7 @@ const { key } = await keys.createFromTemplate(template, { - Key tags/labels - Audit logging (opt-in) - Key rotation +- Rate limiting (opt-in) ## Implementation Notes diff --git a/README.md b/README.md index fba3047..e015dcb 100644 --- a/README.md +++ b/README.md @@ -11,6 +11,7 @@ A TypeScript library for secure API key management with cryptographic hashing, e - **Secure by Default**: SHA-256/SHA-512 hashing with optional salt and timing-safe comparison - **Smart Key Detection**: Automatically extracts keys from `Authorization`, `x-api-key`, or custom headers - **Built-in Caching**: Optional in-memory or Redis caching for validated keys +- **Rate Limiting**: Optional automatic rate limiting on verify calls with atomic counters - **Flexible Storage**: Memory, Redis, and Drizzle ORM adapters included - **Scope-based Permissions**: Fine-grained access control - **Key Management**: Enable/disable, rotate, and soft-revoke keys with audit trails @@ -29,69 +30,76 @@ bun add keypal ## Quick Start ```typescript -import { createKeys } from 'keypal' +import { createKeys } from "keypal"; const keys = createKeys({ - prefix: 'sk_', + prefix: "sk_", cache: true, -}) +}); // Create a key const { key, record } = await keys.create({ - ownerId: 'user_123', - scopes: ['read', 'write'], -}) + ownerId: "user_123", + scopes: ["read", "write"], +}); // Verify from headers -const result = await keys.verify(request.headers) +const result = await keys.verify(request.headers); if (result.valid) { - console.log('Authenticated:', result.record.metadata.ownerId) + console.log("Authenticated:", result.record.metadata.ownerId); } ``` ## Configuration ```typescript -import Redis from 'ioredis' +import Redis from "ioredis"; -const redis = new Redis() +const redis = new Redis(); const keys = createKeys({ // Key generation - prefix: 'sk_prod_', + prefix: "sk_prod_", length: 32, - alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', - + alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", + // Security - algorithm: 'sha256', // or 'sha512' + algorithm: "sha256", // or 'sha512' salt: process.env.API_KEY_SALT, - + // Storage (memory by default) - storage: 'redis', // or custom Storage instance - redis, // required when storage/cache is 'redis' - + storage: "redis", // or custom Storage instance + redis, // required when storage/cache is 'redis' + // Caching - cache: true, // in-memory cache + cache: true, // in-memory cache // cache: 'redis', // Redis cache cacheTtl: 60, - + // Revocation revokedKeyTtl: 604800, // TTL for revoked keys in Redis (7 days), set to 0 to keep forever - + // Usage tracking autoTrackUsage: true, // Automatically update lastUsedAt on verify - + + // Rate limiting (opt-in, requires cache) + rateLimit: { + maxRequests: 100, + windowMs: 60_000, // 1 minute window + }, + // Audit logging (opt-in) - auditLogs: true, // Enable audit logging - auditContext: { // Default context for all audit logs (optional) - userId: 'system', - metadata: { service: 'api' } + auditLogs: true, // Enable audit logging + auditContext: { + // Default context for all audit logs (optional) + userId: "system", + metadata: { service: "api" }, }, - + // Header detection - headerNames: ['x-api-key', 'authorization'], + headerNames: ["x-api-key", "authorization"], extractBearer: true, -}) +}); ``` ## API @@ -101,66 +109,83 @@ const keys = createKeys({ ```typescript // Create const { key, record } = await keys.create({ - ownerId: 'user_123', - name: 'Production Key', - scopes: ['read', 'write'], - expiresAt: '2025-12-31', + ownerId: "user_123", + name: "Production Key", + scopes: ["read", "write"], + expiresAt: "2025-12-31", enabled: true, // optional, defaults to true -}) +}); // List -const userKeys = await keys.list('user_123') +const userKeys = await keys.list("user_123"); // Enable/Disable -await keys.enable(record.id) -await keys.disable(record.id) +await keys.enable(record.id); +await keys.disable(record.id); // Rotate (create new key, mark old as revoked) -const { key: newKey, record: newRecord, oldRecord } = await keys.rotate(record.id, { - name: 'Updated Key', - scopes: ['read', 'write', 'admin'], -}) +const { + key: newKey, + record: newRecord, + oldRecord, +} = await keys.rotate(record.id, { + name: "Updated Key", + scopes: ["read", "write", "admin"], +}); // Revoke (soft delete - keeps record with revokedAt timestamp) -await keys.revoke(record.id) -await keys.revokeAll('user_123') +await keys.revoke(record.id); +await keys.revokeAll("user_123"); // Update last used -await keys.updateLastUsed(record.id) +await keys.updateLastUsed(record.id); ``` ### Verifying Keys ```typescript // From headers (automatic detection) -const result = await keys.verify(request.headers) +const result = await keys.verify(request.headers); // From string -const result = await keys.verify('sk_abc123') -const result = await keys.verify('Bearer sk_abc123') +const result = await keys.verify("sk_abc123"); +const result = await keys.verify("Bearer sk_abc123"); // With options const result = await keys.verify(headers, { - headerNames: ['x-custom-key'], + headerNames: ["x-custom-key"], skipCache: true, skipTracking: true, // Skip updating lastUsedAt (useful when autoTrackUsage is enabled) -}) +}); // Check result if (result.valid) { - console.log(result.record) + console.log(result.record); + // If rate limiting is enabled, result.rateLimit will include rate limit info + if (result.rateLimit) { + console.log(`${result.rateLimit.remaining} requests remaining`); + } } else { - console.log(result.error) // 'Missing API key' | 'Invalid API key' | 'API key has expired' | 'API key is disabled' | 'API key has been revoked' + console.log(result.error); // 'Missing API key' | 'Invalid API key' | 'API key has expired' | 'API key is disabled' | 'API key has been revoked' | 'Rate limit exceeded' + console.log(result.errorCode); // 'MISSING_KEY' | 'INVALID_KEY' | 'EXPIRED' | 'DISABLED' | 'REVOKED' | 'RATE_LIMIT_EXCEEDED' } ``` ### Permission Checking ```typescript -if (keys.hasScope(record, 'write')) { /* ... */ } -if (keys.hasAnyScope(record, ['admin', 'moderator'])) { /* ... */ } -if (keys.hasAllScopes(record, ['read', 'write'])) { /* ... */ } -if (keys.isExpired(record)) { /* ... */ } +if (keys.hasScope(record, "write")) { + /* ... */ +} +if (keys.hasAnyScope(record, ["admin", "moderator"])) { + /* ... */ +} +if (keys.hasAllScopes(record, ["read", "write"])) { + /* ... */ +} +if (keys.isExpired(record)) { + /* ... */ +} ``` ### Usage Tracking @@ -169,13 +194,162 @@ if (keys.isExpired(record)) { /* ... */ } // Enable automatic tracking in config const keys = createKeys({ autoTrackUsage: true, // Automatically updates lastUsedAt on verify -}) +}); // Manually update (always available) -await keys.updateLastUsed(record.id) +await keys.updateLastUsed(record.id); // Skip tracking for specific requests -const result = await keys.verify(headers, { skipTracking: true }) +const result = await keys.verify(headers, { skipTracking: true }); +``` + +### Rate Limiting + +Protect your API from abuse with built-in rate limiting. Uses the same cache infrastructure (memory or Redis) for high-performance request tracking. Windows are aligned to epoch time for consistent behavior in distributed systems. + +**Note:** Cache must be enabled to use rate limiting. + +#### Automatic Rate Limiting + +Enable rate limiting globally on all verify calls by adding the `rateLimit` config option: + +```typescript +const keys = createKeys({ + cache: true, // Required for rate limiting + rateLimit: { + maxRequests: 100, + windowMs: 60_000, // 1 minute window + }, +}); + +// Rate limiting happens automatically on verify() +const result = await keys.verify(headers); + +if (!result.valid) { + if (result.errorCode === "RATE_LIMIT_EXCEEDED") { + return { + error: "Too many requests", + status: 429, + resetAt: result.rateLimit.resetAt, + resetMs: result.rateLimit.resetMs, + }; + } + return { error: result.error, status: 401 }; +} + +// Rate limit info is included in successful responses +console.log({ + current: result.rateLimit.current, // Current request count + limit: result.rateLimit.limit, // Max requests allowed + remaining: result.rateLimit.remaining, // Remaining requests + resetMs: result.rateLimit.resetMs, // Time until reset (ms) + resetAt: result.rateLimit.resetAt, // ISO timestamp when window resets +}); +``` + +**Complete middleware example with rate limit headers**: + +```typescript +app.use("/api/*", async (c, next) => { + const result = await keys.verify(c.req.raw.headers); + + if (!result.valid) { + if (result.errorCode === "RATE_LIMIT_EXCEEDED") { + c.header( + "Retry-After", + Math.ceil(result.rateLimit.resetMs / 1000).toString() + ); + c.header("X-RateLimit-Limit", result.rateLimit.limit.toString()); + c.header("X-RateLimit-Remaining", "0"); + c.header("X-RateLimit-Reset", result.rateLimit.resetAt); + return c.json({ error: "Too many requests" }, 429); + } + return c.json({ error: result.error }, 401); + } + + // Set rate limit headers on successful requests + c.header("X-RateLimit-Limit", result.rateLimit.limit.toString()); + c.header("X-RateLimit-Remaining", result.rateLimit.remaining.toString()); + c.header("X-RateLimit-Reset", result.rateLimit.resetAt); + + c.set("apiKey", result.record); + await next(); +}); +``` + +#### Manual Rate Limiting (advanced) + +For custom rate limiting scenarios (e.g., different limits per endpoint), create rate limiters manually: + +```typescript +const keys = createKeys({ + cache: true, // Required for rate limiting +}); + +// Create custom rate limiters +const strictLimiter = keys.createRateLimiter({ + maxRequests: 10, + windowMs: 60_000, // 10 requests per minute +}); + +const normalLimiter = keys.createRateLimiter({ + maxRequests: 100, + windowMs: 60_000, // 100 requests per minute +}); + +// Use strict limiter for sensitive endpoints +app.post("/api/sensitive", async c => { + const result = await keys.verify(c.req.raw.headers); + if (!result.valid) { + return c.json({ error: result.error }, 401); + } + + const rateLimit = await strictLimiter.check(result.record); + if (!rateLimit.allowed) { + return c.json({ error: "Too many requests" }, 429); + } + // ... +}); + +// Use normal limiter for regular endpoints +app.get("/api/data", async c => { + const result = await keys.verify(c.req.raw.headers); + if (!result.valid) { + return c.json({ error: result.error }, 401); + } + + const rateLimit = await normalLimiter.check(result.record); + if (!rateLimit.allowed) { + return c.json({ error: "Too many requests" }, 429); + } + // ... +}); +``` + +**Dry-run checks** (check without incrementing): + +```typescript +const rateLimit = await rateLimiter.check(record, { increment: false }); +``` + +**Custom identifiers** (e.g., per-owner limits instead of per-key): + +```typescript +const rateLimit = await rateLimiter.check(record, { + identifier: record.metadata.ownerId, // Rate limit by user, not by key +}); +``` + +**Manual reset**: + +```typescript +await rateLimiter.reset(record); +``` + +**Get current count without incrementing**: + +```typescript +const count = await rateLimiter.getCurrentCount(record) ``` ### Audit Logging @@ -231,7 +405,7 @@ const deleted = await keys.deleteLogs({ // Clear logs for a specific key await keys.clearLogs('key_123') -``` +```` **Log Entry Structure:** @@ -253,10 +427,10 @@ await keys.clearLogs('key_123') ### Helper Methods ```typescript -keys.hasKey(headers) // boolean -keys.extractKey(headers) // string | null -keys.generateKey() // string -keys.hashKey(key) // string +keys.hasKey(headers); // boolean +keys.extractKey(headers); // string | null +keys.generateKey(); // string +keys.hashKey(key); // string ``` ## Storage Examples @@ -264,64 +438,64 @@ keys.hashKey(key) // string ### Memory (Default) ```typescript -const keys = createKeys({ prefix: 'sk_' }) +const keys = createKeys({ prefix: "sk_" }); ``` ### Redis ```typescript -import Redis from 'ioredis' +import Redis from "ioredis"; -const redis = new Redis() +const redis = new Redis(); const keys = createKeys({ - prefix: 'sk_', - storage: 'redis', - cache: 'redis', + prefix: "sk_", + storage: "redis", + cache: "redis", redis, -}) +}); ``` ### Drizzle ORM ```typescript -import { drizzle } from 'drizzle-orm/node-postgres' -import { Pool } from 'pg' -import { DrizzleStore } from 'keypal/drizzle' -import { apikey } from 'keypal/drizzle/schema' -import { createKeys } from 'keypal' +import { drizzle } from "drizzle-orm/node-postgres"; +import { Pool } from "pg"; +import { DrizzleStore } from "keypal/drizzle"; +import { apikey } from "keypal/drizzle/schema"; +import { createKeys } from "keypal"; const pool = new Pool({ - connectionString: process.env.DATABASE_URL -}) + connectionString: process.env.DATABASE_URL, +}); -const db = drizzle(pool, { schema: { apikey } }) +const db = drizzle(pool, { schema: { apikey } }); const keys = createKeys({ - prefix: 'sk_prod_', + prefix: "sk_prod_", storage: new DrizzleStore({ db, table: apikey }), cache: true, -}) +}); ``` **Setup Database Schema:** ```typescript // src/drizzle/schema.ts -import { index, jsonb, pgTable, text, unique } from 'drizzle-orm/pg-core' +import { index, jsonb, pgTable, text, unique } from "drizzle-orm/pg-core"; export const apikey = pgTable( - 'apikey', + "apikey", { id: text().primaryKey().notNull(), - keyHash: text('key_hash').notNull(), - metadata: jsonb('metadata').notNull(), + keyHash: text("key_hash").notNull(), + metadata: jsonb("metadata").notNull(), }, - (table) => [ - index('apikey_key_hash_idx').on(table.keyHash), - unique('apikey_key_hash_unique').on(table.keyHash), + table => [ + index("apikey_key_hash_idx").on(table.keyHash), + unique("apikey_key_hash_unique").on(table.keyHash), ] -) +); ``` **Generate migrations:** @@ -340,77 +514,96 @@ bun run studio ### Custom Storage ```typescript -import { type Storage } from 'keypal' +import { type Storage } from "keypal"; const customStorage: Storage = { - save: async (record) => { /* ... */ }, - findByHash: async (keyHash) => { /* ... */ }, - findById: async (id) => { /* ... */ }, - findByOwner: async (ownerId) => { /* ... */ }, - findByTag: async (tag, ownerId) => { /* ... */ }, - findByTags: async (tags, ownerId) => { /* ... */ }, - updateMetadata: async (id, metadata) => { /* ... */ }, - delete: async (id) => { /* ... */ }, - deleteByOwner: async (ownerId) => { /* ... */ }, -} + save: async record => { + /* ... */ + }, + findByHash: async keyHash => { + /* ... */ + }, + findById: async id => { + /* ... */ + }, + findByOwner: async ownerId => { + /* ... */ + }, + findByTag: async (tag, ownerId) => { + /* ... */ + }, + findByTags: async (tags, ownerId) => { + /* ... */ + }, + updateMetadata: async (id, metadata) => { + /* ... */ + }, + delete: async id => { + /* ... */ + }, + deleteByOwner: async ownerId => { + /* ... */ + }, +}; const keys = createKeys({ storage: customStorage, -}) +}); ``` ## Framework Example (Hono) ```typescript -import { Hono } from 'hono' -import { createKeys } from 'keypal' -import Redis from 'ioredis' +import { Hono } from "hono"; +import { createKeys } from "keypal"; +import Redis from "ioredis"; -const redis = new Redis() +const redis = new Redis(); const keys = createKeys({ - prefix: 'sk_', - storage: 'redis', - cache: 'redis', + prefix: "sk_", + storage: "redis", + cache: "redis", redis, -}) +}); -const app = new Hono() +const app = new Hono(); // Authentication middleware -app.use('/api/*', async (c, next) => { - const result = await keys.verify(c.req.raw.headers) - +app.use("/api/*", async (c, next) => { + const result = await keys.verify(c.req.raw.headers); + if (!result.valid) { - return c.json({ error: result.error }, 401) + return c.json({ error: result.error }, 401); } - c.set('apiKey', result.record) - keys.updateLastUsed(result.record.id).catch(console.error) - - await next() -}) + c.set("apiKey", result.record); + keys.updateLastUsed(result.record.id).catch(console.error); + + await next(); +}); // Protected route with scope check -app.get('/api/data', async (c) => { - const record = c.get('apiKey') - - if (!keys.hasScope(record, 'read')) { - return c.json({ error: 'Insufficient permissions' }, 403) +app.get("/api/data", async c => { + const record = c.get("apiKey"); + + if (!keys.hasScope(record, "read")) { + return c.json({ error: "Insufficient permissions" }, 403); } - return c.json({ data: 'sensitive data' }) -}) + return c.json({ data: "sensitive data" }); +}); ``` ## Security Best Practices 1. **Use a salt in production**: + ```typescript const keys = createKeys({ salt: process.env.API_KEY_SALT, - algorithm: 'sha512', - }) + algorithm: "sha512", + }); ``` 2. **Set expiration dates**: Don't create keys that never expire @@ -424,9 +617,10 @@ app.get('/api/data', async (c) => { 6. **Monitor usage**: Track `lastUsedAt` to identify unused keys 7. **Rotate keys**: Implement regular key rotation policies + ```typescript // Rotate keys periodically - const { key: newKey } = await keys.rotate(oldRecord.id) + const { key: newKey } = await keys.rotate(oldRecord.id); ``` 8. **Use soft revocation**: Revoked keys are kept with `revokedAt` timestamp for audit trails (Redis TTL: 7 days, Drizzle: forever) @@ -437,28 +631,56 @@ app.get('/api/data', async (c) => { ```typescript interface ApiKeyRecord { - id: string - keyHash: string - metadata: ApiKeyMetadata + id: string; + keyHash: string; + metadata: ApiKeyMetadata; } interface ApiKeyMetadata { - ownerId: string - name?: string - description?: string - scopes?: string[] - expiresAt: string | null - createdAt?: string - lastUsedAt?: string - enabled?: boolean - revokedAt?: string | null - rotatedTo?: string | null + ownerId: string; + name?: string; + description?: string; + scopes?: string[]; + expiresAt: string | null; + createdAt?: string; + lastUsedAt?: string; + enabled?: boolean; + revokedAt?: string | null; + rotatedTo?: string | null; } interface VerifyResult { - valid: boolean - record?: ApiKeyRecord - error?: string + valid: boolean; + record?: ApiKeyRecord; + error?: string; + errorCode?: ApiKeyErrorCode; + rateLimit?: { + current: number; + limit: number; + remaining: number; + resetMs: number; + resetAt: string; + }; +} + +interface RateLimitConfig { + maxRequests: number; + windowMs: number; + keyPrefix?: string; +} + +interface RateLimitResult { + allowed: boolean; + current: number; + limit: number; + resetMs: number; + resetAt: string; + remaining: number; +} + +interface RateLimitCheckOptions { + increment?: boolean; + identifier?: string; } ``` diff --git a/biome.jsonc b/biome.jsonc index 3face83..ce6228b 100644 --- a/biome.jsonc +++ b/biome.jsonc @@ -1,45 +1,45 @@ { - "$schema": "./node_modules/@biomejs/biome/configuration_schema.json", - "extends": ["ultracite"], - "files": { - "includes": ["!benchmark.ts"] - }, - "formatter": { - "enabled": true, - "indentWidth": 2, - "indentStyle": "tab" - }, - "linter": { - "rules": { - "complexity": { - "noExcessiveCognitiveComplexity": { - "level": "warn", - "options": { - "maxAllowedComplexity": 50 - } - } - } - } - }, - "overrides": [ - { - "linter": { - "rules": { - "suspicious": { - "noConsole": "off", - "noExplicitAny": "off" - }, - "style": { - "noMagicNumbers": "off" - }, - "complexity": { - "useLiteralKeys": "off" - }, - "correctness": { - "noUnusedVariables": "off" - } - } - } - } - ] + "$schema": "./node_modules/@biomejs/biome/configuration_schema.json", + "extends": ["ultracite"], + "files": { + "includes": ["!benchmark.ts"] + }, + "formatter": { + "enabled": true, + "indentWidth": 2, + "indentStyle": "tab" + }, + "linter": { + "rules": { + "complexity": { + "noExcessiveCognitiveComplexity": { + "level": "warn", + "options": { + "maxAllowedComplexity": 100 + } + } + } + } + }, + "overrides": [ + { + "linter": { + "rules": { + "suspicious": { + "noConsole": "off", + "noExplicitAny": "off" + }, + "style": { + "noMagicNumbers": "off" + }, + "complexity": { + "useLiteralKeys": "off" + }, + "correctness": { + "noUnusedVariables": "off" + } + } + } + } + ] } diff --git a/src/core/cache.test.ts b/src/core/cache.test.ts index 9877d87..35a3c35 100644 --- a/src/core/cache.test.ts +++ b/src/core/cache.test.ts @@ -1,4 +1,13 @@ -import { beforeEach, describe, expect, it, vi } from "vitest"; +import Redis from "ioredis"; +import { + afterAll, + beforeAll, + beforeEach, + describe, + expect, + it, + vi, +} from "vitest"; import { MemoryCache, RedisCache } from "./cache"; describe("MemoryCache", () => { @@ -58,47 +67,146 @@ describe("MemoryCache", () => { expect(cache.get("key1")).toBeNull(); expect(cache.get("key2")).toBe("value2"); }); + + it("should increment a non-existent key to 1", () => { + const result = cache.incr("counter", 60); + expect(result).toBe(1); + expect(cache.get("counter")).toBe("1"); + }); + + it("should increment an existing key", () => { + cache.set("counter", "5", 60); + const result = cache.incr("counter", 60); + // biome-ignore lint/style/noMagicNumbers: 6 is expected incremented value + expect(result).toBe(6); + expect(cache.get("counter")).toBe("6"); + }); + + it("should increment multiple times", () => { + expect(cache.incr("counter", 60)).toBe(1); + expect(cache.incr("counter", 60)).toBe(2); + // biome-ignore lint/style/noMagicNumbers: 3 is expected incremented value + expect(cache.incr("counter", 60)).toBe(3); + expect(cache.get("counter")).toBe("3"); + }); + + it("should handle increment with TTL", async () => { + // biome-ignore lint/style/noMagicNumbers: 100ms + cache.incr("counter", 0.1); + expect(cache.get("counter")).toBe("1"); + + // biome-ignore lint/style/noMagicNumbers: 150ms + await new Promise((resolve) => setTimeout(resolve, 150)); + expect(cache.get("counter")).toBeNull(); + }); + + it("should not increment expired keys", async () => { + // biome-ignore lint/style/noMagicNumbers: 100ms + cache.set("counter", "5", 0.1); + + // biome-ignore lint/style/noMagicNumbers: 150ms + await new Promise((resolve) => setTimeout(resolve, 150)); + + const result = cache.incr("counter", 60); + expect(result).toBe(1); + expect(cache.get("counter")).toBe("1"); + }); }); describe("RedisCache", () => { - let mockRedisClient: any; + let redis: Redis; let cache: RedisCache; - beforeEach(() => { - mockRedisClient = { - get: vi.fn(), - setex: vi.fn(), - del: vi.fn(), - }; - cache = new RedisCache(mockRedisClient); + beforeAll(async () => { + redis = new Redis({ + host: process.env.REDIS_HOST || "localhost", + port: Number.parseInt(process.env.REDIS_PORT || "6379", 10), + db: 15, // Use test database + connectTimeout: 2000, + retryStrategy: () => null, // Don't retry + lazyConnect: true, + enableReadyCheck: false, + maxRetriesPerRequest: 1, + }); + + try { + await redis.connect(); + // Ping to verify connection + await redis.ping(); + } catch (error) { + console.warn( + "Redis not available. Skipping Redis tests. Start with: bun run redis:up" + ); + throw error; + } + }); + + beforeEach(async () => { + await redis.flushdb(); + cache = new RedisCache(redis); + vi.restoreAllMocks(); + }); + + afterAll(async () => { + await redis.flushdb(); + await redis.quit(); }); it("should call redis get", async () => { - mockRedisClient.get.mockResolvedValue("value1"); + await redis.set("key1", "value1"); + const spyRedisGet = vi.spyOn(redis, "get"); const result = await cache.get("key1"); expect(result).toBe("value1"); - expect(mockRedisClient.get).toHaveBeenCalledWith("key1"); + + expect(spyRedisGet).toHaveBeenCalledWith("key1"); }); it("should call redis setex with TTL", async () => { + const spyRedisSetex = vi.spyOn(redis, "setex"); + // biome-ignore lint/style/noMagicNumbers: 120 seconds await cache.set("key1", "value1", 120); // biome-ignore lint/style/noMagicNumbers: 120 seconds - expect(mockRedisClient.setex).toHaveBeenCalledWith("key1", 120, "value1"); + expect(spyRedisSetex).toHaveBeenCalledWith("key1", 120, "value1"); }); it("should call redis del", async () => { + const spyRedisDel = vi.spyOn(redis, "del"); await cache.del("key1"); - expect(mockRedisClient.del).toHaveBeenCalledWith("key1"); + expect(spyRedisDel).toHaveBeenCalledWith("key1"); }); it("should return null for non-existent keys", async () => { - mockRedisClient.get.mockResolvedValue(null); - const result = await cache.get("non-existent"); expect(result).toBeNull(); }); + + it("should call redis eval with Lua script for incr", async () => { + const spyRedisEval = vi.spyOn(redis, "eval"); + await redis.set("counter", "4"); + + // biome-ignore lint/style/noMagicNumbers: 120 seconds + const result = await cache.incr("counter", 120); + + // biome-ignore lint/style/noMagicNumbers: 5 as the incremented value + expect(result).toBe(5); + expect(spyRedisEval).toHaveBeenCalledWith( + expect.stringContaining("INCR"), + 1, + "counter", + // biome-ignore lint/style/noMagicNumbers: 120 seconds + 120 + ); + }); + + it("should handle incr returning number", async () => { + await redis.set("new-counter", "0"); + + const result = await cache.incr("new-counter", 60); + expect(result).toBe(1); + expect(typeof result).toBe("number"); + }); }); diff --git a/src/core/cache.ts b/src/core/cache.ts index 085dd95..c1295f7 100644 --- a/src/core/cache.ts +++ b/src/core/cache.ts @@ -4,6 +4,15 @@ export type Cache = { get(key: string): Promise | string | null; set(key: string, value: string, ttl?: number): Promise | void; del(key: string): Promise | void; + /** + * Atomically increments the value at key by 1 and returns the new value. + * If the key doesn't exist, it's set to 1. + * The TTL is reset when the key is first created, not on subsequent increments. + * @param key - The cache key + * @param ttl - Time to live in seconds + * @returns The new value after incrementing + */ + incr(key: string, ttl?: number): Promise | number; }; export class MemoryCache implements Cache { @@ -38,6 +47,30 @@ export class MemoryCache implements Cache { this.cache.delete(key); } + incr(key: string, ttl = 60): number { + const item = this.cache.get(key); + const currentValue = + item && item.expires >= Date.now() + ? Number.parseInt(item.value, 10) || 0 + : 0; + const newValue = currentValue + 1; + + if (currentValue === 0) { + this.cache.set(key, { + value: String(newValue), + // biome-ignore lint/style/noMagicNumbers: 1000ms to seconds + expires: Date.now() + ttl * 1000, + }); + } else if (item) { + this.cache.set(key, { + value: String(newValue), + expires: item.expires, + }); + } + + return newValue; + } + clear(): void { this.cache.clear(); } @@ -61,4 +94,19 @@ export class RedisCache implements Cache { async del(key: string): Promise { await this.client.del(key); } + + async incr(key: string, ttl = 60): Promise { + // Use Redis INCR which is atomic, then set TTL + // Using a Lua script ensures both operations are atomic + const script = ` + local count = redis.call('INCR', KEYS[1]) + if count == 1 then + redis.call('EXPIRE', KEYS[1], ARGV[1]) + end + return count + `; + + const result = await this.client.eval(script, 1, key, ttl); + return Number(result); + } } diff --git a/src/core/rate-limiter.test.ts b/src/core/rate-limiter.test.ts new file mode 100644 index 0000000..703f2c0 --- /dev/null +++ b/src/core/rate-limiter.test.ts @@ -0,0 +1,205 @@ +import Redis from "ioredis"; +import { beforeEach, describe, expect, it } from "vitest"; +import { createKeys } from "../manager"; +import type { ApiKeyRecord } from "../types/api-key-types"; + +describe("RateLimiter", () => { + let keys: ReturnType; + let apiKeyRecord: ApiKeyRecord; + + beforeEach(async () => { + keys = createKeys({ + cache: true, + cacheTtl: 60, + }); + + const { record } = await keys.create({ + ownerId: "user_123", + name: "Test Key", + }); + apiKeyRecord = record; + }); + + it("should allow requests within limit", async () => { + const rateLimiter = keys.createRateLimiter({ + maxRequests: 10, + windowMs: 60_000, + }); + + const result = await rateLimiter.check(apiKeyRecord); + + const REMAINING_REQUESTS = 10 - 1; + + expect(result.allowed).toBe(true); + expect(result.current).toBe(1); + expect(result.limit).toBe(10); + expect(result.remaining).toBe(REMAINING_REQUESTS); + }); + + it("should block requests exceeding limit", async () => { + const rateLimiter = keys.createRateLimiter({ + maxRequests: 5, + windowMs: 60_000, + }); + + // Make 5 requests to hit the limit + const REQUEST_COUNT = 5; + for (let i = 0; i < REQUEST_COUNT; i++) { + await rateLimiter.check(apiKeyRecord); + } + + // 6th request should be blocked + const result = await rateLimiter.check(apiKeyRecord); + + expect(result.allowed).toBe(false); + // With atomic increment, the counter increments before checking, + // so the 6th request will show current: 6 + expect(result.current).toBe(REQUEST_COUNT + 1); + expect(result.remaining).toBe(0); + }); + + it("should reset the rate limit after the window expires", async () => { + const rateLimiter = keys.createRateLimiter({ + maxRequests: 3, + windowMs: 2000, + }); + + await rateLimiter.check(apiKeyRecord); + await rateLimiter.check(apiKeyRecord); + await rateLimiter.check(apiKeyRecord); + + // Wait for the window to expire + const WAIT_TIME_MS = 2000; + await new Promise((resolve) => setTimeout(resolve, WAIT_TIME_MS)); + + await rateLimiter.check(apiKeyRecord); + const result = await rateLimiter.check(apiKeyRecord); + + expect(result.allowed).toBe(true); + expect(result.current).toBe(2); + expect(result.remaining).toBe(1); + }); + + it("should check limit without incrementing when increment is false", async () => { + const rateLimiter = keys.createRateLimiter({ + maxRequests: 3, + windowMs: 2000, + }); + + await rateLimiter.check(apiKeyRecord); + await rateLimiter.check(apiKeyRecord); + + const dryRun = await rateLimiter.check(apiKeyRecord, { increment: false }); + expect(dryRun.allowed).toBe(true); + expect(dryRun.remaining).toBe(1); + + const verify = await rateLimiter.check(apiKeyRecord, { increment: false }); + expect(verify.remaining).toBe(1); + }); + + it("should check limit with custom identifier", async () => { + const rateLimiter = keys.createRateLimiter({ + maxRequests: 3, + windowMs: 2000, + }); + + const result = await rateLimiter.check(apiKeyRecord, { + identifier: "custom_identifier", + }); + expect(result.allowed).toBe(true); + expect(result.remaining).toBe(2); + }); + + it("should reset the rate limit", async () => { + const rateLimiter = keys.createRateLimiter({ + maxRequests: 3, + windowMs: 2000, + }); + + const REMAINING_REQUESTS = 3; + + await rateLimiter.check(apiKeyRecord); + await rateLimiter.check(apiKeyRecord); + await rateLimiter.reset(apiKeyRecord); + + const result = await rateLimiter.check(apiKeyRecord); + expect(result.allowed).toBe(true); + expect(result.current).toBe(1); + expect(result.remaining).toBe(REMAINING_REQUESTS - 1); + }); + + it("should get current count", async () => { + const rateLimiter = keys.createRateLimiter({ + maxRequests: 3, + windowMs: 2000, + }); + + await rateLimiter.check(apiKeyRecord); + await rateLimiter.check(apiKeyRecord); + + const count = await rateLimiter.getCurrentCount(apiKeyRecord); + expect(count).toBe(2); + }); + + it("should test for multiple keys not interfering", async () => { + const { record: apiKeyRecord2 } = await keys.create({ + ownerId: "user_123", + name: "Test Key 2", + }); + + const REMAINING_REQUESTS = 3; + + const rateLimiter = keys.createRateLimiter({ + maxRequests: 3, + windowMs: 2000, + }); + + await rateLimiter.check(apiKeyRecord); + await rateLimiter.check(apiKeyRecord); + + const result = await rateLimiter.check(apiKeyRecord2); + expect(result.allowed).toBe(true); + expect(result.current).toBe(1); + expect(result.remaining).toBe(REMAINING_REQUESTS - 1); + }); + + it("should work with RedisCache", async () => { + const redis = new Redis({ + host: process.env.REDIS_HOST || "localhost", + port: Number.parseInt(process.env.REDIS_PORT || "6379", 10), + db: 15, // Use test database + connectTimeout: 2000, + retryStrategy: () => null, // Don't retry + lazyConnect: true, + enableReadyCheck: false, + maxRetriesPerRequest: 1, + }); + + try { + await redis.connect(); + await redis.ping(); + } catch { + it.skip( + "Redis not available. Skipping Redis tests. Start with: bun run redis:up" + ); + return; + } + + const keyManager = createKeys({ + cache: "redis", + redis, + }); + + const rateLimiter = keyManager.createRateLimiter({ + maxRequests: 3, + windowMs: 2000, + }); + + const result = await rateLimiter.check(apiKeyRecord); + expect(result.allowed).toBe(true); + expect(result.current).toBe(1); + expect(result.remaining).toBe(2); + + await redis.quit(); + }); +}); diff --git a/src/core/rate-limiter.ts b/src/core/rate-limiter.ts new file mode 100644 index 0000000..29e4e76 --- /dev/null +++ b/src/core/rate-limiter.ts @@ -0,0 +1,110 @@ +import type { ApiKeyRecord } from "../types/api-key-types"; +import type { + RateLimitCheckOptions, + RateLimitConfig, + RateLimitResult, +} from "../types/rate-limit-types"; +import { MILLISECONDS_PER_SECOND } from "../utils/constants"; +import type { Cache } from "./cache"; + +export class RateLimiter { + private readonly cache: Cache; + private readonly config: RateLimitConfig; + private readonly keyPrefix: string; + + constructor(cache: Cache, config: RateLimitConfig) { + if (config.windowMs <= 0) { + throw new Error("windowMs must be a positive number"); + } + if (config.maxRequests <= 0) { + throw new Error("maxRequests must be a positive number"); + } + + this.cache = cache; + this.config = config; + this.keyPrefix = config.keyPrefix ?? "ratelimit"; + } + + private getWindowKeyData(apiKeyRecord: ApiKeyRecord, identifier?: string) { + const id = identifier ?? apiKeyRecord.id; + const now = Date.now(); + const windowStart = + Math.floor(now / this.config.windowMs) * this.config.windowMs; + + const key = `${this.keyPrefix}:${id}:${windowStart}`; + + return { now, windowStart, key }; + } + + async check( + apiKeyRecord: ApiKeyRecord, + options: RateLimitCheckOptions = {} + ): Promise { + const increment = options.increment ?? true; + + const { now, windowStart, key } = this.getWindowKeyData( + apiKeyRecord, + options.identifier + ); + + const resetAt = windowStart + this.config.windowMs; + const resetMs = resetAt - now; + const ttlSeconds = Math.ceil( + this.config.windowMs / MILLISECONDS_PER_SECOND + ); + + if (increment) { + // Use atomic increment to prevent race conditions + const newCount = await this.cache.incr(key, ttlSeconds); + + if (newCount > this.config.maxRequests) { + return { + allowed: false, + current: newCount, + limit: this.config.maxRequests, + resetMs, + resetAt: new Date(resetAt).toISOString(), + remaining: 0, + }; + } + + return { + allowed: true, + current: newCount, + limit: this.config.maxRequests, + resetMs, + resetAt: new Date(resetAt).toISOString(), + remaining: this.config.maxRequests - newCount, + }; + } + + // When not incrementing, just check the current value + const currentValue = await this.cache.get(key); + const current = currentValue ? Number.parseInt(currentValue, 10) : 0; + + return { + allowed: current < this.config.maxRequests, + current, + limit: this.config.maxRequests, + resetMs, + resetAt: new Date(resetAt).toISOString(), + remaining: Math.max(0, this.config.maxRequests - current), + }; + } + + async reset(apiKeyRecord: ApiKeyRecord, identifier?: string): Promise { + const { key } = this.getWindowKeyData(apiKeyRecord, identifier); + + await this.cache.del(key); + } + + async getCurrentCount( + apiKeyRecord: ApiKeyRecord, + identifier?: string + ): Promise { + const { key } = this.getWindowKeyData(apiKeyRecord, identifier); + + const value = await this.cache.get(key); + return value ? Number.parseInt(value, 10) : 0; + } +} diff --git a/src/index.ts b/src/index.ts index 8d3cb65..14a60c1 100644 --- a/src/index.ts +++ b/src/index.ts @@ -3,6 +3,7 @@ export { getExpirationTime, isExpired } from "./core/expiration"; export { extractKeyFromHeaders, hasApiKey } from "./core/extract-key"; +export { RateLimiter } from "./core/rate-limiter"; export { hasAllScopes, hasAnyScope, hasScope } from "./core/scopes"; export type { ApiKeyManager, VerifyOptions, VerifyResult } from "./manager"; export { createKeys } from "./manager"; @@ -25,4 +26,9 @@ export type { Permission, PermissionScope, } from "./types/permissions-types"; +export type { + RateLimitCheckOptions, + RateLimitConfig, + RateLimitResult, +} from "./types/rate-limit-types"; export type { Storage, StorageOptions } from "./types/storage-types"; diff --git a/src/manager.test.ts b/src/manager.test.ts index 10d6f7b..895b43a 100644 --- a/src/manager.test.ts +++ b/src/manager.test.ts @@ -1359,3 +1359,144 @@ describe("ApiKeyManager - Audit Logging", () => { }); }); }); + +describe("ApiKeyManager - Rate Limiting", () => { + it("should enforce rate limits on verify calls", async () => { + const keysWithRateLimit = createKeys({ + prefix: "sk_", + cache: true, + rateLimit: { + maxRequests: 3, + windowMs: 60_000, + }, + }); + + const { key } = await keysWithRateLimit.create({ ownerId: "user_1" }); + + // First 3 requests should succeed + const ALLOWED_REQUESTS = 3; + for (let i = 0; i < ALLOWED_REQUESTS; i++) { + const result = await keysWithRateLimit.verify(key); + expect(result.valid).toBe(true); + expect(result.rateLimit).toBeDefined(); + expect(result.rateLimit?.limit).toBe(ALLOWED_REQUESTS); + expect(result.rateLimit?.remaining).toBe(ALLOWED_REQUESTS - (i + 1)); + } + + // 4th request should be rate limited + const blockedResult = await keysWithRateLimit.verify(key); + expect(blockedResult.valid).toBe(false); + expect(blockedResult.errorCode).toBe("RATE_LIMIT_EXCEEDED"); + expect(blockedResult.error).toBe("Rate limit exceeded"); + expect(blockedResult.rateLimit).toBeDefined(); + // biome-ignore lint/style/noMagicNumbers: 4 is the blocked request count + expect(blockedResult.rateLimit?.current).toBe(4); + expect(blockedResult.rateLimit?.remaining).toBe(0); + }); + + it("should not include rate limit info when rate limiting is disabled", async () => { + const keysWithoutRateLimit = createKeys({ + prefix: "sk_", + cache: true, + }); + + const { key } = await keysWithoutRateLimit.create({ ownerId: "user_1" }); + const result = await keysWithoutRateLimit.verify(key); + + expect(result.valid).toBe(true); + expect(result.rateLimit).toBeUndefined(); + }); + + it("should throw error when rate limiting is configured without cache", () => { + expect(() => + createKeys({ + prefix: "sk_", + rateLimit: { + maxRequests: 100, + windowMs: 60_000, + }, + }) + ).toThrow("Cache is required for rate limiting"); + }); + + it("should rate limit per API key", async () => { + const keysWithRateLimit = createKeys({ + prefix: "sk_", + cache: true, + rateLimit: { + maxRequests: 2, + windowMs: 60_000, + }, + }); + + const { key: key1 } = await keysWithRateLimit.create({ + ownerId: "user_1", + }); + const { key: key2 } = await keysWithRateLimit.create({ + ownerId: "user_2", + }); + + // Use key1 twice (hit limit) + await keysWithRateLimit.verify(key1); + await keysWithRateLimit.verify(key1); + + // Third request for key1 should be blocked + const key1Result = await keysWithRateLimit.verify(key1); + expect(key1Result.valid).toBe(false); + expect(key1Result.errorCode).toBe("RATE_LIMIT_EXCEEDED"); + + // key2 should still work (separate rate limit) + const key2Result = await keysWithRateLimit.verify(key2); + expect(key2Result.valid).toBe(true); + expect(key2Result.rateLimit?.remaining).toBe(1); + }); + + it("should include rate limit info in successful responses", async () => { + const keysWithRateLimit = createKeys({ + prefix: "sk_", + cache: true, + rateLimit: { + maxRequests: 10, + windowMs: 60_000, + }, + }); + + const { key } = await keysWithRateLimit.create({ ownerId: "user_1" }); + const result = await keysWithRateLimit.verify(key); + + expect(result.valid).toBe(true); + expect(result.rateLimit).toBeDefined(); + expect(result.rateLimit?.current).toBe(1); + expect(result.rateLimit?.limit).toBe(10); + // biome-ignore lint/style/noMagicNumbers: 9 is the remaining requests + expect(result.rateLimit?.remaining).toBe(9); + expect(result.rateLimit?.resetMs).toBeGreaterThan(0); + expect(result.rateLimit?.resetAt).toMatch(ISO_DATE_REGEX); + }); + + it("should rate limit from cache path", async () => { + const keysWithRateLimit = createKeys({ + prefix: "sk_", + cache: true, + rateLimit: { + maxRequests: 2, + windowMs: 60_000, + }, + }); + + const { key } = await keysWithRateLimit.create({ ownerId: "user_1" }); + + // First verify (cache miss) + const result1 = await keysWithRateLimit.verify(key); + expect(result1.valid).toBe(true); + + // Second verify (cache hit) + const result2 = await keysWithRateLimit.verify(key); + expect(result2.valid).toBe(true); + + // Third verify should be rate limited (cache hit) + const result3 = await keysWithRateLimit.verify(key); + expect(result3.valid).toBe(false); + expect(result3.errorCode).toBe("RATE_LIMIT_EXCEEDED"); + }); +}); diff --git a/src/manager.ts b/src/manager.ts index e5faab6..a3d7c78 100644 --- a/src/manager.ts +++ b/src/manager.ts @@ -8,6 +8,7 @@ import { } from "./core/extract-key"; import { generateKey } from "./core/generate"; import { hashKey } from "./core/hash"; +import { RateLimiter } from "./core/rate-limiter"; import { hasAllScopesWithResources, hasAnyScopeWithResources, @@ -32,6 +33,7 @@ import { createErrorResult, } from "./types/error-types"; import type { PermissionScope } from "./types/permissions-types"; +import type { RateLimitConfig } from "./types/rate-limit-types"; import type { Storage } from "./types/storage-types"; import { logger } from "./utils/logger"; @@ -47,17 +49,30 @@ export type VerifyResult = { error?: string; /** Error code for programmatic handling */ errorCode?: ApiKeyErrorCode; + /** Rate limit information (only included if rate limiting is enabled) */ + rateLimit?: { + /** Current request count in the window */ + current: number; + /** Maximum number of requests allowed within the window */ + limit: number; + /** Number of requests remaining within the window */ + remaining: number; + /** Time in milliseconds until the window resets */ + resetMs: number; + /** ISO timestamp when the window resets */ + resetAt: string; + }; }; /** * Minimal record stored in cache to reduce exposure */ -interface CacheRecord { +type CacheRecord = { id: string; expiresAt: string | null; revokedAt: string | null; enabled: boolean; -} +}; /** * Options for verifying API keys @@ -107,20 +122,22 @@ export class ApiKeyManager { private readonly revokedKeyTtl: number; private readonly isRedisStorage: boolean; private readonly autoTrackUsage: boolean; + private readonly rateLimiter?: RateLimiter; private readonly auditLogsEnabled: boolean; private readonly defaultContext?: ActionContext; constructor(config: ConfigInput = {}) { + const salt = config.salt + ? hashKey(config.salt, { algorithm: "sha256" }) + : ""; - const salt = config.salt ? hashKey(config.salt, { algorithm: "sha256" }) : ""; - this.config = { prefix: config.prefix, // biome-ignore lint/style/noMagicNumbers: 32 characters default length: config.length ?? 32, algorithm: config.algorithm ?? "sha256", alphabet: config.alphabet, - salt + salt, }; // biome-ignore lint/style/noMagicNumbers: 7 days default (604800 seconds) @@ -168,6 +185,11 @@ export class ApiKeyManager { this.cache = config.cache; } // else: cache is false/undefined by default, no caching + + // Initialize rate limiter if configured + if (config.rateLimit) { + this.rateLimiter = this.createRateLimiter(config.rateLimit); + } } generateKey(): string { @@ -334,6 +356,47 @@ export class ApiKeyManager { return createErrorResult(ApiKeyErrorCode.REVOKED); } + if (record.metadata.enabled === false) { + return createErrorResult(ApiKeyErrorCode.DISABLED); + } + + // Check rate limit if enabled + if (this.rateLimiter) { + const rateLimitResult = await this.rateLimiter.check(record); + if (!rateLimitResult.allowed) { + return { + ...createErrorResult(ApiKeyErrorCode.RATE_LIMIT_EXCEEDED), + rateLimit: { + current: rateLimitResult.current, + limit: rateLimitResult.limit, + remaining: rateLimitResult.remaining, + resetMs: rateLimitResult.resetMs, + resetAt: rateLimitResult.resetAt, + }, + }; + } + + // Track usage if enabled + if (this.autoTrackUsage && !options.skipTracking) { + this.updateLastUsed(record.id).catch((err) => { + logger.error("Failed to track usage:", err); + }); + } + + return { + valid: true, + record, + rateLimit: { + current: rateLimitResult.current, + limit: rateLimitResult.limit, + remaining: rateLimitResult.remaining, + resetMs: rateLimitResult.resetMs, + resetAt: rateLimitResult.resetAt, + }, + }; + } + + // Track usage if enabled if (this.autoTrackUsage && !options.skipTracking) { this.updateLastUsed(record.id).catch((err) => { logger.error("Failed to track usage:", err); @@ -376,6 +439,61 @@ export class ApiKeyManager { return createErrorResult(ApiKeyErrorCode.DISABLED); } + // Check rate limit if enabled + if (this.rateLimiter) { + // Cache the record first so subsequent requests can use the cache path + if (this.cache && !options.skipCache) { + try { + const cacheRecord: CacheRecord = { + id: record.id, + expiresAt: record.metadata.expiresAt ?? null, + revokedAt: record.metadata.revokedAt ?? null, + enabled: record.metadata.enabled ?? true, + }; + await this.cache.set( + `apikey:${keyHash}`, + JSON.stringify(cacheRecord), + this.cacheTtl + ); + } catch (error) { + logger.error("CRITICAL: Failed to write to cache:", error); + } + } + + const rateLimitResult = await this.rateLimiter.check(record); + if (!rateLimitResult.allowed) { + return { + ...createErrorResult(ApiKeyErrorCode.RATE_LIMIT_EXCEEDED), + rateLimit: { + current: rateLimitResult.current, + limit: rateLimitResult.limit, + remaining: rateLimitResult.remaining, + resetMs: rateLimitResult.resetMs, + resetAt: rateLimitResult.resetAt, + }, + }; + } + + // Track usage if enabled + if (this.autoTrackUsage && !options.skipTracking) { + this.updateLastUsed(record.id).catch((err) => { + logger.error("Failed to track usage:", err); + }); + } + + return { + valid: true, + record, + rateLimit: { + current: rateLimitResult.current, + limit: rateLimitResult.limit, + remaining: rateLimitResult.remaining, + resetMs: rateLimitResult.resetMs, + resetAt: rateLimitResult.resetAt, + }, + }; + } + if (this.cache && !options.skipCache) { try { const cacheRecord: CacheRecord = { @@ -996,6 +1114,35 @@ export class ApiKeyManager { resource: `${resourceType}:${resourceId}`, }); } + + /** + * Create a rate limiter instance + * + * @param config - Rate limit configuration + * @returns A RateLimiter instance for checking request limits + * + * @example + * ```typescript + * const rateLimiter = keys.createRateLimiter({ + * maxRequests: 100, + * windowMs: 60000, // 1 minute + * }); + * + * const result = await rateLimiter.check(apiKeyRecord); + * if (!result.allowed) { + * throw new Error(`Rate limit exceeded. Reset in ${result.resetMs}ms`); + * } + * ``` + */ + createRateLimiter(config: RateLimitConfig): RateLimiter { + if (!this.cache) { + throw new Error( + "[keypal] Cache is required for rate limiting. Enable cache in ApiKeyManager config." + ); + } + + return new RateLimiter(this.cache, config); + } } /** diff --git a/src/storage/kysely.test.ts b/src/storage/kysely.test.ts index 2c2bfc5..f70bdda 100644 --- a/src/storage/kysely.test.ts +++ b/src/storage/kysely.test.ts @@ -224,9 +224,9 @@ describe("KyselyStore", () => { tags: ["test", "key", "more", "tags"], }); - const found = await store.findByTag("test"); - expect(found).toHaveLength(1); - expect(found.at(0)?.id).toBe(record.id); + const found = await store.findByTag("test"); + expect(found).toHaveLength(1); + expect(found.at(0)?.id).toBe(record.id); }); it("should find all records by multiple tags (OR logic)", async () => { @@ -258,9 +258,9 @@ describe("KyselyStore", () => { tags: ["test"], }); - const found = await store.findByTag("test", "user_123"); - expect(found).toHaveLength(1); - expect(found.at(0)?.id).toBe(record.id); + const found = await store.findByTag("test", "user_123"); + expect(found).toHaveLength(1); + expect(found.at(0)?.id).toBe(record.id); }); it("should find all records by owner and multiple tags", async () => { @@ -274,9 +274,9 @@ describe("KyselyStore", () => { tags: ["test", "key"], }); - const found = await store.findByTags(["test", "key"], "user_123"); - expect(found).toHaveLength(1); - expect(found.at(0)?.id).toBe(record.id); + const found = await store.findByTags(["test", "key"], "user_123"); + expect(found).toHaveLength(1); + expect(found.at(0)?.id).toBe(record.id); }); }); @@ -1241,4 +1241,3 @@ describe("KyselyStore", () => { }); }); }); - diff --git a/src/storage/kysely.ts b/src/storage/kysely.ts index a183ed5..1665c15 100644 --- a/src/storage/kysely.ts +++ b/src/storage/kysely.ts @@ -1,4 +1,4 @@ -import { sql, type Kysely } from "kysely"; +import { type Kysely, sql } from "kysely"; import type { ApiKeyMetadata, ApiKeyRecord } from "../types/api-key-types"; import type { Storage } from "../types/storage-types"; @@ -40,7 +40,10 @@ export class KyselyStore implements Storage { private readonly db: Kysely; private readonly table: keyof ApiKeysDatabase; - constructor(options: { db: Kysely; table: keyof ApiKeysDatabase }) { + constructor(options: { + db: Kysely; + table: keyof ApiKeysDatabase; + }) { this.db = options.db; this.table = options.table; } @@ -73,10 +76,7 @@ export class KyselyStore implements Storage { } async save(record: ApiKeyRecord): Promise { - await this.db - .insertInto(this.table) - .values(this.toRow(record)) - .execute(); + await this.db.insertInto(this.table).values(this.toRow(record)).execute(); } async findByHash(keyHash: string): Promise { @@ -123,8 +123,8 @@ export class KyselyStore implements Storage { // Build tag conditions (OR logic) if (tags.length > 0) { const lowercasedTags = tags.map((t) => t.toLowerCase()); - const tagConditions = lowercasedTags.map((tag) => - sql`metadata @> ${JSON.stringify({ tags: [tag] })}` + const tagConditions = lowercasedTags.map( + (tag) => sql`metadata @> ${JSON.stringify({ tags: [tag] })}` ); // biome-ignore lint/suspicious/noExplicitAny: Kysely or types are complex @@ -177,4 +177,3 @@ export class KyselyStore implements Storage { .execute(); } } - diff --git a/src/types/config-types.ts b/src/types/config-types.ts index 4f73392..08ad6ca 100644 --- a/src/types/config-types.ts +++ b/src/types/config-types.ts @@ -2,6 +2,7 @@ import type Redis from "ioredis"; import { type Static, Type } from "typebox"; import type { Cache } from "../core/cache"; import type { ActionContext } from "./audit-log-types"; +import type { RateLimitConfig } from "./rate-limit-types"; import type { Storage } from "./storage-types"; export const ConfigSchema = Type.Object({ @@ -119,6 +120,15 @@ export type ConfigInput = { */ autoTrackUsage?: boolean; + /** + * Rate limit configuration for verify calls + * When enabled, each verify() call will be rate-limited based on the API key + * Requires cache to be enabled (cache: true | 'redis' | Cache instance) + * @default undefined (no rate limiting) + * @example { maxRequests: 100, windowMs: 60000 } // 100 requests per minute + */ + rateLimit?: RateLimitConfig; + /** * Enable audit logging for key actions * @default false diff --git a/src/types/error-types.ts b/src/types/error-types.ts index b3b3584..d94a675 100644 --- a/src/types/error-types.ts +++ b/src/types/error-types.ts @@ -26,6 +26,9 @@ export const ApiKeyErrorCode = { /** Cache error occurred */ CACHE_ERROR: "CACHE_ERROR", + /** Rate limit exceeded */ + RATE_LIMIT_EXCEEDED: "RATE_LIMIT_EXCEEDED", + /** API key is already revoked */ ALREADY_REVOKED: "ALREADY_REVOKED", @@ -63,6 +66,7 @@ export const ApiKeyErrorMessages: Record = { [ApiKeyErrorCode.DISABLED]: "API key is disabled", [ApiKeyErrorCode.STORAGE_ERROR]: "Storage error occurred", [ApiKeyErrorCode.CACHE_ERROR]: "Cache error occurred", + [ApiKeyErrorCode.RATE_LIMIT_EXCEEDED]: "Rate limit exceeded", [ApiKeyErrorCode.ALREADY_REVOKED]: "API key is already revoked", [ApiKeyErrorCode.ALREADY_ENABLED]: "API key is already enabled", [ApiKeyErrorCode.ALREADY_DISABLED]: "API key is already disabled", @@ -105,7 +109,11 @@ export function createApiKeyError( export function createErrorResult( code: ApiKeyErrorCode, details?: unknown -): { valid: false; error: string; errorCode: ApiKeyErrorCode } { +): { + valid: false; + error: string; + errorCode: ApiKeyErrorCode; +} { const error = createApiKeyError(code, details); return { valid: false, diff --git a/src/types/rate-limit-types.ts b/src/types/rate-limit-types.ts new file mode 100644 index 0000000..7596157 --- /dev/null +++ b/src/types/rate-limit-types.ts @@ -0,0 +1,39 @@ +/** + * Configuration for the rate limiter + */ +export type RateLimitConfig = { + /** Maximum number of requests allowed within the window */ + maxRequests: number; + /** Window in milliseconds */ + windowMs: number; + /** Prefix for the key in the cache */ + keyPrefix?: string; +}; + +/** + * Result of checking the rate limit + */ +export type RateLimitResult = { + /** Whether the request is allowed */ + allowed: boolean; + /** Current request count in the window */ + current: number; + /** Maximum number of requests allowed within the window */ + limit: number; + /** Time in milliseconds until the window resets */ + resetMs: number; + /** ISO timestamp when the window resets */ + resetAt: string; + /** Number of requests remaining within the window */ + remaining: number; +}; + +/** + * Options for checking the rate limit + */ +export type RateLimitCheckOptions = { + /** Increment the counter (default: true). Set to false for dry-run checks */ + increment?: boolean; + /** Custom identifier (defaults to API key record ID) */ + identifier?: string; +}; diff --git a/src/utils/constants.ts b/src/utils/constants.ts new file mode 100644 index 0000000..aca1d1b --- /dev/null +++ b/src/utils/constants.ts @@ -0,0 +1 @@ +export const MILLISECONDS_PER_SECOND = 1000;