From 577dc3e766cbf24a17c54b5537391acc4580bb54 Mon Sep 17 00:00:00 2001 From: Arseniy Kamyshev Date: Tue, 29 Jul 2025 19:01:30 +0700 Subject: [PATCH] feat: add multi-layer caching strategy from kogotochki - Implement generic MultiLayerCache service with automatic layer population - Add cache adapters for memory, KV store, and edge caching - Support tag-based and pattern-based invalidation - Include cache statistics and performance monitoring - Add comprehensive tests and documentation - Inspired by kogotochki's 3-layer caching implementation for top-3 auction winners --- docs/patterns/multi-layer-caching.md | 241 +++++++++++ .../__tests__/multi-layer-cache.test.ts | 379 ++++++++++++++++++ .../cache-adapters/edge-cache-adapter.ts | 176 ++++++++ src/services/cache-adapters/index.ts | 7 + .../cache-adapters/kv-cache-adapter.ts | 189 +++++++++ .../cache-adapters/memory-cache-adapter.ts | 162 ++++++++ src/services/multi-layer-cache.ts | 297 ++++++++++++++ 7 files changed, 1451 insertions(+) create mode 100644 docs/patterns/multi-layer-caching.md create mode 100644 src/services/__tests__/multi-layer-cache.test.ts create mode 100644 src/services/cache-adapters/edge-cache-adapter.ts create mode 100644 src/services/cache-adapters/index.ts create mode 100644 src/services/cache-adapters/kv-cache-adapter.ts create mode 100644 src/services/cache-adapters/memory-cache-adapter.ts create mode 100644 src/services/multi-layer-cache.ts diff --git a/docs/patterns/multi-layer-caching.md b/docs/patterns/multi-layer-caching.md new file mode 100644 index 0000000..ad9de2d --- /dev/null +++ b/docs/patterns/multi-layer-caching.md @@ -0,0 +1,241 @@ +# Multi-Layer Caching Pattern + +This document describes the multi-layer caching implementation that provides a hierarchical caching system with automatic layer population and intelligent TTL management. + +## Overview + +The multi-layer cache pattern allows you to chain multiple cache layers together, where each layer has different performance characteristics and storage capacities. When a cache miss occurs in a faster layer, the system automatically checks slower layers and populates the faster layers with found values. + +## Architecture + +``` +┌─────────────┐ ┌─────────────┐ ┌─────────────┐ +│ L1: Edge │ --> │ L2: KV │ --> │ L3: Database│ +│ (fastest) │ │ (fast) │ │ (slowest) │ +└─────────────┘ └─────────────┘ └─────────────┘ + ↑ ↑ │ + └────────────────────┴────────────────────┘ + Auto-population on cache hit +``` + +## Usage Example + +```typescript +import { MultiLayerCache } from '@/services/multi-layer-cache'; +import { EdgeCacheAdapter } from '@/services/cache-adapters/edge-cache-adapter'; +import { KVCacheAdapter } from '@/services/cache-adapters/kv-cache-adapter'; +import { MemoryCacheAdapter } from '@/services/cache-adapters/memory-cache-adapter'; + +// Configure cache layers +const cache = new MultiLayerCache({ + layers: [ + new EdgeCacheAdapter({ baseUrl: 'https://cache.example.com' }), + new KVCacheAdapter(kvNamespace, { prefix: 'app:' }), + new MemoryCacheAdapter(), + ], + defaultTTL: 300, // 5 minutes + populateUpperLayers: true, // Auto-populate faster layers + logger: logger, +}); + +// Basic usage +const value = await cache.get('user:123'); + +// Cache-aside pattern +const user = await cache.getOrSet( + 'user:123', + async () => { + return await database.getUser(123); + }, + { ttl: 3600, tags: ['users'] }, +); + +// Invalidation +await cache.delete('user:123'); +await cache.invalidatePattern(/^user:.*/); +``` + +## Features + +### 1. Automatic Layer Population + +When a value is found in a lower layer, it's automatically copied to all upper layers for faster subsequent access: + +```typescript +// First call - checks all layers, finds in database (L3) +const data = await cache.get('key'); // L1 miss, L2 miss, L3 hit + +// Second call - found in edge cache (L1) +const data2 = await cache.get('key'); // L1 hit +``` + +### 2. Tag-Based Invalidation + +```typescript +// Set with tags +await cache.set('product:123', product, { + tags: ['products', 'category:electronics'], +}); + +// Invalidate all electronics +await cache.invalidateByTags(['category:electronics']); +``` + +### 3. Pattern-Based Invalidation + +```typescript +// Invalidate all user cache entries +await cache.invalidatePattern(/^user:.*/); + +// Invalidate specific patterns +await cache.invalidatePattern('session:*:2024-*'); +``` + +### 4. Cache Statistics + +```typescript +const stats = cache.getStats(); +console.log({ + hitRate: (stats.hits / (stats.hits + stats.misses)) * 100, + layerEfficiency: stats.layerHits, +}); +``` + +### 5. Cache Warmup + +```typescript +await cache.warmUp([ + { + key: 'config:app', + factory: () => loadAppConfig(), + options: { ttl: 86400 }, + }, + { + key: 'categories:all', + factory: () => loadCategories(), + options: { ttl: 3600, tags: ['categories'] }, + }, +]); +``` + +## Cache Adapters + +### EdgeCacheAdapter + +Uses Cloudflare's Cache API for ultra-fast edge caching: + +- Fastest response times +- Limited storage +- Best for frequently accessed data +- Automatic geographic distribution + +### KVCacheAdapter + +Uses Cloudflare KV or similar key-value stores: + +- Fast global reads +- Larger storage capacity +- Best for semi-static data +- Supports metadata and tags + +### MemoryCacheAdapter + +In-memory caching for single instance: + +- Fastest possible access +- Limited by instance memory +- Lost on restart +- Best for computation results + +## TTL Strategies + +### Dynamic TTL Based on Layer + +```typescript +class SmartCache extends MultiLayerCache { + protected populateUpperLayersAsync( + missedLayers: Array<{ layer: CacheLayer; index: number }>, + key: string, + value: T, + ): void { + missedLayers.forEach(({ layer, index }) => { + // Shorter TTL for upper layers + const ttl = this.defaultTTL * (1 - index * 0.2); + layer.set(key, value, { ttl }); + }); + } +} +``` + +### Time-Based TTL + +```typescript +function calculateTTL(targetTime: Date): number { + const now = new Date(); + const ttl = Math.floor((targetTime.getTime() - now.getTime()) / 1000); + return Math.max(60, Math.min(86400, ttl)); // 1 min to 24 hours +} +``` + +## Performance Considerations + +1. **Layer Ordering**: Place fastest layers first +2. **TTL Management**: Use shorter TTLs for upper layers +3. **Selective Population**: Only populate upper layers for frequently accessed data +4. **Batch Operations**: Use warmup for predictable access patterns +5. **Error Handling**: Failed layers don't block cache operations + +## Integration with Wireframe + +The multi-layer cache integrates seamlessly with Wireframe's architecture: + +```typescript +import { PlatformContext } from '@/core/platform-context'; +import { MultiLayerCache } from '@/services/multi-layer-cache'; + +export class CachedDataService { + private cache: MultiLayerCache; + + constructor(private ctx: PlatformContext) { + this.cache = new MultiLayerCache({ + layers: [ + new EdgeCacheAdapter({ logger: ctx.logger }), + new KVCacheAdapter(ctx.platform.getKeyValueStore(), { + prefix: `${ctx.config.name}:cache:`, + }), + ], + logger: ctx.logger, + }); + } + + async getData(key: string): Promise { + return this.cache.getOrSet(key, async () => { + return await this.fetchFromSource(key); + }); + } +} +``` + +## Best Practices + +1. **Cache Key Design**: Use hierarchical keys (e.g., `type:id:version`) +2. **Invalidation Strategy**: Prefer tags over individual key deletion +3. **Monitoring**: Track cache hit rates and adjust layer configuration +4. **Graceful Degradation**: Always handle cache failures gracefully +5. **Security**: Don't cache sensitive data in shared layers + +## Migration from Single-Layer Cache + +```typescript +// Before +const cache = new KVCache(kv); +const data = await cache.get(key); + +// After +const cache = new MultiLayerCache({ + layers: [new KVCacheAdapter(kv)], +}); +const data = await cache.get(key); +``` + +The multi-layer cache is backward compatible with single-layer usage while providing room for growth. diff --git a/src/services/__tests__/multi-layer-cache.test.ts b/src/services/__tests__/multi-layer-cache.test.ts new file mode 100644 index 0000000..3050a8c --- /dev/null +++ b/src/services/__tests__/multi-layer-cache.test.ts @@ -0,0 +1,379 @@ +import { describe, it, expect, beforeEach, vi } from 'vitest'; + +import { MultiLayerCache, type CacheLayer } from '../multi-layer-cache'; +import { MemoryCacheAdapter } from '../cache-adapters/memory-cache-adapter'; + +// Mock cache layer for testing +class MockCacheLayer implements CacheLayer { + name: string; + private store = new Map(); + private getCalls = 0; + private setCalls = 0; + + constructor(name: string) { + this.name = name; + } + + async get(key: string): Promise { + this.getCalls++; + return this.store.get(key) || null; + } + + async set(key: string, value: T): Promise { + this.setCalls++; + this.store.set(key, value); + } + + async delete(key: string): Promise { + this.store.delete(key); + } + + async has(key: string): Promise { + return this.store.has(key); + } + + getGetCalls(): number { + return this.getCalls; + } + + getSetCalls(): number { + return this.setCalls; + } + + clear(): void { + this.store.clear(); + this.getCalls = 0; + this.setCalls = 0; + } +} + +describe('MultiLayerCache', () => { + let l1: MockCacheLayer; + let l2: MockCacheLayer; + let l3: MockCacheLayer; + let cache: MultiLayerCache; + + beforeEach(() => { + l1 = new MockCacheLayer('L1'); + l2 = new MockCacheLayer('L2'); + l3 = new MockCacheLayer('L3'); + + cache = new MultiLayerCache({ + layers: [l1, l2, l3], + defaultTTL: 300, + }); + }); + + describe('constructor', () => { + it('should throw error if no layers provided', () => { + expect(() => new MultiLayerCache({ layers: [] })).toThrow( + 'At least one cache layer is required', + ); + }); + }); + + describe('get', () => { + it('should return value from first layer if present', async () => { + await l1.set('key1', 'value1'); + + const result = await cache.get('key1'); + + expect(result).toBe('value1'); + expect(l1.getGetCalls()).toBe(1); + expect(l2.getGetCalls()).toBe(0); + expect(l3.getGetCalls()).toBe(0); + }); + + it('should check subsequent layers if not in first', async () => { + await l2.set('key1', 'value2'); + + const result = await cache.get('key1'); + + expect(result).toBe('value2'); + expect(l1.getGetCalls()).toBe(1); + expect(l2.getGetCalls()).toBe(1); + expect(l3.getGetCalls()).toBe(0); + }); + + it('should populate upper layers when found in lower layer', async () => { + await l3.set('key1', 'value3'); + + const result = await cache.get('key1'); + + expect(result).toBe('value3'); + + // Wait for async population + await new Promise((resolve) => setTimeout(resolve, 10)); + + // Check that upper layers were populated + expect(await l1.get('key1')).toBe('value3'); + expect(await l2.get('key1')).toBe('value3'); + }); + + it('should return null if not found in any layer', async () => { + const result = await cache.get('nonexistent'); + + expect(result).toBeNull(); + expect(l1.getGetCalls()).toBe(1); + expect(l2.getGetCalls()).toBe(1); + expect(l3.getGetCalls()).toBe(1); + }); + + it('should handle layer errors gracefully', async () => { + const errorLayer = { + name: 'error', + get: vi.fn().mockRejectedValue(new Error('Layer error')), + set: vi.fn(), + delete: vi.fn(), + }; + + const cacheWithError = new MultiLayerCache({ + layers: [errorLayer, l2], + }); + + await l2.set('key1', 'value2'); + const result = await cacheWithError.get('key1'); + + expect(result).toBe('value2'); + expect(errorLayer.get).toHaveBeenCalled(); + }); + }); + + describe('set', () => { + it('should set value in all layers', async () => { + await cache.set('key1', 'value1'); + + expect(await l1.get('key1')).toBe('value1'); + expect(await l2.get('key1')).toBe('value1'); + expect(await l3.get('key1')).toBe('value1'); + }); + + it('should handle layer errors gracefully', async () => { + const errorLayer = { + name: 'error', + get: vi.fn(), + set: vi.fn().mockRejectedValue(new Error('Set error')), + delete: vi.fn(), + }; + + const cacheWithError = new MultiLayerCache({ + layers: [errorLayer, l2], + }); + + await cacheWithError.set('key1', 'value1'); + + // Should still set in working layer + expect(await l2.get('key1')).toBe('value1'); + }); + }); + + describe('delete', () => { + it('should delete from all layers', async () => { + await l1.set('key1', 'value1'); + await l2.set('key1', 'value1'); + await l3.set('key1', 'value1'); + + await cache.delete('key1'); + + expect(await l1.get('key1')).toBeNull(); + expect(await l2.get('key1')).toBeNull(); + expect(await l3.get('key1')).toBeNull(); + }); + }); + + describe('getOrSet', () => { + it('should return cached value if exists', async () => { + await l1.set('key1', 'cached'); + const factory = vi.fn().mockResolvedValue('generated'); + + const result = await cache.getOrSet('key1', factory); + + expect(result).toBe('cached'); + expect(factory).not.toHaveBeenCalled(); + }); + + it('should generate and cache value if not exists', async () => { + const factory = vi.fn().mockResolvedValue('generated'); + + const result = await cache.getOrSet('key1', factory); + + expect(result).toBe('generated'); + expect(factory).toHaveBeenCalledTimes(1); + expect(await l1.get('key1')).toBe('generated'); + }); + }); + + describe('warmUp', () => { + it('should warm up cache with multiple items', async () => { + const items = [ + { key: 'item1', factory: () => Promise.resolve('value1') }, + { key: 'item2', factory: () => Promise.resolve('value2') }, + { key: 'item3', factory: () => Promise.resolve('value3') }, + ]; + + await cache.warmUp(items); + + expect(await cache.get('item1')).toBe('value1'); + expect(await cache.get('item2')).toBe('value2'); + expect(await cache.get('item3')).toBe('value3'); + }); + + it('should handle warmup errors gracefully', async () => { + const items = [ + { key: 'item1', factory: () => Promise.resolve('value1') }, + { key: 'item2', factory: () => Promise.reject(new Error('Factory error')) }, + { key: 'item3', factory: () => Promise.resolve('value3') }, + ]; + + await cache.warmUp(items); + + expect(await cache.get('item1')).toBe('value1'); + expect(await cache.get('item2')).toBeNull(); + expect(await cache.get('item3')).toBe('value3'); + }); + }); + + describe('has', () => { + it('should return true if key exists in any layer', async () => { + await l2.set('key1', 'value'); + + const result = await cache.has('key1'); + + expect(result).toBe(true); + }); + + it('should return false if key does not exist', async () => { + const result = await cache.has('nonexistent'); + + expect(result).toBe(false); + }); + + it('should use get as fallback if has is not implemented', async () => { + const layerWithoutHas = { + name: 'no-has', + get: vi.fn().mockResolvedValue('value'), + set: vi.fn(), + delete: vi.fn(), + }; + + const cacheWithoutHas = new MultiLayerCache({ + layers: [layerWithoutHas], + }); + + const result = await cacheWithoutHas.has('key1'); + + expect(result).toBe(true); + expect(layerWithoutHas.get).toHaveBeenCalledWith('key1'); + }); + }); + + describe('statistics', () => { + it('should track cache statistics', async () => { + await l1.set('hit1', 'value1'); + await l2.set('hit2', 'value2'); + + await cache.get('hit1'); // L1 hit + await cache.get('hit2'); // L2 hit + await cache.get('miss'); // Miss + await cache.set('new', 'value'); + await cache.delete('hit1'); + + const stats = cache.getStats(); + + expect(stats.hits).toBe(2); + expect(stats.misses).toBe(1); + expect(stats.sets).toBe(1); + expect(stats.deletes).toBe(1); + expect(stats.layerHits['L1']).toBe(1); + expect(stats.layerHits['L2']).toBe(1); + expect(stats.layerHits['L3']).toBe(0); + }); + + it('should reset statistics', async () => { + await cache.get('key'); + await cache.set('key', 'value'); + + cache.resetStats(); + const stats = cache.getStats(); + + expect(stats.hits).toBe(0); + expect(stats.misses).toBe(0); + expect(stats.sets).toBe(0); + expect(stats.deletes).toBe(0); + }); + }); + + describe('MemoryCacheAdapter', () => { + let memCache: MemoryCacheAdapter; + + beforeEach(() => { + memCache = new MemoryCacheAdapter(); + }); + + it('should handle TTL expiration', async () => { + // Set with 1ms TTL + await memCache.set('key1', 'value1', { ttl: 0.001 }); + + // Wait for expiration + await new Promise((resolve) => setTimeout(resolve, 5)); + + const result = await memCache.get('key1'); + expect(result).toBeNull(); + }); + + it('should support tag-based invalidation', async () => { + await memCache.set('key1', 'value1', { tags: ['tag1', 'tag2'] }); + await memCache.set('key2', 'value2', { tags: ['tag1'] }); + await memCache.set('key3', 'value3', { tags: ['tag2'] }); + await memCache.set('key4', 'value4'); + + const count = await memCache.invalidateByTags(['tag1']); + + expect(count).toBe(2); + expect(await memCache.get('key1')).toBeNull(); + expect(await memCache.get('key2')).toBeNull(); + expect(await memCache.get('key3')).toBe('value3'); + expect(await memCache.get('key4')).toBe('value4'); + }); + + it('should prune expired entries', async () => { + await memCache.set('expired1', 'value1', { ttl: 0.001 }); + await memCache.set('expired2', 'value2', { ttl: 0.001 }); + await memCache.set('valid', 'value3', { ttl: 3600 }); + + await new Promise((resolve) => setTimeout(resolve, 5)); + + const pruned = await memCache.prune(); + + expect(pruned).toBe(2); + expect(memCache.size()).toBe(1); + expect(await memCache.get('valid')).toBe('value3'); + }); + }); + + describe('invalidatePattern', () => { + it('should invalidate entries matching pattern', async () => { + const layerWithPattern = { + name: 'pattern-layer', + get: vi.fn(), + set: vi.fn(), + delete: vi.fn(), + invalidatePattern: vi.fn().mockResolvedValue(3), + }; + + const cacheWithPattern = new MultiLayerCache({ + layers: [layerWithPattern], + }); + + const count = await cacheWithPattern.invalidatePattern(/user:.*/); + + expect(count).toBe(3); + expect(layerWithPattern.invalidatePattern).toHaveBeenCalledWith(/user:.*/); + }); + + it('should handle layers without pattern support', async () => { + const count = await cache.invalidatePattern(/test/); + expect(count).toBe(0); + }); + }); +}); diff --git a/src/services/cache-adapters/edge-cache-adapter.ts b/src/services/cache-adapters/edge-cache-adapter.ts new file mode 100644 index 0000000..ae97a3c --- /dev/null +++ b/src/services/cache-adapters/edge-cache-adapter.ts @@ -0,0 +1,176 @@ +/** + * Edge cache adapter using Cloudflare Cache API for multi-layer caching + */ + +import type { CacheLayer, CacheOptions } from '../multi-layer-cache'; +import type { ILogger } from '../../core/interfaces/logger'; + +export interface EdgeCacheAdapterConfig { + baseUrl?: string; + logger?: ILogger; +} + +export class EdgeCacheAdapter implements CacheLayer { + name = 'edge'; + private cacheApi: Cache; + private baseUrl: string; + private logger?: ILogger; + + constructor(config?: EdgeCacheAdapterConfig) { + // Check if running in Cloudflare Workers environment + if (typeof caches === 'undefined') { + throw new Error('Edge cache is only available in Cloudflare Workers environment'); + } + + this.cacheApi = caches.default; + this.baseUrl = config?.baseUrl || 'https://cache.internal'; + this.logger = config?.logger; + } + + private getCacheKey(key: string): string { + return `${this.baseUrl}/${key}`; + } + + async get(key: string): Promise { + try { + const cacheKey = this.getCacheKey(key); + const cached = await this.cacheApi.match(cacheKey); + + if (!cached) { + return null; + } + + // Check if expired + const expires = cached.headers.get('expires'); + if (expires && new Date(expires) < new Date()) { + await this.delete(key); + return null; + } + + const data = await cached.json(); + return data as T; + } catch (error) { + this.logger?.error('Edge cache get error', { error, key }); + return null; + } + } + + async set(key: string, value: T, options?: CacheOptions): Promise { + try { + const cacheKey = this.getCacheKey(key); + const ttl = options?.ttl || 300; // Default 5 minutes + + const headers: HeadersInit = { + 'Content-Type': 'application/json', + 'Cache-Control': `public, max-age=${ttl}, s-maxage=${ttl}`, + Expires: new Date(Date.now() + ttl * 1000).toISOString(), + }; + + // Add tags if provided + if (options?.tags && options.tags.length > 0) { + headers['X-Cache-Tags'] = options.tags.join(','); + } + + // Add metadata if provided + if (options?.metadata) { + headers['X-Cache-Metadata'] = JSON.stringify(options.metadata); + } + + const response = new Response(JSON.stringify(value), { headers }); + + await this.cacheApi.put(cacheKey, response); + } catch (error) { + this.logger?.error('Edge cache set error', { error, key }); + } + } + + async delete(key: string): Promise { + try { + const cacheKey = this.getCacheKey(key); + await this.cacheApi.delete(cacheKey); + } catch (error) { + this.logger?.error('Edge cache delete error', { error, key }); + } + } + + async has(key: string): Promise { + try { + const cacheKey = this.getCacheKey(key); + const cached = await this.cacheApi.match(cacheKey); + + if (!cached) { + return false; + } + + // Check if expired + const expires = cached.headers.get('expires'); + if (expires && new Date(expires) < new Date()) { + await this.delete(key); + return false; + } + + return true; + } catch (error) { + this.logger?.error('Edge cache has error', { error, key }); + return false; + } + } + + /** + * Cache a Response object directly + */ + async cacheResponse(request: Request, response: Response, options?: CacheOptions): Promise { + try { + const ttl = options?.ttl || 300; + + // Clone response to avoid consuming it + const responseToCache = new Response(response.body, response); + + // Add cache headers + responseToCache.headers.set('Cache-Control', `public, max-age=${ttl}, s-maxage=${ttl}`); + responseToCache.headers.set('Expires', new Date(Date.now() + ttl * 1000).toISOString()); + + if (options?.tags) { + responseToCache.headers.set('X-Cache-Tags', options.tags.join(',')); + } + + await this.cacheApi.put(request, responseToCache); + } catch (error) { + this.logger?.error('Response cache error', { error, url: request.url }); + } + } + + /** + * Get cached Response + */ + async getCachedResponse(request: Request): Promise { + try { + const cached = await this.cacheApi.match(request); + return cached || null; + } catch (error) { + this.logger?.error('Response cache get error', { error, url: request.url }); + return null; + } + } + + /** + * Warm up cache with multiple entries + */ + async warmUp( + entries: Array<{ + key: string; + factory: () => Promise; + options?: CacheOptions; + }>, + ): Promise { + const promises = entries.map(({ key, factory, options }) => + factory() + .then((value) => this.set(key, value, options)) + .catch((error) => { + this.logger?.error('Cache warmup failed', { error, key }); + }), + ); + + await Promise.all(promises); + } +} diff --git a/src/services/cache-adapters/index.ts b/src/services/cache-adapters/index.ts new file mode 100644 index 0000000..bc56fa1 --- /dev/null +++ b/src/services/cache-adapters/index.ts @@ -0,0 +1,7 @@ +export { EdgeCacheAdapter } from './edge-cache-adapter'; +export type { EdgeCacheAdapterConfig } from './edge-cache-adapter'; + +export { KVCacheAdapter } from './kv-cache-adapter'; +export type { KVCacheAdapterConfig } from './kv-cache-adapter'; + +export { MemoryCacheAdapter } from './memory-cache-adapter'; diff --git a/src/services/cache-adapters/kv-cache-adapter.ts b/src/services/cache-adapters/kv-cache-adapter.ts new file mode 100644 index 0000000..019a746 --- /dev/null +++ b/src/services/cache-adapters/kv-cache-adapter.ts @@ -0,0 +1,189 @@ +/** + * Key-Value store cache adapter for multi-layer caching + */ + +import type { IKeyValueStore } from '../../core/interfaces/storage'; +import type { CacheLayer, CacheOptions } from '../multi-layer-cache'; + +export interface KVCacheAdapterConfig { + prefix?: string; + defaultTTL?: number; +} + +export class KVCacheAdapter implements CacheLayer { + name = 'kv'; + private kv: IKeyValueStore; + private prefix: string; + private defaultTTL: number; + + constructor(kv: IKeyValueStore, config?: KVCacheAdapterConfig) { + this.kv = kv; + this.prefix = config?.prefix || 'cache:'; + this.defaultTTL = config?.defaultTTL || 300; + } + + private getKey(key: string): string { + return `${this.prefix}${key}`; + } + + async get(key: string): Promise { + try { + const fullKey = this.getKey(key); + const data = await this.kv.get(fullKey); + + if (!data) { + return null; + } + + // Parse stored data + const entry = JSON.parse(data); + + // Check expiration + if (entry.expires && Date.now() > entry.expires) { + await this.delete(key); + return null; + } + + return entry.value as T; + } catch { + // Invalid data or parsing error + return null; + } + } + + async set(key: string, value: T, options?: CacheOptions): Promise { + const ttl = options?.ttl || this.defaultTTL; + const expires = Date.now() + ttl * 1000; + + const entry = { + value, + expires, + tags: options?.tags, + metadata: options?.metadata, + }; + + const fullKey = this.getKey(key); + await this.kv.put(fullKey, JSON.stringify(entry), { expirationTtl: ttl }); + + // Store tag index if tags are provided + if (options?.tags && options.tags.length > 0) { + await this.updateTagIndex(key, options.tags); + } + } + + async delete(key: string): Promise { + const fullKey = this.getKey(key); + + // Get entry to clean up tags + const data = await this.kv.get(fullKey); + if (data) { + try { + const entry = JSON.parse(data); + if (entry.tags) { + await this.removeFromTagIndex(key, entry.tags); + } + } catch { + // Ignore parsing errors + } + } + + await this.kv.delete(fullKey); + } + + async has(key: string): Promise { + const value = await this.get(key); + return value !== null; + } + + /** + * Invalidate entries by tags + */ + async invalidateByTags(tags: string[]): Promise { + let count = 0; + + for (const tag of tags) { + const tagKey = `${this.prefix}tag:${tag}`; + const keysData = await this.kv.get(tagKey); + + if (keysData) { + try { + const keys = JSON.parse(keysData) as string[]; + for (const key of keys) { + await this.delete(key); + count++; + } + } catch { + // Ignore parsing errors + } + } + } + + return count; + } + + /** + * Invalidate entries by pattern + */ + async invalidatePattern(pattern: string | RegExp): Promise { + // Get all keys with prefix + const keys = await this.kv.list({ prefix: this.prefix }); + let count = 0; + + const regex = pattern instanceof RegExp ? pattern : new RegExp(pattern); + + for (const keyInfo of keys.keys) { + // Remove prefix to get original key + const originalKey = keyInfo.name.substring(this.prefix.length); + + if (regex.test(originalKey)) { + await this.delete(originalKey); + count++; + } + } + + return count; + } + + private async updateTagIndex(key: string, tags: string[]): Promise { + for (const tag of tags) { + const tagKey = `${this.prefix}tag:${tag}`; + const existingData = await this.kv.get(tagKey); + + let keys: string[] = []; + if (existingData) { + try { + keys = JSON.parse(existingData); + } catch { + keys = []; + } + } + + if (!keys.includes(key)) { + keys.push(key); + await this.kv.put(tagKey, JSON.stringify(keys), { expirationTtl: 86400 }); // 24 hours + } + } + } + + private async removeFromTagIndex(key: string, tags: string[]): Promise { + for (const tag of tags) { + const tagKey = `${this.prefix}tag:${tag}`; + const existingData = await this.kv.get(tagKey); + + if (existingData) { + try { + let keys = JSON.parse(existingData) as string[]; + keys = keys.filter((k) => k !== key); + + if (keys.length > 0) { + await this.kv.put(tagKey, JSON.stringify(keys), { expirationTtl: 86400 }); + } else { + await this.kv.delete(tagKey); + } + } catch { + // Ignore parsing errors + } + } + } + } +} diff --git a/src/services/cache-adapters/memory-cache-adapter.ts b/src/services/cache-adapters/memory-cache-adapter.ts new file mode 100644 index 0000000..3ef335d --- /dev/null +++ b/src/services/cache-adapters/memory-cache-adapter.ts @@ -0,0 +1,162 @@ +/** + * In-memory cache adapter for multi-layer caching + */ + +import type { CacheLayer, CacheOptions } from '../multi-layer-cache'; + +interface CacheEntry { + value: T; + expires: number; + tags?: string[]; +} + +export class MemoryCacheAdapter implements CacheLayer { + name = 'memory'; + private cache = new Map>(); + private tagIndex = new Map>(); + + async get(key: string): Promise { + const entry = this.cache.get(key); + + if (!entry) { + return null; + } + + // Check expiration + if (Date.now() > entry.expires) { + await this.delete(key); + return null; + } + + return entry.value; + } + + async set(key: string, value: T, options?: CacheOptions): Promise { + const ttl = options?.ttl || 300; + const expires = Date.now() + ttl * 1000; + + // Remove from old tags if exists + const existingEntry = this.cache.get(key); + if (existingEntry?.tags) { + this.removeFromTags(key, existingEntry.tags); + } + + // Set new entry + const entry: CacheEntry = { + value, + expires, + tags: options?.tags, + }; + + this.cache.set(key, entry); + + // Update tag index + if (options?.tags) { + this.addToTags(key, options.tags); + } + } + + async delete(key: string): Promise { + const entry = this.cache.get(key); + if (entry) { + if (entry.tags) { + this.removeFromTags(key, entry.tags); + } + this.cache.delete(key); + } + } + + async has(key: string): Promise { + const entry = this.cache.get(key); + if (!entry) return false; + + // Check expiration + if (Date.now() > entry.expires) { + await this.delete(key); + return false; + } + + return true; + } + + /** + * Clear all entries + */ + async clear(): Promise { + this.cache.clear(); + this.tagIndex.clear(); + } + + /** + * Get cache size + */ + size(): number { + return this.cache.size; + } + + /** + * Invalidate entries by tags + */ + async invalidateByTags(tags: string[]): Promise { + const keysToDelete = new Set(); + + for (const tag of tags) { + const keys = this.tagIndex.get(tag); + if (keys) { + for (const key of keys) { + keysToDelete.add(key); + } + } + } + + const count = keysToDelete.size; + for (const key of keysToDelete) { + await this.delete(key); + } + + return count; + } + + /** + * Prune expired entries + */ + async prune(): Promise { + const now = Date.now(); + const keysToDelete: string[] = []; + + for (const [key, entry] of this.cache.entries()) { + if (now > entry.expires) { + keysToDelete.push(key); + } + } + + for (const key of keysToDelete) { + await this.delete(key); + } + + return keysToDelete.length; + } + + private addToTags(key: string, tags: string[]): void { + for (const tag of tags) { + let keys = this.tagIndex.get(tag); + if (!keys) { + keys = new Set(); + this.tagIndex.set(tag, keys); + } + keys.add(key); + } + } + + private removeFromTags(key: string, tags: string[]): void { + for (const tag of tags) { + const keys = this.tagIndex.get(tag); + if (keys) { + keys.delete(key); + if (keys.size === 0) { + this.tagIndex.delete(tag); + } + } + } + } +} diff --git a/src/services/multi-layer-cache.ts b/src/services/multi-layer-cache.ts new file mode 100644 index 0000000..479e18e --- /dev/null +++ b/src/services/multi-layer-cache.ts @@ -0,0 +1,297 @@ +/** + * Multi-layer caching service with automatic layer population + * Provides a hierarchy of caches: L1 (fastest) -> L2 -> L3 (slowest) + */ + +import type { ILogger } from '../core/interfaces/logger'; + +export interface CacheLayer { + name: string; + get(key: string): Promise; + set(key: string, value: T, options?: CacheOptions): Promise; + delete(key: string): Promise; + has?(key: string): Promise; +} + +export interface CacheOptions { + ttl?: number; // Time to live in seconds + tags?: string[]; // Cache tags for bulk invalidation + metadata?: Record; +} + +export interface MultiLayerCacheConfig { + layers: CacheLayer[]; + defaultTTL?: number; + populateUpperLayers?: boolean; + logger?: ILogger; +} + +export interface CacheStats { + hits: number; + misses: number; + sets: number; + deletes: number; + layerHits: Record; +} + +export class MultiLayerCache { + private layers: CacheLayer[]; + private defaultTTL: number; + private populateUpperLayers: boolean; + private logger?: ILogger; + private stats: CacheStats; + + constructor(config: MultiLayerCacheConfig) { + if (config.layers.length === 0) { + throw new Error('At least one cache layer is required'); + } + + this.layers = config.layers as CacheLayer[]; + this.defaultTTL = config.defaultTTL || 300; // 5 minutes default + this.populateUpperLayers = config.populateUpperLayers ?? true; + this.logger = config.logger; + + this.stats = { + hits: 0, + misses: 0, + sets: 0, + deletes: 0, + layerHits: {}, + }; + + // Initialize layer hit counters + for (const layer of this.layers) { + this.stats.layerHits[layer.name] = 0; + } + } + + /** + * Get value from cache, checking each layer in order + */ + async get(key: string): Promise { + const missedLayers: Array<{ layer: CacheLayer; index: number }> = []; + + for (let i = 0; i < this.layers.length; i++) { + const layer = this.layers[i]; + if (!layer) continue; + + try { + const value = await layer.get(key); + + if (value !== null) { + this.stats.hits++; + const layerName = layer.name; + const hitCount = this.stats.layerHits[layerName]; + if (hitCount !== undefined) { + this.stats.layerHits[layerName] = hitCount + 1; + } + this.logger?.debug('Cache hit', { key, layer: layer.name }); + + // Populate upper layers if enabled + if (this.populateUpperLayers && missedLayers.length > 0) { + this.populateUpperLayersAsync(missedLayers, key, value); + } + + return value; + } + + missedLayers.push({ layer, index: i }); + } catch (error) { + this.logger?.error('Cache layer error', { error, layer: layer.name, key }); + } + } + + this.stats.misses++; + this.logger?.debug('Cache miss', { key }); + return null; + } + + /** + * Set value in all cache layers + */ + async set(key: string, value: T, options?: CacheOptions): Promise { + const ttl = options?.ttl || this.defaultTTL; + const promises: Promise[] = []; + + for (const layer of this.layers) { + promises.push( + layer.set(key, value, { ...options, ttl }).catch((error) => { + this.logger?.error('Failed to set in cache layer', { + error, + layer: layer.name, + key, + }); + }), + ); + } + + await Promise.all(promises); + this.stats.sets++; + this.logger?.debug('Cache set', { key, ttl }); + } + + /** + * Delete value from all cache layers + */ + async delete(key: string): Promise { + const promises: Promise[] = []; + + for (const layer of this.layers) { + promises.push( + layer.delete(key).catch((error) => { + this.logger?.error('Failed to delete from cache layer', { + error, + layer: layer.name, + key, + }); + }), + ); + } + + await Promise.all(promises); + this.stats.deletes++; + this.logger?.debug('Cache delete', { key }); + } + + /** + * Get or set with cache-aside pattern + */ + async getOrSet(key: string, factory: () => Promise, options?: CacheOptions): Promise { + // Try to get from cache + const cached = await this.get(key); + if (cached !== null) { + return cached; + } + + // Generate value + const value = await factory(); + + // Cache it + await this.set(key, value, options); + + return value; + } + + /** + * Warm up cache with predefined values + */ + async warmUp( + items: Array<{ + key: string; + factory: () => Promise; + options?: CacheOptions; + }>, + ): Promise { + this.logger?.info('Warming up cache', { count: items.length }); + + const promises = items.map(({ key, factory, options }) => + this.getOrSet(key, factory, options).catch((error) => { + this.logger?.error('Cache warmup failed', { error, key }); + }), + ); + + await Promise.all(promises); + } + + /** + * Check if key exists in any layer + */ + async has(key: string): Promise { + for (const layer of this.layers) { + try { + if (layer.has) { + const exists = await layer.has(key); + if (exists) return true; + } else { + // Fallback to get if has is not implemented + const value = await layer.get(key); + if (value !== null) return true; + } + } catch (error) { + this.logger?.error('Cache layer error during has check', { + error, + layer: layer.name, + key, + }); + } + } + + return false; + } + + /** + * Get cache statistics + */ + getStats(): CacheStats { + return { ...this.stats }; + } + + /** + * Reset cache statistics + */ + resetStats(): void { + this.stats.hits = 0; + this.stats.misses = 0; + this.stats.sets = 0; + this.stats.deletes = 0; + + for (const layer of this.layers) { + this.stats.layerHits[layer.name] = 0; + } + } + + /** + * Populate upper layers asynchronously + */ + private populateUpperLayersAsync( + missedLayers: Array<{ layer: CacheLayer; index: number }>, + key: string, + value: T, + ): void { + // Calculate TTL based on layer position + const baseTTL = this.defaultTTL; + + Promise.all( + missedLayers.map(({ layer, index }) => { + // Reduce TTL for upper layers + const ttl = Math.max(60, Math.floor(baseTTL * (1 - index * 0.2))); + + return layer.set(key, value, { ttl }).catch((error) => { + this.logger?.error('Failed to populate upper layer', { + error, + layer: layer.name, + key, + }); + }); + }), + ).catch(() => { + // Already logged individual errors + }); + } + + /** + * Invalidate by pattern (requires pattern support in layers) + */ + async invalidatePattern(pattern: string | RegExp): Promise { + let totalInvalidated = 0; + + for (const layer of this.layers) { + try { + // Check if layer supports pattern invalidation + if ('invalidatePattern' in layer && typeof layer.invalidatePattern === 'function') { + // eslint-disable-next-line @typescript-eslint/no-explicit-any -- Pattern invalidation is an optional extension + const count = await (layer as any).invalidatePattern(pattern); + totalInvalidated += count; + } + } catch (error) { + this.logger?.error('Pattern invalidation error', { + error, + layer: layer.name, + pattern: pattern.toString(), + }); + } + } + + return totalInvalidated; + } +}