Press n or j to go to the next uncovered block, b, p or k for the previous block.
| 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 | 1x 53x 53x 53x 53x 1x 23x 23x 23x 23x 23x 23x 23x 26x 26x 8x 8x 26x 23x 26x 2x 2x 2x 2x 2x 2x 26x 23x 23x 20x 20x 20x 7x 7x 7x 20x 1x 1x 1x 1x 1x 12x 12x 12x 12x 20x 23x 26x 26x 26x 26x 26x 26x 23x 4x 4x 4x 4x 1x 1x 1x 1x 4x 23x 3x 3x 23x 1x 1x 23x 8x 8x 23x 4x 4x 23x 23x | /**
* AgentKits — Response Cache Module
*
* In-memory LRU cache for LLM responses. Keys by hashing prompt+model+params.
* Configurable TTL and max size.
*
* Usage:
* import { createResponseCache } from 'agentkits/response-cache';
* const cache = createResponseCache({ maxSize: 500, ttlMs: 600_000 });
* cache.set({ prompt: '...', model: 'gpt-4o' }, 'response');
* const hit = cache.get({ prompt: '...', model: 'gpt-4o' });
*/
import { createHash } from 'crypto';
// ── Types ──────────────────────────────────────────────────────────
export interface ResponseCacheConfig {
/** Max entries in cache (default: 1000) */
maxSize?: number;
/** TTL in milliseconds (default: 10 minutes) */
ttlMs?: number;
}
export interface CacheKey {
prompt?: string;
messages?: Array<{ role: string; content: string }>;
model: string;
temperature?: number;
maxTokens?: number;
[key: string]: unknown;
}
export interface ResponseCache {
get(key: CacheKey): string | undefined;
set(key: CacheKey, response: string): void;
has(key: CacheKey): boolean;
delete(key: CacheKey): boolean;
clear(): void;
readonly size: number;
readonly stats: { hits: number; misses: number; evictions: number };
}
interface CacheEntry {
value: string;
expiresAt: number;
hash: string;
}
// ── Helpers ────────────────────────────────────────────────────────
function hashKey(key: CacheKey): string {
const normalized = JSON.stringify(key, Object.keys(key).sort());
return createHash('sha256').update(normalized).digest('hex').slice(0, 16);
}
// ── Factory ────────────────────────────────────────────────────────
export function createResponseCache(config: ResponseCacheConfig = {}): ResponseCache {
const maxSize = config.maxSize ?? 1000;
const ttlMs = config.ttlMs ?? 600_000; // 10 min
// Map preserves insertion order — we use it for LRU
const map = new Map<string, CacheEntry>();
let hits = 0;
let misses = 0;
let evictions = 0;
function evictExpired() {
const now = Date.now();
for (const [k, v] of map) {
if (v.expiresAt <= now) {
map.delete(k);
evictions++;
}
}
}
function evictLRU() {
while (map.size >= maxSize) {
const oldest = map.keys().next().value;
if (oldest !== undefined) {
map.delete(oldest);
evictions++;
} else break;
}
}
return {
get(key: CacheKey): string | undefined {
const h = hashKey(key);
const entry = map.get(h);
if (!entry) {
misses++;
return undefined;
}
if (entry.expiresAt <= Date.now()) {
map.delete(h);
misses++;
evictions++;
return undefined;
}
// Move to end (most recently used)
map.delete(h);
map.set(h, entry);
hits++;
return entry.value;
},
set(key: CacheKey, response: string) {
const h = hashKey(key);
// Remove first to reset position
map.delete(h);
evictExpired();
evictLRU();
map.set(h, { value: response, expiresAt: Date.now() + ttlMs, hash: h });
},
has(key: CacheKey): boolean {
const h = hashKey(key);
const entry = map.get(h);
if (!entry) return false;
if (entry.expiresAt <= Date.now()) {
map.delete(h);
return false;
}
return true;
},
delete(key: CacheKey): boolean {
return map.delete(hashKey(key));
},
clear() {
map.clear();
},
get size() {
return map.size;
},
get stats() {
return { hits, misses, evictions };
},
};
}
|