Files
telemetry-client-js/src/prediction-cache.ts
Jason Woltje 493bc72601
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
chore(#1): apply Prettier formatting to all source and test files
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-14 22:48:08 -06:00

60 lines
1.4 KiB
TypeScript

import { PredictionQuery, PredictionResponse } from "./types/predictions.js";
interface CacheEntry {
response: PredictionResponse;
expiresAt: number;
}
/**
* In-memory cache for prediction responses with TTL-based expiry.
*/
export class PredictionCache {
private readonly cache = new Map<string, CacheEntry>();
private readonly ttlMs: number;
constructor(ttlMs: number) {
this.ttlMs = ttlMs;
}
/** Build a deterministic cache key from a prediction query. */
private buildKey(query: PredictionQuery): string {
return `${query.task_type}:${query.model}:${query.provider}:${query.complexity}`;
}
/** Get a cached prediction. Returns null if not cached or expired. */
get(query: PredictionQuery): PredictionResponse | null {
const key = this.buildKey(query);
const entry = this.cache.get(key);
if (!entry) {
return null;
}
if (Date.now() > entry.expiresAt) {
this.cache.delete(key);
return null;
}
return entry.response;
}
/** Store a prediction response with TTL. */
set(query: PredictionQuery, response: PredictionResponse): void {
const key = this.buildKey(query);
this.cache.set(key, {
response,
expiresAt: Date.now() + this.ttlMs,
});
}
/** Clear all cached predictions. */
clear(): void {
this.cache.clear();
}
/** Number of entries currently in cache (including potentially expired). */
get size(): number {
return this.cache.size;
}
}