feat: TypeScript telemetry client SDK v0.1.0
Standalone npm package (@mosaicstack/telemetry-client) for reporting task-completion telemetry and querying predictions from the Mosaic Stack Telemetry server. - TelemetryClient with setInterval-based background flush - EventQueue (bounded FIFO array) - BatchSubmitter with native fetch, exponential backoff, Retry-After - PredictionCache (Map + TTL) - EventBuilder with auto-generated event_id/timestamp - Zero runtime dependencies (Node 18+ native APIs) - 43 tests, 86% branch coverage Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
318
tests/client.test.ts
Normal file
318
tests/client.test.ts
Normal file
@@ -0,0 +1,318 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { TelemetryClient } from '../src/client.js';
|
||||
import { TelemetryConfig } from '../src/config.js';
|
||||
import {
|
||||
TaskCompletionEvent,
|
||||
TaskType,
|
||||
Complexity,
|
||||
Harness,
|
||||
Provider,
|
||||
Outcome,
|
||||
} from '../src/types/events.js';
|
||||
import { PredictionQuery, PredictionResponse } from '../src/types/predictions.js';
|
||||
|
||||
function makeConfig(overrides: Partial<TelemetryConfig> = {}): TelemetryConfig {
|
||||
return {
|
||||
serverUrl: 'https://tel.example.com',
|
||||
apiKey: 'a'.repeat(64),
|
||||
instanceId: 'test-instance',
|
||||
submitIntervalMs: 60_000,
|
||||
maxQueueSize: 100,
|
||||
batchSize: 10,
|
||||
requestTimeoutMs: 5000,
|
||||
dryRun: true, // Use dryRun by default in tests
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makeEvent(id = 'evt-1'): TaskCompletionEvent {
|
||||
return {
|
||||
instance_id: 'test-instance',
|
||||
event_id: id,
|
||||
schema_version: '1.0',
|
||||
timestamp: new Date().toISOString(),
|
||||
task_duration_ms: 5000,
|
||||
task_type: TaskType.IMPLEMENTATION,
|
||||
complexity: Complexity.MEDIUM,
|
||||
harness: Harness.CLAUDE_CODE,
|
||||
model: 'claude-3-opus',
|
||||
provider: Provider.ANTHROPIC,
|
||||
estimated_input_tokens: 1000,
|
||||
estimated_output_tokens: 500,
|
||||
actual_input_tokens: 1100,
|
||||
actual_output_tokens: 550,
|
||||
estimated_cost_usd_micros: 50000,
|
||||
actual_cost_usd_micros: 55000,
|
||||
quality_gate_passed: true,
|
||||
quality_gates_run: [],
|
||||
quality_gates_failed: [],
|
||||
context_compactions: 0,
|
||||
context_rotations: 0,
|
||||
context_utilization_final: 0.5,
|
||||
outcome: Outcome.SUCCESS,
|
||||
retry_count: 0,
|
||||
};
|
||||
}
|
||||
|
||||
function makeQuery(): PredictionQuery {
|
||||
return {
|
||||
task_type: TaskType.IMPLEMENTATION,
|
||||
model: 'claude-3-opus',
|
||||
provider: Provider.ANTHROPIC,
|
||||
complexity: Complexity.MEDIUM,
|
||||
};
|
||||
}
|
||||
|
||||
function makePredictionResponse(): PredictionResponse {
|
||||
return {
|
||||
prediction: {
|
||||
input_tokens: { p10: 500, p25: 750, median: 1000, p75: 1500, p90: 2000 },
|
||||
output_tokens: { p10: 200, p25: 350, median: 500, p75: 750, p90: 1000 },
|
||||
cost_usd_micros: { median: 50000 },
|
||||
duration_ms: { median: 30000 },
|
||||
correction_factors: { input: 1.1, output: 1.05 },
|
||||
quality: { gate_pass_rate: 0.85, success_rate: 0.9 },
|
||||
},
|
||||
metadata: {
|
||||
sample_size: 100,
|
||||
fallback_level: 0,
|
||||
confidence: 'high',
|
||||
last_updated: new Date().toISOString(),
|
||||
cache_hit: false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
describe('TelemetryClient', () => {
|
||||
let fetchSpy: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
fetchSpy = vi.fn();
|
||||
vi.stubGlobal('fetch', fetchSpy);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
describe('start/stop lifecycle', () => {
|
||||
it('should start and stop cleanly', async () => {
|
||||
const client = new TelemetryClient(makeConfig());
|
||||
|
||||
expect(client.isRunning).toBe(false);
|
||||
client.start();
|
||||
expect(client.isRunning).toBe(true);
|
||||
|
||||
await client.stop();
|
||||
expect(client.isRunning).toBe(false);
|
||||
});
|
||||
|
||||
it('should be idempotent on start', () => {
|
||||
const client = new TelemetryClient(makeConfig());
|
||||
client.start();
|
||||
client.start(); // Should not throw or create double intervals
|
||||
expect(client.isRunning).toBe(true);
|
||||
});
|
||||
|
||||
it('should be idempotent on stop', async () => {
|
||||
const client = new TelemetryClient(makeConfig());
|
||||
await client.stop();
|
||||
await client.stop(); // Should not throw
|
||||
expect(client.isRunning).toBe(false);
|
||||
});
|
||||
|
||||
it('should flush events on stop', async () => {
|
||||
const client = new TelemetryClient(makeConfig());
|
||||
client.start();
|
||||
|
||||
client.track(makeEvent('e1'));
|
||||
client.track(makeEvent('e2'));
|
||||
expect(client.queueSize).toBe(2);
|
||||
|
||||
await client.stop();
|
||||
// In dryRun mode, flush succeeds and queue should be empty
|
||||
expect(client.queueSize).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('track()', () => {
|
||||
it('should queue events', () => {
|
||||
const client = new TelemetryClient(makeConfig());
|
||||
client.track(makeEvent('e1'));
|
||||
client.track(makeEvent('e2'));
|
||||
expect(client.queueSize).toBe(2);
|
||||
});
|
||||
|
||||
it('should silently drop events when disabled', () => {
|
||||
const client = new TelemetryClient(makeConfig({ enabled: false }));
|
||||
client.track(makeEvent());
|
||||
expect(client.queueSize).toBe(0);
|
||||
});
|
||||
|
||||
it('should never throw even on internal error', () => {
|
||||
const errorFn = vi.fn();
|
||||
const client = new TelemetryClient(
|
||||
makeConfig({ onError: errorFn, maxQueueSize: 0 }),
|
||||
);
|
||||
|
||||
// This should not throw. maxQueueSize of 0 could cause issues
|
||||
// but track() is designed to catch everything.
|
||||
expect(() => client.track(makeEvent())).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('predictions', () => {
|
||||
it('should return null for uncached prediction', () => {
|
||||
const client = new TelemetryClient(makeConfig());
|
||||
const result = client.getPrediction(makeQuery());
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return cached prediction after refresh', async () => {
|
||||
const predictionResponse = makePredictionResponse();
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
status: 200,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
results: [predictionResponse],
|
||||
}),
|
||||
});
|
||||
|
||||
const client = new TelemetryClient(makeConfig({ dryRun: false }));
|
||||
const query = makeQuery();
|
||||
|
||||
await client.refreshPredictions([query]);
|
||||
|
||||
const result = client.getPrediction(query);
|
||||
expect(result).toEqual(predictionResponse);
|
||||
});
|
||||
|
||||
it('should handle refresh error gracefully', async () => {
|
||||
fetchSpy.mockRejectedValueOnce(new Error('Network error'));
|
||||
|
||||
const errorFn = vi.fn();
|
||||
const client = new TelemetryClient(
|
||||
makeConfig({ dryRun: false, onError: errorFn }),
|
||||
);
|
||||
|
||||
// Should not throw
|
||||
await client.refreshPredictions([makeQuery()]);
|
||||
expect(errorFn).toHaveBeenCalledWith(expect.any(Error));
|
||||
});
|
||||
|
||||
it('should handle non-ok HTTP response on refresh', async () => {
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: 500,
|
||||
statusText: 'Internal Server Error',
|
||||
});
|
||||
|
||||
const errorFn = vi.fn();
|
||||
const client = new TelemetryClient(
|
||||
makeConfig({ dryRun: false, onError: errorFn }),
|
||||
);
|
||||
|
||||
await client.refreshPredictions([makeQuery()]);
|
||||
expect(errorFn).toHaveBeenCalledWith(expect.any(Error));
|
||||
});
|
||||
});
|
||||
|
||||
describe('background flush', () => {
|
||||
it('should trigger flush on interval', async () => {
|
||||
const client = new TelemetryClient(
|
||||
makeConfig({ submitIntervalMs: 10_000 }),
|
||||
);
|
||||
client.start();
|
||||
|
||||
client.track(makeEvent('e1'));
|
||||
expect(client.queueSize).toBe(1);
|
||||
|
||||
// Advance past submit interval
|
||||
await vi.advanceTimersByTimeAsync(11_000);
|
||||
|
||||
// In dryRun mode, events should be flushed
|
||||
expect(client.queueSize).toBe(0);
|
||||
|
||||
await client.stop();
|
||||
});
|
||||
});
|
||||
|
||||
describe('flush error handling', () => {
|
||||
it('should re-enqueue events on submit failure', async () => {
|
||||
// Use non-dryRun mode to actually hit the submitter
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: 500,
|
||||
statusText: 'Internal Server Error',
|
||||
});
|
||||
|
||||
const errorFn = vi.fn();
|
||||
const client = new TelemetryClient(
|
||||
makeConfig({ dryRun: false, maxRetries: 0, onError: errorFn }),
|
||||
);
|
||||
|
||||
client.track(makeEvent('e1'));
|
||||
expect(client.queueSize).toBe(1);
|
||||
|
||||
// Start and trigger flush
|
||||
client.start();
|
||||
await vi.advanceTimersByTimeAsync(70_000);
|
||||
|
||||
// Events should be re-enqueued after failure
|
||||
expect(client.queueSize).toBeGreaterThan(0);
|
||||
|
||||
await client.stop();
|
||||
});
|
||||
|
||||
it('should handle onError callback that throws', async () => {
|
||||
const throwingErrorFn = () => {
|
||||
throw new Error('Error handler broke');
|
||||
};
|
||||
const client = new TelemetryClient(
|
||||
makeConfig({ onError: throwingErrorFn, enabled: false }),
|
||||
);
|
||||
|
||||
// This should not throw even though onError throws
|
||||
// Force an error path by calling track when disabled (no error),
|
||||
// but we can test via refreshPredictions
|
||||
fetchSpy.mockRejectedValueOnce(new Error('fail'));
|
||||
await expect(client.refreshPredictions([makeQuery()])).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('event builder', () => {
|
||||
it('should expose an event builder', () => {
|
||||
const client = new TelemetryClient(makeConfig());
|
||||
expect(client.eventBuilder).toBeDefined();
|
||||
|
||||
const event = client.eventBuilder.build({
|
||||
task_duration_ms: 1000,
|
||||
task_type: TaskType.TESTING,
|
||||
complexity: Complexity.LOW,
|
||||
harness: Harness.AIDER,
|
||||
model: 'gpt-4',
|
||||
provider: Provider.OPENAI,
|
||||
estimated_input_tokens: 100,
|
||||
estimated_output_tokens: 50,
|
||||
actual_input_tokens: 100,
|
||||
actual_output_tokens: 50,
|
||||
estimated_cost_usd_micros: 1000,
|
||||
actual_cost_usd_micros: 1000,
|
||||
quality_gate_passed: true,
|
||||
quality_gates_run: [],
|
||||
quality_gates_failed: [],
|
||||
context_compactions: 0,
|
||||
context_rotations: 0,
|
||||
context_utilization_final: 0.3,
|
||||
outcome: Outcome.SUCCESS,
|
||||
retry_count: 0,
|
||||
});
|
||||
|
||||
expect(event.instance_id).toBe('test-instance');
|
||||
expect(event.schema_version).toBe('1.0');
|
||||
});
|
||||
});
|
||||
});
|
||||
219
tests/event-builder.test.ts
Normal file
219
tests/event-builder.test.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import { describe, it, expect, vi, afterEach } from 'vitest';
|
||||
import { EventBuilder } from '../src/event-builder.js';
|
||||
import { ResolvedConfig } from '../src/config.js';
|
||||
import {
|
||||
TaskType,
|
||||
Complexity,
|
||||
Harness,
|
||||
Provider,
|
||||
Outcome,
|
||||
QualityGate,
|
||||
RepoSizeCategory,
|
||||
} from '../src/types/events.js';
|
||||
|
||||
function makeConfig(): ResolvedConfig {
|
||||
return {
|
||||
serverUrl: 'https://tel.example.com',
|
||||
apiKey: 'a'.repeat(64),
|
||||
instanceId: 'my-instance-uuid',
|
||||
enabled: true,
|
||||
submitIntervalMs: 300_000,
|
||||
maxQueueSize: 1000,
|
||||
batchSize: 100,
|
||||
requestTimeoutMs: 10_000,
|
||||
predictionCacheTtlMs: 21_600_000,
|
||||
dryRun: false,
|
||||
maxRetries: 3,
|
||||
onError: () => {},
|
||||
};
|
||||
}
|
||||
|
||||
describe('EventBuilder', () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should build a complete TaskCompletionEvent', () => {
|
||||
const builder = new EventBuilder(makeConfig());
|
||||
const event = builder.build({
|
||||
task_duration_ms: 15000,
|
||||
task_type: TaskType.IMPLEMENTATION,
|
||||
complexity: Complexity.HIGH,
|
||||
harness: Harness.CLAUDE_CODE,
|
||||
model: 'claude-3-opus',
|
||||
provider: Provider.ANTHROPIC,
|
||||
estimated_input_tokens: 2000,
|
||||
estimated_output_tokens: 1000,
|
||||
actual_input_tokens: 2200,
|
||||
actual_output_tokens: 1100,
|
||||
estimated_cost_usd_micros: 100000,
|
||||
actual_cost_usd_micros: 110000,
|
||||
quality_gate_passed: true,
|
||||
quality_gates_run: [QualityGate.BUILD, QualityGate.TEST, QualityGate.LINT],
|
||||
quality_gates_failed: [],
|
||||
context_compactions: 2,
|
||||
context_rotations: 1,
|
||||
context_utilization_final: 0.75,
|
||||
outcome: Outcome.SUCCESS,
|
||||
retry_count: 0,
|
||||
language: 'typescript',
|
||||
repo_size_category: RepoSizeCategory.MEDIUM,
|
||||
});
|
||||
|
||||
expect(event.task_type).toBe(TaskType.IMPLEMENTATION);
|
||||
expect(event.complexity).toBe(Complexity.HIGH);
|
||||
expect(event.model).toBe('claude-3-opus');
|
||||
expect(event.quality_gates_run).toEqual([
|
||||
QualityGate.BUILD,
|
||||
QualityGate.TEST,
|
||||
QualityGate.LINT,
|
||||
]);
|
||||
expect(event.language).toBe('typescript');
|
||||
expect(event.repo_size_category).toBe(RepoSizeCategory.MEDIUM);
|
||||
});
|
||||
|
||||
it('should auto-generate event_id as UUID', () => {
|
||||
const builder = new EventBuilder(makeConfig());
|
||||
const event = builder.build({
|
||||
task_duration_ms: 1000,
|
||||
task_type: TaskType.TESTING,
|
||||
complexity: Complexity.LOW,
|
||||
harness: Harness.AIDER,
|
||||
model: 'gpt-4',
|
||||
provider: Provider.OPENAI,
|
||||
estimated_input_tokens: 100,
|
||||
estimated_output_tokens: 50,
|
||||
actual_input_tokens: 100,
|
||||
actual_output_tokens: 50,
|
||||
estimated_cost_usd_micros: 1000,
|
||||
actual_cost_usd_micros: 1000,
|
||||
quality_gate_passed: true,
|
||||
quality_gates_run: [],
|
||||
quality_gates_failed: [],
|
||||
context_compactions: 0,
|
||||
context_rotations: 0,
|
||||
context_utilization_final: 0.3,
|
||||
outcome: Outcome.SUCCESS,
|
||||
retry_count: 0,
|
||||
});
|
||||
|
||||
// UUID format: 8-4-4-4-12 hex chars
|
||||
expect(event.event_id).toMatch(
|
||||
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/,
|
||||
);
|
||||
|
||||
// Each event should get a unique ID
|
||||
const event2 = builder.build({
|
||||
task_duration_ms: 1000,
|
||||
task_type: TaskType.TESTING,
|
||||
complexity: Complexity.LOW,
|
||||
harness: Harness.AIDER,
|
||||
model: 'gpt-4',
|
||||
provider: Provider.OPENAI,
|
||||
estimated_input_tokens: 100,
|
||||
estimated_output_tokens: 50,
|
||||
actual_input_tokens: 100,
|
||||
actual_output_tokens: 50,
|
||||
estimated_cost_usd_micros: 1000,
|
||||
actual_cost_usd_micros: 1000,
|
||||
quality_gate_passed: true,
|
||||
quality_gates_run: [],
|
||||
quality_gates_failed: [],
|
||||
context_compactions: 0,
|
||||
context_rotations: 0,
|
||||
context_utilization_final: 0.3,
|
||||
outcome: Outcome.SUCCESS,
|
||||
retry_count: 0,
|
||||
});
|
||||
|
||||
expect(event.event_id).not.toBe(event2.event_id);
|
||||
});
|
||||
|
||||
it('should auto-set timestamp to ISO 8601', () => {
|
||||
const now = new Date('2026-02-07T10:00:00.000Z');
|
||||
vi.setSystemTime(now);
|
||||
|
||||
const builder = new EventBuilder(makeConfig());
|
||||
const event = builder.build({
|
||||
task_duration_ms: 1000,
|
||||
task_type: TaskType.DEBUGGING,
|
||||
complexity: Complexity.MEDIUM,
|
||||
harness: Harness.OPENCODE,
|
||||
model: 'claude-3-sonnet',
|
||||
provider: Provider.ANTHROPIC,
|
||||
estimated_input_tokens: 500,
|
||||
estimated_output_tokens: 200,
|
||||
actual_input_tokens: 500,
|
||||
actual_output_tokens: 200,
|
||||
estimated_cost_usd_micros: 5000,
|
||||
actual_cost_usd_micros: 5000,
|
||||
quality_gate_passed: false,
|
||||
quality_gates_run: [QualityGate.TEST],
|
||||
quality_gates_failed: [QualityGate.TEST],
|
||||
context_compactions: 0,
|
||||
context_rotations: 0,
|
||||
context_utilization_final: 0.4,
|
||||
outcome: Outcome.FAILURE,
|
||||
retry_count: 1,
|
||||
});
|
||||
|
||||
expect(event.timestamp).toBe('2026-02-07T10:00:00.000Z');
|
||||
});
|
||||
|
||||
it('should set instance_id from config', () => {
|
||||
const config = makeConfig();
|
||||
const builder = new EventBuilder(config);
|
||||
const event = builder.build({
|
||||
task_duration_ms: 1000,
|
||||
task_type: TaskType.PLANNING,
|
||||
complexity: Complexity.LOW,
|
||||
harness: Harness.UNKNOWN,
|
||||
model: 'test-model',
|
||||
provider: Provider.UNKNOWN,
|
||||
estimated_input_tokens: 0,
|
||||
estimated_output_tokens: 0,
|
||||
actual_input_tokens: 0,
|
||||
actual_output_tokens: 0,
|
||||
estimated_cost_usd_micros: 0,
|
||||
actual_cost_usd_micros: 0,
|
||||
quality_gate_passed: true,
|
||||
quality_gates_run: [],
|
||||
quality_gates_failed: [],
|
||||
context_compactions: 0,
|
||||
context_rotations: 0,
|
||||
context_utilization_final: 0,
|
||||
outcome: Outcome.SUCCESS,
|
||||
retry_count: 0,
|
||||
});
|
||||
|
||||
expect(event.instance_id).toBe('my-instance-uuid');
|
||||
});
|
||||
|
||||
it('should set schema_version to 1.0', () => {
|
||||
const builder = new EventBuilder(makeConfig());
|
||||
const event = builder.build({
|
||||
task_duration_ms: 1000,
|
||||
task_type: TaskType.REFACTORING,
|
||||
complexity: Complexity.CRITICAL,
|
||||
harness: Harness.KILO_CODE,
|
||||
model: 'gemini-pro',
|
||||
provider: Provider.GOOGLE,
|
||||
estimated_input_tokens: 3000,
|
||||
estimated_output_tokens: 2000,
|
||||
actual_input_tokens: 3000,
|
||||
actual_output_tokens: 2000,
|
||||
estimated_cost_usd_micros: 80000,
|
||||
actual_cost_usd_micros: 80000,
|
||||
quality_gate_passed: true,
|
||||
quality_gates_run: [QualityGate.TYPECHECK],
|
||||
quality_gates_failed: [],
|
||||
context_compactions: 5,
|
||||
context_rotations: 2,
|
||||
context_utilization_final: 0.95,
|
||||
outcome: Outcome.SUCCESS,
|
||||
retry_count: 0,
|
||||
});
|
||||
|
||||
expect(event.schema_version).toBe('1.0');
|
||||
});
|
||||
});
|
||||
126
tests/prediction-cache.test.ts
Normal file
126
tests/prediction-cache.test.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { PredictionCache } from '../src/prediction-cache.js';
|
||||
import { PredictionQuery, PredictionResponse } from '../src/types/predictions.js';
|
||||
import { TaskType, Complexity, Provider } from '../src/types/events.js';
|
||||
|
||||
function makeQuery(overrides: Partial<PredictionQuery> = {}): PredictionQuery {
|
||||
return {
|
||||
task_type: TaskType.IMPLEMENTATION,
|
||||
model: 'claude-3-opus',
|
||||
provider: Provider.ANTHROPIC,
|
||||
complexity: Complexity.MEDIUM,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makeResponse(sampleSize = 100): PredictionResponse {
|
||||
return {
|
||||
prediction: {
|
||||
input_tokens: { p10: 500, p25: 750, median: 1000, p75: 1500, p90: 2000 },
|
||||
output_tokens: { p10: 200, p25: 350, median: 500, p75: 750, p90: 1000 },
|
||||
cost_usd_micros: { median: 50000 },
|
||||
duration_ms: { median: 30000 },
|
||||
correction_factors: { input: 1.1, output: 1.05 },
|
||||
quality: { gate_pass_rate: 0.85, success_rate: 0.9 },
|
||||
},
|
||||
metadata: {
|
||||
sample_size: sampleSize,
|
||||
fallback_level: 0,
|
||||
confidence: 'high',
|
||||
last_updated: new Date().toISOString(),
|
||||
cache_hit: false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
describe('PredictionCache', () => {
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('should return null for cache miss', () => {
|
||||
const cache = new PredictionCache(60_000);
|
||||
const result = cache.get(makeQuery());
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return cached prediction on hit', () => {
|
||||
const cache = new PredictionCache(60_000);
|
||||
const query = makeQuery();
|
||||
const response = makeResponse();
|
||||
|
||||
cache.set(query, response);
|
||||
const result = cache.get(query);
|
||||
|
||||
expect(result).toEqual(response);
|
||||
});
|
||||
|
||||
it('should return null when entry has expired', () => {
|
||||
const cache = new PredictionCache(60_000); // 60s TTL
|
||||
const query = makeQuery();
|
||||
const response = makeResponse();
|
||||
|
||||
cache.set(query, response);
|
||||
expect(cache.get(query)).toEqual(response);
|
||||
|
||||
// Advance time past TTL
|
||||
vi.advanceTimersByTime(61_000);
|
||||
|
||||
expect(cache.get(query)).toBeNull();
|
||||
});
|
||||
|
||||
it('should differentiate queries by all fields', () => {
|
||||
const cache = new PredictionCache(60_000);
|
||||
|
||||
const query1 = makeQuery({ task_type: TaskType.IMPLEMENTATION });
|
||||
const query2 = makeQuery({ task_type: TaskType.DEBUGGING });
|
||||
const response1 = makeResponse(100);
|
||||
const response2 = makeResponse(200);
|
||||
|
||||
cache.set(query1, response1);
|
||||
cache.set(query2, response2);
|
||||
|
||||
expect(cache.get(query1)?.metadata.sample_size).toBe(100);
|
||||
expect(cache.get(query2)?.metadata.sample_size).toBe(200);
|
||||
});
|
||||
|
||||
it('should clear all entries', () => {
|
||||
const cache = new PredictionCache(60_000);
|
||||
cache.set(makeQuery(), makeResponse());
|
||||
cache.set(makeQuery({ task_type: TaskType.TESTING }), makeResponse());
|
||||
|
||||
expect(cache.size).toBe(2);
|
||||
cache.clear();
|
||||
expect(cache.size).toBe(0);
|
||||
expect(cache.get(makeQuery())).toBeNull();
|
||||
});
|
||||
|
||||
it('should overwrite existing entry with same query', () => {
|
||||
const cache = new PredictionCache(60_000);
|
||||
const query = makeQuery();
|
||||
|
||||
cache.set(query, makeResponse(100));
|
||||
cache.set(query, makeResponse(200));
|
||||
|
||||
expect(cache.size).toBe(1);
|
||||
expect(cache.get(query)?.metadata.sample_size).toBe(200);
|
||||
});
|
||||
|
||||
it('should clean expired entry on get', () => {
|
||||
const cache = new PredictionCache(60_000);
|
||||
const query = makeQuery();
|
||||
|
||||
cache.set(query, makeResponse());
|
||||
expect(cache.size).toBe(1);
|
||||
|
||||
vi.advanceTimersByTime(61_000);
|
||||
|
||||
// get() should clean up
|
||||
cache.get(query);
|
||||
expect(cache.size).toBe(0);
|
||||
});
|
||||
});
|
||||
150
tests/queue.test.ts
Normal file
150
tests/queue.test.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { EventQueue } from '../src/queue.js';
|
||||
import {
|
||||
TaskType,
|
||||
Complexity,
|
||||
Harness,
|
||||
Provider,
|
||||
Outcome,
|
||||
TaskCompletionEvent,
|
||||
} from '../src/types/events.js';
|
||||
|
||||
function makeEvent(id: string): TaskCompletionEvent {
|
||||
return {
|
||||
instance_id: 'test-instance',
|
||||
event_id: id,
|
||||
schema_version: '1.0',
|
||||
timestamp: new Date().toISOString(),
|
||||
task_duration_ms: 1000,
|
||||
task_type: TaskType.IMPLEMENTATION,
|
||||
complexity: Complexity.MEDIUM,
|
||||
harness: Harness.CLAUDE_CODE,
|
||||
model: 'claude-3-opus',
|
||||
provider: Provider.ANTHROPIC,
|
||||
estimated_input_tokens: 1000,
|
||||
estimated_output_tokens: 500,
|
||||
actual_input_tokens: 1100,
|
||||
actual_output_tokens: 550,
|
||||
estimated_cost_usd_micros: 50000,
|
||||
actual_cost_usd_micros: 55000,
|
||||
quality_gate_passed: true,
|
||||
quality_gates_run: [],
|
||||
quality_gates_failed: [],
|
||||
context_compactions: 0,
|
||||
context_rotations: 0,
|
||||
context_utilization_final: 0.5,
|
||||
outcome: Outcome.SUCCESS,
|
||||
retry_count: 0,
|
||||
};
|
||||
}
|
||||
|
||||
describe('EventQueue', () => {
|
||||
it('should enqueue and drain events', () => {
|
||||
const queue = new EventQueue(10);
|
||||
const event = makeEvent('e1');
|
||||
|
||||
queue.enqueue(event);
|
||||
expect(queue.size).toBe(1);
|
||||
expect(queue.isEmpty).toBe(false);
|
||||
|
||||
const drained = queue.drain(10);
|
||||
expect(drained).toHaveLength(1);
|
||||
expect(drained[0].event_id).toBe('e1');
|
||||
expect(queue.isEmpty).toBe(true);
|
||||
});
|
||||
|
||||
it('should respect maxSize with FIFO eviction', () => {
|
||||
const queue = new EventQueue(3);
|
||||
|
||||
queue.enqueue(makeEvent('e1'));
|
||||
queue.enqueue(makeEvent('e2'));
|
||||
queue.enqueue(makeEvent('e3'));
|
||||
expect(queue.size).toBe(3);
|
||||
|
||||
// Adding a 4th should evict the oldest (e1)
|
||||
queue.enqueue(makeEvent('e4'));
|
||||
expect(queue.size).toBe(3);
|
||||
|
||||
const drained = queue.drain(10);
|
||||
expect(drained.map((e) => e.event_id)).toEqual(['e2', 'e3', 'e4']);
|
||||
});
|
||||
|
||||
it('should drain up to maxItems', () => {
|
||||
const queue = new EventQueue(10);
|
||||
queue.enqueue(makeEvent('e1'));
|
||||
queue.enqueue(makeEvent('e2'));
|
||||
queue.enqueue(makeEvent('e3'));
|
||||
|
||||
const drained = queue.drain(2);
|
||||
expect(drained).toHaveLength(2);
|
||||
expect(drained.map((e) => e.event_id)).toEqual(['e1', 'e2']);
|
||||
expect(queue.size).toBe(1);
|
||||
});
|
||||
|
||||
it('should remove drained items from the queue', () => {
|
||||
const queue = new EventQueue(10);
|
||||
queue.enqueue(makeEvent('e1'));
|
||||
queue.enqueue(makeEvent('e2'));
|
||||
|
||||
queue.drain(1);
|
||||
expect(queue.size).toBe(1);
|
||||
|
||||
const remaining = queue.drain(10);
|
||||
expect(remaining[0].event_id).toBe('e2');
|
||||
});
|
||||
|
||||
it('should report isEmpty correctly', () => {
|
||||
const queue = new EventQueue(5);
|
||||
expect(queue.isEmpty).toBe(true);
|
||||
|
||||
queue.enqueue(makeEvent('e1'));
|
||||
expect(queue.isEmpty).toBe(false);
|
||||
|
||||
queue.drain(1);
|
||||
expect(queue.isEmpty).toBe(true);
|
||||
});
|
||||
|
||||
it('should report size correctly', () => {
|
||||
const queue = new EventQueue(10);
|
||||
expect(queue.size).toBe(0);
|
||||
|
||||
queue.enqueue(makeEvent('e1'));
|
||||
expect(queue.size).toBe(1);
|
||||
|
||||
queue.enqueue(makeEvent('e2'));
|
||||
expect(queue.size).toBe(2);
|
||||
|
||||
queue.drain(1);
|
||||
expect(queue.size).toBe(1);
|
||||
});
|
||||
|
||||
it('should return empty array when draining empty queue', () => {
|
||||
const queue = new EventQueue(5);
|
||||
const drained = queue.drain(10);
|
||||
expect(drained).toEqual([]);
|
||||
});
|
||||
|
||||
it('should prepend events to the front of the queue', () => {
|
||||
const queue = new EventQueue(10);
|
||||
queue.enqueue(makeEvent('e3'));
|
||||
|
||||
queue.prepend([makeEvent('e1'), makeEvent('e2')]);
|
||||
expect(queue.size).toBe(3);
|
||||
|
||||
const drained = queue.drain(10);
|
||||
expect(drained.map((e) => e.event_id)).toEqual(['e1', 'e2', 'e3']);
|
||||
});
|
||||
|
||||
it('should respect maxSize when prepending', () => {
|
||||
const queue = new EventQueue(3);
|
||||
queue.enqueue(makeEvent('e3'));
|
||||
queue.enqueue(makeEvent('e4'));
|
||||
|
||||
// Only 1 slot available, so only first event should be prepended
|
||||
queue.prepend([makeEvent('e1'), makeEvent('e2')]);
|
||||
expect(queue.size).toBe(3);
|
||||
|
||||
const drained = queue.drain(10);
|
||||
expect(drained.map((e) => e.event_id)).toEqual(['e1', 'e3', 'e4']);
|
||||
});
|
||||
});
|
||||
216
tests/submitter.test.ts
Normal file
216
tests/submitter.test.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { BatchSubmitter } from '../src/submitter.js';
|
||||
import { ResolvedConfig } from '../src/config.js';
|
||||
import {
|
||||
TaskCompletionEvent,
|
||||
TaskType,
|
||||
Complexity,
|
||||
Harness,
|
||||
Provider,
|
||||
Outcome,
|
||||
} from '../src/types/events.js';
|
||||
|
||||
function makeConfig(overrides: Partial<ResolvedConfig> = {}): ResolvedConfig {
|
||||
return {
|
||||
serverUrl: 'https://tel.example.com',
|
||||
apiKey: 'a'.repeat(64),
|
||||
instanceId: 'test-instance-id',
|
||||
enabled: true,
|
||||
submitIntervalMs: 300_000,
|
||||
maxQueueSize: 1000,
|
||||
batchSize: 100,
|
||||
requestTimeoutMs: 10_000,
|
||||
predictionCacheTtlMs: 21_600_000,
|
||||
dryRun: false,
|
||||
maxRetries: 3,
|
||||
onError: () => {},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function makeEvent(id = 'evt-1'): TaskCompletionEvent {
|
||||
return {
|
||||
instance_id: 'test-instance-id',
|
||||
event_id: id,
|
||||
schema_version: '1.0',
|
||||
timestamp: new Date().toISOString(),
|
||||
task_duration_ms: 5000,
|
||||
task_type: TaskType.IMPLEMENTATION,
|
||||
complexity: Complexity.MEDIUM,
|
||||
harness: Harness.CLAUDE_CODE,
|
||||
model: 'claude-3-opus',
|
||||
provider: Provider.ANTHROPIC,
|
||||
estimated_input_tokens: 1000,
|
||||
estimated_output_tokens: 500,
|
||||
actual_input_tokens: 1100,
|
||||
actual_output_tokens: 550,
|
||||
estimated_cost_usd_micros: 50000,
|
||||
actual_cost_usd_micros: 55000,
|
||||
quality_gate_passed: true,
|
||||
quality_gates_run: [],
|
||||
quality_gates_failed: [],
|
||||
context_compactions: 0,
|
||||
context_rotations: 0,
|
||||
context_utilization_final: 0.5,
|
||||
outcome: Outcome.SUCCESS,
|
||||
retry_count: 0,
|
||||
};
|
||||
}
|
||||
|
||||
describe('BatchSubmitter', () => {
|
||||
let fetchSpy: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
fetchSpy = vi.fn();
|
||||
vi.stubGlobal('fetch', fetchSpy);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
it('should submit a batch successfully', async () => {
|
||||
const responseBody = {
|
||||
accepted: 1,
|
||||
rejected: 0,
|
||||
results: [{ event_id: 'evt-1', status: 'accepted' }],
|
||||
};
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
status: 202,
|
||||
json: () => Promise.resolve(responseBody),
|
||||
});
|
||||
|
||||
const submitter = new BatchSubmitter(makeConfig());
|
||||
const result = await submitter.submit([makeEvent()]);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.response).toEqual(responseBody);
|
||||
expect(fetchSpy).toHaveBeenCalledTimes(1);
|
||||
|
||||
const [url, options] = fetchSpy.mock.calls[0];
|
||||
expect(url).toBe('https://tel.example.com/v1/events/batch');
|
||||
expect(options.method).toBe('POST');
|
||||
expect(options.headers['Authorization']).toBe(`Bearer ${'a'.repeat(64)}`);
|
||||
});
|
||||
|
||||
it('should handle 429 with Retry-After header', async () => {
|
||||
const headers = new Map([['Retry-After', '1']]);
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: 429,
|
||||
headers: { get: (name: string) => headers.get(name) ?? null },
|
||||
});
|
||||
|
||||
// After retry, succeed
|
||||
const responseBody = {
|
||||
accepted: 1,
|
||||
rejected: 0,
|
||||
results: [{ event_id: 'evt-1', status: 'accepted' }],
|
||||
};
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
status: 202,
|
||||
json: () => Promise.resolve(responseBody),
|
||||
});
|
||||
|
||||
const submitter = new BatchSubmitter(makeConfig({ maxRetries: 1 }));
|
||||
|
||||
// Run submit in background and advance timers
|
||||
const submitPromise = submitter.submit([makeEvent()]);
|
||||
|
||||
// Advance enough to cover Retry-After (1s) + backoff with jitter (~1-1.5s)
|
||||
await vi.advanceTimersByTimeAsync(10_000);
|
||||
|
||||
const result = await submitPromise;
|
||||
expect(result.success).toBe(true);
|
||||
expect(fetchSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should handle 403 error', async () => {
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: 403,
|
||||
statusText: 'Forbidden',
|
||||
});
|
||||
|
||||
const submitter = new BatchSubmitter(makeConfig({ maxRetries: 0 }));
|
||||
const result = await submitter.submit([makeEvent()]);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.message).toContain('Forbidden');
|
||||
expect(result.error?.message).toContain('403');
|
||||
});
|
||||
|
||||
it('should retry on network error with backoff', async () => {
|
||||
fetchSpy.mockRejectedValueOnce(new Error('Network error'));
|
||||
fetchSpy.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
status: 202,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
accepted: 1,
|
||||
rejected: 0,
|
||||
results: [{ event_id: 'evt-1', status: 'accepted' }],
|
||||
}),
|
||||
});
|
||||
|
||||
const submitter = new BatchSubmitter(makeConfig({ maxRetries: 1 }));
|
||||
const submitPromise = submitter.submit([makeEvent()]);
|
||||
|
||||
// Advance past backoff delay
|
||||
await vi.advanceTimersByTimeAsync(5000);
|
||||
|
||||
const result = await submitPromise;
|
||||
expect(result.success).toBe(true);
|
||||
expect(fetchSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should fail after max retries exhausted', async () => {
|
||||
fetchSpy.mockRejectedValue(new Error('Network error'));
|
||||
|
||||
const submitter = new BatchSubmitter(makeConfig({ maxRetries: 2 }));
|
||||
const submitPromise = submitter.submit([makeEvent()]);
|
||||
|
||||
// Advance timers to allow all retries
|
||||
await vi.advanceTimersByTimeAsync(120_000);
|
||||
|
||||
const result = await submitPromise;
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.message).toBe('Network error');
|
||||
});
|
||||
|
||||
it('should not call fetch in dryRun mode', async () => {
|
||||
const submitter = new BatchSubmitter(makeConfig({ dryRun: true }));
|
||||
const result = await submitter.submit([makeEvent('evt-1'), makeEvent('evt-2')]);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.response?.accepted).toBe(2);
|
||||
expect(result.response?.rejected).toBe(0);
|
||||
expect(fetchSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle request timeout via AbortController', async () => {
|
||||
fetchSpy.mockImplementation(
|
||||
(_url: string, options: { signal: AbortSignal }) =>
|
||||
new Promise((_resolve, reject) => {
|
||||
options.signal.addEventListener('abort', () => {
|
||||
reject(new DOMException('The operation was aborted.', 'AbortError'));
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
const submitter = new BatchSubmitter(
|
||||
makeConfig({ requestTimeoutMs: 1000, maxRetries: 0 }),
|
||||
);
|
||||
const submitPromise = submitter.submit([makeEvent()]);
|
||||
|
||||
await vi.advanceTimersByTimeAsync(2000);
|
||||
|
||||
const result = await submitPromise;
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.message).toContain('aborted');
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user