- Fix CRITICAL: Increase single-spawn threshold from 10ms to 50ms (CI flakiness) - Fix CRITICAL: Replace no-op validation test with real backoff scale tests - Fix IMPORTANT: Add warmup iterations before all timed measurements - Fix IMPORTANT: Increase scan position ratio tolerance to 10x for sub-ms noise - Refactored queue perf tests to use actual service methods (calculateBackoffDelay) - Helper function to reduce spawn request duplication Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
134 lines
4.1 KiB
TypeScript
134 lines
4.1 KiB
TypeScript
/**
|
|
* Performance Test: Queue Service Throughput
|
|
*
|
|
* Benchmarks the queue service's pure functions under load
|
|
* to verify performance characteristics.
|
|
*
|
|
* Covers issue #229 (ORCH-128)
|
|
*/
|
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
|
import { QueueService } from "../../src/queue/queue.service";
|
|
import { ConfigService } from "@nestjs/config";
|
|
|
|
describe("Performance: Queue Service", () => {
|
|
let service: QueueService;
|
|
|
|
const mockValkeyService = {
|
|
getConnection: vi.fn().mockReturnValue({
|
|
host: "localhost",
|
|
port: 6379,
|
|
}),
|
|
updateTaskStatus: vi.fn().mockResolvedValue(undefined),
|
|
publishEvent: vi.fn().mockResolvedValue(undefined),
|
|
};
|
|
|
|
const mockConfigService = {
|
|
get: vi.fn((key: string, defaultValue?: unknown) => {
|
|
const config: Record<string, unknown> = {
|
|
"orchestrator.queue.name": "perf-test-queue",
|
|
"orchestrator.queue.maxRetries": 3,
|
|
"orchestrator.queue.baseDelay": 1000,
|
|
"orchestrator.queue.maxDelay": 60000,
|
|
};
|
|
return config[key] ?? defaultValue;
|
|
}),
|
|
};
|
|
|
|
beforeEach(() => {
|
|
vi.clearAllMocks();
|
|
service = new QueueService(
|
|
mockValkeyService as never,
|
|
mockConfigService as unknown as ConfigService
|
|
);
|
|
});
|
|
|
|
describe("Backoff calculation performance", () => {
|
|
it("should calculate 10,000 backoff delays in under 10ms", () => {
|
|
// Warmup
|
|
for (let i = 0; i < 100; i++) {
|
|
service.calculateBackoffDelay(i % 20, 1000, 60000);
|
|
}
|
|
|
|
const start = performance.now();
|
|
|
|
for (let i = 0; i < 10000; i++) {
|
|
service.calculateBackoffDelay(i % 20, 1000, 60000);
|
|
}
|
|
|
|
const duration = performance.now() - start;
|
|
expect(duration).toBeLessThan(10);
|
|
});
|
|
|
|
it("should produce consistent results under rapid invocation", () => {
|
|
const results: number[] = [];
|
|
|
|
for (let attempt = 0; attempt <= 10; attempt++) {
|
|
const delay = service.calculateBackoffDelay(attempt, 1000, 60000);
|
|
results.push(delay);
|
|
}
|
|
|
|
// Verify expected exponential pattern
|
|
expect(results[0]).toBe(1000); // 1000 * 2^0
|
|
expect(results[1]).toBe(2000); // 1000 * 2^1
|
|
expect(results[2]).toBe(4000); // 1000 * 2^2
|
|
expect(results[3]).toBe(8000); // 1000 * 2^3
|
|
|
|
// After attempt 6 (64000), should be capped at 60000
|
|
expect(results[6]).toBe(60000);
|
|
expect(results[10]).toBe(60000);
|
|
});
|
|
});
|
|
|
|
describe("Backoff calculation at scale", () => {
|
|
it("should handle all retry levels from 0 to 100 consistently", () => {
|
|
// Warmup
|
|
for (let i = 0; i < 50; i++) {
|
|
service.calculateBackoffDelay(i, 1000, 60000);
|
|
}
|
|
|
|
const start = performance.now();
|
|
const results = new Map<number, number>();
|
|
|
|
for (let attempt = 0; attempt <= 100; attempt++) {
|
|
const delay = service.calculateBackoffDelay(attempt, 1000, 60000);
|
|
results.set(attempt, delay);
|
|
}
|
|
|
|
const duration = performance.now() - start;
|
|
expect(duration).toBeLessThan(10);
|
|
|
|
// Verify monotonic increase up to cap
|
|
for (let attempt = 1; attempt <= 100; attempt++) {
|
|
const current = results.get(attempt) ?? 0;
|
|
const previous = results.get(attempt - 1) ?? 0;
|
|
expect(current).toBeGreaterThanOrEqual(previous);
|
|
expect(current).toBeLessThanOrEqual(60000);
|
|
}
|
|
});
|
|
|
|
it("should calculate backoffs with varying base delays rapidly", () => {
|
|
const baseDelays = [100, 500, 1000, 2000, 5000];
|
|
const maxDelays = [10000, 30000, 60000, 120000];
|
|
|
|
// Warmup
|
|
service.calculateBackoffDelay(0, 1000, 60000);
|
|
|
|
const start = performance.now();
|
|
|
|
for (const base of baseDelays) {
|
|
for (const max of maxDelays) {
|
|
for (let attempt = 0; attempt < 20; attempt++) {
|
|
const delay = service.calculateBackoffDelay(attempt, base, max);
|
|
expect(delay).toBeLessThanOrEqual(max);
|
|
expect(delay).toBeGreaterThanOrEqual(base);
|
|
}
|
|
}
|
|
}
|
|
|
|
const duration = performance.now() - start;
|
|
// 5 * 4 * 20 = 400 calculations should complete quickly
|
|
expect(duration).toBeLessThan(50);
|
|
});
|
|
});
|
|
});
|