fix(#229): Remediate code review findings for performance tests
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline failed

- Fix CRITICAL: Increase single-spawn threshold from 10ms to 50ms (CI flakiness)
- Fix CRITICAL: Replace no-op validation test with real backoff scale tests
- Fix IMPORTANT: Add warmup iterations before all timed measurements
- Fix IMPORTANT: Increase scan position ratio tolerance to 10x for sub-ms noise
- Refactored queue perf tests to use actual service methods (calculateBackoffDelay)
- Helper function to reduce spawn request duplication

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Jason Woltje
2026-02-05 13:23:19 -06:00
parent b93f4c59ce
commit 0796cbc744
3 changed files with 97 additions and 78 deletions

View File

@@ -1,8 +1,8 @@
/**
* Performance Test: Queue Service Throughput
*
* Benchmarks the queue service's pure functions and validation logic
* under load to verify performance characteristics.
* Benchmarks the queue service's pure functions under load
* to verify performance characteristics.
*
* Covers issue #229 (ORCH-128)
*/
@@ -44,6 +44,11 @@ describe("Performance: Queue Service", () => {
describe("Backoff calculation performance", () => {
it("should calculate 10,000 backoff delays in under 10ms", () => {
// Warmup
for (let i = 0; i < 100; i++) {
service.calculateBackoffDelay(i % 20, 1000, 60000);
}
const start = performance.now();
for (let i = 0; i < 10000; i++) {
@@ -74,26 +79,55 @@ describe("Performance: Queue Service", () => {
});
});
describe("Validation performance", () => {
it("should validate 1000 task contexts rapidly", () => {
const contexts = Array.from({ length: 1000 }, (_, i) => ({
repository: `https://git.example.com/repo-${String(i)}.git`,
branch: `feature/task-${String(i)}`,
workItems: [`US-${String(i).padStart(3, "0")}`],
skills: ["typescript", "nestjs"],
}));
describe("Backoff calculation at scale", () => {
it("should handle all retry levels from 0 to 100 consistently", () => {
// Warmup
for (let i = 0; i < 50; i++) {
service.calculateBackoffDelay(i, 1000, 60000);
}
const start = performance.now();
const results = new Map<number, number>();
for (const context of contexts) {
// Validate context fields (simulates what addTask validates)
expect(context.repository).toBeTruthy();
expect(context.branch).toBeTruthy();
expect(context.workItems.length).toBeGreaterThan(0);
for (let attempt = 0; attempt <= 100; attempt++) {
const delay = service.calculateBackoffDelay(attempt, 1000, 60000);
results.set(attempt, delay);
}
const duration = performance.now() - start;
expect(duration).toBeLessThan(100);
expect(duration).toBeLessThan(10);
// Verify monotonic increase up to cap
for (let attempt = 1; attempt <= 100; attempt++) {
const current = results.get(attempt) ?? 0;
const previous = results.get(attempt - 1) ?? 0;
expect(current).toBeGreaterThanOrEqual(previous);
expect(current).toBeLessThanOrEqual(60000);
}
});
it("should calculate backoffs with varying base delays rapidly", () => {
const baseDelays = [100, 500, 1000, 2000, 5000];
const maxDelays = [10000, 30000, 60000, 120000];
// Warmup
service.calculateBackoffDelay(0, 1000, 60000);
const start = performance.now();
for (const base of baseDelays) {
for (const max of maxDelays) {
for (let attempt = 0; attempt < 20; attempt++) {
const delay = service.calculateBackoffDelay(attempt, base, max);
expect(delay).toBeLessThanOrEqual(max);
expect(delay).toBeGreaterThanOrEqual(base);
}
}
}
const duration = performance.now() - start;
// 5 * 4 * 20 = 400 calculations should complete quickly
expect(duration).toBeLessThan(50);
});
});
});