Files
stack/apps/api/src/chat-proxy/chat-proxy.service.spec.ts
Jason Woltje e85fb11f03
All checks were successful
ci/woodpecker/push/ci Pipeline was successful
test(ms22-p2): add unit tests for agent services (#687)
Co-authored-by: Jason Woltje <jason@diversecanvas.com>
Co-committed-by: Jason Woltje <jason@diversecanvas.com>
2026-03-05 03:40:35 +00:00

251 lines
8.3 KiB
TypeScript

import {
ServiceUnavailableException,
NotFoundException,
BadGatewayException,
} from "@nestjs/common";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { ChatProxyService } from "./chat-proxy.service";
describe("ChatProxyService", () => {
const userId = "user-123";
const prisma = {
userAgentConfig: {
findUnique: vi.fn(),
},
userAgent: {
findUnique: vi.fn(),
},
};
const containerLifecycle = {
ensureRunning: vi.fn(),
touch: vi.fn(),
};
const config = {
get: vi.fn(),
};
let service: ChatProxyService;
let fetchMock: ReturnType<typeof vi.fn>;
beforeEach(() => {
fetchMock = vi.fn();
vi.stubGlobal("fetch", fetchMock);
service = new ChatProxyService(prisma as never, containerLifecycle as never, config as never);
});
afterEach(() => {
vi.unstubAllGlobals();
vi.clearAllMocks();
});
describe("getContainerUrl", () => {
it("calls ensureRunning and touch for the user", async () => {
containerLifecycle.ensureRunning.mockResolvedValue({
url: "http://mosaic-user-user-123:19000",
token: "gateway-token",
});
containerLifecycle.touch.mockResolvedValue(undefined);
const url = await service.getContainerUrl(userId);
expect(url).toBe("http://mosaic-user-user-123:19000");
expect(containerLifecycle.ensureRunning).toHaveBeenCalledWith(userId);
expect(containerLifecycle.touch).toHaveBeenCalledWith(userId);
});
});
describe("proxyChat", () => {
it("forwards the request to the user's OpenClaw container", async () => {
containerLifecycle.ensureRunning.mockResolvedValue({
url: "http://mosaic-user-user-123:19000",
token: "gateway-token",
});
containerLifecycle.touch.mockResolvedValue(undefined);
fetchMock.mockResolvedValue(new Response("event: token\ndata: hello\n\n"));
const messages = [{ role: "user", content: "Hello from Mosaic" }];
const response = await service.proxyChat(userId, messages);
expect(response).toBeInstanceOf(Response);
expect(fetchMock).toHaveBeenCalledWith(
"http://mosaic-user-user-123:19000/v1/chat/completions",
expect.objectContaining({
method: "POST",
headers: {
Authorization: "Bearer gateway-token",
"Content-Type": "application/json",
},
})
);
const [, request] = fetchMock.mock.calls[0] as [string, RequestInit];
const parsedBody = JSON.parse(String(request.body));
expect(parsedBody).toEqual({
messages,
model: "openclaw:default",
stream: true,
});
});
it("throws ServiceUnavailableException on connection refused errors", async () => {
containerLifecycle.ensureRunning.mockResolvedValue({
url: "http://mosaic-user-user-123:19000",
token: "gateway-token",
});
containerLifecycle.touch.mockResolvedValue(undefined);
fetchMock.mockRejectedValue(new Error("connect ECONNREFUSED 127.0.0.1:19000"));
await expect(service.proxyChat(userId, [])).rejects.toBeInstanceOf(
ServiceUnavailableException
);
});
it("throws ServiceUnavailableException on timeout errors", async () => {
containerLifecycle.ensureRunning.mockResolvedValue({
url: "http://mosaic-user-user-123:19000",
token: "gateway-token",
});
containerLifecycle.touch.mockResolvedValue(undefined);
fetchMock.mockRejectedValue(new Error("The operation was aborted due to timeout"));
await expect(service.proxyChat(userId, [])).rejects.toBeInstanceOf(
ServiceUnavailableException
);
});
});
describe("proxyChat with agent routing", () => {
it("includes agent config when agentName is specified", async () => {
const mockAgent = {
name: "jarvis",
displayName: "Jarvis",
personality: "Capable, direct, proactive.",
primaryModel: "opus",
isActive: true,
};
containerLifecycle.ensureRunning.mockResolvedValue({
url: "http://mosaic-user-user-123:19000",
token: "gateway-token",
});
containerLifecycle.touch.mockResolvedValue(undefined);
prisma.userAgent.findUnique.mockResolvedValue(mockAgent);
fetchMock.mockResolvedValue(new Response("event: token\ndata: hello\n\n"));
const messages = [{ role: "user", content: "Hello Jarvis" }];
await service.proxyChat(userId, messages, undefined, "jarvis");
const [, request] = fetchMock.mock.calls[0] as [string, RequestInit];
const parsedBody = JSON.parse(String(request.body));
expect(parsedBody).toEqual({
messages,
model: "opus",
stream: true,
agent: "jarvis",
agent_personality: "Capable, direct, proactive.",
});
});
it("throws NotFoundException when agent not found", async () => {
containerLifecycle.ensureRunning.mockResolvedValue({
url: "http://mosaic-user-user-123:19000",
token: "gateway-token",
});
containerLifecycle.touch.mockResolvedValue(undefined);
prisma.userAgent.findUnique.mockResolvedValue(null);
const messages = [{ role: "user", content: "Hello" }];
await expect(service.proxyChat(userId, messages, undefined, "nonexistent")).rejects.toThrow(
NotFoundException
);
});
it("throws NotFoundException when agent is not active", async () => {
containerLifecycle.ensureRunning.mockResolvedValue({
url: "http://mosaic-user-user-123:19000",
token: "gateway-token",
});
containerLifecycle.touch.mockResolvedValue(undefined);
prisma.userAgent.findUnique.mockResolvedValue({
name: "inactive-agent",
displayName: "Inactive",
personality: "...",
primaryModel: null,
isActive: false,
});
const messages = [{ role: "user", content: "Hello" }];
await expect(
service.proxyChat(userId, messages, undefined, "inactive-agent")
).rejects.toThrow(NotFoundException);
});
it("falls back to default model when agent has no primaryModel", async () => {
const mockAgent = {
name: "jarvis",
displayName: "Jarvis",
personality: "Capable, direct, proactive.",
primaryModel: null,
isActive: true,
};
containerLifecycle.ensureRunning.mockResolvedValue({
url: "http://mosaic-user-user-123:19000",
token: "gateway-token",
});
containerLifecycle.touch.mockResolvedValue(undefined);
prisma.userAgent.findUnique.mockResolvedValue(mockAgent);
prisma.userAgentConfig.findUnique.mockResolvedValue(null);
fetchMock.mockResolvedValue(new Response("event: token\ndata: hello\n\n"));
const messages = [{ role: "user", content: "Hello" }];
await service.proxyChat(userId, messages, undefined, "jarvis");
const [, request] = fetchMock.mock.calls[0] as [string, RequestInit];
const parsedBody = JSON.parse(String(request.body));
expect(parsedBody.model).toBe("openclaw:default");
});
});
describe("proxyGuestChat", () => {
it("uses environment variables for guest LLM configuration", async () => {
config.get.mockImplementation((key: string) => {
if (key === "GUEST_LLM_URL") return "http://10.1.1.42:11434/v1";
if (key === "GUEST_LLM_MODEL") return "llama3.2";
return undefined;
});
fetchMock.mockResolvedValue(new Response("event: token\ndata: hello\n\n"));
const messages = [{ role: "user", content: "Hello" }];
await service.proxyGuestChat(messages);
expect(fetchMock).toHaveBeenCalledWith(
"http://10.1.1.42:11434/v1/chat/completions",
expect.objectContaining({
method: "POST",
headers: {
"Content-Type": "application/json",
},
})
);
const [, request] = fetchMock.mock.calls[0] as [string, RequestInit];
const parsedBody = JSON.parse(String(request.body));
expect(parsedBody.model).toBe("llama3.2");
});
it("throws BadGatewayException on guest LLM errors", async () => {
config.get.mockReturnValue(undefined);
fetchMock.mockResolvedValue(new Response("Internal Server Error", { status: 500 }));
const messages = [{ role: "user", content: "Hello" }];
await expect(service.proxyGuestChat(messages)).rejects.toThrow(BadGatewayException);
});
});
});