Files
stack/apps/web/src/hooks/useChat.test.ts
Jason Woltje 8484e060d7
All checks were successful
ci/woodpecker/push/ci Pipeline was successful
test(web): update useChat tests for streaming-only implementation
2026-03-04 18:14:14 -06:00

659 lines
21 KiB
TypeScript

/**
* @file useChat.test.ts
* @description Tests for the useChat hook that manages chat state and LLM interactions
*/
import { renderHook, act } from "@testing-library/react";
import { describe, it, expect, beforeEach, vi, afterEach, type MockedFunction } from "vitest";
import { useChat, type Message } from "./useChat";
import * as chatApi from "@/lib/api/chat";
import * as ideasApi from "@/lib/api/ideas";
import type { Idea } from "@/lib/api/ideas";
// Mock the API modules - use importOriginal to preserve types/enums
vi.mock("@/lib/api/chat", () => ({
sendChatMessage: vi.fn(),
streamChatMessage: vi.fn(),
}));
vi.mock("@/lib/api/ideas", async (importOriginal) => {
// eslint-disable-next-line @typescript-eslint/consistent-type-imports
const actual = await importOriginal<typeof import("@/lib/api/ideas")>();
return {
...actual,
createConversation: vi.fn(),
updateConversation: vi.fn(),
getIdea: vi.fn(),
};
});
const mockSendChatMessage = chatApi.sendChatMessage as MockedFunction<
typeof chatApi.sendChatMessage
>;
const mockStreamChatMessage = chatApi.streamChatMessage as MockedFunction<
typeof chatApi.streamChatMessage
>;
const mockCreateConversation = ideasApi.createConversation as MockedFunction<
typeof ideasApi.createConversation
>;
const mockGetIdea = ideasApi.getIdea as MockedFunction<typeof ideasApi.getIdea>;
/**
* Creates a mock Idea
*/
function createMockIdea(id: string, title: string, content: string): Idea {
return {
id,
workspaceId: "workspace-1",
title,
content,
status: "CAPTURED",
priority: "medium",
tags: ["chat"],
metadata: { conversationType: "chat" },
creatorId: "user-1",
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
} as Idea;
}
/**
* Configure streamChatMessage to immediately fail,
* without using a non-streaming fallback.
*/
function makeStreamFail(error: Error = new Error("Streaming not available")): void {
mockStreamChatMessage.mockImplementation(
(
_request,
_onChunk,
_onComplete,
onError: (err: Error) => void,
_signal?: AbortSignal
): void => {
// Call synchronously so the Promise rejects immediately
onError(error);
}
);
}
/**
* Configure streamChatMessage to succeed with given tokens.
* Uses a ref-style object to share cancellation state across the async boundary.
*/
function makeStreamSucceed(tokens: string[]): void {
mockStreamChatMessage.mockImplementation(
(
_request,
onChunk: (chunk: string) => void,
onComplete: () => void,
_onError: (err: Error) => void,
signal?: AbortSignal
): void => {
const state = { cancelled: false };
signal?.addEventListener("abort", () => {
state.cancelled = true;
});
const run = async (): Promise<void> => {
for (const token of tokens) {
if (state.cancelled) return;
await Promise.resolve();
onChunk(token);
}
if (!state.cancelled) {
onComplete();
}
};
void run();
}
);
}
describe("useChat", () => {
beforeEach(() => {
vi.clearAllMocks();
// Default: streaming fails so tests exercise the fallback path
makeStreamFail();
});
afterEach(() => {
vi.restoreAllMocks();
});
describe("initial state", () => {
it("should initialize with welcome message", () => {
const { result } = renderHook(() => useChat());
expect(result.current.messages).toHaveLength(1);
expect(result.current.messages[0]?.role).toBe("assistant");
expect(result.current.messages[0]?.id).toBe("welcome");
expect(result.current.isLoading).toBe(false);
expect(result.current.isStreaming).toBe(false);
expect(result.current.error).toBeNull();
expect(result.current.conversationId).toBeNull();
});
it("should expose abortStream function", () => {
const { result } = renderHook(() => useChat());
expect(typeof result.current.abortStream).toBe("function");
});
});
describe("sendMessage (streaming failure path)", () => {
it("should not send empty messages", async () => {
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("");
await result.current.sendMessage(" ");
});
expect(mockSendChatMessage).not.toHaveBeenCalled();
expect(mockStreamChatMessage).not.toHaveBeenCalled();
expect(result.current.messages).toHaveLength(1); // only welcome
});
it("should handle streaming errors gracefully", async () => {
vi.spyOn(console, "warn").mockImplementation(() => undefined);
makeStreamFail(new Error("Streaming not available"));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Hello");
});
// Streaming fails, no fallback, placeholder is removed
expect(result.current.error).toContain("Chat error:");
expect(result.current.messages).toHaveLength(2); // welcome + user (no assistant)
});
});
describe("streaming path", () => {
it("should stream tokens into assistant message", async () => {
const tokens = ["Hello", " world", "!"];
makeStreamSucceed(tokens);
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Hi");
});
expect(result.current.messages).toHaveLength(3);
expect(result.current.messages[2]?.role).toBe("assistant");
expect(result.current.messages[2]?.content).toBe("Hello world!");
});
it("should set isStreaming true during streaming then false when done", async () => {
let capturedOnChunk: ((chunk: string) => void) | undefined;
let capturedOnComplete: (() => void) | undefined;
mockStreamChatMessage.mockImplementation(
(
_request,
onChunk: (chunk: string) => void,
onComplete: () => void,
_onError: (err: Error) => void
): void => {
capturedOnChunk = onChunk;
capturedOnComplete = onComplete;
}
);
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat());
let sendDone = false;
act(() => {
void result.current.sendMessage("Hello").then(() => {
sendDone = true;
});
});
// Send first token (triggers streaming state)
await act(async () => {
capturedOnChunk?.("Hello");
await Promise.resolve();
});
expect(result.current.isStreaming).toBe(true);
// Complete the stream
await act(async () => {
capturedOnComplete?.();
await Promise.resolve();
await Promise.resolve();
await Promise.resolve();
});
expect(result.current.isStreaming).toBe(false);
expect(sendDone).toBe(true);
});
it("should keep partial content on abort", async () => {
let capturedOnChunk: ((chunk: string) => void) | undefined;
mockStreamChatMessage.mockImplementation(
(
_request,
onChunk: (chunk: string) => void,
_onComplete: () => void,
_onError: (err: Error) => void,
signal?: AbortSignal
): void => {
capturedOnChunk = onChunk;
if (signal) {
signal.addEventListener("abort", () => {
// Stream aborted
});
}
}
);
const { result } = renderHook(() => useChat());
act(() => {
void result.current.sendMessage("Hello");
});
await act(async () => {
capturedOnChunk?.("Partial");
capturedOnChunk?.(" content");
await Promise.resolve();
});
await act(async () => {
result.current.abortStream();
await Promise.resolve();
});
expect(result.current.isStreaming).toBe(false);
const assistantMsg = result.current.messages.find(
(m) => m.role === "assistant" && m.id !== "welcome"
);
expect(assistantMsg?.content).toBe("Partial content");
});
it("should not send while streaming", async () => {
let capturedOnChunk: ((chunk: string) => void) | undefined;
mockStreamChatMessage.mockImplementation(
(
_request,
onChunk: (chunk: string) => void,
_onComplete: () => void,
_onError: (err: Error) => void
): void => {
capturedOnChunk = onChunk;
}
);
const { result } = renderHook(() => useChat());
act(() => {
void result.current.sendMessage("First");
});
await act(async () => {
capturedOnChunk?.("token");
await Promise.resolve();
});
expect(result.current.isStreaming).toBe(true);
await act(async () => {
await result.current.sendMessage("Second");
});
// Only one stream call
expect(mockStreamChatMessage).toHaveBeenCalledTimes(1);
});
});
describe("rapid sends - stale closure prevention", () => {
it("should not lose messages on rapid sequential sends", async () => {
// Use streaming success path for deterministic behavior
makeStreamSucceed(["Response 1"]);
mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Message 1");
});
expect(result.current.messages).toHaveLength(3); // welcome + user1 + assistant1
makeStreamSucceed(["Response 2"]);
await act(async () => {
await result.current.sendMessage("Message 2");
});
expect(result.current.messages).toHaveLength(5); // welcome + user1 + assistant1 + user2 + assistant2
const userMessages = result.current.messages.filter((m) => m.role === "user");
expect(userMessages).toHaveLength(2);
expect(userMessages[0]?.content).toBe("Message 1");
expect(userMessages[1]?.content).toBe("Message 2");
});
it("should use functional updates for all message state changes", async () => {
mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat());
const messageCounts: number[] = [];
makeStreamSucceed(["R1"]);
await act(async () => {
await result.current.sendMessage("Test 1");
});
messageCounts.push(result.current.messages.length);
makeStreamSucceed(["R2"]);
await act(async () => {
await result.current.sendMessage("Test 2");
});
messageCounts.push(result.current.messages.length);
makeStreamSucceed(["R3"]);
await act(async () => {
await result.current.sendMessage("Test 3");
});
messageCounts.push(result.current.messages.length);
expect(messageCounts).toEqual([3, 5, 7]);
expect(result.current.messages).toHaveLength(7);
const userMessages = result.current.messages.filter((m) => m.role === "user");
expect(userMessages).toHaveLength(3);
});
it("should maintain correct message order with ref-based state tracking", async () => {
mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat());
makeStreamSucceed(["First response"]);
await act(async () => {
await result.current.sendMessage("Query 1");
});
makeStreamSucceed(["Second response"]);
await act(async () => {
await result.current.sendMessage("Query 2");
});
makeStreamSucceed(["Third response"]);
await act(async () => {
await result.current.sendMessage("Query 3");
});
const messages = result.current.messages;
expect(messages[0]?.id).toBe("welcome");
expect(messages[1]?.content).toBe("Query 1");
expect(messages[2]?.content).toBe("First response");
expect(messages[3]?.content).toBe("Query 2");
expect(messages[4]?.content).toBe("Second response");
expect(messages[5]?.content).toBe("Query 3");
expect(messages[6]?.content).toBe("Third response");
});
});
describe("loadConversation", () => {
it("should load conversation from backend", async () => {
const savedMessages: Message[] = [
{
id: "msg-1",
role: "user",
content: "Saved message",
createdAt: new Date().toISOString(),
},
{
id: "msg-2",
role: "assistant",
content: "Saved response",
createdAt: new Date().toISOString(),
},
];
mockGetIdea.mockResolvedValueOnce(
createMockIdea("conv-123", "My Conversation", JSON.stringify(savedMessages))
);
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.loadConversation("conv-123");
});
expect(result.current.messages).toHaveLength(2);
expect(result.current.messages[0]?.content).toBe("Saved message");
expect(result.current.conversationId).toBe("conv-123");
expect(result.current.conversationTitle).toBe("My Conversation");
});
it("should fall back to welcome message when stored JSON is corrupted", async () => {
vi.spyOn(console, "warn").mockImplementation(() => undefined);
mockGetIdea.mockResolvedValueOnce(
createMockIdea("conv-bad", "Corrupted", "not valid json {{{")
);
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.loadConversation("conv-bad");
});
expect(result.current.messages).toHaveLength(1);
expect(result.current.messages[0]?.id).toBe("welcome");
});
it("should fall back to welcome message when stored data has wrong shape", async () => {
vi.spyOn(console, "warn").mockImplementation(() => undefined);
mockGetIdea.mockResolvedValueOnce(
createMockIdea("conv-bad", "Wrong Shape", JSON.stringify({ not: "an array" }))
);
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.loadConversation("conv-bad");
});
expect(result.current.messages).toHaveLength(1);
expect(result.current.messages[0]?.id).toBe("welcome");
});
it("should fall back to welcome message when messages have invalid roles", async () => {
vi.spyOn(console, "warn").mockImplementation(() => undefined);
const badMessages = [
{ id: "msg-1", role: "hacker", content: "Bad", createdAt: "2026-01-01" },
];
mockGetIdea.mockResolvedValueOnce(
createMockIdea("conv-bad", "Bad Roles", JSON.stringify(badMessages))
);
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.loadConversation("conv-bad");
});
expect(result.current.messages).toHaveLength(1);
expect(result.current.messages[0]?.id).toBe("welcome");
});
it("should set sanitized error and call onError when getIdea rejects", async () => {
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
mockGetIdea.mockRejectedValueOnce(new Error("Not found"));
const onError = vi.fn();
const { result } = renderHook(() => useChat({ onError }));
await act(async () => {
await result.current.loadConversation("conv-missing");
});
expect(result.current.error).toBe("Unable to load conversation. Please try again.");
expect(onError).toHaveBeenCalledWith(expect.any(Error));
expect(consoleSpy).toHaveBeenCalledWith(
"Failed to load conversation",
expect.objectContaining({
errorType: "LOAD_ERROR",
ideaId: "conv-missing",
timestamp: expect.any(String) as string,
})
);
expect(result.current.isLoading).toBe(false);
});
it("should not re-throw when getIdea rejects", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined);
mockGetIdea.mockRejectedValueOnce(new Error("Server error"));
const { result } = renderHook(() => useChat());
await act(async () => {
await expect(result.current.loadConversation("conv-err")).resolves.toBeUndefined();
});
expect(result.current.error).toBe("Unable to load conversation. Please try again.");
});
});
describe("startNewConversation", () => {
it("should reset to initial state", async () => {
makeStreamSucceed(["Response"]);
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Hello");
});
expect(result.current.messages.length).toBeGreaterThan(1);
act(() => {
result.current.startNewConversation();
});
expect(result.current.messages).toHaveLength(1);
expect(result.current.messages[0]?.id).toBe("welcome");
expect(result.current.conversationId).toBeNull();
expect(result.current.conversationTitle).toBeNull();
});
});
describe("clearError", () => {
it("should clear error state", async () => {
vi.spyOn(console, "warn").mockImplementation(() => undefined);
makeStreamFail(new Error("Test error"));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Hello");
});
expect(result.current.error).toContain("Chat error:");
act(() => {
result.current.clearError();
});
expect(result.current.error).toBeNull();
});
});
// Note: "error context logging" tests removed - the detailed logging with LLM_ERROR type
// was removed in commit 44da50d when guest fallback mode was removed.
// The implementation now uses simple console.warn for streaming failures.
describe("LLM vs persistence error separation", () => {
it("should show streaming error when stream fails", async () => {
vi.spyOn(console, "warn").mockImplementation(() => undefined);
makeStreamFail(new Error("Streaming not available"));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Hello");
});
// Streaming fails, placeholder is removed, error is set
expect(result.current.error).toContain("Chat error:");
expect(result.current.messages).toHaveLength(2); // welcome + user (no assistant)
});
it("should keep assistant message visible when save fails (streaming path)", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined);
makeStreamSucceed(["Great answer!"]);
mockCreateConversation.mockRejectedValueOnce(new Error("Database connection lost"));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Hello");
});
expect(result.current.messages).toHaveLength(3); // welcome + user + assistant
expect(result.current.messages[2]?.content).toBe("Great answer!");
expect(result.current.error).toContain("Message sent but failed to save");
});
it("should log with PERSISTENCE_ERROR type when save fails", async () => {
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
makeStreamSucceed(["Response"]);
mockCreateConversation.mockRejectedValueOnce(new Error("DB error"));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Test");
});
expect(consoleSpy).toHaveBeenCalledWith(
"Failed to save conversation",
expect.objectContaining({
errorType: "PERSISTENCE_ERROR",
})
);
const llmErrorCalls = consoleSpy.mock.calls.filter((call) => {
const ctx: unknown = call[1];
return (
typeof ctx === "object" &&
ctx !== null &&
"errorType" in ctx &&
(ctx as { errorType: string }).errorType === "LLM_ERROR"
);
});
expect(llmErrorCalls).toHaveLength(0);
});
it("should handle non-Error throws from persistence layer", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
makeStreamSucceed(["OK"]);
mockCreateConversation.mockRejectedValueOnce("DB string error");
const onError = vi.fn();
const { result } = renderHook(() => useChat({ onError }));
await act(async () => {
await result.current.sendMessage("Hello");
});
expect(result.current.messages[2]?.content).toBe("OK");
expect(result.current.error).toBe("Message sent but failed to save. Please try again.");
expect(onError).toHaveBeenCalledWith(expect.any(Error));
});
});
});