Some checks failed
ci/woodpecker/push/web Pipeline failed
Co-authored-by: Jason Woltje <jason@diversecanvas.com> Co-committed-by: Jason Woltje <jason@diversecanvas.com>
866 lines
29 KiB
TypeScript
866 lines
29 KiB
TypeScript
/**
|
|
* @file useChat.test.ts
|
|
* @description Tests for the useChat hook that manages chat state and LLM interactions
|
|
*/
|
|
|
|
import { renderHook, act } from "@testing-library/react";
|
|
import { describe, it, expect, beforeEach, vi, afterEach, type MockedFunction } from "vitest";
|
|
import { useChat, type Message } from "./useChat";
|
|
import * as chatApi from "@/lib/api/chat";
|
|
import * as ideasApi from "@/lib/api/ideas";
|
|
import type { Idea } from "@/lib/api/ideas";
|
|
import type { ChatResponse } from "@/lib/api/chat";
|
|
|
|
// Mock the API modules - use importOriginal to preserve types/enums
|
|
vi.mock("@/lib/api/chat", () => ({
|
|
sendChatMessage: vi.fn(),
|
|
streamChatMessage: vi.fn(),
|
|
}));
|
|
|
|
vi.mock("@/lib/api/ideas", async (importOriginal) => {
|
|
// eslint-disable-next-line @typescript-eslint/consistent-type-imports
|
|
const actual = await importOriginal<typeof import("@/lib/api/ideas")>();
|
|
return {
|
|
...actual,
|
|
createConversation: vi.fn(),
|
|
updateConversation: vi.fn(),
|
|
getIdea: vi.fn(),
|
|
};
|
|
});
|
|
|
|
const mockSendChatMessage = chatApi.sendChatMessage as MockedFunction<
|
|
typeof chatApi.sendChatMessage
|
|
>;
|
|
const mockStreamChatMessage = chatApi.streamChatMessage as MockedFunction<
|
|
typeof chatApi.streamChatMessage
|
|
>;
|
|
const mockCreateConversation = ideasApi.createConversation as MockedFunction<
|
|
typeof ideasApi.createConversation
|
|
>;
|
|
const mockUpdateConversation = ideasApi.updateConversation as MockedFunction<
|
|
typeof ideasApi.updateConversation
|
|
>;
|
|
const mockGetIdea = ideasApi.getIdea as MockedFunction<typeof ideasApi.getIdea>;
|
|
|
|
/**
|
|
* Creates a mock ChatResponse
|
|
*/
|
|
function createMockChatResponse(content: string, model = "llama3.2"): ChatResponse {
|
|
return {
|
|
message: { role: "assistant" as const, content },
|
|
model,
|
|
done: true,
|
|
promptEvalCount: 10,
|
|
evalCount: 5,
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Creates a mock Idea
|
|
*/
|
|
function createMockIdea(id: string, title: string, content: string): Idea {
|
|
return {
|
|
id,
|
|
workspaceId: "workspace-1",
|
|
title,
|
|
content,
|
|
status: "CAPTURED",
|
|
priority: "medium",
|
|
tags: ["chat"],
|
|
metadata: { conversationType: "chat" },
|
|
creatorId: "user-1",
|
|
createdAt: new Date().toISOString(),
|
|
updatedAt: new Date().toISOString(),
|
|
} as Idea;
|
|
}
|
|
|
|
/**
|
|
* Configure streamChatMessage to immediately fail,
|
|
* triggering the fallback to sendChatMessage.
|
|
*/
|
|
function makeStreamFail(): void {
|
|
mockStreamChatMessage.mockImplementation(
|
|
(
|
|
_request,
|
|
_onChunk,
|
|
_onComplete,
|
|
onError: (err: Error) => void,
|
|
_signal?: AbortSignal
|
|
): void => {
|
|
// Call synchronously so the Promise rejects immediately
|
|
onError(new Error("Streaming not available"));
|
|
}
|
|
);
|
|
}
|
|
|
|
/**
|
|
* Configure streamChatMessage to succeed with given tokens.
|
|
* Uses a ref-style object to share cancellation state across the async boundary.
|
|
*/
|
|
function makeStreamSucceed(tokens: string[]): void {
|
|
mockStreamChatMessage.mockImplementation(
|
|
(
|
|
_request,
|
|
onChunk: (chunk: string) => void,
|
|
onComplete: () => void,
|
|
_onError: (err: Error) => void,
|
|
signal?: AbortSignal
|
|
): void => {
|
|
const state = { cancelled: false };
|
|
signal?.addEventListener("abort", () => {
|
|
state.cancelled = true;
|
|
});
|
|
const run = async (): Promise<void> => {
|
|
for (const token of tokens) {
|
|
if (state.cancelled) return;
|
|
await Promise.resolve();
|
|
onChunk(token);
|
|
}
|
|
if (!state.cancelled) {
|
|
onComplete();
|
|
}
|
|
};
|
|
void run();
|
|
}
|
|
);
|
|
}
|
|
|
|
describe("useChat", () => {
|
|
beforeEach(() => {
|
|
vi.clearAllMocks();
|
|
// Default: streaming fails so tests exercise the fallback path
|
|
makeStreamFail();
|
|
});
|
|
|
|
afterEach(() => {
|
|
vi.restoreAllMocks();
|
|
});
|
|
|
|
describe("initial state", () => {
|
|
it("should initialize with welcome message", () => {
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
expect(result.current.messages).toHaveLength(1);
|
|
expect(result.current.messages[0]?.role).toBe("assistant");
|
|
expect(result.current.messages[0]?.id).toBe("welcome");
|
|
expect(result.current.isLoading).toBe(false);
|
|
expect(result.current.isStreaming).toBe(false);
|
|
expect(result.current.error).toBeNull();
|
|
expect(result.current.conversationId).toBeNull();
|
|
});
|
|
|
|
it("should expose abortStream function", () => {
|
|
const { result } = renderHook(() => useChat());
|
|
expect(typeof result.current.abortStream).toBe("function");
|
|
});
|
|
});
|
|
|
|
describe("sendMessage (fallback path when streaming fails)", () => {
|
|
it("should add user message and assistant response via fallback", async () => {
|
|
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Hello there!"));
|
|
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Hello");
|
|
});
|
|
|
|
expect(result.current.messages).toHaveLength(3); // welcome + user + assistant
|
|
expect(result.current.messages[1]?.role).toBe("user");
|
|
expect(result.current.messages[1]?.content).toBe("Hello");
|
|
expect(result.current.messages[2]?.role).toBe("assistant");
|
|
expect(result.current.messages[2]?.content).toBe("Hello there!");
|
|
});
|
|
|
|
it("should not send empty messages", async () => {
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("");
|
|
await result.current.sendMessage(" ");
|
|
});
|
|
|
|
expect(mockSendChatMessage).not.toHaveBeenCalled();
|
|
expect(mockStreamChatMessage).not.toHaveBeenCalled();
|
|
expect(result.current.messages).toHaveLength(1); // only welcome
|
|
});
|
|
|
|
it("should handle API errors gracefully", async () => {
|
|
vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
mockSendChatMessage.mockRejectedValueOnce(new Error("API Error"));
|
|
|
|
const onError = vi.fn();
|
|
const { result } = renderHook(() => useChat({ onError }));
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Hello");
|
|
});
|
|
|
|
expect(result.current.error).toBe("Unable to send message. Please try again.");
|
|
expect(onError).toHaveBeenCalledWith(expect.any(Error));
|
|
expect(result.current.messages).toHaveLength(3);
|
|
expect(result.current.messages[2]?.content).toBe("Something went wrong. Please try again.");
|
|
});
|
|
});
|
|
|
|
describe("streaming path", () => {
|
|
it("should stream tokens into assistant message", async () => {
|
|
const tokens = ["Hello", " world", "!"];
|
|
makeStreamSucceed(tokens);
|
|
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Hi");
|
|
});
|
|
|
|
expect(result.current.messages).toHaveLength(3);
|
|
expect(result.current.messages[2]?.role).toBe("assistant");
|
|
expect(result.current.messages[2]?.content).toBe("Hello world!");
|
|
});
|
|
|
|
it("should set isStreaming true during streaming then false when done", async () => {
|
|
let capturedOnChunk: ((chunk: string) => void) | undefined;
|
|
let capturedOnComplete: (() => void) | undefined;
|
|
|
|
mockStreamChatMessage.mockImplementation(
|
|
(
|
|
_request,
|
|
onChunk: (chunk: string) => void,
|
|
onComplete: () => void,
|
|
_onError: (err: Error) => void
|
|
): void => {
|
|
capturedOnChunk = onChunk;
|
|
capturedOnComplete = onComplete;
|
|
}
|
|
);
|
|
|
|
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
let sendDone = false;
|
|
act(() => {
|
|
void result.current.sendMessage("Hello").then(() => {
|
|
sendDone = true;
|
|
});
|
|
});
|
|
|
|
// Send first token (triggers streaming state)
|
|
await act(async () => {
|
|
capturedOnChunk?.("Hello");
|
|
await Promise.resolve();
|
|
});
|
|
|
|
expect(result.current.isStreaming).toBe(true);
|
|
|
|
// Complete the stream
|
|
await act(async () => {
|
|
capturedOnComplete?.();
|
|
await Promise.resolve();
|
|
await Promise.resolve();
|
|
await Promise.resolve();
|
|
});
|
|
|
|
expect(result.current.isStreaming).toBe(false);
|
|
expect(sendDone).toBe(true);
|
|
});
|
|
|
|
it("should keep partial content on abort", async () => {
|
|
let capturedOnChunk: ((chunk: string) => void) | undefined;
|
|
|
|
mockStreamChatMessage.mockImplementation(
|
|
(
|
|
_request,
|
|
onChunk: (chunk: string) => void,
|
|
_onComplete: () => void,
|
|
_onError: (err: Error) => void,
|
|
signal?: AbortSignal
|
|
): void => {
|
|
capturedOnChunk = onChunk;
|
|
if (signal) {
|
|
signal.addEventListener("abort", () => {
|
|
// Stream aborted
|
|
});
|
|
}
|
|
}
|
|
);
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
act(() => {
|
|
void result.current.sendMessage("Hello");
|
|
});
|
|
|
|
await act(async () => {
|
|
capturedOnChunk?.("Partial");
|
|
capturedOnChunk?.(" content");
|
|
await Promise.resolve();
|
|
});
|
|
|
|
await act(async () => {
|
|
result.current.abortStream();
|
|
await Promise.resolve();
|
|
});
|
|
|
|
expect(result.current.isStreaming).toBe(false);
|
|
const assistantMsg = result.current.messages.find(
|
|
(m) => m.role === "assistant" && m.id !== "welcome"
|
|
);
|
|
expect(assistantMsg?.content).toBe("Partial content");
|
|
});
|
|
|
|
it("should not send while streaming", async () => {
|
|
let capturedOnChunk: ((chunk: string) => void) | undefined;
|
|
|
|
mockStreamChatMessage.mockImplementation(
|
|
(
|
|
_request,
|
|
onChunk: (chunk: string) => void,
|
|
_onComplete: () => void,
|
|
_onError: (err: Error) => void
|
|
): void => {
|
|
capturedOnChunk = onChunk;
|
|
}
|
|
);
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
act(() => {
|
|
void result.current.sendMessage("First");
|
|
});
|
|
|
|
await act(async () => {
|
|
capturedOnChunk?.("token");
|
|
await Promise.resolve();
|
|
});
|
|
|
|
expect(result.current.isStreaming).toBe(true);
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Second");
|
|
});
|
|
|
|
// Only one stream call
|
|
expect(mockStreamChatMessage).toHaveBeenCalledTimes(1);
|
|
});
|
|
});
|
|
|
|
describe("rapid sends - stale closure prevention", () => {
|
|
it("should not lose messages on rapid sequential sends", async () => {
|
|
// Use streaming success path for deterministic behavior
|
|
makeStreamSucceed(["Response 1"]);
|
|
|
|
mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", ""));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Message 1");
|
|
});
|
|
|
|
expect(result.current.messages).toHaveLength(3); // welcome + user1 + assistant1
|
|
|
|
makeStreamSucceed(["Response 2"]);
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Message 2");
|
|
});
|
|
|
|
expect(result.current.messages).toHaveLength(5); // welcome + user1 + assistant1 + user2 + assistant2
|
|
|
|
const userMessages = result.current.messages.filter((m) => m.role === "user");
|
|
expect(userMessages).toHaveLength(2);
|
|
expect(userMessages[0]?.content).toBe("Message 1");
|
|
expect(userMessages[1]?.content).toBe("Message 2");
|
|
});
|
|
|
|
it("should use functional updates for all message state changes", async () => {
|
|
mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", ""));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
const messageCounts: number[] = [];
|
|
|
|
makeStreamSucceed(["R1"]);
|
|
await act(async () => {
|
|
await result.current.sendMessage("Test 1");
|
|
});
|
|
messageCounts.push(result.current.messages.length);
|
|
|
|
makeStreamSucceed(["R2"]);
|
|
await act(async () => {
|
|
await result.current.sendMessage("Test 2");
|
|
});
|
|
messageCounts.push(result.current.messages.length);
|
|
|
|
makeStreamSucceed(["R3"]);
|
|
await act(async () => {
|
|
await result.current.sendMessage("Test 3");
|
|
});
|
|
messageCounts.push(result.current.messages.length);
|
|
|
|
expect(messageCounts).toEqual([3, 5, 7]);
|
|
expect(result.current.messages).toHaveLength(7);
|
|
const userMessages = result.current.messages.filter((m) => m.role === "user");
|
|
expect(userMessages).toHaveLength(3);
|
|
});
|
|
|
|
it("should maintain correct message order with ref-based state tracking", async () => {
|
|
mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", ""));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
makeStreamSucceed(["First response"]);
|
|
await act(async () => {
|
|
await result.current.sendMessage("Query 1");
|
|
});
|
|
|
|
makeStreamSucceed(["Second response"]);
|
|
await act(async () => {
|
|
await result.current.sendMessage("Query 2");
|
|
});
|
|
|
|
makeStreamSucceed(["Third response"]);
|
|
await act(async () => {
|
|
await result.current.sendMessage("Query 3");
|
|
});
|
|
|
|
const messages = result.current.messages;
|
|
expect(messages[0]?.id).toBe("welcome");
|
|
expect(messages[1]?.content).toBe("Query 1");
|
|
expect(messages[2]?.content).toBe("First response");
|
|
expect(messages[3]?.content).toBe("Query 2");
|
|
expect(messages[4]?.content).toBe("Second response");
|
|
expect(messages[5]?.content).toBe("Query 3");
|
|
expect(messages[6]?.content).toBe("Third response");
|
|
});
|
|
});
|
|
|
|
describe("loadConversation", () => {
|
|
it("should load conversation from backend", async () => {
|
|
const savedMessages: Message[] = [
|
|
{
|
|
id: "msg-1",
|
|
role: "user",
|
|
content: "Saved message",
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
{
|
|
id: "msg-2",
|
|
role: "assistant",
|
|
content: "Saved response",
|
|
createdAt: new Date().toISOString(),
|
|
},
|
|
];
|
|
|
|
mockGetIdea.mockResolvedValueOnce(
|
|
createMockIdea("conv-123", "My Conversation", JSON.stringify(savedMessages))
|
|
);
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.loadConversation("conv-123");
|
|
});
|
|
|
|
expect(result.current.messages).toHaveLength(2);
|
|
expect(result.current.messages[0]?.content).toBe("Saved message");
|
|
expect(result.current.conversationId).toBe("conv-123");
|
|
expect(result.current.conversationTitle).toBe("My Conversation");
|
|
});
|
|
|
|
it("should fall back to welcome message when stored JSON is corrupted", async () => {
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
mockGetIdea.mockResolvedValueOnce(
|
|
createMockIdea("conv-bad", "Corrupted", "not valid json {{{")
|
|
);
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.loadConversation("conv-bad");
|
|
});
|
|
|
|
expect(result.current.messages).toHaveLength(1);
|
|
expect(result.current.messages[0]?.id).toBe("welcome");
|
|
});
|
|
|
|
it("should fall back to welcome message when stored data has wrong shape", async () => {
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
mockGetIdea.mockResolvedValueOnce(
|
|
createMockIdea("conv-bad", "Wrong Shape", JSON.stringify({ not: "an array" }))
|
|
);
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.loadConversation("conv-bad");
|
|
});
|
|
|
|
expect(result.current.messages).toHaveLength(1);
|
|
expect(result.current.messages[0]?.id).toBe("welcome");
|
|
});
|
|
|
|
it("should fall back to welcome message when messages have invalid roles", async () => {
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
const badMessages = [
|
|
{ id: "msg-1", role: "hacker", content: "Bad", createdAt: "2026-01-01" },
|
|
];
|
|
mockGetIdea.mockResolvedValueOnce(
|
|
createMockIdea("conv-bad", "Bad Roles", JSON.stringify(badMessages))
|
|
);
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.loadConversation("conv-bad");
|
|
});
|
|
|
|
expect(result.current.messages).toHaveLength(1);
|
|
expect(result.current.messages[0]?.id).toBe("welcome");
|
|
});
|
|
|
|
it("should set sanitized error and call onError when getIdea rejects", async () => {
|
|
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
mockGetIdea.mockRejectedValueOnce(new Error("Not found"));
|
|
|
|
const onError = vi.fn();
|
|
const { result } = renderHook(() => useChat({ onError }));
|
|
|
|
await act(async () => {
|
|
await result.current.loadConversation("conv-missing");
|
|
});
|
|
|
|
expect(result.current.error).toBe("Unable to load conversation. Please try again.");
|
|
expect(onError).toHaveBeenCalledWith(expect.any(Error));
|
|
expect(consoleSpy).toHaveBeenCalledWith(
|
|
"Failed to load conversation",
|
|
expect.objectContaining({
|
|
errorType: "LOAD_ERROR",
|
|
ideaId: "conv-missing",
|
|
timestamp: expect.any(String) as string,
|
|
})
|
|
);
|
|
expect(result.current.isLoading).toBe(false);
|
|
});
|
|
|
|
it("should not re-throw when getIdea rejects", async () => {
|
|
vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
mockGetIdea.mockRejectedValueOnce(new Error("Server error"));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await expect(result.current.loadConversation("conv-err")).resolves.toBeUndefined();
|
|
});
|
|
|
|
expect(result.current.error).toBe("Unable to load conversation. Please try again.");
|
|
});
|
|
});
|
|
|
|
describe("startNewConversation", () => {
|
|
it("should reset to initial state", async () => {
|
|
makeStreamSucceed(["Response"]);
|
|
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Hello");
|
|
});
|
|
|
|
expect(result.current.messages.length).toBeGreaterThan(1);
|
|
|
|
act(() => {
|
|
result.current.startNewConversation();
|
|
});
|
|
|
|
expect(result.current.messages).toHaveLength(1);
|
|
expect(result.current.messages[0]?.id).toBe("welcome");
|
|
expect(result.current.conversationId).toBeNull();
|
|
expect(result.current.conversationTitle).toBeNull();
|
|
});
|
|
});
|
|
|
|
describe("clearError", () => {
|
|
it("should clear error state", async () => {
|
|
vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
mockSendChatMessage.mockRejectedValueOnce(new Error("Test error"));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Hello");
|
|
});
|
|
|
|
expect(result.current.error).toBe("Unable to send message. Please try again.");
|
|
|
|
act(() => {
|
|
result.current.clearError();
|
|
});
|
|
|
|
expect(result.current.error).toBeNull();
|
|
});
|
|
});
|
|
|
|
describe("error context logging", () => {
|
|
it("should log comprehensive error context when sendMessage fails", async () => {
|
|
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
mockSendChatMessage.mockRejectedValueOnce(new Error("LLM timeout"));
|
|
|
|
const { result } = renderHook(() => useChat({ model: "llama3.2" }));
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Hello world");
|
|
});
|
|
|
|
expect(consoleSpy).toHaveBeenCalledWith(
|
|
"Failed to send chat message",
|
|
expect.objectContaining({
|
|
errorType: "LLM_ERROR",
|
|
messageLength: 11,
|
|
messagePreview: "Hello world",
|
|
model: "llama3.2",
|
|
timestamp: expect.any(String) as string,
|
|
})
|
|
);
|
|
});
|
|
|
|
it("should truncate long message previews to 50 characters", async () => {
|
|
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
mockSendChatMessage.mockRejectedValueOnce(new Error("Failed"));
|
|
|
|
const longMessage = "A".repeat(100);
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage(longMessage);
|
|
});
|
|
|
|
expect(consoleSpy).toHaveBeenCalledWith(
|
|
"Failed to send chat message",
|
|
expect.objectContaining({
|
|
messagePreview: "A".repeat(50),
|
|
messageLength: 100,
|
|
})
|
|
);
|
|
});
|
|
|
|
it("should include message count in error context", async () => {
|
|
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
|
|
// First successful message via streaming
|
|
makeStreamSucceed(["OK"]);
|
|
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("First");
|
|
});
|
|
|
|
// Second message: streaming fails, fallback fails
|
|
makeStreamFail();
|
|
mockSendChatMessage.mockRejectedValueOnce(new Error("Fail"));
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Second");
|
|
});
|
|
|
|
expect(consoleSpy).toHaveBeenCalledWith(
|
|
"Failed to send chat message",
|
|
expect.objectContaining({
|
|
messageCount: expect.any(Number) as number,
|
|
})
|
|
);
|
|
});
|
|
});
|
|
|
|
describe("LLM vs persistence error separation", () => {
|
|
it("should show LLM error and add error message to chat when API fails", async () => {
|
|
vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
mockSendChatMessage.mockRejectedValueOnce(new Error("Model not available"));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Hello");
|
|
});
|
|
|
|
expect(result.current.error).toBe("Unable to send message. Please try again.");
|
|
expect(result.current.messages).toHaveLength(3);
|
|
expect(result.current.messages[2]?.content).toBe("Something went wrong. Please try again.");
|
|
});
|
|
|
|
it("should keep assistant message visible when save fails (streaming path)", async () => {
|
|
vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
makeStreamSucceed(["Great answer!"]);
|
|
mockCreateConversation.mockRejectedValueOnce(new Error("Database connection lost"));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Hello");
|
|
});
|
|
|
|
expect(result.current.messages).toHaveLength(3); // welcome + user + assistant
|
|
expect(result.current.messages[2]?.content).toBe("Great answer!");
|
|
expect(result.current.error).toContain("Message sent but failed to save");
|
|
});
|
|
|
|
it("should keep assistant message visible when save fails (fallback path)", async () => {
|
|
vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Great answer!"));
|
|
mockCreateConversation.mockRejectedValueOnce(new Error("Database connection lost"));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Hello");
|
|
});
|
|
|
|
expect(result.current.messages).toHaveLength(3);
|
|
expect(result.current.messages[2]?.content).toBe("Great answer!");
|
|
expect(result.current.error).toContain("Message sent but failed to save");
|
|
});
|
|
|
|
it("should log with PERSISTENCE_ERROR type when save fails", async () => {
|
|
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Response"));
|
|
mockCreateConversation.mockRejectedValueOnce(new Error("DB error"));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Test");
|
|
});
|
|
|
|
expect(consoleSpy).toHaveBeenCalledWith(
|
|
"Failed to save conversation",
|
|
expect.objectContaining({
|
|
errorType: "PERSISTENCE_ERROR",
|
|
})
|
|
);
|
|
|
|
const llmErrorCalls = consoleSpy.mock.calls.filter((call) => {
|
|
const ctx: unknown = call[1];
|
|
return (
|
|
typeof ctx === "object" &&
|
|
ctx !== null &&
|
|
"errorType" in ctx &&
|
|
(ctx as { errorType: string }).errorType === "LLM_ERROR"
|
|
);
|
|
});
|
|
expect(llmErrorCalls).toHaveLength(0);
|
|
});
|
|
|
|
it("should use different user-facing messages for LLM vs save errors", async () => {
|
|
vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
|
|
// LLM error path (streaming fails + fallback fails)
|
|
mockSendChatMessage.mockRejectedValueOnce(new Error("Timeout"));
|
|
const { result: result1 } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result1.current.sendMessage("Test");
|
|
});
|
|
|
|
const llmError = result1.current.error;
|
|
|
|
// Save error path (streaming succeeds, save fails)
|
|
makeStreamSucceed(["OK"]);
|
|
mockCreateConversation.mockRejectedValueOnce(new Error("DB down"));
|
|
const { result: result2 } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result2.current.sendMessage("Test");
|
|
});
|
|
|
|
const saveError = result2.current.error;
|
|
|
|
expect(llmError).toBe("Unable to send message. Please try again.");
|
|
expect(saveError).toContain("Message sent but failed to save");
|
|
expect(llmError).not.toEqual(saveError);
|
|
});
|
|
|
|
it("should handle non-Error throws from LLM API", async () => {
|
|
vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
mockSendChatMessage.mockRejectedValueOnce("string error");
|
|
|
|
const onError = vi.fn();
|
|
const { result } = renderHook(() => useChat({ onError }));
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Hello");
|
|
});
|
|
|
|
expect(result.current.error).toBe("Unable to send message. Please try again.");
|
|
expect(onError).toHaveBeenCalledWith(expect.any(Error));
|
|
expect(result.current.messages[2]?.content).toBe("Something went wrong. Please try again.");
|
|
});
|
|
|
|
it("should handle non-Error throws from persistence layer", async () => {
|
|
vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
makeStreamSucceed(["OK"]);
|
|
mockCreateConversation.mockRejectedValueOnce("DB string error");
|
|
|
|
const onError = vi.fn();
|
|
const { result } = renderHook(() => useChat({ onError }));
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Hello");
|
|
});
|
|
|
|
expect(result.current.messages[2]?.content).toBe("OK");
|
|
expect(result.current.error).toBe("Message sent but failed to save. Please try again.");
|
|
expect(onError).toHaveBeenCalledWith(expect.any(Error));
|
|
});
|
|
|
|
it("should handle updateConversation failure for existing conversations", async () => {
|
|
vi.spyOn(console, "error").mockImplementation(() => undefined);
|
|
vi.spyOn(console, "warn").mockImplementation(() => undefined);
|
|
|
|
// First message via fallback
|
|
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("First response"));
|
|
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
|
|
|
|
const { result } = renderHook(() => useChat());
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("First");
|
|
});
|
|
|
|
expect(result.current.conversationId).toBe("conv-1");
|
|
|
|
// Second message via fallback, updateConversation fails
|
|
makeStreamFail();
|
|
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Second response"));
|
|
mockUpdateConversation.mockRejectedValueOnce(new Error("Connection reset"));
|
|
|
|
await act(async () => {
|
|
await result.current.sendMessage("Second");
|
|
});
|
|
|
|
const assistantMessages = result.current.messages.filter(
|
|
(m) => m.role === "assistant" && m.id !== "welcome"
|
|
);
|
|
expect(assistantMessages[assistantMessages.length - 1]?.content).toBe("Second response");
|
|
expect(result.current.error).toBe("Message sent but failed to save. Please try again.");
|
|
});
|
|
});
|
|
});
|