feat(web): Integrate M4-LLM error handling improvements
Some checks failed
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline failed

Port high-value features from work/m4-llm branch into develop's
security-hardened codebase:

- Separate LLM vs persistence error handling in useChat (shows
  assistant response even when save fails)
- Add structured error context logging with errorType, messagePreview,
  messageCount fields for debugging
- Enforce state invariant in useChatOverlay: cannot be minimized when
  closed
- Add onStorageError callback with user-friendly messages and
  per-error-type deduplication
- Add error logging to Chat imperative handle methods
- Create Chat.test.tsx with loadConversation failure mode tests

Skipped from work/m4-llm (superseded by develop):
- AbortSignal timeout (develop has centralized client timeout)
- Custom toast system (duplicates @mosaic/ui)
- ErrorBoundary (develop has its own)
- WebSocket typed events (develop's ref-based pattern is superior)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Jason Woltje
2026-02-06 20:04:53 -06:00
parent ac796072d8
commit 893a139087
8 changed files with 598 additions and 17 deletions

View File

@@ -420,4 +420,175 @@ describe("useChat", () => {
expect(result.current.error).toBeNull();
});
});
describe("error context logging", () => {
it("should log comprehensive error context when sendMessage fails", async () => {
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
mockSendChatMessage.mockRejectedValueOnce(new Error("LLM timeout"));
const { result } = renderHook(() => useChat({ model: "llama3.2" }));
await act(async () => {
await result.current.sendMessage("Hello world");
});
expect(consoleSpy).toHaveBeenCalledWith(
"Failed to send chat message",
expect.objectContaining({
errorType: "LLM_ERROR",
messageLength: 11,
messagePreview: "Hello world",
model: "llama3.2",
timestamp: expect.any(String) as string,
})
);
});
it("should truncate long message previews to 50 characters", async () => {
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
mockSendChatMessage.mockRejectedValueOnce(new Error("Failed"));
const longMessage = "A".repeat(100);
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage(longMessage);
});
expect(consoleSpy).toHaveBeenCalledWith(
"Failed to send chat message",
expect.objectContaining({
messagePreview: "A".repeat(50),
messageLength: 100,
})
);
});
it("should include message count in error context", async () => {
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
// First successful message
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("OK"));
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("First");
});
// Second message fails
mockSendChatMessage.mockRejectedValueOnce(new Error("Fail"));
await act(async () => {
await result.current.sendMessage("Second");
});
// messageCount should reflect messages including the new user message
expect(consoleSpy).toHaveBeenCalledWith(
"Failed to send chat message",
expect.objectContaining({
messageCount: expect.any(Number) as number,
})
);
});
});
describe("LLM vs persistence error separation", () => {
it("should show LLM error and add error message to chat when API fails", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined);
mockSendChatMessage.mockRejectedValueOnce(new Error("Model not available"));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Hello");
});
expect(result.current.error).toBe("Model not available");
// Should have welcome + user + error message
expect(result.current.messages).toHaveLength(3);
expect(result.current.messages[2]?.content).toContain("Error: Model not available");
});
it("should keep assistant message visible when save fails", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined);
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Great answer!"));
mockCreateConversation.mockRejectedValueOnce(new Error("Database connection lost"));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Hello");
});
// Assistant message should still be visible
expect(result.current.messages).toHaveLength(3); // welcome + user + assistant
expect(result.current.messages[2]?.content).toBe("Great answer!");
// Error should indicate persistence failure
expect(result.current.error).toContain("Message sent but failed to save");
});
it("should log with PERSISTENCE_ERROR type when save fails", async () => {
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Response"));
mockCreateConversation.mockRejectedValueOnce(new Error("DB error"));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Test");
});
expect(consoleSpy).toHaveBeenCalledWith(
"Failed to save conversation",
expect.objectContaining({
errorType: "PERSISTENCE_ERROR",
})
);
// Should NOT have logged as LLM_ERROR
const llmErrorCalls = consoleSpy.mock.calls.filter((call) => {
const ctx: unknown = call[1];
return (
typeof ctx === "object" &&
ctx !== null &&
"errorType" in ctx &&
(ctx as { errorType: string }).errorType === "LLM_ERROR"
);
});
expect(llmErrorCalls).toHaveLength(0);
});
it("should use different user-facing messages for LLM vs save errors", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined);
// Test LLM error message
mockSendChatMessage.mockRejectedValueOnce(new Error("Timeout"));
const { result: result1 } = renderHook(() => useChat());
await act(async () => {
await result1.current.sendMessage("Test");
});
const llmError = result1.current.error;
// Test save error message
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("OK"));
mockCreateConversation.mockRejectedValueOnce(new Error("DB down"));
const { result: result2 } = renderHook(() => useChat());
await act(async () => {
await result2.current.sendMessage("Test");
});
const saveError = result2.current.error;
// They should be different
expect(llmError).toBe("Timeout");
expect(saveError).toContain("Message sent but failed to save");
expect(llmError).not.toEqual(saveError);
});
});
});

View File

@@ -208,12 +208,33 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
? generateTitle(content)
: (conversationTitle ?? "Chat Conversation");
// Save conversation
await saveConversation(finalMessages, title);
// Save conversation (separate error handling from LLM errors)
try {
await saveConversation(finalMessages, title);
} catch (saveErr) {
const saveErrorMsg =
saveErr instanceof Error ? saveErr.message : "Unknown persistence error";
setError(`Message sent but failed to save: ${saveErrorMsg}`);
console.error("Failed to save conversation", {
error: saveErr,
errorType: "PERSISTENCE_ERROR",
conversationId,
});
}
} catch (err) {
const errorMsg = err instanceof Error ? err.message : "Failed to send message";
setError(errorMsg);
onError?.(err instanceof Error ? err : new Error(errorMsg));
console.error("Failed to send chat message", {
error: err,
errorType: "LLM_ERROR",
conversationId,
messageLength: content.length,
messagePreview: content.substring(0, 50),
model,
messageCount: messagesRef.current.length,
timestamp: new Date().toISOString(),
});
// Add error message to chat
const errorMessage: Message = {

View File

@@ -260,7 +260,7 @@ describe("useChatOverlay", () => {
expect(result.current.isOpen).toBe(false);
});
it("should not change minimized state when toggling", () => {
it("should reset minimized state when toggling closed", () => {
const { result } = renderHook(() => useChatOverlay());
act(() => {
@@ -274,8 +274,24 @@ describe("useChatOverlay", () => {
result.current.toggle();
});
// Should close but keep minimized state for next open
// Should close and reset minimized (invariant: cannot be minimized when closed)
expect(result.current.isOpen).toBe(false);
expect(result.current.isMinimized).toBe(false);
});
it("should preserve minimized state when toggling open", () => {
const { result } = renderHook(() => useChatOverlay());
// Start closed
expect(result.current.isOpen).toBe(false);
act(() => {
result.current.toggle();
});
// Should open and not be minimized
expect(result.current.isOpen).toBe(true);
expect(result.current.isMinimized).toBe(false);
});
});
@@ -305,4 +321,122 @@ describe("useChatOverlay", () => {
expect(result.current.isMinimized).toBe(false);
});
});
describe("state invariant enforcement", () => {
it("should reject invalid localStorage state: closed AND minimized", () => {
vi.spyOn(console, "warn").mockImplementation(() => undefined);
// This violates the invariant: cannot be minimized when closed
localStorageMock.setItem(
"chatOverlayState",
JSON.stringify({ isOpen: false, isMinimized: true })
);
const { result } = renderHook(() => useChatOverlay());
// Should fall back to defaults since invariant is violated
expect(result.current.isOpen).toBe(false);
expect(result.current.isMinimized).toBe(false);
});
it("should accept valid state: open and minimized", () => {
localStorageMock.setItem(
"chatOverlayState",
JSON.stringify({ isOpen: true, isMinimized: true })
);
const { result } = renderHook(() => useChatOverlay());
expect(result.current.isOpen).toBe(true);
expect(result.current.isMinimized).toBe(true);
});
it("should reset isMinimized when closing via close()", () => {
const { result } = renderHook(() => useChatOverlay());
act(() => {
result.current.open();
result.current.minimize();
});
expect(result.current.isMinimized).toBe(true);
act(() => {
result.current.close();
});
expect(result.current.isOpen).toBe(false);
expect(result.current.isMinimized).toBe(false);
});
});
describe("storage error handling", () => {
it("should call onStorageError when localStorage save fails", () => {
const onStorageError = vi.fn();
const quotaError = new DOMException("Storage full", "QuotaExceededError");
const { result } = renderHook(() => useChatOverlay({ onStorageError }));
// Make setItem throw
vi.spyOn(window.localStorage, "setItem").mockImplementation(() => {
throw quotaError;
});
vi.spyOn(console, "warn").mockImplementation(() => undefined);
act(() => {
result.current.open();
});
expect(onStorageError).toHaveBeenCalledWith(
"Storage is full. Chat state may not persist across page refreshes."
);
});
it("should show appropriate message for SecurityError", () => {
const onStorageError = vi.fn();
const securityError = new DOMException("Blocked", "SecurityError");
const { result } = renderHook(() => useChatOverlay({ onStorageError }));
vi.spyOn(window.localStorage, "setItem").mockImplementation(() => {
throw securityError;
});
vi.spyOn(console, "warn").mockImplementation(() => undefined);
act(() => {
result.current.open();
});
expect(onStorageError).toHaveBeenCalledWith(
"Storage is unavailable in this browser mode. Chat state will not persist."
);
});
it("should only notify once per error type", () => {
const onStorageError = vi.fn();
const quotaError = new DOMException("Storage full", "QuotaExceededError");
// Set up spy BEFORE rendering so all saves (including initial) throw
vi.spyOn(window.localStorage, "setItem").mockImplementation(() => {
throw quotaError;
});
vi.spyOn(console, "warn").mockImplementation(() => undefined);
const { result } = renderHook(() => useChatOverlay({ onStorageError }));
// Initial render triggers a save which throws → first notification
// Multiple state changes that trigger more saves
act(() => {
result.current.open();
});
act(() => {
result.current.minimize();
});
act(() => {
result.current.expand();
});
// Should only have been called once for QuotaExceededError despite multiple failures
expect(onStorageError).toHaveBeenCalledTimes(1);
});
});
});

View File

@@ -3,7 +3,7 @@
* @description Hook for managing the global chat overlay state
*/
import { useState, useEffect, useCallback } from "react";
import { useState, useEffect, useCallback, useRef } from "react";
import { safeJsonParse, isChatOverlayState } from "@/lib/utils/safe-json";
interface ChatOverlayState {
@@ -11,6 +11,10 @@ interface ChatOverlayState {
isMinimized: boolean;
}
export interface UseChatOverlayOptions {
onStorageError?: (message: string) => void;
}
interface UseChatOverlayResult extends ChatOverlayState {
open: () => void;
close: () => void;
@@ -27,6 +31,23 @@ const DEFAULT_STATE: ChatOverlayState = {
isMinimized: false,
};
/**
* Get a user-friendly error message for localStorage failures
*/
function getStorageErrorMessage(error: unknown): string {
if (error instanceof DOMException) {
switch (error.name) {
case "QuotaExceededError":
return "Storage is full. Chat state may not persist across page refreshes.";
case "SecurityError":
return "Storage is unavailable in this browser mode. Chat state will not persist.";
case "InvalidStateError":
return "Storage is disabled. Chat state will not persist across page refreshes.";
}
}
return "Unable to save chat state. It may not persist across page refreshes.";
}
/**
* Load state from localStorage with runtime type validation
*/
@@ -48,9 +69,13 @@ function loadState(): ChatOverlayState {
}
/**
* Save state to localStorage
* Save state to localStorage, notifying on error (once per error type)
*/
function saveState(state: ChatOverlayState): void {
function saveState(
state: ChatOverlayState,
onStorageError: ((message: string) => void) | undefined,
notifiedErrors: Set<string>
): void {
if (typeof window === "undefined") {
return;
}
@@ -58,20 +83,31 @@ function saveState(state: ChatOverlayState): void {
try {
window.localStorage.setItem(STORAGE_KEY, JSON.stringify(state));
} catch (error) {
const errorName = error instanceof DOMException ? error.name : "UnknownError";
console.warn("Failed to save chat overlay state to localStorage:", error);
if (onStorageError && !notifiedErrors.has(errorName)) {
notifiedErrors.add(errorName);
onStorageError(getStorageErrorMessage(error));
}
}
}
/**
* Custom hook for managing chat overlay state
* Persists state to localStorage for consistency across page refreshes
* Persists state to localStorage for consistency across page refreshes.
* Enforces invariant: cannot be minimized when closed.
*/
export function useChatOverlay(): UseChatOverlayResult {
export function useChatOverlay(options: UseChatOverlayOptions = {}): UseChatOverlayResult {
const { onStorageError } = options;
const [state, setState] = useState<ChatOverlayState>(loadState);
const notifiedErrorsRef = useRef<Set<string>>(new Set());
const onStorageErrorRef = useRef(onStorageError);
onStorageErrorRef.current = onStorageError;
// Persist state changes to localStorage
useEffect(() => {
saveState(state);
saveState(state, onStorageErrorRef.current, notifiedErrorsRef.current);
}, [state]);
const open = useCallback(() => {
@@ -79,7 +115,7 @@ export function useChatOverlay(): UseChatOverlayResult {
}, []);
const close = useCallback(() => {
setState((prev) => ({ ...prev, isOpen: false }));
setState({ isOpen: false, isMinimized: false });
}, []);
const minimize = useCallback(() => {
@@ -91,7 +127,10 @@ export function useChatOverlay(): UseChatOverlayResult {
}, []);
const toggle = useCallback(() => {
setState((prev) => ({ ...prev, isOpen: !prev.isOpen }));
setState((prev) => {
const newIsOpen = !prev.isOpen;
return { isOpen: newIsOpen, isMinimized: newIsOpen ? prev.isMinimized : false };
});
}, []);
const toggleMinimize = useCallback(() => {