feat(web): implement SSE chat streaming with real-time token rendering (#516)
Some checks failed
ci/woodpecker/push/web Pipeline failed

Co-authored-by: Jason Woltje <jason@diversecanvas.com>
Co-committed-by: Jason Woltje <jason@diversecanvas.com>
This commit was merged in pull request #516.
This commit is contained in:
2026-02-26 02:39:43 +00:00
committed by jason.woltje
parent 6290fc3d53
commit 7de0e734b0
8 changed files with 797 additions and 297 deletions

View File

@@ -765,6 +765,28 @@ body::before {
animation: scaleIn 0.1s ease-out; animation: scaleIn 0.1s ease-out;
} }
/* Streaming cursor for real-time token rendering */
@keyframes streaming-cursor-blink {
0%,
100% {
opacity: 1;
}
50% {
opacity: 0;
}
}
.streaming-cursor {
display: inline-block;
width: 2px;
height: 1em;
background-color: rgb(var(--accent-primary));
border-radius: 1px;
animation: streaming-cursor-blink 1s step-end infinite;
vertical-align: text-bottom;
margin-left: 1px;
}
/* ----------------------------------------------------------------------------- /* -----------------------------------------------------------------------------
Dashboard Layout — Responsive Grids Dashboard Layout — Responsive Grids
----------------------------------------------------------------------------- */ ----------------------------------------------------------------------------- */

View File

@@ -64,10 +64,12 @@ function createMockUseChatReturn(
}, },
], ],
isLoading: false, isLoading: false,
isStreaming: false,
error: null, error: null,
conversationId: null, conversationId: null,
conversationTitle: null, conversationTitle: null,
sendMessage: vi.fn().mockResolvedValue(undefined), sendMessage: vi.fn().mockResolvedValue(undefined),
abortStream: vi.fn(),
loadConversation: vi.fn().mockResolvedValue(undefined), loadConversation: vi.fn().mockResolvedValue(undefined),
startNewConversation: vi.fn(), startNewConversation: vi.fn(),
setMessages: vi.fn(), setMessages: vi.fn(),

View File

@@ -59,14 +59,15 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
const { user, isLoading: authLoading } = useAuth(); const { user, isLoading: authLoading } = useAuth();
// Use the chat hook for state management
const { const {
messages, messages,
isLoading: isChatLoading, isLoading: isChatLoading,
isStreaming,
error, error,
conversationId, conversationId,
conversationTitle, conversationTitle,
sendMessage, sendMessage,
abortStream,
loadConversation, loadConversation,
startNewConversation, startNewConversation,
clearError, clearError,
@@ -75,15 +76,7 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
...(initialProjectId !== undefined && { projectId: initialProjectId }), ...(initialProjectId !== undefined && { projectId: initialProjectId }),
}); });
// Connect to WebSocket for real-time updates (when we have a user) const { isConnected: isWsConnected } = useWebSocket(user?.id ?? "", "", {});
const { isConnected: isWsConnected } = useWebSocket(
user?.id ?? "", // Use user ID as workspace ID for now
"", // Token not needed since we use cookies
{
// Future: Add handlers for chat-related events
// onChatMessage: (msg) => { ... }
}
);
const messagesEndRef = useRef<HTMLDivElement>(null); const messagesEndRef = useRef<HTMLDivElement>(null);
const inputRef = useRef<HTMLTextAreaElement>(null); const inputRef = useRef<HTMLTextAreaElement>(null);
@@ -91,7 +84,10 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
const quipTimerRef = useRef<NodeJS.Timeout | null>(null); const quipTimerRef = useRef<NodeJS.Timeout | null>(null);
const quipIntervalRef = useRef<NodeJS.Timeout | null>(null); const quipIntervalRef = useRef<NodeJS.Timeout | null>(null);
// Expose methods to parent via ref // Identify the streaming message (last assistant message while streaming)
const streamingMessageId =
isStreaming && messages.length > 0 ? messages[messages.length - 1]?.id : undefined;
useImperativeHandle(ref, () => ({ useImperativeHandle(ref, () => ({
loadConversation: async (cId: string): Promise<void> => { loadConversation: async (cId: string): Promise<void> => {
await loadConversation(cId); await loadConversation(cId);
@@ -110,7 +106,6 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
scrollToBottom(); scrollToBottom();
}, [messages, scrollToBottom]); }, [messages, scrollToBottom]);
// Notify parent of conversation changes
useEffect(() => { useEffect(() => {
if (conversationId && conversationTitle) { if (conversationId && conversationTitle) {
onConversationChange?.(conversationId, { onConversationChange?.(conversationId, {
@@ -125,7 +120,6 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
} }
}, [conversationId, conversationTitle, initialProjectId, onConversationChange]); }, [conversationId, conversationTitle, initialProjectId, onConversationChange]);
// Global keyboard shortcut: Ctrl+/ to focus input
useEffect(() => { useEffect(() => {
const handleKeyDown = (e: KeyboardEvent): void => { const handleKeyDown = (e: KeyboardEvent): void => {
if ((e.ctrlKey || e.metaKey) && e.key === "/") { if ((e.ctrlKey || e.metaKey) && e.key === "/") {
@@ -139,20 +133,17 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
}; };
}, []); }, []);
// Show loading quips // Show loading quips only during non-streaming load (initial fetch wait)
useEffect(() => { useEffect(() => {
if (isChatLoading) { if (isChatLoading && !isStreaming) {
// Show first quip after 3 seconds
quipTimerRef.current = setTimeout(() => { quipTimerRef.current = setTimeout(() => {
setLoadingQuip(WAITING_QUIPS[Math.floor(Math.random() * WAITING_QUIPS.length)] ?? null); setLoadingQuip(WAITING_QUIPS[Math.floor(Math.random() * WAITING_QUIPS.length)] ?? null);
}, 3000); }, 3000);
// Change quip every 5 seconds
quipIntervalRef.current = setInterval(() => { quipIntervalRef.current = setInterval(() => {
setLoadingQuip(WAITING_QUIPS[Math.floor(Math.random() * WAITING_QUIPS.length)] ?? null); setLoadingQuip(WAITING_QUIPS[Math.floor(Math.random() * WAITING_QUIPS.length)] ?? null);
}, 5000); }, 5000);
} else { } else {
// Clear timers when loading stops
if (quipTimerRef.current) { if (quipTimerRef.current) {
clearTimeout(quipTimerRef.current); clearTimeout(quipTimerRef.current);
quipTimerRef.current = null; quipTimerRef.current = null;
@@ -168,7 +159,7 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
if (quipTimerRef.current) clearTimeout(quipTimerRef.current); if (quipTimerRef.current) clearTimeout(quipTimerRef.current);
if (quipIntervalRef.current) clearInterval(quipIntervalRef.current); if (quipIntervalRef.current) clearInterval(quipIntervalRef.current);
}; };
}, [isChatLoading]); }, [isChatLoading, isStreaming]);
const handleSendMessage = useCallback( const handleSendMessage = useCallback(
async (content: string) => { async (content: string) => {
@@ -177,7 +168,6 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
[sendMessage] [sendMessage]
); );
// Show loading state while auth is loading
if (authLoading) { if (authLoading) {
return ( return (
<div <div
@@ -227,6 +217,8 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
<MessageList <MessageList
messages={messages as (Message & { thinking?: string })[]} messages={messages as (Message & { thinking?: string })[]}
isLoading={isChatLoading} isLoading={isChatLoading}
isStreaming={isStreaming}
{...(streamingMessageId != null ? { streamingMessageId } : {})}
loadingQuip={loadingQuip} loadingQuip={loadingQuip}
/> />
<div ref={messagesEndRef} /> <div ref={messagesEndRef} />
@@ -294,6 +286,8 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
onSend={handleSendMessage} onSend={handleSendMessage}
disabled={isChatLoading || !user} disabled={isChatLoading || !user}
inputRef={inputRef} inputRef={inputRef}
isStreaming={isStreaming}
onStopStreaming={abortStream}
/> />
</div> </div>
</div> </div>

View File

@@ -7,13 +7,20 @@ interface ChatInputProps {
onSend: (message: string) => void; onSend: (message: string) => void;
disabled?: boolean; disabled?: boolean;
inputRef?: RefObject<HTMLTextAreaElement | null>; inputRef?: RefObject<HTMLTextAreaElement | null>;
isStreaming?: boolean;
onStopStreaming?: () => void;
} }
export function ChatInput({ onSend, disabled, inputRef }: ChatInputProps): React.JSX.Element { export function ChatInput({
onSend,
disabled,
inputRef,
isStreaming = false,
onStopStreaming,
}: ChatInputProps): React.JSX.Element {
const [message, setMessage] = useState(""); const [message, setMessage] = useState("");
const [version, setVersion] = useState<string | null>(null); const [version, setVersion] = useState<string | null>(null);
// Fetch version from static version.json (generated at build time)
useEffect(() => { useEffect(() => {
interface VersionData { interface VersionData {
version?: string; version?: string;
@@ -24,7 +31,6 @@ export function ChatInput({ onSend, disabled, inputRef }: ChatInputProps): React
.then((res) => res.json() as Promise<VersionData>) .then((res) => res.json() as Promise<VersionData>)
.then((data) => { .then((data) => {
if (data.version) { if (data.version) {
// Format as "version+commit" for full build identification
const fullVersion = data.commit ? `${data.version}+${data.commit}` : data.version; const fullVersion = data.commit ? `${data.version}+${data.commit}` : data.version;
setVersion(fullVersion); setVersion(fullVersion);
} }
@@ -35,20 +41,22 @@ export function ChatInput({ onSend, disabled, inputRef }: ChatInputProps): React
}, []); }, []);
const handleSubmit = useCallback(() => { const handleSubmit = useCallback(() => {
if (message.trim() && !disabled) { if (message.trim() && !disabled && !isStreaming) {
onSend(message); onSend(message);
setMessage(""); setMessage("");
} }
}, [message, onSend, disabled]); }, [message, onSend, disabled, isStreaming]);
const handleStop = useCallback(() => {
onStopStreaming?.();
}, [onStopStreaming]);
const handleKeyDown = useCallback( const handleKeyDown = useCallback(
(e: KeyboardEvent<HTMLTextAreaElement>) => { (e: KeyboardEvent<HTMLTextAreaElement>) => {
// Enter to send (without Shift)
if (e.key === "Enter" && !e.shiftKey) { if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault(); e.preventDefault();
handleSubmit(); handleSubmit();
} }
// Ctrl/Cmd + Enter to send (alternative)
if (e.key === "Enter" && (e.ctrlKey || e.metaKey)) { if (e.key === "Enter" && (e.ctrlKey || e.metaKey)) {
e.preventDefault(); e.preventDefault();
handleSubmit(); handleSubmit();
@@ -61,6 +69,7 @@ export function ChatInput({ onSend, disabled, inputRef }: ChatInputProps): React
const maxCharacters = 4000; const maxCharacters = 4000;
const isNearLimit = characterCount > maxCharacters * 0.9; const isNearLimit = characterCount > maxCharacters * 0.9;
const isOverLimit = characterCount > maxCharacters; const isOverLimit = characterCount > maxCharacters;
const isInputDisabled = disabled ?? false;
return ( return (
<div className="space-y-3"> <div className="space-y-3">
@@ -69,7 +78,10 @@ export function ChatInput({ onSend, disabled, inputRef }: ChatInputProps): React
className="relative rounded-lg border transition-all duration-150" className="relative rounded-lg border transition-all duration-150"
style={{ style={{
backgroundColor: "rgb(var(--surface-0))", backgroundColor: "rgb(var(--surface-0))",
borderColor: disabled ? "rgb(var(--border-default))" : "rgb(var(--border-strong))", borderColor:
isInputDisabled || isStreaming
? "rgb(var(--border-default))"
: "rgb(var(--border-strong))",
}} }}
> >
<textarea <textarea
@@ -79,8 +91,8 @@ export function ChatInput({ onSend, disabled, inputRef }: ChatInputProps): React
setMessage(e.target.value); setMessage(e.target.value);
}} }}
onKeyDown={handleKeyDown} onKeyDown={handleKeyDown}
placeholder="Type a message..." placeholder={isStreaming ? "AI is responding..." : "Type a message..."}
disabled={disabled} disabled={isInputDisabled || isStreaming}
rows={1} rows={1}
className="block w-full resize-none bg-transparent px-4 py-3 pr-24 text-sm outline-none placeholder:text-[rgb(var(--text-muted))] disabled:opacity-50" className="block w-full resize-none bg-transparent px-4 py-3 pr-24 text-sm outline-none placeholder:text-[rgb(var(--text-muted))] disabled:opacity-50"
style={{ style={{
@@ -97,14 +109,32 @@ export function ChatInput({ onSend, disabled, inputRef }: ChatInputProps): React
aria-describedby="input-help" aria-describedby="input-help"
/> />
{/* Send Button */} {/* Send / Stop Button */}
<div className="absolute bottom-2 right-2 flex items-center gap-2"> <div className="absolute bottom-2 right-2 flex items-center gap-2">
{isStreaming ? (
<button
onClick={handleStop}
className="btn-sm rounded-md flex items-center gap-1.5"
style={{
backgroundColor: "rgb(var(--semantic-error))",
color: "white",
padding: "0.25rem 0.75rem",
}}
aria-label="Stop generating"
title="Stop generating"
>
<svg className="h-4 w-4" fill="currentColor" viewBox="0 0 24 24" aria-hidden="true">
<rect x="6" y="6" width="12" height="12" rx="1" />
</svg>
<span className="hidden sm:inline text-sm font-medium">Stop</span>
</button>
) : (
<button <button
onClick={handleSubmit} onClick={handleSubmit}
disabled={(disabled ?? !message.trim()) || isOverLimit} disabled={isInputDisabled || !message.trim() || isOverLimit}
className="btn-primary btn-sm rounded-md" className="btn-primary btn-sm rounded-md"
style={{ style={{
opacity: disabled || !message.trim() || isOverLimit ? 0.5 : 1, opacity: isInputDisabled || !message.trim() || isOverLimit ? 0.5 : 1,
}} }}
aria-label="Send message" aria-label="Send message"
> >
@@ -119,6 +149,7 @@ export function ChatInput({ onSend, disabled, inputRef }: ChatInputProps): React
</svg> </svg>
<span className="hidden sm:inline">Send</span> <span className="hidden sm:inline">Send</span>
</button> </button>
)}
</div> </div>
</div> </div>
@@ -128,7 +159,6 @@ export function ChatInput({ onSend, disabled, inputRef }: ChatInputProps): React
style={{ color: "rgb(var(--text-muted))" }} style={{ color: "rgb(var(--text-muted))" }}
id="input-help" id="input-help"
> >
{/* Keyboard Shortcuts */}
<div className="hidden items-center gap-4 sm:flex"> <div className="hidden items-center gap-4 sm:flex">
<div className="flex items-center gap-1.5"> <div className="flex items-center gap-1.5">
<span className="kbd">Enter</span> <span className="kbd">Enter</span>
@@ -142,10 +172,8 @@ export function ChatInput({ onSend, disabled, inputRef }: ChatInputProps): React
</div> </div>
</div> </div>
{/* Mobile hint */}
<div className="sm:hidden">Tap send or press Enter</div> <div className="sm:hidden">Tap send or press Enter</div>
{/* Character Count */}
<div <div
className="flex items-center gap-2" className="flex items-center gap-2"
style={{ style={{

View File

@@ -1,11 +1,13 @@
"use client"; "use client";
import { useCallback, useState } from "react"; import { useCallback, useEffect, useRef, useState } from "react";
import type { Message } from "@/hooks/useChat"; import type { Message } from "@/hooks/useChat";
interface MessageListProps { interface MessageListProps {
messages: Message[]; messages: Message[];
isLoading: boolean; isLoading: boolean;
isStreaming?: boolean;
streamingMessageId?: string;
loadingQuip?: string | null; loadingQuip?: string | null;
} }
@@ -14,7 +16,6 @@ interface MessageListProps {
* Extracts <thinking>...</thinking> or <think>...</think> blocks. * Extracts <thinking>...</thinking> or <think>...</think> blocks.
*/ */
function parseThinking(content: string): { thinking: string | null; response: string } { function parseThinking(content: string): { thinking: string | null; response: string } {
// Match <thinking>...</thinking> or <think>...</think> blocks
const thinkingRegex = /<(?:thinking|think)>([\s\S]*?)<\/(?:thinking|think)>/gi; const thinkingRegex = /<(?:thinking|think)>([\s\S]*?)<\/(?:thinking|think)>/gi;
const matches = content.match(thinkingRegex); const matches = content.match(thinkingRegex);
@@ -22,14 +23,12 @@ function parseThinking(content: string): { thinking: string | null; response: st
return { thinking: null, response: content }; return { thinking: null, response: content };
} }
// Extract thinking content
let thinking = ""; let thinking = "";
for (const match of matches) { for (const match of matches) {
const innerContent = match.replace(/<\/?(?:thinking|think)>/gi, ""); const innerContent = match.replace(/<\/?(?:thinking|think)>/gi, "");
thinking += innerContent.trim() + "\n"; thinking += innerContent.trim() + "\n";
} }
// Remove thinking blocks from response
const response = content.replace(thinkingRegex, "").trim(); const response = content.replace(thinkingRegex, "").trim();
const trimmedThinking = thinking.trim(); const trimmedThinking = thinking.trim();
@@ -42,25 +41,47 @@ function parseThinking(content: string): { thinking: string | null; response: st
export function MessageList({ export function MessageList({
messages, messages,
isLoading, isLoading,
isStreaming = false,
streamingMessageId,
loadingQuip, loadingQuip,
}: MessageListProps): React.JSX.Element { }: MessageListProps): React.JSX.Element {
const bottomRef = useRef<HTMLDivElement>(null);
// Auto-scroll to bottom when messages change or streaming tokens arrive
useEffect(() => {
if (isStreaming || isLoading) {
bottomRef.current?.scrollIntoView({ behavior: "smooth" });
}
}, [messages, isStreaming, isLoading]);
return ( return (
<div className="space-y-6" role="log" aria-label="Chat messages"> <div className="space-y-6" role="log" aria-label="Chat messages">
{messages.map((message) => ( {messages.map((message) => (
<MessageBubble key={message.id} message={message} /> <MessageBubble
key={message.id}
message={message}
isStreaming={isStreaming && message.id === streamingMessageId}
/>
))} ))}
{isLoading && <LoadingIndicator {...(loadingQuip != null && { quip: loadingQuip })} />} {isLoading && !isStreaming && (
<LoadingIndicator {...(loadingQuip != null && { quip: loadingQuip })} />
)}
<div ref={bottomRef} />
</div> </div>
); );
} }
function MessageBubble({ message }: { message: Message }): React.JSX.Element { interface MessageBubbleProps {
message: Message;
isStreaming?: boolean;
}
function MessageBubble({ message, isStreaming = false }: MessageBubbleProps): React.JSX.Element {
const isUser = message.role === "user"; const isUser = message.role === "user";
const [copied, setCopied] = useState(false); const [copied, setCopied] = useState(false);
const [thinkingExpanded, setThinkingExpanded] = useState(false); const [thinkingExpanded, setThinkingExpanded] = useState(false);
// Parse thinking from content (or use pre-parsed thinking field)
const { thinking, response } = message.thinking const { thinking, response } = message.thinking
? { thinking: message.thinking, response: message.content } ? { thinking: message.thinking, response: message.content }
: parseThinking(message.content); : parseThinking(message.content);
@@ -73,7 +94,6 @@ function MessageBubble({ message }: { message: Message }): React.JSX.Element {
setCopied(false); setCopied(false);
}, 2000); }, 2000);
} catch (err) { } catch (err) {
// Silently fail - clipboard copy is non-critical
void err; void err;
} }
}, [response]); }, [response]);
@@ -106,8 +126,21 @@ function MessageBubble({ message }: { message: Message }): React.JSX.Element {
<span className="font-medium" style={{ color: "rgb(var(--text-secondary))" }}> <span className="font-medium" style={{ color: "rgb(var(--text-secondary))" }}>
{isUser ? "You" : "AI Assistant"} {isUser ? "You" : "AI Assistant"}
</span> </span>
{/* Streaming indicator in header */}
{!isUser && isStreaming && (
<span
className="px-1.5 py-0.5 rounded text-[10px] font-medium"
style={{
backgroundColor: "rgb(var(--accent-primary) / 0.15)",
color: "rgb(var(--accent-primary))",
}}
aria-label="Streaming"
>
streaming
</span>
)}
{/* Model indicator for assistant messages */} {/* Model indicator for assistant messages */}
{!isUser && message.model && ( {!isUser && message.model && !isStreaming && (
<span <span
className="px-1.5 py-0.5 rounded text-[10px] font-medium" className="px-1.5 py-0.5 rounded text-[10px] font-medium"
style={{ style={{
@@ -200,9 +233,19 @@ function MessageBubble({ message }: { message: Message }): React.JSX.Element {
border: isUser ? "none" : "1px solid rgb(var(--border-default))", border: isUser ? "none" : "1px solid rgb(var(--border-default))",
}} }}
> >
<p className="whitespace-pre-wrap text-sm leading-relaxed">{response}</p> <p className="whitespace-pre-wrap text-sm leading-relaxed">
{response}
{/* Blinking cursor during streaming */}
{isStreaming && !isUser && (
<span
className="streaming-cursor inline-block ml-0.5 align-middle"
aria-hidden="true"
/>
)}
</p>
{/* Copy Button - appears on hover */} {/* Copy Button - hidden while streaming */}
{!isStreaming && (
<button <button
onClick={handleCopy} onClick={handleCopy}
className="absolute -right-2 -top-2 rounded-md border p-1.5 opacity-0 transition-all group-hover:opacity-100 focus:opacity-100" className="absolute -right-2 -top-2 rounded-md border p-1.5 opacity-0 transition-all group-hover:opacity-100 focus:opacity-100"
@@ -237,6 +280,7 @@ function MessageBubble({ message }: { message: Message }): React.JSX.Element {
</svg> </svg>
)} )}
</button> </button>
)}
</div> </div>
</div> </div>
</div> </div>

View File

@@ -14,6 +14,7 @@ import type { ChatResponse } from "@/lib/api/chat";
// Mock the API modules - use importOriginal to preserve types/enums // Mock the API modules - use importOriginal to preserve types/enums
vi.mock("@/lib/api/chat", () => ({ vi.mock("@/lib/api/chat", () => ({
sendChatMessage: vi.fn(), sendChatMessage: vi.fn(),
streamChatMessage: vi.fn(),
})); }));
vi.mock("@/lib/api/ideas", async (importOriginal) => { vi.mock("@/lib/api/ideas", async (importOriginal) => {
@@ -30,6 +31,9 @@ vi.mock("@/lib/api/ideas", async (importOriginal) => {
const mockSendChatMessage = chatApi.sendChatMessage as MockedFunction< const mockSendChatMessage = chatApi.sendChatMessage as MockedFunction<
typeof chatApi.sendChatMessage typeof chatApi.sendChatMessage
>; >;
const mockStreamChatMessage = chatApi.streamChatMessage as MockedFunction<
typeof chatApi.streamChatMessage
>;
const mockCreateConversation = ideasApi.createConversation as MockedFunction< const mockCreateConversation = ideasApi.createConversation as MockedFunction<
typeof ideasApi.createConversation typeof ideasApi.createConversation
>; >;
@@ -70,9 +74,62 @@ function createMockIdea(id: string, title: string, content: string): Idea {
} as Idea; } as Idea;
} }
/**
* Configure streamChatMessage to immediately fail,
* triggering the fallback to sendChatMessage.
*/
function makeStreamFail(): void {
mockStreamChatMessage.mockImplementation(
(
_request,
_onChunk,
_onComplete,
onError: (err: Error) => void,
_signal?: AbortSignal
): void => {
// Call synchronously so the Promise rejects immediately
onError(new Error("Streaming not available"));
}
);
}
/**
* Configure streamChatMessage to succeed with given tokens.
* Uses a ref-style object to share cancellation state across the async boundary.
*/
function makeStreamSucceed(tokens: string[]): void {
mockStreamChatMessage.mockImplementation(
(
_request,
onChunk: (chunk: string) => void,
onComplete: () => void,
_onError: (err: Error) => void,
signal?: AbortSignal
): void => {
const state = { cancelled: false };
signal?.addEventListener("abort", () => {
state.cancelled = true;
});
const run = async (): Promise<void> => {
for (const token of tokens) {
if (state.cancelled) return;
await Promise.resolve();
onChunk(token);
}
if (!state.cancelled) {
onComplete();
}
};
void run();
}
);
}
describe("useChat", () => { describe("useChat", () => {
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
// Default: streaming fails so tests exercise the fallback path
makeStreamFail();
}); });
afterEach(() => { afterEach(() => {
@@ -87,13 +144,19 @@ describe("useChat", () => {
expect(result.current.messages[0]?.role).toBe("assistant"); expect(result.current.messages[0]?.role).toBe("assistant");
expect(result.current.messages[0]?.id).toBe("welcome"); expect(result.current.messages[0]?.id).toBe("welcome");
expect(result.current.isLoading).toBe(false); expect(result.current.isLoading).toBe(false);
expect(result.current.isStreaming).toBe(false);
expect(result.current.error).toBeNull(); expect(result.current.error).toBeNull();
expect(result.current.conversationId).toBeNull(); expect(result.current.conversationId).toBeNull();
}); });
it("should expose abortStream function", () => {
const { result } = renderHook(() => useChat());
expect(typeof result.current.abortStream).toBe("function");
});
}); });
describe("sendMessage", () => { describe("sendMessage (fallback path when streaming fails)", () => {
it("should add user message and assistant response", async () => { it("should add user message and assistant response via fallback", async () => {
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Hello there!")); mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Hello there!"));
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", "")); mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
@@ -119,47 +182,13 @@ describe("useChat", () => {
}); });
expect(mockSendChatMessage).not.toHaveBeenCalled(); expect(mockSendChatMessage).not.toHaveBeenCalled();
expect(mockStreamChatMessage).not.toHaveBeenCalled();
expect(result.current.messages).toHaveLength(1); // only welcome expect(result.current.messages).toHaveLength(1); // only welcome
}); });
it("should not send while loading", async () => {
let resolveFirst: ((value: ChatResponse) => void) | undefined;
const firstPromise = new Promise<ChatResponse>((resolve) => {
resolveFirst = resolve;
});
mockSendChatMessage.mockReturnValueOnce(firstPromise);
const { result } = renderHook(() => useChat());
// Start first message
act(() => {
void result.current.sendMessage("First");
});
expect(result.current.isLoading).toBe(true);
// Try to send second while loading
await act(async () => {
await result.current.sendMessage("Second");
});
// Should only have one call
expect(mockSendChatMessage).toHaveBeenCalledTimes(1);
// Cleanup - resolve the pending promise
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
await act(async () => {
if (resolveFirst) {
resolveFirst(createMockChatResponse("Response"));
}
// Allow promise to settle
await Promise.resolve();
});
});
it("should handle API errors gracefully", async () => { it("should handle API errors gracefully", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined); vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
mockSendChatMessage.mockRejectedValueOnce(new Error("API Error")); mockSendChatMessage.mockRejectedValueOnce(new Error("API Error"));
const onError = vi.fn(); const onError = vi.fn();
@@ -171,46 +200,178 @@ describe("useChat", () => {
expect(result.current.error).toBe("Unable to send message. Please try again."); expect(result.current.error).toBe("Unable to send message. Please try again.");
expect(onError).toHaveBeenCalledWith(expect.any(Error)); expect(onError).toHaveBeenCalledWith(expect.any(Error));
// Should have welcome + user + error message
expect(result.current.messages).toHaveLength(3); expect(result.current.messages).toHaveLength(3);
expect(result.current.messages[2]?.content).toBe("Something went wrong. Please try again."); expect(result.current.messages[2]?.content).toBe("Something went wrong. Please try again.");
}); });
}); });
describe("streaming path", () => {
it("should stream tokens into assistant message", async () => {
const tokens = ["Hello", " world", "!"];
makeStreamSucceed(tokens);
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Hi");
});
expect(result.current.messages).toHaveLength(3);
expect(result.current.messages[2]?.role).toBe("assistant");
expect(result.current.messages[2]?.content).toBe("Hello world!");
});
it("should set isStreaming true during streaming then false when done", async () => {
let capturedOnChunk: ((chunk: string) => void) | undefined;
let capturedOnComplete: (() => void) | undefined;
mockStreamChatMessage.mockImplementation(
(
_request,
onChunk: (chunk: string) => void,
onComplete: () => void,
_onError: (err: Error) => void
): void => {
capturedOnChunk = onChunk;
capturedOnComplete = onComplete;
}
);
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat());
let sendDone = false;
act(() => {
void result.current.sendMessage("Hello").then(() => {
sendDone = true;
});
});
// Send first token (triggers streaming state)
await act(async () => {
capturedOnChunk?.("Hello");
await Promise.resolve();
});
expect(result.current.isStreaming).toBe(true);
// Complete the stream
await act(async () => {
capturedOnComplete?.();
await Promise.resolve();
await Promise.resolve();
await Promise.resolve();
});
expect(result.current.isStreaming).toBe(false);
expect(sendDone).toBe(true);
});
it("should keep partial content on abort", async () => {
let capturedOnChunk: ((chunk: string) => void) | undefined;
mockStreamChatMessage.mockImplementation(
(
_request,
onChunk: (chunk: string) => void,
_onComplete: () => void,
_onError: (err: Error) => void,
signal?: AbortSignal
): void => {
capturedOnChunk = onChunk;
if (signal) {
signal.addEventListener("abort", () => {
// Stream aborted
});
}
}
);
const { result } = renderHook(() => useChat());
act(() => {
void result.current.sendMessage("Hello");
});
await act(async () => {
capturedOnChunk?.("Partial");
capturedOnChunk?.(" content");
await Promise.resolve();
});
await act(async () => {
result.current.abortStream();
await Promise.resolve();
});
expect(result.current.isStreaming).toBe(false);
const assistantMsg = result.current.messages.find(
(m) => m.role === "assistant" && m.id !== "welcome"
);
expect(assistantMsg?.content).toBe("Partial content");
});
it("should not send while streaming", async () => {
let capturedOnChunk: ((chunk: string) => void) | undefined;
mockStreamChatMessage.mockImplementation(
(
_request,
onChunk: (chunk: string) => void,
_onComplete: () => void,
_onError: (err: Error) => void
): void => {
capturedOnChunk = onChunk;
}
);
const { result } = renderHook(() => useChat());
act(() => {
void result.current.sendMessage("First");
});
await act(async () => {
capturedOnChunk?.("token");
await Promise.resolve();
});
expect(result.current.isStreaming).toBe(true);
await act(async () => {
await result.current.sendMessage("Second");
});
// Only one stream call
expect(mockStreamChatMessage).toHaveBeenCalledTimes(1);
});
});
describe("rapid sends - stale closure prevention", () => { describe("rapid sends - stale closure prevention", () => {
it("should not lose messages on rapid sequential sends", async () => { it("should not lose messages on rapid sequential sends", async () => {
// This test verifies that functional state updates prevent message loss // Use streaming success path for deterministic behavior
// when multiple messages are sent in quick succession makeStreamSucceed(["Response 1"]);
let callCount = 0;
mockSendChatMessage.mockImplementation(async (): Promise<ChatResponse> => {
callCount++;
// Small delay to simulate network
await Promise.resolve();
return createMockChatResponse(`Response ${String(callCount)}`);
});
mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", "")); mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat()); const { result } = renderHook(() => useChat());
// Send first message
await act(async () => { await act(async () => {
await result.current.sendMessage("Message 1"); await result.current.sendMessage("Message 1");
}); });
// Verify first message cycle complete
expect(result.current.messages).toHaveLength(3); // welcome + user1 + assistant1 expect(result.current.messages).toHaveLength(3); // welcome + user1 + assistant1
// Send second message makeStreamSucceed(["Response 2"]);
await act(async () => { await act(async () => {
await result.current.sendMessage("Message 2"); await result.current.sendMessage("Message 2");
}); });
// Verify all messages are present (no data loss)
expect(result.current.messages).toHaveLength(5); // welcome + user1 + assistant1 + user2 + assistant2 expect(result.current.messages).toHaveLength(5); // welcome + user1 + assistant1 + user2 + assistant2
// Verify message order and content
const userMessages = result.current.messages.filter((m) => m.role === "user"); const userMessages = result.current.messages.filter((m) => m.role === "user");
expect(userMessages).toHaveLength(2); expect(userMessages).toHaveLength(2);
expect(userMessages[0]?.content).toBe("Message 1"); expect(userMessages[0]?.content).toBe("Message 1");
@@ -218,69 +379,56 @@ describe("useChat", () => {
}); });
it("should use functional updates for all message state changes", async () => { it("should use functional updates for all message state changes", async () => {
// This test verifies that the implementation uses functional updates
// by checking that messages accumulate correctly
mockSendChatMessage.mockResolvedValue(createMockChatResponse("Response"));
mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", "")); mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat()); const { result } = renderHook(() => useChat());
// Track message count after each operation
const messageCounts: number[] = []; const messageCounts: number[] = [];
makeStreamSucceed(["R1"]);
await act(async () => { await act(async () => {
await result.current.sendMessage("Test 1"); await result.current.sendMessage("Test 1");
}); });
messageCounts.push(result.current.messages.length); messageCounts.push(result.current.messages.length);
makeStreamSucceed(["R2"]);
await act(async () => { await act(async () => {
await result.current.sendMessage("Test 2"); await result.current.sendMessage("Test 2");
}); });
messageCounts.push(result.current.messages.length); messageCounts.push(result.current.messages.length);
makeStreamSucceed(["R3"]);
await act(async () => { await act(async () => {
await result.current.sendMessage("Test 3"); await result.current.sendMessage("Test 3");
}); });
messageCounts.push(result.current.messages.length); messageCounts.push(result.current.messages.length);
// Should accumulate: 3, 5, 7 (welcome + pairs of user/assistant)
expect(messageCounts).toEqual([3, 5, 7]); expect(messageCounts).toEqual([3, 5, 7]);
// Verify final state has all messages
expect(result.current.messages).toHaveLength(7); expect(result.current.messages).toHaveLength(7);
const userMessages = result.current.messages.filter((m) => m.role === "user"); const userMessages = result.current.messages.filter((m) => m.role === "user");
expect(userMessages).toHaveLength(3); expect(userMessages).toHaveLength(3);
}); });
it("should maintain correct message order with ref-based state tracking", async () => { it("should maintain correct message order with ref-based state tracking", async () => {
// This test verifies that messagesRef is properly synchronized
const responses = ["First response", "Second response", "Third response"];
let responseIndex = 0;
mockSendChatMessage.mockImplementation((): Promise<ChatResponse> => {
const response = responses[responseIndex++];
return Promise.resolve(createMockChatResponse(response ?? ""));
});
mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", "")); mockCreateConversation.mockResolvedValue(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat()); const { result } = renderHook(() => useChat());
makeStreamSucceed(["First response"]);
await act(async () => { await act(async () => {
await result.current.sendMessage("Query 1"); await result.current.sendMessage("Query 1");
}); });
makeStreamSucceed(["Second response"]);
await act(async () => { await act(async () => {
await result.current.sendMessage("Query 2"); await result.current.sendMessage("Query 2");
}); });
makeStreamSucceed(["Third response"]);
await act(async () => { await act(async () => {
await result.current.sendMessage("Query 3"); await result.current.sendMessage("Query 3");
}); });
// Verify messages are in correct order
const messages = result.current.messages; const messages = result.current.messages;
expect(messages[0]?.id).toBe("welcome"); expect(messages[0]?.id).toBe("welcome");
expect(messages[1]?.content).toBe("Query 1"); expect(messages[1]?.content).toBe("Query 1");
@@ -337,14 +485,12 @@ describe("useChat", () => {
await result.current.loadConversation("conv-bad"); await result.current.loadConversation("conv-bad");
}); });
// Should fall back to welcome message
expect(result.current.messages).toHaveLength(1); expect(result.current.messages).toHaveLength(1);
expect(result.current.messages[0]?.id).toBe("welcome"); expect(result.current.messages[0]?.id).toBe("welcome");
}); });
it("should fall back to welcome message when stored data has wrong shape", async () => { it("should fall back to welcome message when stored data has wrong shape", async () => {
vi.spyOn(console, "warn").mockImplementation(() => undefined); vi.spyOn(console, "warn").mockImplementation(() => undefined);
// Valid JSON but wrong shape (object instead of array, missing required fields)
mockGetIdea.mockResolvedValueOnce( mockGetIdea.mockResolvedValueOnce(
createMockIdea("conv-bad", "Wrong Shape", JSON.stringify({ not: "an array" })) createMockIdea("conv-bad", "Wrong Shape", JSON.stringify({ not: "an array" }))
); );
@@ -408,7 +554,6 @@ describe("useChat", () => {
const { result } = renderHook(() => useChat()); const { result } = renderHook(() => useChat());
// Should resolve without throwing - errors are handled internally
await act(async () => { await act(async () => {
await expect(result.current.loadConversation("conv-err")).resolves.toBeUndefined(); await expect(result.current.loadConversation("conv-err")).resolves.toBeUndefined();
}); });
@@ -419,19 +564,17 @@ describe("useChat", () => {
describe("startNewConversation", () => { describe("startNewConversation", () => {
it("should reset to initial state", async () => { it("should reset to initial state", async () => {
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Response")); makeStreamSucceed(["Response"]);
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", "")); mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat()); const { result } = renderHook(() => useChat());
// Send a message to have some state
await act(async () => { await act(async () => {
await result.current.sendMessage("Hello"); await result.current.sendMessage("Hello");
}); });
expect(result.current.messages.length).toBeGreaterThan(1); expect(result.current.messages.length).toBeGreaterThan(1);
// Start new conversation
act(() => { act(() => {
result.current.startNewConversation(); result.current.startNewConversation();
}); });
@@ -446,6 +589,7 @@ describe("useChat", () => {
describe("clearError", () => { describe("clearError", () => {
it("should clear error state", async () => { it("should clear error state", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined); vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
mockSendChatMessage.mockRejectedValueOnce(new Error("Test error")); mockSendChatMessage.mockRejectedValueOnce(new Error("Test error"));
const { result } = renderHook(() => useChat()); const { result } = renderHook(() => useChat());
@@ -467,6 +611,7 @@ describe("useChat", () => {
describe("error context logging", () => { describe("error context logging", () => {
it("should log comprehensive error context when sendMessage fails", async () => { it("should log comprehensive error context when sendMessage fails", async () => {
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined); const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
mockSendChatMessage.mockRejectedValueOnce(new Error("LLM timeout")); mockSendChatMessage.mockRejectedValueOnce(new Error("LLM timeout"));
const { result } = renderHook(() => useChat({ model: "llama3.2" })); const { result } = renderHook(() => useChat({ model: "llama3.2" }));
@@ -489,6 +634,7 @@ describe("useChat", () => {
it("should truncate long message previews to 50 characters", async () => { it("should truncate long message previews to 50 characters", async () => {
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined); const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
mockSendChatMessage.mockRejectedValueOnce(new Error("Failed")); mockSendChatMessage.mockRejectedValueOnce(new Error("Failed"));
const longMessage = "A".repeat(100); const longMessage = "A".repeat(100);
@@ -509,9 +655,10 @@ describe("useChat", () => {
it("should include message count in error context", async () => { it("should include message count in error context", async () => {
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined); const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
// First successful message // First successful message via streaming
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("OK")); makeStreamSucceed(["OK"]);
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", "")); mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
const { result } = renderHook(() => useChat()); const { result } = renderHook(() => useChat());
@@ -520,14 +667,14 @@ describe("useChat", () => {
await result.current.sendMessage("First"); await result.current.sendMessage("First");
}); });
// Second message fails // Second message: streaming fails, fallback fails
makeStreamFail();
mockSendChatMessage.mockRejectedValueOnce(new Error("Fail")); mockSendChatMessage.mockRejectedValueOnce(new Error("Fail"));
await act(async () => { await act(async () => {
await result.current.sendMessage("Second"); await result.current.sendMessage("Second");
}); });
// messageCount should reflect messages including the new user message
expect(consoleSpy).toHaveBeenCalledWith( expect(consoleSpy).toHaveBeenCalledWith(
"Failed to send chat message", "Failed to send chat message",
expect.objectContaining({ expect.objectContaining({
@@ -540,6 +687,7 @@ describe("useChat", () => {
describe("LLM vs persistence error separation", () => { describe("LLM vs persistence error separation", () => {
it("should show LLM error and add error message to chat when API fails", async () => { it("should show LLM error and add error message to chat when API fails", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined); vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
mockSendChatMessage.mockRejectedValueOnce(new Error("Model not available")); mockSendChatMessage.mockRejectedValueOnce(new Error("Model not available"));
const { result } = renderHook(() => useChat()); const { result } = renderHook(() => useChat());
@@ -549,13 +697,29 @@ describe("useChat", () => {
}); });
expect(result.current.error).toBe("Unable to send message. Please try again."); expect(result.current.error).toBe("Unable to send message. Please try again.");
// Should have welcome + user + error message
expect(result.current.messages).toHaveLength(3); expect(result.current.messages).toHaveLength(3);
expect(result.current.messages[2]?.content).toBe("Something went wrong. Please try again."); expect(result.current.messages[2]?.content).toBe("Something went wrong. Please try again.");
}); });
it("should keep assistant message visible when save fails", async () => { it("should keep assistant message visible when save fails (streaming path)", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined); vi.spyOn(console, "error").mockImplementation(() => undefined);
makeStreamSucceed(["Great answer!"]);
mockCreateConversation.mockRejectedValueOnce(new Error("Database connection lost"));
const { result } = renderHook(() => useChat());
await act(async () => {
await result.current.sendMessage("Hello");
});
expect(result.current.messages).toHaveLength(3); // welcome + user + assistant
expect(result.current.messages[2]?.content).toBe("Great answer!");
expect(result.current.error).toContain("Message sent but failed to save");
});
it("should keep assistant message visible when save fails (fallback path)", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Great answer!")); mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Great answer!"));
mockCreateConversation.mockRejectedValueOnce(new Error("Database connection lost")); mockCreateConversation.mockRejectedValueOnce(new Error("Database connection lost"));
@@ -565,16 +729,14 @@ describe("useChat", () => {
await result.current.sendMessage("Hello"); await result.current.sendMessage("Hello");
}); });
// Assistant message should still be visible expect(result.current.messages).toHaveLength(3);
expect(result.current.messages).toHaveLength(3); // welcome + user + assistant
expect(result.current.messages[2]?.content).toBe("Great answer!"); expect(result.current.messages[2]?.content).toBe("Great answer!");
// Error should indicate persistence failure
expect(result.current.error).toContain("Message sent but failed to save"); expect(result.current.error).toContain("Message sent but failed to save");
}); });
it("should log with PERSISTENCE_ERROR type when save fails", async () => { it("should log with PERSISTENCE_ERROR type when save fails", async () => {
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined); const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Response")); mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Response"));
mockCreateConversation.mockRejectedValueOnce(new Error("DB error")); mockCreateConversation.mockRejectedValueOnce(new Error("DB error"));
@@ -591,7 +753,6 @@ describe("useChat", () => {
}) })
); );
// Should NOT have logged as LLM_ERROR
const llmErrorCalls = consoleSpy.mock.calls.filter((call) => { const llmErrorCalls = consoleSpy.mock.calls.filter((call) => {
const ctx: unknown = call[1]; const ctx: unknown = call[1];
return ( return (
@@ -606,8 +767,9 @@ describe("useChat", () => {
it("should use different user-facing messages for LLM vs save errors", async () => { it("should use different user-facing messages for LLM vs save errors", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined); vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
// Test LLM error message // LLM error path (streaming fails + fallback fails)
mockSendChatMessage.mockRejectedValueOnce(new Error("Timeout")); mockSendChatMessage.mockRejectedValueOnce(new Error("Timeout"));
const { result: result1 } = renderHook(() => useChat()); const { result: result1 } = renderHook(() => useChat());
@@ -617,8 +779,8 @@ describe("useChat", () => {
const llmError = result1.current.error; const llmError = result1.current.error;
// Test save error message // Save error path (streaming succeeds, save fails)
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("OK")); makeStreamSucceed(["OK"]);
mockCreateConversation.mockRejectedValueOnce(new Error("DB down")); mockCreateConversation.mockRejectedValueOnce(new Error("DB down"));
const { result: result2 } = renderHook(() => useChat()); const { result: result2 } = renderHook(() => useChat());
@@ -628,7 +790,6 @@ describe("useChat", () => {
const saveError = result2.current.error; const saveError = result2.current.error;
// They should be different
expect(llmError).toBe("Unable to send message. Please try again."); expect(llmError).toBe("Unable to send message. Please try again.");
expect(saveError).toContain("Message sent but failed to save"); expect(saveError).toContain("Message sent but failed to save");
expect(llmError).not.toEqual(saveError); expect(llmError).not.toEqual(saveError);
@@ -636,6 +797,7 @@ describe("useChat", () => {
it("should handle non-Error throws from LLM API", async () => { it("should handle non-Error throws from LLM API", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined); vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
mockSendChatMessage.mockRejectedValueOnce("string error"); mockSendChatMessage.mockRejectedValueOnce("string error");
const onError = vi.fn(); const onError = vi.fn();
@@ -652,7 +814,8 @@ describe("useChat", () => {
it("should handle non-Error throws from persistence layer", async () => { it("should handle non-Error throws from persistence layer", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined); vi.spyOn(console, "error").mockImplementation(() => undefined);
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("OK")); vi.spyOn(console, "warn").mockImplementation(() => undefined);
makeStreamSucceed(["OK"]);
mockCreateConversation.mockRejectedValueOnce("DB string error"); mockCreateConversation.mockRejectedValueOnce("DB string error");
const onError = vi.fn(); const onError = vi.fn();
@@ -662,7 +825,6 @@ describe("useChat", () => {
await result.current.sendMessage("Hello"); await result.current.sendMessage("Hello");
}); });
// Assistant message should still be visible
expect(result.current.messages[2]?.content).toBe("OK"); expect(result.current.messages[2]?.content).toBe("OK");
expect(result.current.error).toBe("Message sent but failed to save. Please try again."); expect(result.current.error).toBe("Message sent but failed to save. Please try again.");
expect(onError).toHaveBeenCalledWith(expect.any(Error)); expect(onError).toHaveBeenCalledWith(expect.any(Error));
@@ -670,8 +832,9 @@ describe("useChat", () => {
it("should handle updateConversation failure for existing conversations", async () => { it("should handle updateConversation failure for existing conversations", async () => {
vi.spyOn(console, "error").mockImplementation(() => undefined); vi.spyOn(console, "error").mockImplementation(() => undefined);
vi.spyOn(console, "warn").mockImplementation(() => undefined);
// First message succeeds and creates conversation // First message via fallback
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("First response")); mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("First response"));
mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", "")); mockCreateConversation.mockResolvedValueOnce(createMockIdea("conv-1", "Test", ""));
@@ -683,7 +846,8 @@ describe("useChat", () => {
expect(result.current.conversationId).toBe("conv-1"); expect(result.current.conversationId).toBe("conv-1");
// Second message succeeds but updateConversation fails // Second message via fallback, updateConversation fails
makeStreamFail();
mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Second response")); mockSendChatMessage.mockResolvedValueOnce(createMockChatResponse("Second response"));
mockUpdateConversation.mockRejectedValueOnce(new Error("Connection reset")); mockUpdateConversation.mockRejectedValueOnce(new Error("Connection reset"));
@@ -691,8 +855,10 @@ describe("useChat", () => {
await result.current.sendMessage("Second"); await result.current.sendMessage("Second");
}); });
// Assistant message should still be visible const assistantMessages = result.current.messages.filter(
expect(result.current.messages[4]?.content).toBe("Second response"); (m) => m.role === "assistant" && m.id !== "welcome"
);
expect(assistantMessages[assistantMessages.length - 1]?.content).toBe("Second response");
expect(result.current.error).toBe("Message sent but failed to save. Please try again."); expect(result.current.error).toBe("Message sent but failed to save. Please try again.");
}); });
}); });

View File

@@ -4,7 +4,11 @@
*/ */
import { useState, useCallback, useRef } from "react"; import { useState, useCallback, useRef } from "react";
import { sendChatMessage, type ChatMessage as ApiChatMessage } from "@/lib/api/chat"; import {
sendChatMessage,
streamChatMessage,
type ChatMessage as ApiChatMessage,
} from "@/lib/api/chat";
import { createConversation, updateConversation, getIdea, type Idea } from "@/lib/api/ideas"; import { createConversation, updateConversation, getIdea, type Idea } from "@/lib/api/ideas";
import { safeJsonParse, isMessageArray } from "@/lib/utils/safe-json"; import { safeJsonParse, isMessageArray } from "@/lib/utils/safe-json";
@@ -33,10 +37,12 @@ export interface UseChatOptions {
export interface UseChatReturn { export interface UseChatReturn {
messages: Message[]; messages: Message[];
isLoading: boolean; isLoading: boolean;
isStreaming: boolean;
error: string | null; error: string | null;
conversationId: string | null; conversationId: string | null;
conversationTitle: string | null; conversationTitle: string | null;
sendMessage: (content: string) => Promise<void>; sendMessage: (content: string) => Promise<void>;
abortStream: () => void;
loadConversation: (ideaId: string) => Promise<void>; loadConversation: (ideaId: string) => Promise<void>;
startNewConversation: (projectId?: string | null) => void; startNewConversation: (projectId?: string | null) => void;
setMessages: React.Dispatch<React.SetStateAction<Message[]>>; setMessages: React.Dispatch<React.SetStateAction<Message[]>>;
@@ -66,6 +72,7 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
const [messages, setMessages] = useState<Message[]>([WELCOME_MESSAGE]); const [messages, setMessages] = useState<Message[]>([WELCOME_MESSAGE]);
const [isLoading, setIsLoading] = useState(false); const [isLoading, setIsLoading] = useState(false);
const [isStreaming, setIsStreaming] = useState(false);
const [error, setError] = useState<string | null>(null); const [error, setError] = useState<string | null>(null);
const [conversationId, setConversationId] = useState<string | null>(null); const [conversationId, setConversationId] = useState<string | null>(null);
const [conversationTitle, setConversationTitle] = useState<string | null>(null); const [conversationTitle, setConversationTitle] = useState<string | null>(null);
@@ -78,6 +85,16 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
const messagesRef = useRef<Message[]>(messages); const messagesRef = useRef<Message[]>(messages);
messagesRef.current = messages; messagesRef.current = messages;
// AbortController ref for the active stream
const abortControllerRef = useRef<AbortController | null>(null);
// Track conversation state in refs to avoid stale closures in streaming callbacks
const conversationIdRef = useRef<string | null>(conversationId);
conversationIdRef.current = conversationId;
const conversationTitleRef = useRef<string | null>(conversationTitle);
conversationTitleRef.current = conversationTitle;
/** /**
* Convert our Message format to API ChatMessage format * Convert our Message format to API ChatMessage format
*/ */
@@ -119,44 +136,57 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
}, []); }, []);
/** /**
* Save conversation to backend * Save conversation to backend.
* Uses refs for conversation state to avoid stale closures in streaming callbacks.
*/ */
const saveConversation = useCallback( const saveConversation = useCallback(
async (msgs: Message[], title: string): Promise<string> => { async (msgs: Message[], title: string): Promise<string> => {
const content = serializeMessages(msgs); const content = serializeMessages(msgs);
const currentConvId = conversationIdRef.current;
if (conversationId) { if (currentConvId) {
// Update existing conversation await updateConversation(currentConvId, content, title);
await updateConversation(conversationId, content, title); return currentConvId;
return conversationId;
} else { } else {
// Create new conversation
const idea = await createConversation(title, content, projectIdRef.current ?? undefined); const idea = await createConversation(title, content, projectIdRef.current ?? undefined);
setConversationId(idea.id); setConversationId(idea.id);
setConversationTitle(title); setConversationTitle(title);
conversationIdRef.current = idea.id;
conversationTitleRef.current = title;
return idea.id; return idea.id;
} }
}, },
[conversationId, serializeMessages] [serializeMessages]
); );
/** /**
* Send a message to the LLM and save the conversation * Abort an active stream
*/
const abortStream = useCallback((): void => {
if (abortControllerRef.current) {
abortControllerRef.current.abort();
abortControllerRef.current = null;
}
setIsStreaming(false);
setIsLoading(false);
}, []);
/**
* Send a message to the LLM using streaming, with fallback to non-streaming
*/ */
const sendMessage = useCallback( const sendMessage = useCallback(
async (content: string): Promise<void> => { async (content: string): Promise<void> => {
if (!content.trim() || isLoading) { if (!content.trim() || isLoading || isStreaming) {
return; return;
} }
const userMessage: Message = { const userMessage: Message = {
id: `user-${Date.now().toString()}`, id: `user-${Date.now().toString()}-${Math.random().toString(36).slice(2, 8)}`,
role: "user", role: "user",
content: content.trim(), content: content.trim(),
createdAt: new Date().toISOString(), createdAt: new Date().toISOString(),
}; };
// Add user message immediately using functional update
setMessages((prev) => { setMessages((prev) => {
const updated = [...prev, userMessage]; const updated = [...prev, userMessage];
messagesRef.current = updated; messagesRef.current = updated;
@@ -165,8 +195,15 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
setIsLoading(true); setIsLoading(true);
setError(null); setError(null);
try { const assistantMessageId = `assistant-${Date.now().toString()}-${Math.random().toString(36).slice(2, 8)}`;
// Prepare API request - use ref to get current messages (prevents stale closure) const placeholderMessage: Message = {
id: assistantMessageId,
role: "assistant",
content: "",
createdAt: new Date().toISOString(),
model,
};
const currentMessages = messagesRef.current; const currentMessages = messagesRef.current;
const apiMessages = convertToApiMessages(currentMessages); const apiMessages = convertToApiMessages(currentMessages);
@@ -178,10 +215,84 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
...(systemPrompt !== undefined && { systemPrompt }), ...(systemPrompt !== undefined && { systemPrompt }),
}; };
// Call LLM API const controller = new AbortController();
abortControllerRef.current = controller;
let streamingSucceeded = false;
try {
await new Promise<void>((resolve, reject) => {
let hasReceivedData = false;
streamChatMessage(
request,
(chunk: string) => {
if (!hasReceivedData) {
hasReceivedData = true;
setIsLoading(false);
setIsStreaming(true);
setMessages((prev) => {
const updated = [...prev, { ...placeholderMessage }];
messagesRef.current = updated;
return updated;
});
}
setMessages((prev) => {
const updated = prev.map((msg) =>
msg.id === assistantMessageId ? { ...msg, content: msg.content + chunk } : msg
);
messagesRef.current = updated;
return updated;
});
},
() => {
streamingSucceeded = true;
setIsStreaming(false);
abortControllerRef.current = null;
resolve();
},
(err: Error) => {
reject(err);
},
controller.signal
);
});
} catch (err: unknown) {
if (controller.signal.aborted) {
setIsStreaming(false);
setIsLoading(false);
abortControllerRef.current = null;
// Remove placeholder if no content was received
setMessages((prev) => {
const assistantMsg = prev.find((m) => m.id === assistantMessageId);
if (assistantMsg?.content === "") {
const updated = prev.filter((m) => m.id !== assistantMessageId);
messagesRef.current = updated;
return updated;
}
messagesRef.current = prev;
return prev;
});
return;
}
// Streaming failed — fall back to non-streaming
console.warn("Streaming failed, falling back to non-streaming", {
error: err instanceof Error ? err : new Error(String(err)),
});
setMessages((prev) => {
const withoutPlaceholder = prev.filter((m) => m.id !== assistantMessageId);
messagesRef.current = withoutPlaceholder;
return withoutPlaceholder;
});
setIsStreaming(false);
try {
const response = await sendChatMessage(request); const response = await sendChatMessage(request);
// Create assistant message
const assistantMessage: Message = { const assistantMessage: Message = {
id: `assistant-${Date.now().toString()}`, id: `assistant-${Date.now().toString()}`,
role: "assistant", role: "assistant",
@@ -193,22 +304,59 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
totalTokens: (response.promptEvalCount ?? 0) + (response.evalCount ?? 0), totalTokens: (response.promptEvalCount ?? 0) + (response.evalCount ?? 0),
}; };
// Add assistant message using functional update
let finalMessages: Message[] = [];
setMessages((prev) => { setMessages((prev) => {
finalMessages = [...prev, assistantMessage]; const updated = [...prev, assistantMessage];
messagesRef.current = finalMessages; messagesRef.current = updated;
return finalMessages; return updated;
}); });
// Generate title from first user message if this is a new conversation streamingSucceeded = true;
} catch (fallbackErr: unknown) {
const errorMsg =
fallbackErr instanceof Error ? fallbackErr.message : "Failed to send message";
setError("Unable to send message. Please try again.");
onError?.(fallbackErr instanceof Error ? fallbackErr : new Error(errorMsg));
console.error("Failed to send chat message", {
error: fallbackErr,
errorType: "LLM_ERROR",
conversationId: conversationIdRef.current,
messageLength: content.length,
messagePreview: content.substring(0, 50),
model,
messageCount: messagesRef.current.length,
timestamp: new Date().toISOString(),
});
const errorMessage: Message = {
id: `error-${String(Date.now())}`,
role: "assistant",
content: "Something went wrong. Please try again.",
createdAt: new Date().toISOString(),
};
setMessages((prev) => {
const updated = [...prev, errorMessage];
messagesRef.current = updated;
return updated;
});
setIsLoading(false);
return;
}
}
setIsLoading(false);
if (!streamingSucceeded) {
return;
}
const finalMessages = messagesRef.current;
const isFirstMessage = const isFirstMessage =
!conversationId && finalMessages.filter((m) => m.role === "user").length === 1; !conversationIdRef.current && finalMessages.filter((m) => m.role === "user").length === 1;
const title = isFirstMessage const title = isFirstMessage
? generateTitle(content) ? generateTitle(content)
: (conversationTitle ?? "Chat Conversation"); : (conversationTitleRef.current ?? "Chat Conversation");
// Save conversation (separate error handling from LLM errors)
try { try {
await saveConversation(finalMessages, title); await saveConversation(finalMessages, title);
} catch (saveErr) { } catch (saveErr) {
@@ -219,41 +367,14 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
console.error("Failed to save conversation", { console.error("Failed to save conversation", {
error: saveErr, error: saveErr,
errorType: "PERSISTENCE_ERROR", errorType: "PERSISTENCE_ERROR",
conversationId, conversationId: conversationIdRef.current,
detail: saveErrorMsg, detail: saveErrorMsg,
}); });
} }
} catch (err) {
const errorMsg = err instanceof Error ? err.message : "Failed to send message";
setError("Unable to send message. Please try again.");
onError?.(err instanceof Error ? err : new Error(errorMsg));
console.error("Failed to send chat message", {
error: err,
errorType: "LLM_ERROR",
conversationId,
messageLength: content.length,
messagePreview: content.substring(0, 50),
model,
messageCount: messagesRef.current.length,
timestamp: new Date().toISOString(),
});
// Add error message to chat
const errorMessage: Message = {
id: `error-${String(Date.now())}`,
role: "assistant",
content: "Something went wrong. Please try again.",
createdAt: new Date().toISOString(),
};
setMessages((prev) => [...prev, errorMessage]);
} finally {
setIsLoading(false);
}
}, },
[ [
isLoading, isLoading,
conversationId, isStreaming,
conversationTitle,
model, model,
temperature, temperature,
maxTokens, maxTokens,
@@ -280,6 +401,8 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
setMessages(msgs); setMessages(msgs);
setConversationId(idea.id); setConversationId(idea.id);
setConversationTitle(idea.title ?? null); setConversationTitle(idea.title ?? null);
conversationIdRef.current = idea.id;
conversationTitleRef.current = idea.title ?? null;
} catch (err) { } catch (err) {
const errorMsg = err instanceof Error ? err.message : "Failed to load conversation"; const errorMsg = err instanceof Error ? err.message : "Failed to load conversation";
setError("Unable to load conversation. Please try again."); setError("Unable to load conversation. Please try again.");
@@ -305,6 +428,8 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
setConversationId(null); setConversationId(null);
setConversationTitle(null); setConversationTitle(null);
setError(null); setError(null);
conversationIdRef.current = null;
conversationTitleRef.current = null;
projectIdRef.current = newProjectId ?? null; projectIdRef.current = newProjectId ?? null;
}, []); }, []);
@@ -318,10 +443,12 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
return { return {
messages, messages,
isLoading, isLoading,
isStreaming,
error, error,
conversationId, conversationId,
conversationTitle, conversationTitle,
sendMessage, sendMessage,
abortStream,
loadConversation, loadConversation,
startNewConversation, startNewConversation,
setMessages, setMessages,

View File

@@ -3,7 +3,8 @@
* Handles LLM chat interactions via /api/llm/chat * Handles LLM chat interactions via /api/llm/chat
*/ */
import { apiPost } from "./client"; import { apiPost, fetchCsrfToken, getCsrfToken } from "./client";
import { API_BASE_URL } from "../config";
export interface ChatMessage { export interface ChatMessage {
role: "system" | "user" | "assistant"; role: "system" | "user" | "assistant";
@@ -31,6 +32,19 @@ export interface ChatResponse {
evalCount?: number; evalCount?: number;
} }
/**
* Parsed SSE data chunk from the LLM stream
*/
interface SseChunk {
error?: string;
message?: {
role: string;
content: string;
};
model?: string;
done?: boolean;
}
/** /**
* Send a chat message to the LLM * Send a chat message to the LLM
*/ */
@@ -39,19 +53,122 @@ export async function sendChatMessage(request: ChatRequest): Promise<ChatRespons
} }
/** /**
* Stream a chat message from the LLM (not implemented yet) * Get or refresh the CSRF token for streaming requests.
* TODO: Implement streaming support */
async function ensureCsrfTokenForStream(): Promise<string> {
const existing = getCsrfToken();
if (existing) {
return existing;
}
return fetchCsrfToken();
}
/**
* Stream a chat message from the LLM using SSE over fetch.
*
* The backend accepts stream: true in the request body and responds with
* Server-Sent Events:
* data: {"message":{"content":"token"},...}\n\n for each token
* data: [DONE]\n\n when the stream is complete
* data: {"error":"message"}\n\n on error
*
* @param request - Chat request (stream field will be forced to true)
* @param onChunk - Called with each token string as it arrives
* @param onComplete - Called when the stream finishes successfully
* @param onError - Called if the stream encounters an error
* @param signal - Optional AbortSignal for cancellation
*/ */
export function streamChatMessage( export function streamChatMessage(
request: ChatRequest, request: ChatRequest,
onChunk: (chunk: string) => void, onChunk: (chunk: string) => void,
onComplete: () => void, onComplete: () => void,
onError: (error: Error) => void onError: (error: Error) => void,
signal?: AbortSignal
): void { ): void {
// Streaming implementation would go here void (async (): Promise<void> => {
void request; try {
void onChunk; const csrfToken = await ensureCsrfTokenForStream();
void onComplete;
void onError; const response = await fetch(`${API_BASE_URL}/api/llm/chat`, {
throw new Error("Streaming not implemented yet"); method: "POST",
headers: {
"Content-Type": "application/json",
"X-CSRF-Token": csrfToken,
},
credentials: "include",
body: JSON.stringify({ ...request, stream: true }),
signal: signal ?? null,
});
if (!response.ok) {
const errorText = await response.text().catch(() => response.statusText);
throw new Error(`Stream request failed: ${errorText}`);
}
if (!response.body) {
throw new Error("Response body is not readable");
}
const reader = response.body.getReader();
const decoder = new TextDecoder("utf-8");
let buffer = "";
let readerDone = false;
while (!readerDone) {
const { done, value } = await reader.read();
readerDone = done;
if (done) {
break;
}
buffer += decoder.decode(value, { stream: true });
// SSE messages are separated by double newlines
const parts = buffer.split("\n\n");
// Keep the last (potentially incomplete) part
buffer = parts.pop() ?? "";
for (const part of parts) {
const trimmed = part.trim();
if (!trimmed) continue;
for (const line of trimmed.split("\n")) {
if (!line.startsWith("data: ")) continue;
const data = line.slice("data: ".length).trim();
if (data === "[DONE]") {
onComplete();
return;
}
try {
const parsed = JSON.parse(data) as SseChunk;
if (parsed.error) {
throw new Error(parsed.error);
}
if (parsed.message?.content) {
onChunk(parsed.message.content);
}
} catch (parseErr) {
if (parseErr instanceof SyntaxError) {
continue;
}
throw parseErr;
}
}
}
}
// Natural end of stream without [DONE]
onComplete();
} catch (err: unknown) {
if (err instanceof DOMException && err.name === "AbortError") {
return;
}
onError(err instanceof Error ? err : new Error(String(err)));
}
})();
} }