- Make loadConversation fully self-contained like sendMessage (handle errors internally via state, onError callback, and structured logging) - Remove duplicate try/catch+log from Chat.tsx imperative handle - Replace re-throw tests with delegation and no-throw tests - Add hook-level loadConversation error path tests (getIdea rejection) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
331 lines
9.8 KiB
TypeScript
331 lines
9.8 KiB
TypeScript
/**
|
|
* useChat hook
|
|
* Manages chat state, LLM interactions, and conversation persistence
|
|
*/
|
|
|
|
import { useState, useCallback, useRef } from "react";
|
|
import { sendChatMessage, type ChatMessage as ApiChatMessage } from "@/lib/api/chat";
|
|
import { createConversation, updateConversation, getIdea, type Idea } from "@/lib/api/ideas";
|
|
import { safeJsonParse, isMessageArray } from "@/lib/utils/safe-json";
|
|
|
|
export interface Message {
|
|
id: string;
|
|
role: "user" | "assistant" | "system";
|
|
content: string;
|
|
thinking?: string;
|
|
createdAt: string;
|
|
model?: string;
|
|
provider?: string;
|
|
promptTokens?: number;
|
|
completionTokens?: number;
|
|
totalTokens?: number;
|
|
}
|
|
|
|
export interface UseChatOptions {
|
|
model?: string;
|
|
temperature?: number;
|
|
maxTokens?: number;
|
|
systemPrompt?: string;
|
|
projectId?: string | null;
|
|
onError?: (error: Error) => void;
|
|
}
|
|
|
|
export interface UseChatReturn {
|
|
messages: Message[];
|
|
isLoading: boolean;
|
|
error: string | null;
|
|
conversationId: string | null;
|
|
conversationTitle: string | null;
|
|
sendMessage: (content: string) => Promise<void>;
|
|
loadConversation: (ideaId: string) => Promise<void>;
|
|
startNewConversation: (projectId?: string | null) => void;
|
|
setMessages: React.Dispatch<React.SetStateAction<Message[]>>;
|
|
clearError: () => void;
|
|
}
|
|
|
|
const DEFAULT_MODEL = "llama3.2";
|
|
const WELCOME_MESSAGE: Message = {
|
|
id: "welcome",
|
|
role: "assistant",
|
|
content: "Hello! I'm your AI assistant. How can I help you today?",
|
|
createdAt: new Date().toISOString(),
|
|
};
|
|
|
|
/**
|
|
* Hook for managing chat conversations
|
|
*/
|
|
export function useChat(options: UseChatOptions = {}): UseChatReturn {
|
|
const {
|
|
model = DEFAULT_MODEL,
|
|
temperature,
|
|
maxTokens,
|
|
systemPrompt,
|
|
projectId,
|
|
onError,
|
|
} = options;
|
|
|
|
const [messages, setMessages] = useState<Message[]>([WELCOME_MESSAGE]);
|
|
const [isLoading, setIsLoading] = useState(false);
|
|
const [error, setError] = useState<string | null>(null);
|
|
const [conversationId, setConversationId] = useState<string | null>(null);
|
|
const [conversationTitle, setConversationTitle] = useState<string | null>(null);
|
|
|
|
// Track project ID in ref to prevent stale closures
|
|
const projectIdRef = useRef<string | null>(projectId ?? null);
|
|
projectIdRef.current = projectId ?? null;
|
|
|
|
// Track messages in ref to prevent stale closures during rapid sends
|
|
const messagesRef = useRef<Message[]>(messages);
|
|
messagesRef.current = messages;
|
|
|
|
/**
|
|
* Convert our Message format to API ChatMessage format
|
|
*/
|
|
const convertToApiMessages = useCallback((msgs: Message[]): ApiChatMessage[] => {
|
|
return msgs
|
|
.filter((msg) => msg.role !== "system" || msg.id !== "welcome")
|
|
.map((msg) => ({
|
|
role: msg.role,
|
|
content: msg.content,
|
|
}));
|
|
}, []);
|
|
|
|
/**
|
|
* Generate a conversation title from the first user message
|
|
*/
|
|
const generateTitle = useCallback((firstMessage: string): string => {
|
|
const maxLength = 60;
|
|
const trimmed = firstMessage.trim();
|
|
|
|
if (trimmed.length <= maxLength) {
|
|
return trimmed;
|
|
}
|
|
|
|
return trimmed.substring(0, maxLength - 3) + "...";
|
|
}, []);
|
|
|
|
/**
|
|
* Serialize messages to JSON for storage
|
|
*/
|
|
const serializeMessages = useCallback((msgs: Message[]): string => {
|
|
return JSON.stringify(msgs, null, 2);
|
|
}, []);
|
|
|
|
/**
|
|
* Deserialize messages from JSON with runtime type validation
|
|
*/
|
|
const deserializeMessages = useCallback((json: string): Message[] => {
|
|
return safeJsonParse(json, isMessageArray, [WELCOME_MESSAGE]);
|
|
}, []);
|
|
|
|
/**
|
|
* Save conversation to backend
|
|
*/
|
|
const saveConversation = useCallback(
|
|
async (msgs: Message[], title: string): Promise<string> => {
|
|
const content = serializeMessages(msgs);
|
|
|
|
if (conversationId) {
|
|
// Update existing conversation
|
|
await updateConversation(conversationId, content, title);
|
|
return conversationId;
|
|
} else {
|
|
// Create new conversation
|
|
const idea = await createConversation(title, content, projectIdRef.current ?? undefined);
|
|
setConversationId(idea.id);
|
|
setConversationTitle(title);
|
|
return idea.id;
|
|
}
|
|
},
|
|
[conversationId, serializeMessages]
|
|
);
|
|
|
|
/**
|
|
* Send a message to the LLM and save the conversation
|
|
*/
|
|
const sendMessage = useCallback(
|
|
async (content: string): Promise<void> => {
|
|
if (!content.trim() || isLoading) {
|
|
return;
|
|
}
|
|
|
|
const userMessage: Message = {
|
|
id: `user-${Date.now().toString()}`,
|
|
role: "user",
|
|
content: content.trim(),
|
|
createdAt: new Date().toISOString(),
|
|
};
|
|
|
|
// Add user message immediately using functional update
|
|
setMessages((prev) => {
|
|
const updated = [...prev, userMessage];
|
|
messagesRef.current = updated;
|
|
return updated;
|
|
});
|
|
setIsLoading(true);
|
|
setError(null);
|
|
|
|
try {
|
|
// Prepare API request - use ref to get current messages (prevents stale closure)
|
|
const currentMessages = messagesRef.current;
|
|
const apiMessages = convertToApiMessages(currentMessages);
|
|
|
|
const request = {
|
|
model,
|
|
messages: apiMessages,
|
|
...(temperature !== undefined && { temperature }),
|
|
...(maxTokens !== undefined && { maxTokens }),
|
|
...(systemPrompt !== undefined && { systemPrompt }),
|
|
};
|
|
|
|
// Call LLM API
|
|
const response = await sendChatMessage(request);
|
|
|
|
// Create assistant message
|
|
const assistantMessage: Message = {
|
|
id: `assistant-${Date.now().toString()}`,
|
|
role: "assistant",
|
|
content: response.message.content,
|
|
createdAt: new Date().toISOString(),
|
|
model: response.model,
|
|
promptTokens: response.promptEvalCount ?? 0,
|
|
completionTokens: response.evalCount ?? 0,
|
|
totalTokens: (response.promptEvalCount ?? 0) + (response.evalCount ?? 0),
|
|
};
|
|
|
|
// Add assistant message using functional update
|
|
let finalMessages: Message[] = [];
|
|
setMessages((prev) => {
|
|
finalMessages = [...prev, assistantMessage];
|
|
messagesRef.current = finalMessages;
|
|
return finalMessages;
|
|
});
|
|
|
|
// Generate title from first user message if this is a new conversation
|
|
const isFirstMessage =
|
|
!conversationId && finalMessages.filter((m) => m.role === "user").length === 1;
|
|
const title = isFirstMessage
|
|
? generateTitle(content)
|
|
: (conversationTitle ?? "Chat Conversation");
|
|
|
|
// Save conversation (separate error handling from LLM errors)
|
|
try {
|
|
await saveConversation(finalMessages, title);
|
|
} catch (saveErr) {
|
|
const saveErrorMsg =
|
|
saveErr instanceof Error ? saveErr.message : "Unknown persistence error";
|
|
setError("Message sent but failed to save. Please try again.");
|
|
onError?.(saveErr instanceof Error ? saveErr : new Error(saveErrorMsg));
|
|
console.error("Failed to save conversation", {
|
|
error: saveErr,
|
|
errorType: "PERSISTENCE_ERROR",
|
|
conversationId,
|
|
detail: saveErrorMsg,
|
|
});
|
|
}
|
|
} catch (err) {
|
|
const errorMsg = err instanceof Error ? err.message : "Failed to send message";
|
|
setError("Unable to send message. Please try again.");
|
|
onError?.(err instanceof Error ? err : new Error(errorMsg));
|
|
console.error("Failed to send chat message", {
|
|
error: err,
|
|
errorType: "LLM_ERROR",
|
|
conversationId,
|
|
messageLength: content.length,
|
|
messagePreview: content.substring(0, 50),
|
|
model,
|
|
messageCount: messagesRef.current.length,
|
|
timestamp: new Date().toISOString(),
|
|
});
|
|
|
|
// Add error message to chat
|
|
const errorMessage: Message = {
|
|
id: `error-${String(Date.now())}`,
|
|
role: "assistant",
|
|
content: "Something went wrong. Please try again.",
|
|
createdAt: new Date().toISOString(),
|
|
};
|
|
setMessages((prev) => [...prev, errorMessage]);
|
|
} finally {
|
|
setIsLoading(false);
|
|
}
|
|
},
|
|
[
|
|
isLoading,
|
|
conversationId,
|
|
conversationTitle,
|
|
model,
|
|
temperature,
|
|
maxTokens,
|
|
systemPrompt,
|
|
onError,
|
|
convertToApiMessages,
|
|
generateTitle,
|
|
saveConversation,
|
|
]
|
|
);
|
|
|
|
/**
|
|
* Load an existing conversation from the backend
|
|
*/
|
|
const loadConversation = useCallback(
|
|
async (ideaId: string): Promise<void> => {
|
|
try {
|
|
setIsLoading(true);
|
|
setError(null);
|
|
|
|
const idea: Idea = await getIdea(ideaId);
|
|
const msgs = deserializeMessages(idea.content);
|
|
|
|
setMessages(msgs);
|
|
setConversationId(idea.id);
|
|
setConversationTitle(idea.title ?? null);
|
|
} catch (err) {
|
|
const errorMsg = err instanceof Error ? err.message : "Failed to load conversation";
|
|
setError("Unable to load conversation. Please try again.");
|
|
onError?.(err instanceof Error ? err : new Error(errorMsg));
|
|
console.error("Failed to load conversation", {
|
|
error: err,
|
|
errorType: "LOAD_ERROR",
|
|
ideaId,
|
|
timestamp: new Date().toISOString(),
|
|
});
|
|
} finally {
|
|
setIsLoading(false);
|
|
}
|
|
},
|
|
[deserializeMessages, onError]
|
|
);
|
|
|
|
/**
|
|
* Start a new conversation
|
|
*/
|
|
const startNewConversation = useCallback((newProjectId?: string | null): void => {
|
|
setMessages([WELCOME_MESSAGE]);
|
|
setConversationId(null);
|
|
setConversationTitle(null);
|
|
setError(null);
|
|
projectIdRef.current = newProjectId ?? null;
|
|
}, []);
|
|
|
|
/**
|
|
* Clear error message
|
|
*/
|
|
const clearError = useCallback((): void => {
|
|
setError(null);
|
|
}, []);
|
|
|
|
return {
|
|
messages,
|
|
isLoading,
|
|
error,
|
|
conversationId,
|
|
conversationTitle,
|
|
sendMessage,
|
|
loadConversation,
|
|
startNewConversation,
|
|
setMessages,
|
|
clearError,
|
|
};
|
|
}
|