feat: wire chat UI to backend APIs
- Created API clients for LLM chat (/api/llm/chat) and Ideas (/api/ideas) - Implemented useChat hook for conversation state management - Connected Chat component to backend with full CRUD operations - Integrated ConversationSidebar with conversation fetching - Added automatic conversation persistence after each message - Integrated WebSocket for connection status - Used existing better-auth for authentication - All TypeScript strict mode compliant (no any types) Deliverables: ✅ Working chat interface at /chat route ✅ Conversations save to database via Ideas API ✅ Real-time WebSocket connection ✅ Clean TypeScript (no errors) ✅ Full conversation loading and persistence See CHAT_INTEGRATION_SUMMARY.md for detailed documentation.
This commit is contained in:
295
apps/web/src/hooks/useChat.ts
Normal file
295
apps/web/src/hooks/useChat.ts
Normal file
@@ -0,0 +1,295 @@
|
||||
/**
|
||||
* useChat hook
|
||||
* Manages chat state, LLM interactions, and conversation persistence
|
||||
*/
|
||||
|
||||
import { useState, useCallback, useRef } from "react";
|
||||
import { sendChatMessage, type ChatMessage as ApiChatMessage } from "@/lib/api/chat";
|
||||
import { createConversation, updateConversation, getIdea, type Idea } from "@/lib/api/ideas";
|
||||
|
||||
export interface Message {
|
||||
id: string;
|
||||
role: "user" | "assistant" | "system";
|
||||
content: string;
|
||||
thinking?: string;
|
||||
createdAt: string;
|
||||
model?: string;
|
||||
provider?: string;
|
||||
promptTokens?: number;
|
||||
completionTokens?: number;
|
||||
totalTokens?: number;
|
||||
}
|
||||
|
||||
export interface UseChatOptions {
|
||||
model?: string;
|
||||
temperature?: number;
|
||||
maxTokens?: number;
|
||||
systemPrompt?: string;
|
||||
projectId?: string | null;
|
||||
onError?: (error: Error) => void;
|
||||
}
|
||||
|
||||
export interface UseChatReturn {
|
||||
messages: Message[];
|
||||
isLoading: boolean;
|
||||
error: string | null;
|
||||
conversationId: string | null;
|
||||
conversationTitle: string | null;
|
||||
sendMessage: (content: string) => Promise<void>;
|
||||
loadConversation: (ideaId: string) => Promise<void>;
|
||||
startNewConversation: (projectId?: string | null) => void;
|
||||
setMessages: React.Dispatch<React.SetStateAction<Message[]>>;
|
||||
clearError: () => void;
|
||||
}
|
||||
|
||||
const DEFAULT_MODEL = "llama3.2";
|
||||
const WELCOME_MESSAGE: Message = {
|
||||
id: "welcome",
|
||||
role: "assistant",
|
||||
content: "Hello! I'm your AI assistant. How can I help you today?",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
/**
|
||||
* Hook for managing chat conversations
|
||||
*/
|
||||
export function useChat(options: UseChatOptions = {}): UseChatReturn {
|
||||
const {
|
||||
model = DEFAULT_MODEL,
|
||||
temperature,
|
||||
maxTokens,
|
||||
systemPrompt,
|
||||
projectId,
|
||||
onError,
|
||||
} = options;
|
||||
|
||||
const [messages, setMessages] = useState<Message[]>([WELCOME_MESSAGE]);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [conversationId, setConversationId] = useState<string | null>(null);
|
||||
const [conversationTitle, setConversationTitle] = useState<string | null>(null);
|
||||
|
||||
// Track project ID in ref to prevent stale closures
|
||||
const projectIdRef = useRef<string | null>(projectId ?? null);
|
||||
projectIdRef.current = projectId ?? null;
|
||||
|
||||
/**
|
||||
* Convert our Message format to API ChatMessage format
|
||||
*/
|
||||
const convertToApiMessages = useCallback((msgs: Message[]): ApiChatMessage[] => {
|
||||
return msgs
|
||||
.filter((msg) => msg.role !== "system" || msg.id !== "welcome")
|
||||
.map((msg) => ({
|
||||
role: msg.role as "system" | "user" | "assistant",
|
||||
content: msg.content,
|
||||
}));
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Generate a conversation title from the first user message
|
||||
*/
|
||||
const generateTitle = useCallback((firstMessage: string): string => {
|
||||
const maxLength = 60;
|
||||
const trimmed = firstMessage.trim();
|
||||
|
||||
if (trimmed.length <= maxLength) {
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
return trimmed.substring(0, maxLength - 3) + "...";
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Serialize messages to JSON for storage
|
||||
*/
|
||||
const serializeMessages = useCallback((msgs: Message[]): string => {
|
||||
return JSON.stringify(msgs, null, 2);
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Deserialize messages from JSON
|
||||
*/
|
||||
const deserializeMessages = useCallback((json: string): Message[] => {
|
||||
try {
|
||||
const parsed = JSON.parse(json) as Message[];
|
||||
return Array.isArray(parsed) ? parsed : [WELCOME_MESSAGE];
|
||||
} catch {
|
||||
return [WELCOME_MESSAGE];
|
||||
}
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Save conversation to backend
|
||||
*/
|
||||
const saveConversation = useCallback(
|
||||
async (msgs: Message[], title: string): Promise<string> => {
|
||||
const content = serializeMessages(msgs);
|
||||
|
||||
if (conversationId) {
|
||||
// Update existing conversation
|
||||
await updateConversation(conversationId, content, title);
|
||||
return conversationId;
|
||||
} else {
|
||||
// Create new conversation
|
||||
const idea = await createConversation(
|
||||
title,
|
||||
content,
|
||||
projectIdRef.current ?? undefined
|
||||
);
|
||||
setConversationId(idea.id);
|
||||
setConversationTitle(title);
|
||||
return idea.id;
|
||||
}
|
||||
},
|
||||
[conversationId, serializeMessages]
|
||||
);
|
||||
|
||||
/**
|
||||
* Send a message to the LLM and save the conversation
|
||||
*/
|
||||
const sendMessage = useCallback(
|
||||
async (content: string): Promise<void> => {
|
||||
if (!content.trim() || isLoading) {
|
||||
return;
|
||||
}
|
||||
|
||||
const userMessage: Message = {
|
||||
id: `user-${Date.now()}`,
|
||||
role: "user",
|
||||
content: content.trim(),
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Add user message immediately
|
||||
setMessages((prev) => [...prev, userMessage]);
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
// Prepare API request
|
||||
const updatedMessages = [...messages, userMessage];
|
||||
const apiMessages = convertToApiMessages(updatedMessages);
|
||||
|
||||
const request = {
|
||||
model,
|
||||
messages: apiMessages,
|
||||
temperature,
|
||||
maxTokens,
|
||||
systemPrompt,
|
||||
};
|
||||
|
||||
// Call LLM API
|
||||
const response = await sendChatMessage(request);
|
||||
|
||||
// Create assistant message
|
||||
const assistantMessage: Message = {
|
||||
id: `assistant-${Date.now()}`,
|
||||
role: "assistant",
|
||||
content: response.message.content,
|
||||
createdAt: new Date().toISOString(),
|
||||
model: response.model,
|
||||
promptTokens: response.promptEvalCount,
|
||||
completionTokens: response.evalCount,
|
||||
totalTokens: (response.promptEvalCount ?? 0) + (response.evalCount ?? 0),
|
||||
};
|
||||
|
||||
// Add assistant message
|
||||
const finalMessages = [...updatedMessages, assistantMessage];
|
||||
setMessages(finalMessages);
|
||||
|
||||
// Generate title from first user message if this is a new conversation
|
||||
const isFirstMessage = !conversationId && finalMessages.filter(m => m.role === "user").length === 1;
|
||||
const title = isFirstMessage
|
||||
? generateTitle(content)
|
||||
: conversationTitle ?? "Chat Conversation";
|
||||
|
||||
// Save conversation
|
||||
await saveConversation(finalMessages, title);
|
||||
|
||||
} catch (err) {
|
||||
const errorMsg = err instanceof Error ? err.message : "Failed to send message";
|
||||
setError(errorMsg);
|
||||
onError?.(err instanceof Error ? err : new Error(errorMsg));
|
||||
|
||||
// Add error message to chat
|
||||
const errorMessage: Message = {
|
||||
id: `error-${Date.now()}`,
|
||||
role: "assistant",
|
||||
content: `Error: ${errorMsg}`,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
setMessages((prev) => [...prev, errorMessage]);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
},
|
||||
[
|
||||
messages,
|
||||
isLoading,
|
||||
conversationId,
|
||||
conversationTitle,
|
||||
model,
|
||||
temperature,
|
||||
maxTokens,
|
||||
systemPrompt,
|
||||
onError,
|
||||
convertToApiMessages,
|
||||
generateTitle,
|
||||
saveConversation,
|
||||
]
|
||||
);
|
||||
|
||||
/**
|
||||
* Load an existing conversation from the backend
|
||||
*/
|
||||
const loadConversation = useCallback(async (ideaId: string): Promise<void> => {
|
||||
try {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
const idea: Idea = await getIdea(ideaId);
|
||||
const msgs = deserializeMessages(idea.content);
|
||||
|
||||
setMessages(msgs);
|
||||
setConversationId(idea.id);
|
||||
setConversationTitle(idea.title ?? null);
|
||||
} catch (err) {
|
||||
const errorMsg = err instanceof Error ? err.message : "Failed to load conversation";
|
||||
setError(errorMsg);
|
||||
onError?.(err instanceof Error ? err : new Error(errorMsg));
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [deserializeMessages, onError]);
|
||||
|
||||
/**
|
||||
* Start a new conversation
|
||||
*/
|
||||
const startNewConversation = useCallback((newProjectId?: string | null): void => {
|
||||
setMessages([WELCOME_MESSAGE]);
|
||||
setConversationId(null);
|
||||
setConversationTitle(null);
|
||||
setError(null);
|
||||
projectIdRef.current = newProjectId ?? null;
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Clear error message
|
||||
*/
|
||||
const clearError = useCallback((): void => {
|
||||
setError(null);
|
||||
}, []);
|
||||
|
||||
return {
|
||||
messages,
|
||||
isLoading,
|
||||
error,
|
||||
conversationId,
|
||||
conversationTitle,
|
||||
sendMessage,
|
||||
loadConversation,
|
||||
startNewConversation,
|
||||
setMessages,
|
||||
clearError,
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user