feat: wire chat UI to backend APIs
- Created API clients for LLM chat (/api/llm/chat) and Ideas (/api/ideas) - Implemented useChat hook for conversation state management - Connected Chat component to backend with full CRUD operations - Integrated ConversationSidebar with conversation fetching - Added automatic conversation persistence after each message - Integrated WebSocket for connection status - Used existing better-auth for authentication - All TypeScript strict mode compliant (no any types) Deliverables: ✅ Working chat interface at /chat route ✅ Conversations save to database via Ideas API ✅ Real-time WebSocket connection ✅ Clean TypeScript (no errors) ✅ Full conversation loading and persistence See CHAT_INTEGRATION_SUMMARY.md for detailed documentation.
This commit is contained in:
@@ -1,82 +1,15 @@
|
||||
"use client";
|
||||
|
||||
import { useCallback, useEffect, useRef, useState, useMemo, forwardRef, useImperativeHandle } from "react";
|
||||
// NOTE: These hooks will need to be created or adapted (see issue #TBD)
|
||||
// import { useAuth } from "@/lib/hooks/useAuth";
|
||||
// import { useProjects } from "@/lib/hooks/useProjects";
|
||||
// import { useConversations } from "@/lib/hooks/useConversations";
|
||||
// import { useApi } from "@/lib/hooks/useApi";
|
||||
import { useCallback, useEffect, useRef, useImperativeHandle, forwardRef, useState } from "react";
|
||||
import { useAuth } from "@/lib/auth/auth-context";
|
||||
import { useChat } from "@/hooks/useChat";
|
||||
import { useWebSocket } from "@/hooks/useWebSocket";
|
||||
import { MessageList } from "./MessageList";
|
||||
import { ChatInput } from "./ChatInput";
|
||||
// NOTE: Import types need to be created (see issue #TBD)
|
||||
// import type { ConversationDetail } from "@/lib/hooks/useConversations";
|
||||
// import { handleSessionExpired, isSessionExpiring } from "@/lib/api";
|
||||
// import type { LLMModel, DefaultModel } from "@/lib/api";
|
||||
|
||||
// Placeholder types until the actual types are created
|
||||
type ConversationDetail = Record<string, unknown>;
|
||||
type LLMModel = { id: string; name: string; provider?: string };
|
||||
type DefaultModel = { model: string; provider?: string };
|
||||
|
||||
export interface Message {
|
||||
id: string;
|
||||
role: "user" | "assistant" | "system";
|
||||
content: string;
|
||||
thinking?: string; // Chain of thought reasoning from thinking models
|
||||
createdAt: string;
|
||||
model?: string; // LLM model used for this response
|
||||
provider?: string; // LLM provider (ollama, claude, etc.)
|
||||
// Token usage info
|
||||
promptTokens?: number;
|
||||
completionTokens?: number;
|
||||
totalTokens?: number;
|
||||
}
|
||||
|
||||
const API_URL = process.env.NEXT_PUBLIC_API_URL || "http://localhost:8000";
|
||||
|
||||
// Friendly waiting messages (shown after a few seconds of loading)
|
||||
const WAITING_QUIPS = [
|
||||
"The AI is warming up... give it a moment.",
|
||||
"Loading the neural pathways...",
|
||||
"Waking up the LLM. It's not a morning model.",
|
||||
"Brewing some thoughts...",
|
||||
"The AI is stretching its parameters...",
|
||||
"Summoning intelligence from the void...",
|
||||
"Teaching electrons to think...",
|
||||
"Consulting the silicon oracle...",
|
||||
"The hamsters are spinning up the GPU...",
|
||||
"Defragmenting the neural networks...",
|
||||
];
|
||||
|
||||
// Error messages for actual timeouts
|
||||
const TIMEOUT_QUIPS = [
|
||||
"The AI got lost in thought. Literally. Try again?",
|
||||
"That took too long, even by AI standards. Give it another go?",
|
||||
"The model wandered off. Let's try to find it again.",
|
||||
"Response timed out. The AI may have fallen asleep. Retry?",
|
||||
"The LLM took an unexpected vacation. One more attempt?",
|
||||
];
|
||||
|
||||
// Error messages for connection failures
|
||||
const CONNECTION_QUIPS = [
|
||||
"I seem to have misplaced the server. Check your connection?",
|
||||
"The server and I are having communication issues. It's not you, it's us.",
|
||||
"Connection lost. Either the internet is down, or the server is playing hide and seek.",
|
||||
"Unable to reach the mothership. The tubes appear to be clogged.",
|
||||
"The server isn't responding. Perhaps it's giving us the silent treatment.",
|
||||
];
|
||||
|
||||
const getRandomQuip = (quips: string[]) => quips[Math.floor(Math.random() * quips.length)];
|
||||
|
||||
const WELCOME_MESSAGE: Message = {
|
||||
id: "welcome",
|
||||
role: "assistant",
|
||||
content: "Hello. I'm your AI assistant. How can I help you today?",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
import type { Message } from "@/hooks/useChat";
|
||||
|
||||
export interface ChatRef {
|
||||
loadConversation: (conversation: ConversationDetail) => void;
|
||||
loadConversation: (conversationId: string) => Promise<void>;
|
||||
startNewConversation: (projectId?: string | null) => void;
|
||||
getCurrentConversationId: () => string | null;
|
||||
}
|
||||
@@ -96,68 +29,72 @@ interface ChatProps {
|
||||
onInitialProjectHandled?: () => void;
|
||||
}
|
||||
|
||||
const WAITING_QUIPS = [
|
||||
"The AI is warming up... give it a moment.",
|
||||
"Loading the neural pathways...",
|
||||
"Waking up the LLM. It's not a morning model.",
|
||||
"Brewing some thoughts...",
|
||||
"The AI is stretching its parameters...",
|
||||
"Summoning intelligence from the void...",
|
||||
"Teaching electrons to think...",
|
||||
"Consulting the silicon oracle...",
|
||||
"The hamsters are spinning up the GPU...",
|
||||
"Defragmenting the neural networks...",
|
||||
];
|
||||
|
||||
export const Chat = forwardRef<ChatRef, ChatProps>(function Chat({
|
||||
onConversationChange,
|
||||
onProjectChange: _onProjectChange,
|
||||
initialProjectId,
|
||||
onInitialProjectHandled,
|
||||
onInitialProjectHandled: _onInitialProjectHandled,
|
||||
}, ref) {
|
||||
void _onProjectChange; // Kept for potential future use
|
||||
void _onProjectChange;
|
||||
void _onInitialProjectHandled;
|
||||
|
||||
// NOTE: Replace with actual hooks once they're created (see issue #TBD)
|
||||
const accessToken = null;
|
||||
const isLoading = false;
|
||||
const authLoading = false;
|
||||
const authError = null;
|
||||
const projects: Array<{ id: string; name: string }> = [];
|
||||
// const { accessToken, isLoading: authLoading, error: authError } = useAuth();
|
||||
// const { projects } = useProjects();
|
||||
// const { updateConversationProject } = useConversations();
|
||||
// const api = useApi();
|
||||
const { user, isLoading: authLoading } = useAuth();
|
||||
|
||||
const [messages, setMessages] = useState<Message[]>([WELCOME_MESSAGE]);
|
||||
const [isChatLoading, setIsChatLoading] = useState(false);
|
||||
const [loadingQuip, setLoadingQuip] = useState<string | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [conversationId, setConversationId] = useState<string | null>(null);
|
||||
const [conversationTitle, setConversationTitle] = useState<string | null>(null);
|
||||
const [conversationProjectId, setConversationProjectId] = useState<string | null>(null);
|
||||
const [pendingProjectId, setPendingProjectId] = useState<string | null>(null);
|
||||
const [showProjectMenu, setShowProjectMenu] = useState(false);
|
||||
const [showModelMenu, setShowModelMenu] = useState(false);
|
||||
const [showFooterProjectMenu, setShowFooterProjectMenu] = useState(false);
|
||||
const [showFooterModelMenu, setShowFooterModelMenu] = useState(false);
|
||||
const [isMovingProject, setIsMovingProject] = useState(false);
|
||||
const [availableModels, setAvailableModels] = useState<LLMModel[]>([]);
|
||||
const [defaultModel, setDefaultModel] = useState<DefaultModel | null>(null);
|
||||
const [selectedModel, setSelectedModel] = useState<LLMModel | null>(null);
|
||||
const [modelLoadError, setModelLoadError] = useState<string | null>(null);
|
||||
const [isLoadingModels, setIsLoadingModels] = useState(false);
|
||||
const [useReasoning, setUseReasoning] = useState(false); // Toggle for reasoning/thinking mode
|
||||
// Use the chat hook for state management
|
||||
const {
|
||||
messages,
|
||||
isLoading: isChatLoading,
|
||||
error,
|
||||
conversationId,
|
||||
conversationTitle,
|
||||
sendMessage,
|
||||
loadConversation,
|
||||
startNewConversation,
|
||||
clearError,
|
||||
} = useChat({
|
||||
model: "llama3.2",
|
||||
projectId: initialProjectId,
|
||||
onError: (err) => {
|
||||
console.error("Chat error:", err);
|
||||
},
|
||||
});
|
||||
|
||||
// Connect to WebSocket for real-time updates (when we have a user)
|
||||
const { isConnected: isWsConnected } = useWebSocket(
|
||||
user?.id ?? "", // Use user ID as workspace ID for now
|
||||
"", // Token not needed since we use cookies
|
||||
{
|
||||
// Future: Add handlers for chat-related events
|
||||
// onChatMessage: (msg) => { ... }
|
||||
}
|
||||
);
|
||||
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||
const inputRef = useRef<HTMLTextAreaElement>(null);
|
||||
const projectMenuRef = useRef<HTMLDivElement>(null);
|
||||
const modelMenuRef = useRef<HTMLDivElement>(null);
|
||||
const footerProjectMenuRef = useRef<HTMLDivElement>(null);
|
||||
const footerModelMenuRef = useRef<HTMLDivElement>(null);
|
||||
// Track conversation ID in ref to prevent stale closure issues
|
||||
const conversationIdRef = useRef<string | null>(conversationId);
|
||||
const [loadingQuip, setLoadingQuip] = useState<string | null>(null);
|
||||
const quipTimerRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const quipIntervalRef = useRef<NodeJS.Timeout | null>(null);
|
||||
|
||||
// Expose methods to parent via ref
|
||||
useImperativeHandle(ref, () => ({
|
||||
loadConversation: (conversation: ConversationDetail) => {
|
||||
// NOTE: Implement once ConversationDetail type is available (see issue #TBD)
|
||||
void conversation; // Placeholder until implemented
|
||||
loadConversation: async (conversationId: string) => {
|
||||
await loadConversation(conversationId);
|
||||
},
|
||||
startNewConversation: (projectId?: string | null) => {
|
||||
setConversationId(null);
|
||||
setConversationTitle(null);
|
||||
setConversationProjectId(null);
|
||||
setMessages([WELCOME_MESSAGE]);
|
||||
setError(null);
|
||||
setPendingProjectId(projectId || null);
|
||||
setShowProjectMenu(false);
|
||||
onConversationChange?.(null);
|
||||
startNewConversation(projectId);
|
||||
},
|
||||
getCurrentConversationId: () => conversationId,
|
||||
}));
|
||||
@@ -170,17 +107,20 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat({
|
||||
scrollToBottom();
|
||||
}, [messages, scrollToBottom]);
|
||||
|
||||
// Keep conversationIdRef in sync with state to prevent stale closures
|
||||
// Notify parent of conversation changes
|
||||
useEffect(() => {
|
||||
conversationIdRef.current = conversationId;
|
||||
}, [conversationId]);
|
||||
|
||||
// Handle auth errors
|
||||
useEffect(() => {
|
||||
if (authError === "RefreshAccessTokenError") {
|
||||
setError("Your session has expired. Please sign in again.");
|
||||
if (conversationId && conversationTitle) {
|
||||
onConversationChange?.(conversationId, {
|
||||
id: conversationId,
|
||||
title: conversationTitle,
|
||||
project_id: initialProjectId ?? null,
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
});
|
||||
} else {
|
||||
onConversationChange?.(null);
|
||||
}
|
||||
}, [authError]);
|
||||
}, [conversationId, conversationTitle, initialProjectId, onConversationChange]);
|
||||
|
||||
// Global keyboard shortcut: Ctrl+/ to focus input
|
||||
useEffect(() => {
|
||||
@@ -194,95 +134,43 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat({
|
||||
return () => document.removeEventListener("keydown", handleKeyDown);
|
||||
}, []);
|
||||
|
||||
// TODO: Implement click outside handlers for menus
|
||||
|
||||
const sendMessage = useCallback(
|
||||
async (content: string) => {
|
||||
if (!content.trim() || isChatLoading) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Add user message immediately
|
||||
const userMessage: Message = {
|
||||
id: `user-${Date.now()}`,
|
||||
role: "user",
|
||||
content: content.trim(),
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
setMessages((prev) => [...prev, userMessage]);
|
||||
setIsChatLoading(true);
|
||||
setLoadingQuip(null);
|
||||
setError(null);
|
||||
|
||||
// Show a witty loading message after 3 seconds
|
||||
const quipTimerId = setTimeout(() => {
|
||||
setLoadingQuip(getRandomQuip(WAITING_QUIPS) ?? null);
|
||||
// Show loading quips
|
||||
useEffect(() => {
|
||||
if (isChatLoading) {
|
||||
// Show first quip after 3 seconds
|
||||
quipTimerRef.current = setTimeout(() => {
|
||||
setLoadingQuip(WAITING_QUIPS[Math.floor(Math.random() * WAITING_QUIPS.length)] ?? null);
|
||||
}, 3000);
|
||||
|
||||
// Change quip every 5 seconds if still waiting
|
||||
const quipIntervalId = setInterval(() => {
|
||||
setLoadingQuip(getRandomQuip(WAITING_QUIPS) ?? null);
|
||||
// Change quip every 5 seconds
|
||||
quipIntervalRef.current = setInterval(() => {
|
||||
setLoadingQuip(WAITING_QUIPS[Math.floor(Math.random() * WAITING_QUIPS.length)] ?? null);
|
||||
}, 5000);
|
||||
|
||||
try {
|
||||
// NOTE: Implement actual API call to /api/brain/query (see issue #TBD)
|
||||
const requestBody: {
|
||||
message: string;
|
||||
conversation_id: string | null;
|
||||
project_id?: string;
|
||||
provider_instance_id?: string;
|
||||
provider?: string;
|
||||
model?: string;
|
||||
use_reasoning?: boolean;
|
||||
} = {
|
||||
message: content.trim(),
|
||||
conversation_id: conversationId,
|
||||
};
|
||||
|
||||
// Placeholder response for now
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
|
||||
const assistantMessage: Message = {
|
||||
id: `assistant-${Date.now()}`,
|
||||
role: "assistant",
|
||||
content: "This is a placeholder response. The chat API integration is not yet complete.",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
setMessages((prev) => [...prev, assistantMessage]);
|
||||
|
||||
// Clear quip timers on success
|
||||
clearTimeout(quipTimerId);
|
||||
clearInterval(quipIntervalId);
|
||||
setLoadingQuip(null);
|
||||
} catch (err) {
|
||||
// Clear quip timers on error
|
||||
clearTimeout(quipTimerId);
|
||||
clearInterval(quipIntervalId);
|
||||
setLoadingQuip(null);
|
||||
|
||||
// Error is already captured in errorMsg below
|
||||
const errorMsg = err instanceof Error ? err.message : "Failed to send message";
|
||||
setError(errorMsg);
|
||||
|
||||
const errorMessage: Message = {
|
||||
id: `error-${Date.now()}`,
|
||||
role: "assistant",
|
||||
content: errorMsg,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
setMessages((prev) => [...prev, errorMessage]);
|
||||
} finally {
|
||||
setIsChatLoading(false);
|
||||
} else {
|
||||
// Clear timers when loading stops
|
||||
if (quipTimerRef.current) {
|
||||
clearTimeout(quipTimerRef.current);
|
||||
quipTimerRef.current = null;
|
||||
}
|
||||
},
|
||||
[conversationId, isChatLoading]
|
||||
);
|
||||
if (quipIntervalRef.current) {
|
||||
clearInterval(quipIntervalRef.current);
|
||||
quipIntervalRef.current = null;
|
||||
}
|
||||
setLoadingQuip(null);
|
||||
}
|
||||
|
||||
const dismissError = useCallback(() => {
|
||||
setError(null);
|
||||
}, []);
|
||||
return () => {
|
||||
if (quipTimerRef.current) clearTimeout(quipTimerRef.current);
|
||||
if (quipIntervalRef.current) clearInterval(quipIntervalRef.current);
|
||||
};
|
||||
}, [isChatLoading]);
|
||||
|
||||
const handleSendMessage = useCallback(
|
||||
async (content: string) => {
|
||||
await sendMessage(content);
|
||||
},
|
||||
[sendMessage]
|
||||
);
|
||||
|
||||
// Show loading state while auth is loading
|
||||
if (authLoading) {
|
||||
@@ -298,10 +186,26 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat({
|
||||
|
||||
return (
|
||||
<div className="flex flex-1 flex-col" style={{ backgroundColor: "rgb(var(--color-background))" }}>
|
||||
{/* Connection Status Indicator */}
|
||||
{user && !isWsConnected && (
|
||||
<div className="border-b px-4 py-2" style={{ backgroundColor: "rgb(var(--surface-0))", borderColor: "rgb(var(--border-default))" }}>
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="h-2 w-2 rounded-full" style={{ backgroundColor: "rgb(var(--semantic-warning))" }} />
|
||||
<span className="text-sm" style={{ color: "rgb(var(--text-secondary))" }}>
|
||||
Reconnecting to server...
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Messages Area */}
|
||||
<div className="flex-1 overflow-y-auto">
|
||||
<div className="mx-auto max-w-4xl px-4 py-6 lg:px-8">
|
||||
<MessageList messages={messages} isLoading={isChatLoading} loadingQuip={loadingQuip} />
|
||||
<MessageList
|
||||
messages={messages as Array<Message & { thinking?: string }>}
|
||||
isLoading={isChatLoading}
|
||||
loadingQuip={loadingQuip}
|
||||
/>
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
</div>
|
||||
@@ -338,7 +242,7 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat({
|
||||
</span>
|
||||
</div>
|
||||
<button
|
||||
onClick={dismissError}
|
||||
onClick={clearError}
|
||||
className="rounded p-1 transition-colors hover:bg-black/5"
|
||||
aria-label="Dismiss error"
|
||||
>
|
||||
@@ -367,8 +271,8 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat({
|
||||
>
|
||||
<div className="mx-auto max-w-4xl px-4 py-4 lg:px-8">
|
||||
<ChatInput
|
||||
onSend={sendMessage}
|
||||
disabled={isChatLoading || !accessToken}
|
||||
onSend={handleSendMessage}
|
||||
disabled={isChatLoading || !user}
|
||||
inputRef={inputRef}
|
||||
/>
|
||||
</div>
|
||||
|
||||
Reference in New Issue
Block a user