feat: add chat components from jarvis frontend

- Migrated Chat.tsx with message handling and UI structure
- Migrated ChatInput.tsx with character limits and keyboard shortcuts
- Migrated MessageList.tsx with thinking/reasoning display
- Migrated ConversationSidebar.tsx (simplified placeholder)
- Migrated BackendStatusBanner.tsx (simplified placeholder)
- Created components/chat/index.ts barrel export
- Created app/chat/page.tsx placeholder route

These components are adapted from jarvis-fe but not yet fully functional:
- API calls placeholder (need to wire up /api/brain/query)
- Auth hooks stubbed (need useAuth implementation)
- Project/conversation hooks stubbed (need implementation)
- Imports changed from @jarvis/* to @mosaic/*

Next steps:
- Implement missing hooks (useAuth, useProjects, useConversations, useApi)
- Wire up backend API endpoints
- Add proper TypeScript types
- Implement full conversation management
This commit is contained in:
Jason Woltje
2026-01-29 21:47:00 -06:00
parent aa267b56d8
commit d54714ea06
8 changed files with 1463 additions and 0 deletions

View File

@@ -0,0 +1,378 @@
"use client";
import { useCallback, useEffect, useRef, useState, useMemo, forwardRef, useImperativeHandle } from "react";
// TODO: These hooks will need to be created or adapted
// import { useAuth } from "@/lib/hooks/useAuth";
// import { useProjects } from "@/lib/hooks/useProjects";
// import { useConversations } from "@/lib/hooks/useConversations";
// import { useApi } from "@/lib/hooks/useApi";
import { MessageList } from "./MessageList";
import { ChatInput } from "./ChatInput";
// TODO: Import types need to be created
// import type { ConversationDetail } from "@/lib/hooks/useConversations";
// import { handleSessionExpired, isSessionExpiring } from "@/lib/api";
// import type { LLMModel, DefaultModel } from "@/lib/api";
// Placeholder types until the actual types are created
type ConversationDetail = any;
type LLMModel = any;
type DefaultModel = any;
export interface Message {
id: string;
role: "user" | "assistant" | "system";
content: string;
thinking?: string; // Chain of thought reasoning from thinking models
createdAt: string;
model?: string; // LLM model used for this response
provider?: string; // LLM provider (ollama, claude, etc.)
// Token usage info
promptTokens?: number;
completionTokens?: number;
totalTokens?: number;
}
const API_URL = process.env.NEXT_PUBLIC_API_URL || "http://localhost:8000";
// Friendly waiting messages (shown after a few seconds of loading)
const WAITING_QUIPS = [
"The AI is warming up... give it a moment.",
"Loading the neural pathways...",
"Waking up the LLM. It's not a morning model.",
"Brewing some thoughts...",
"The AI is stretching its parameters...",
"Summoning intelligence from the void...",
"Teaching electrons to think...",
"Consulting the silicon oracle...",
"The hamsters are spinning up the GPU...",
"Defragmenting the neural networks...",
];
// Error messages for actual timeouts
const TIMEOUT_QUIPS = [
"The AI got lost in thought. Literally. Try again?",
"That took too long, even by AI standards. Give it another go?",
"The model wandered off. Let's try to find it again.",
"Response timed out. The AI may have fallen asleep. Retry?",
"The LLM took an unexpected vacation. One more attempt?",
];
// Error messages for connection failures
const CONNECTION_QUIPS = [
"I seem to have misplaced the server. Check your connection?",
"The server and I are having communication issues. It's not you, it's us.",
"Connection lost. Either the internet is down, or the server is playing hide and seek.",
"Unable to reach the mothership. The tubes appear to be clogged.",
"The server isn't responding. Perhaps it's giving us the silent treatment.",
];
const getRandomQuip = (quips: string[]) => quips[Math.floor(Math.random() * quips.length)];
const WELCOME_MESSAGE: Message = {
id: "welcome",
role: "assistant",
content: "Hello. I'm your AI assistant. How can I help you today?",
createdAt: new Date().toISOString(),
};
export interface ChatRef {
loadConversation: (conversation: ConversationDetail) => void;
startNewConversation: (projectId?: string | null) => void;
getCurrentConversationId: () => string | null;
}
export interface NewConversationData {
id: string;
title: string | null;
project_id: string | null;
created_at: string;
updated_at: string;
}
interface ChatProps {
onConversationChange?: (conversationId: string | null, conversationData?: NewConversationData) => void;
onProjectChange?: () => void;
initialProjectId?: string | null;
onInitialProjectHandled?: () => void;
}
export const Chat = forwardRef<ChatRef, ChatProps>(function Chat({
onConversationChange,
onProjectChange: _onProjectChange,
initialProjectId,
onInitialProjectHandled,
}, ref) {
void _onProjectChange; // Kept for potential future use
// TODO: Replace with actual hooks once they're created
const accessToken = null;
const isLoading = false;
const authLoading = false;
const authError = null;
const projects: any[] = [];
// const { accessToken, isLoading: authLoading, error: authError } = useAuth();
// const { projects } = useProjects();
// const { updateConversationProject } = useConversations();
// const api = useApi();
const [messages, setMessages] = useState<Message[]>([WELCOME_MESSAGE]);
const [isChatLoading, setIsChatLoading] = useState(false);
const [loadingQuip, setLoadingQuip] = useState<string | null>(null);
const [error, setError] = useState<string | null>(null);
const [conversationId, setConversationId] = useState<string | null>(null);
const [conversationTitle, setConversationTitle] = useState<string | null>(null);
const [conversationProjectId, setConversationProjectId] = useState<string | null>(null);
const [pendingProjectId, setPendingProjectId] = useState<string | null>(null);
const [showProjectMenu, setShowProjectMenu] = useState(false);
const [showModelMenu, setShowModelMenu] = useState(false);
const [showFooterProjectMenu, setShowFooterProjectMenu] = useState(false);
const [showFooterModelMenu, setShowFooterModelMenu] = useState(false);
const [isMovingProject, setIsMovingProject] = useState(false);
const [availableModels, setAvailableModels] = useState<LLMModel[]>([]);
const [defaultModel, setDefaultModel] = useState<DefaultModel | null>(null);
const [selectedModel, setSelectedModel] = useState<LLMModel | null>(null);
const [modelLoadError, setModelLoadError] = useState<string | null>(null);
const [isLoadingModels, setIsLoadingModels] = useState(false);
const [useReasoning, setUseReasoning] = useState(false); // Toggle for reasoning/thinking mode
const messagesEndRef = useRef<HTMLDivElement>(null);
const inputRef = useRef<HTMLTextAreaElement>(null);
const projectMenuRef = useRef<HTMLDivElement>(null);
const modelMenuRef = useRef<HTMLDivElement>(null);
const footerProjectMenuRef = useRef<HTMLDivElement>(null);
const footerModelMenuRef = useRef<HTMLDivElement>(null);
// Track conversation ID in ref to prevent stale closure issues
const conversationIdRef = useRef<string | null>(conversationId);
// Expose methods to parent via ref
useImperativeHandle(ref, () => ({
loadConversation: (conversation: ConversationDetail) => {
// TODO: Implement once ConversationDetail type is available
console.log("loadConversation called with:", conversation);
},
startNewConversation: (projectId?: string | null) => {
setConversationId(null);
setConversationTitle(null);
setConversationProjectId(null);
setMessages([WELCOME_MESSAGE]);
setError(null);
setPendingProjectId(projectId || null);
setShowProjectMenu(false);
onConversationChange?.(null);
},
getCurrentConversationId: () => conversationId,
}));
const scrollToBottom = useCallback(() => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
}, []);
useEffect(() => {
scrollToBottom();
}, [messages, scrollToBottom]);
// Keep conversationIdRef in sync with state to prevent stale closures
useEffect(() => {
conversationIdRef.current = conversationId;
}, [conversationId]);
// Handle auth errors
useEffect(() => {
if (authError === "RefreshAccessTokenError") {
setError("Your session has expired. Please sign in again.");
}
}, [authError]);
// Global keyboard shortcut: Ctrl+/ to focus input
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
if ((e.ctrlKey || e.metaKey) && e.key === "/") {
e.preventDefault();
inputRef.current?.focus();
}
};
document.addEventListener("keydown", handleKeyDown);
return () => document.removeEventListener("keydown", handleKeyDown);
}, []);
// TODO: Implement click outside handlers for menus
const sendMessage = useCallback(
async (content: string) => {
if (!content.trim() || isChatLoading) {
return;
}
// Add user message immediately
const userMessage: Message = {
id: `user-${Date.now()}`,
role: "user",
content: content.trim(),
createdAt: new Date().toISOString(),
};
setMessages((prev) => [...prev, userMessage]);
setIsChatLoading(true);
setLoadingQuip(null);
setError(null);
// Show a witty loading message after 3 seconds
const quipTimerId = setTimeout(() => {
setLoadingQuip(getRandomQuip(WAITING_QUIPS));
}, 3000);
// Change quip every 5 seconds if still waiting
const quipIntervalId = setInterval(() => {
setLoadingQuip(getRandomQuip(WAITING_QUIPS));
}, 5000);
try {
// TODO: Implement actual API call to /api/brain/query
const requestBody: {
message: string;
conversation_id: string | null;
project_id?: string;
provider_instance_id?: string;
provider?: string;
model?: string;
use_reasoning?: boolean;
} = {
message: content.trim(),
conversation_id: conversationId,
};
// Placeholder response for now
await new Promise(resolve => setTimeout(resolve, 1000));
const assistantMessage: Message = {
id: `assistant-${Date.now()}`,
role: "assistant",
content: "This is a placeholder response. The chat API integration is not yet complete.",
createdAt: new Date().toISOString(),
};
setMessages((prev) => [...prev, assistantMessage]);
// Clear quip timers on success
clearTimeout(quipTimerId);
clearInterval(quipIntervalId);
setLoadingQuip(null);
} catch (err) {
// Clear quip timers on error
clearTimeout(quipTimerId);
clearInterval(quipIntervalId);
setLoadingQuip(null);
console.error("Failed to send message:", err);
const errorMsg = err instanceof Error ? err.message : "Failed to send message";
setError(errorMsg);
const errorMessage: Message = {
id: `error-${Date.now()}`,
role: "assistant",
content: errorMsg,
createdAt: new Date().toISOString(),
};
setMessages((prev) => [...prev, errorMessage]);
} finally {
setIsChatLoading(false);
}
},
[conversationId, isChatLoading]
);
const dismissError = useCallback(() => {
setError(null);
}, []);
// Show loading state while auth is loading
if (authLoading) {
return (
<div className="flex flex-1 items-center justify-center" style={{ backgroundColor: "rgb(var(--color-background))" }}>
<div className="flex items-center gap-3">
<div className="h-5 w-5 animate-spin rounded-full border-2 border-t-transparent" style={{ borderColor: "rgb(var(--accent-primary))", borderTopColor: "transparent" }} />
<span style={{ color: "rgb(var(--text-secondary))" }}>Loading...</span>
</div>
</div>
);
}
return (
<div className="flex flex-1 flex-col" style={{ backgroundColor: "rgb(var(--color-background))" }}>
{/* Messages Area */}
<div className="flex-1 overflow-y-auto">
<div className="mx-auto max-w-4xl px-4 py-6 lg:px-8">
<MessageList messages={messages} isLoading={isChatLoading} loadingQuip={loadingQuip} />
<div ref={messagesEndRef} />
</div>
</div>
{/* Error Alert */}
{error && (
<div className="mx-4 mb-2 lg:mx-auto lg:max-w-4xl lg:px-8">
<div
className="flex items-center justify-between rounded-lg border px-4 py-3"
style={{
backgroundColor: "rgb(var(--semantic-error-light))",
borderColor: "rgb(var(--semantic-error) / 0.3)",
}}
role="alert"
>
<div className="flex items-center gap-3">
<svg
className="h-4 w-4 flex-shrink-0"
style={{ color: "rgb(var(--semantic-error))" }}
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
strokeWidth={2}
>
<circle cx="12" cy="12" r="10" />
<line x1="12" y1="8" x2="12" y2="12" />
<line x1="12" y1="16" x2="12.01" y2="16" />
</svg>
<span
className="text-sm"
style={{ color: "rgb(var(--semantic-error-dark))" }}
>
{error}
</span>
</div>
<button
onClick={dismissError}
className="rounded p-1 transition-colors hover:bg-black/5"
aria-label="Dismiss error"
>
<svg
className="h-4 w-4"
style={{ color: "rgb(var(--semantic-error))" }}
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
strokeWidth={2}
>
<path d="M18 6 6 18M6 6l12 12" />
</svg>
</button>
</div>
</div>
)}
{/* Input Area */}
<div
className="sticky bottom-0 border-t"
style={{
backgroundColor: "rgb(var(--surface-0))",
borderColor: "rgb(var(--border-default))",
}}
>
<div className="mx-auto max-w-4xl px-4 py-4 lg:px-8">
<ChatInput
onSend={sendMessage}
disabled={isChatLoading || !accessToken}
inputRef={inputRef}
/>
</div>
</div>
</div>
);
});