feat(web): port chat UI — model selector, keybindings, thinking display, styled header
All checks were successful
ci/woodpecker/push/ci Pipeline was successful
ci/woodpecker/pr/ci Pipeline was successful

This commit is contained in:
2026-03-19 20:42:48 -05:00
parent 25f880416a
commit 68e056ac91
11 changed files with 1848 additions and 260 deletions

View File

@@ -1,93 +1,172 @@
'use client';
import { useCallback, useEffect, useRef, useState } from 'react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { api } from '@/lib/api';
import { destroySocket, getSocket } from '@/lib/socket';
import type { Conversation, Message } from '@/lib/types';
import type { Conversation, Message, ModelInfo, ProviderInfo } from '@/lib/types';
import { ConversationList } from '@/components/chat/conversation-list';
import { MessageBubble } from '@/components/chat/message-bubble';
import { ChatInput } from '@/components/chat/chat-input';
import { StreamingMessage } from '@/components/chat/streaming-message';
import { AppHeader } from '@/components/layout/app-header';
const FALLBACK_MODELS: ModelInfo[] = [
{
id: 'claude-3-5-sonnet',
provider: 'anthropic',
name: 'claude-3.5-sonnet',
reasoning: true,
contextWindow: 200_000,
maxTokens: 8_192,
inputTypes: ['text'],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
},
{
id: 'gpt-4.1',
provider: 'openai',
name: 'gpt-4.1',
reasoning: false,
contextWindow: 128_000,
maxTokens: 8_192,
inputTypes: ['text'],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
},
{
id: 'gemini-2.0-flash',
provider: 'google',
name: 'gemini-2.0-flash',
reasoning: false,
contextWindow: 1_000_000,
maxTokens: 8_192,
inputTypes: ['text', 'image'],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
},
];
export default function ChatPage(): React.ReactElement {
const [conversations, setConversations] = useState<Conversation[]>([]);
const [activeId, setActiveId] = useState<string | null>(null);
const [messages, setMessages] = useState<Message[]>([]);
const [sidebarOpen, setSidebarOpen] = useState(true);
const [models, setModels] = useState<ModelInfo[]>(FALLBACK_MODELS);
const [selectedModelId, setSelectedModelId] = useState(FALLBACK_MODELS[0]?.id ?? '');
const [streamingText, setStreamingText] = useState('');
const [streamingThinking, setStreamingThinking] = useState('');
const [isStreaming, setIsStreaming] = useState(false);
const messagesEndRef = useRef<HTMLDivElement>(null);
// Track the active conversation ID in a ref so socket event handlers always
// see the current value without needing to be re-registered.
const activeIdRef = useRef<string | null>(null);
const streamingTextRef = useRef('');
const streamingThinkingRef = useRef('');
activeIdRef.current = activeId;
// Accumulate streamed text in a ref so agent:end can read the full content
// without stale-closure issues.
const streamingTextRef = useRef('');
const selectedModel = useMemo(
() => models.find((model) => model.id === selectedModelId) ?? models[0] ?? null,
[models, selectedModelId],
);
const selectedModelRef = useRef<ModelInfo | null>(selectedModel);
selectedModelRef.current = selectedModel;
// Load conversations on mount
useEffect(() => {
api<Conversation[]>('/api/conversations')
.then(setConversations)
.catch(() => {});
}, []);
// Load messages when active conversation changes
useEffect(() => {
api<ProviderInfo[]>('/api/providers')
.then((providers) =>
providers.filter((provider) => provider.available).flatMap((provider) => provider.models),
)
.then((availableModels) => {
if (availableModels.length === 0) return;
setModels(availableModels);
setSelectedModelId((current) =>
availableModels.some((model) => model.id === current) ? current : availableModels[0]!.id,
);
})
.catch(() => {
setModels(FALLBACK_MODELS);
});
}, []);
useEffect(() => {
if (!activeId) {
setMessages([]);
return;
}
// Clear streaming state when switching conversations
setIsStreaming(false);
setStreamingText('');
setStreamingThinking('');
streamingTextRef.current = '';
streamingThinkingRef.current = '';
api<Message[]>(`/api/conversations/${activeId}/messages`)
.then(setMessages)
.then((fetchedMessages) => setMessages(fetchedMessages.map(normalizeMessage)))
.catch(() => {});
}, [activeId]);
// Auto-scroll to bottom
useEffect(() => {
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' });
}, [messages, streamingText]);
}, [messages, streamingText, streamingThinking]);
// Socket.io setup — connect once for the page lifetime
useEffect(() => {
const socket = getSocket();
function onAgentStart(data: { conversationId: string }): void {
// Only update state if the event belongs to the currently viewed conversation
if (activeIdRef.current !== data.conversationId) return;
setIsStreaming(true);
setStreamingText('');
setStreamingThinking('');
streamingTextRef.current = '';
streamingThinkingRef.current = '';
}
function onAgentText(data: { conversationId: string; text: string }): void {
function onAgentText(data: { conversationId: string; text?: string; thinking?: string }): void {
if (activeIdRef.current !== data.conversationId) return;
streamingTextRef.current += data.text;
setStreamingText((prev) => prev + data.text);
if (data.text) {
streamingTextRef.current += data.text;
setStreamingText((prev) => prev + data.text);
}
if (data.thinking) {
streamingThinkingRef.current += data.thinking;
setStreamingThinking((prev) => prev + data.thinking);
}
}
function onAgentEnd(data: { conversationId: string }): void {
function onAgentEnd(data: {
conversationId: string;
thinking?: string;
model?: string;
provider?: string;
promptTokens?: number;
completionTokens?: number;
totalTokens?: number;
}): void {
if (activeIdRef.current !== data.conversationId) return;
const finalText = streamingTextRef.current;
const finalThinking = data.thinking ?? streamingThinkingRef.current;
setIsStreaming(false);
setStreamingText('');
setStreamingThinking('');
streamingTextRef.current = '';
// Append the completed assistant message to the local message list.
// The Pi agent session is in-memory so the assistant response is not
// persisted to the DB — we build the local UI state instead.
streamingThinkingRef.current = '';
if (finalText) {
setMessages((prev) => [
...prev,
{
id: `assistant-${Date.now()}`,
conversationId: data.conversationId,
role: 'assistant' as const,
role: 'assistant',
content: finalText,
thinking: finalThinking || undefined,
model: data.model ?? selectedModelRef.current?.name,
provider: data.provider ?? selectedModelRef.current?.provider,
promptTokens: data.promptTokens,
completionTokens: data.completionTokens,
totalTokens: data.totalTokens,
createdAt: new Date().toISOString(),
},
]);
@@ -97,13 +176,15 @@ export default function ChatPage(): React.ReactElement {
function onError(data: { error: string; conversationId?: string }): void {
setIsStreaming(false);
setStreamingText('');
setStreamingThinking('');
streamingTextRef.current = '';
streamingThinkingRef.current = '';
setMessages((prev) => [
...prev,
{
id: `error-${Date.now()}`,
conversationId: data.conversationId ?? '',
role: 'system' as const,
role: 'system',
content: `Error: ${data.error}`,
createdAt: new Date().toISOString(),
},
@@ -115,7 +196,6 @@ export default function ChatPage(): React.ReactElement {
socket.on('agent:end', onAgentEnd);
socket.on('error', onError);
// Connect if not already connected
if (!socket.connected) {
socket.connect();
}
@@ -125,19 +205,17 @@ export default function ChatPage(): React.ReactElement {
socket.off('agent:text', onAgentText);
socket.off('agent:end', onAgentEnd);
socket.off('error', onError);
// Fully tear down the socket when the chat page unmounts so we get a
// fresh authenticated connection next time the page is visited.
destroySocket();
};
}, []);
const handleNewConversation = useCallback(async () => {
const conv = await api<Conversation>('/api/conversations', {
const conversation = await api<Conversation>('/api/conversations', {
method: 'POST',
body: { title: 'New conversation' },
});
setConversations((prev) => [conv, ...prev]);
setActiveId(conv.id);
setConversations((prev) => [conversation, ...prev]);
setActiveId(conversation.id);
setMessages([]);
}, []);
@@ -146,20 +224,22 @@ export default function ChatPage(): React.ReactElement {
method: 'PATCH',
body: { title },
});
setConversations((prev) => prev.map((c) => (c.id === id ? updated : c)));
setConversations((prev) =>
prev.map((conversation) => (conversation.id === id ? updated : conversation)),
);
}, []);
const handleDelete = useCallback(
async (id: string) => {
try {
await api<void>(`/api/conversations/${id}`, { method: 'DELETE' });
setConversations((prev) => prev.filter((c) => c.id !== id));
setConversations((prev) => prev.filter((conversation) => conversation.id !== id));
if (activeId === id) {
setActiveId(null);
setMessages([]);
}
} catch (err) {
console.error('[ChatPage] Failed to delete conversation:', err);
} catch (error) {
console.error('[ChatPage] Failed to delete conversation:', error);
}
},
[activeId],
@@ -171,8 +251,9 @@ export default function ChatPage(): React.ReactElement {
method: 'PATCH',
body: { archived },
});
setConversations((prev) => prev.map((c) => (c.id === id ? updated : c)));
// If archiving the active conversation, deselect it
setConversations((prev) =>
prev.map((conversation) => (conversation.id === id ? updated : conversation)),
);
if (archived && activeId === id) {
setActiveId(null);
setMessages([]);
@@ -182,75 +263,114 @@ export default function ChatPage(): React.ReactElement {
);
const handleSend = useCallback(
async (content: string) => {
let convId = activeId;
async (content: string, options?: { modelId?: string }) => {
let conversationId = activeId;
// Auto-create conversation if none selected
if (!convId) {
if (!conversationId) {
const autoTitle = content.slice(0, 60);
const conv = await api<Conversation>('/api/conversations', {
const conversation = await api<Conversation>('/api/conversations', {
method: 'POST',
body: { title: autoTitle },
});
setConversations((prev) => [conv, ...prev]);
setActiveId(conv.id);
convId = conv.id;
setConversations((prev) => [conversation, ...prev]);
setActiveId(conversation.id);
conversationId = conversation.id;
} else {
// Auto-title: if the active conversation still has the default "New
// conversation" title and this is the first message, update the title
// from the message content.
const activeConv = conversations.find((c) => c.id === convId);
if (activeConv?.title === 'New conversation' && messages.length === 0) {
const activeConversation = conversations.find(
(conversation) => conversation.id === conversationId,
);
if (activeConversation?.title === 'New conversation' && messages.length === 0) {
const autoTitle = content.slice(0, 60);
api<Conversation>(`/api/conversations/${convId}`, {
api<Conversation>(`/api/conversations/${conversationId}`, {
method: 'PATCH',
body: { title: autoTitle },
})
.then((updated) => {
setConversations((prev) => prev.map((c) => (c.id === convId ? updated : c)));
setConversations((prev) =>
prev.map((conversation) =>
conversation.id === conversationId ? updated : conversation,
),
);
})
.catch(() => {});
}
}
// Optimistic user message in local UI state
setMessages((prev) => [
...prev,
{
id: `user-${Date.now()}`,
conversationId: convId,
role: 'user' as const,
conversationId,
role: 'user',
content,
createdAt: new Date().toISOString(),
},
]);
// Persist the user message to the DB so conversation history is
// available when the page is reloaded or a new session starts.
api<Message>(`/api/conversations/${convId}/messages`, {
api<Message>(`/api/conversations/${conversationId}/messages`, {
method: 'POST',
body: { role: 'user', content },
}).catch(() => {
// Non-fatal: the agent can still process the message even if
// REST persistence fails.
});
}).catch(() => {});
// Send to WebSocket — gateway creates/resumes the agent session and
// streams the response back via agent:start / agent:text / agent:end.
const socket = getSocket();
if (!socket.connected) {
socket.connect();
}
socket.emit('message', { conversationId: convId, content });
socket.emit('message', {
conversationId,
content,
model: options?.modelId ?? selectedModelRef.current?.id,
});
},
[activeId, conversations, messages],
[activeId, conversations, messages.length],
);
const handleStop = useCallback(() => {
const socket = getSocket();
socket.emit('cancel', { conversationId: activeIdRef.current });
const partialText = streamingTextRef.current.trim();
const partialThinking = streamingThinkingRef.current.trim();
if (partialText) {
setMessages((prev) => [
...prev,
{
id: `assistant-partial-${Date.now()}`,
conversationId: activeIdRef.current ?? '',
role: 'assistant',
content: partialText,
thinking: partialThinking || undefined,
model: selectedModelRef.current?.name,
provider: selectedModelRef.current?.provider,
createdAt: new Date().toISOString(),
},
]);
}
setIsStreaming(false);
setStreamingText('');
setStreamingThinking('');
streamingTextRef.current = '';
streamingThinkingRef.current = '';
destroySocket();
}, []);
const handleEditLastMessage = useCallback((): string | null => {
const lastUserMessage = [...messages].reverse().find((message) => message.role === 'user');
return lastUserMessage?.content ?? null;
}, [messages]);
const activeConversation =
conversations.find((conversation) => conversation.id === activeId) ?? null;
return (
<div className="-m-6 flex h-[calc(100vh-3.5rem)]">
<div className="-m-6 flex h-[100dvh] overflow-hidden">
<ConversationList
conversations={conversations}
activeId={activeId}
isOpen={sidebarOpen}
onClose={() => setSidebarOpen(false)}
onSelect={setActiveId}
onNew={handleNewConversation}
onRename={handleRename}
@@ -258,36 +378,90 @@ export default function ChatPage(): React.ReactElement {
onArchive={handleArchive}
/>
<div className="flex flex-1 flex-col">
{activeId ? (
<>
<div className="flex-1 space-y-4 overflow-y-auto p-6">
{messages.map((msg) => (
<MessageBubble key={msg.id} message={msg} />
))}
{isStreaming && <StreamingMessage text={streamingText} />}
<div ref={messagesEndRef} />
</div>
<ChatInput onSend={handleSend} disabled={isStreaming} />
</>
) : (
<div className="flex flex-1 items-center justify-center">
<div className="text-center">
<h2 className="text-lg font-medium text-text-secondary">Welcome to Mosaic Chat</h2>
<p className="mt-1 text-sm text-text-muted">
Select a conversation or start a new one
</p>
<button
type="button"
onClick={handleNewConversation}
className="mt-4 rounded-lg bg-blue-600 px-4 py-2 text-sm font-medium text-white transition-colors hover:bg-blue-700"
>
Start new conversation
</button>
</div>
<div
className="relative flex min-w-0 flex-1 flex-col overflow-hidden"
style={{
background:
'radial-gradient(circle at top, color-mix(in srgb, var(--color-ms-blue-500) 14%, transparent), transparent 35%), var(--color-bg)',
}}
>
<AppHeader
conversationTitle={activeConversation?.title}
isSidebarOpen={sidebarOpen}
onToggleSidebar={() => setSidebarOpen((prev) => !prev)}
/>
<div className="flex-1 overflow-y-auto px-4 py-6 md:px-6">
<div className="mx-auto flex w-full max-w-4xl flex-col gap-4">
{messages.length === 0 && !isStreaming ? (
<div className="flex min-h-full flex-1 items-center justify-center py-16">
<div className="max-w-xl text-center">
<div className="mb-4 text-xs uppercase tracking-[0.3em] text-[var(--color-muted)]">
Mosaic Chat
</div>
<h2 className="text-3xl font-semibold text-[var(--color-text)]">
Start a new session with a better chat interface.
</h2>
<p className="mt-3 text-sm leading-7 text-[var(--color-text-2)]">
Pick a model, send a prompt, and the response area will keep reasoning,
metadata, and streaming status visible without leaving the page.
</p>
</div>
</div>
) : null}
{messages.map((message) => (
<MessageBubble key={message.id} message={message} />
))}
{isStreaming ? (
<StreamingMessage
text={streamingText}
thinking={streamingThinking}
modelName={selectedModel?.name ?? null}
/>
) : null}
<div ref={messagesEndRef} />
</div>
)}
</div>
<div className="sticky bottom-0">
<div className="mx-auto w-full max-w-4xl">
<ChatInput
onSend={handleSend}
onStop={handleStop}
isStreaming={isStreaming}
models={models}
selectedModelId={selectedModelId}
onModelChange={setSelectedModelId}
onRequestEditLastMessage={handleEditLastMessage}
/>
</div>
</div>
</div>
</div>
);
}
function normalizeMessage(message: Message): Message {
const metadata = message.metadata ?? {};
return {
...message,
thinking:
message.thinking ?? (typeof metadata.thinking === 'string' ? metadata.thinking : undefined),
model: message.model ?? (typeof metadata.model === 'string' ? metadata.model : undefined),
provider:
message.provider ?? (typeof metadata.provider === 'string' ? metadata.provider : undefined),
promptTokens:
message.promptTokens ??
(typeof metadata.prompt_tokens === 'number' ? metadata.prompt_tokens : undefined),
completionTokens:
message.completionTokens ??
(typeof metadata.completion_tokens === 'number' ? metadata.completion_tokens : undefined),
totalTokens:
message.totalTokens ??
(typeof metadata.total_tokens === 'number' ? metadata.total_tokens : undefined),
};
}