feat(web): polish master chat with model selector, params config, and empty state (#519)
All checks were successful
ci/woodpecker/push/web Pipeline was successful

Co-authored-by: Jason Woltje <jason@diversecanvas.com>
Co-committed-by: Jason Woltje <jason@diversecanvas.com>
This commit was merged in pull request #519.
This commit is contained in:
2026-02-26 03:17:23 +00:00
committed by jason.woltje
parent 417c6ab49c
commit 13aa52aa53
9 changed files with 1020 additions and 13 deletions

View File

@@ -43,6 +43,15 @@ vi.mock("./ChatInput", () => ({
Send Send
</button> </button>
), ),
DEFAULT_TEMPERATURE: 0.7,
DEFAULT_MAX_TOKENS: 4096,
DEFAULT_MODEL: "llama3.2",
AVAILABLE_MODELS: [
{ id: "llama3.2", label: "Llama 3.2" },
{ id: "claude-3.5-sonnet", label: "Claude 3.5 Sonnet" },
{ id: "gpt-4o", label: "GPT-4o" },
{ id: "deepseek-r1", label: "DeepSeek R1" },
],
})); }));
const mockUseAuth = authModule.useAuth as MockedFunction<typeof authModule.useAuth>; const mockUseAuth = authModule.useAuth as MockedFunction<typeof authModule.useAuth>;

View File

@@ -5,7 +5,8 @@ import { useAuth } from "@/lib/auth/auth-context";
import { useChat } from "@/hooks/useChat"; import { useChat } from "@/hooks/useChat";
import { useWebSocket } from "@/hooks/useWebSocket"; import { useWebSocket } from "@/hooks/useWebSocket";
import { MessageList } from "./MessageList"; import { MessageList } from "./MessageList";
import { ChatInput } from "./ChatInput"; import { ChatInput, type ModelId, DEFAULT_TEMPERATURE, DEFAULT_MAX_TOKENS } from "./ChatInput";
import { ChatEmptyState } from "./ChatEmptyState";
import type { Message } from "@/hooks/useChat"; import type { Message } from "@/hooks/useChat";
export interface ChatRef { export interface ChatRef {
@@ -59,6 +60,14 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
const { user, isLoading: authLoading } = useAuth(); const { user, isLoading: authLoading } = useAuth();
// Model and params state — initialized from ChatInput's persisted values
const [selectedModel, setSelectedModel] = useState<ModelId>("llama3.2");
const [temperature, setTemperature] = useState<number>(DEFAULT_TEMPERATURE);
const [maxTokens, setMaxTokens] = useState<number>(DEFAULT_MAX_TOKENS);
// Suggestion fill value: controls ChatInput's textarea content
const [suggestionValue, setSuggestionValue] = useState<string | undefined>(undefined);
const { const {
messages, messages,
isLoading: isChatLoading, isLoading: isChatLoading,
@@ -72,7 +81,9 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
startNewConversation, startNewConversation,
clearError, clearError,
} = useChat({ } = useChat({
model: "llama3.2", model: selectedModel,
temperature,
maxTokens,
...(initialProjectId !== undefined && { projectId: initialProjectId }), ...(initialProjectId !== undefined && { projectId: initialProjectId }),
}); });
@@ -88,6 +99,11 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
const streamingMessageId = const streamingMessageId =
isStreaming && messages.length > 0 ? messages[messages.length - 1]?.id : undefined; isStreaming && messages.length > 0 ? messages[messages.length - 1]?.id : undefined;
// Whether the conversation is empty (only welcome message or no messages)
const isEmptyConversation =
messages.length === 0 ||
(messages.length === 1 && messages[0]?.id === "welcome" && !isChatLoading && !isStreaming);
useImperativeHandle(ref, () => ({ useImperativeHandle(ref, () => ({
loadConversation: async (cId: string): Promise<void> => { loadConversation: async (cId: string): Promise<void> => {
await loadConversation(cId); await loadConversation(cId);
@@ -122,16 +138,29 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
useEffect(() => { useEffect(() => {
const handleKeyDown = (e: KeyboardEvent): void => { const handleKeyDown = (e: KeyboardEvent): void => {
// Cmd/Ctrl + / : Focus input
if ((e.ctrlKey || e.metaKey) && e.key === "/") { if ((e.ctrlKey || e.metaKey) && e.key === "/") {
e.preventDefault(); e.preventDefault();
inputRef.current?.focus(); inputRef.current?.focus();
} }
// Cmd/Ctrl + N : Start new conversation
if ((e.ctrlKey || e.metaKey) && (e.key === "n" || e.key === "N")) {
e.preventDefault();
startNewConversation(null);
inputRef.current?.focus();
}
// Cmd/Ctrl + L : Clear / start new conversation
if ((e.ctrlKey || e.metaKey) && (e.key === "l" || e.key === "L")) {
e.preventDefault();
startNewConversation(null);
inputRef.current?.focus();
}
}; };
document.addEventListener("keydown", handleKeyDown); document.addEventListener("keydown", handleKeyDown);
return (): void => { return (): void => {
document.removeEventListener("keydown", handleKeyDown); document.removeEventListener("keydown", handleKeyDown);
}; };
}, []); }, [startNewConversation]);
// Show loading quips only during non-streaming load (initial fetch wait) // Show loading quips only during non-streaming load (initial fetch wait)
useEffect(() => { useEffect(() => {
@@ -168,6 +197,14 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
[sendMessage] [sendMessage]
); );
const handleSuggestionClick = useCallback((prompt: string): void => {
setSuggestionValue(prompt);
// Clear after a tick so input receives it, then focus
setTimeout(() => {
inputRef.current?.focus();
}, 0);
}, []);
if (authLoading) { if (authLoading) {
return ( return (
<div <div
@@ -214,6 +251,9 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
{/* Messages Area */} {/* Messages Area */}
<div className="flex-1 overflow-y-auto"> <div className="flex-1 overflow-y-auto">
<div className="mx-auto max-w-4xl px-4 py-6 lg:px-8"> <div className="mx-auto max-w-4xl px-4 py-6 lg:px-8">
{isEmptyConversation ? (
<ChatEmptyState onSuggestionClick={handleSuggestionClick} />
) : (
<MessageList <MessageList
messages={messages as (Message & { thinking?: string })[]} messages={messages as (Message & { thinking?: string })[]}
isLoading={isChatLoading} isLoading={isChatLoading}
@@ -221,6 +261,7 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
{...(streamingMessageId != null ? { streamingMessageId } : {})} {...(streamingMessageId != null ? { streamingMessageId } : {})}
loadingQuip={loadingQuip} loadingQuip={loadingQuip}
/> />
)}
<div ref={messagesEndRef} /> <div ref={messagesEndRef} />
</div> </div>
</div> </div>
@@ -288,6 +329,10 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
inputRef={inputRef} inputRef={inputRef}
isStreaming={isStreaming} isStreaming={isStreaming}
onStopStreaming={abortStream} onStopStreaming={abortStream}
onModelChange={setSelectedModel}
onTemperatureChange={setTemperature}
onMaxTokensChange={setMaxTokens}
{...(suggestionValue !== undefined ? { externalValue: suggestionValue } : {})}
/> />
</div> </div>
</div> </div>

View File

@@ -0,0 +1,103 @@
/**
* @file ChatEmptyState.test.tsx
* @description Tests for ChatEmptyState component: greeting, suggestions, click handling
*/
import { render, screen, fireEvent } from "@testing-library/react";
import { describe, it, expect, vi } from "vitest";
import { ChatEmptyState } from "./ChatEmptyState";
describe("ChatEmptyState", () => {
it("should render the greeting heading", () => {
render(<ChatEmptyState onSuggestionClick={vi.fn()} />);
expect(screen.getByRole("heading", { name: /how can i help/i })).toBeDefined();
});
it("should render the empty state container", () => {
render(<ChatEmptyState onSuggestionClick={vi.fn()} />);
expect(screen.getByTestId("chat-empty-state")).toBeDefined();
});
it("should render four suggestion buttons", () => {
render(<ChatEmptyState onSuggestionClick={vi.fn()} />);
// Four suggestions
const buttons = screen.getAllByRole("button");
expect(buttons.length).toBe(4);
});
it("should render 'Explain this project' suggestion", () => {
render(<ChatEmptyState onSuggestionClick={vi.fn()} />);
expect(screen.getByText("Explain this project")).toBeDefined();
});
it("should render 'Help me debug' suggestion", () => {
render(<ChatEmptyState onSuggestionClick={vi.fn()} />);
expect(screen.getByText("Help me debug")).toBeDefined();
});
it("should render 'Write a test for' suggestion", () => {
render(<ChatEmptyState onSuggestionClick={vi.fn()} />);
expect(screen.getByText("Write a test for")).toBeDefined();
});
it("should render 'Refactor this code' suggestion", () => {
render(<ChatEmptyState onSuggestionClick={vi.fn()} />);
expect(screen.getByText("Refactor this code")).toBeDefined();
});
it("should call onSuggestionClick with the correct prompt when a suggestion is clicked", () => {
const onSuggestionClick = vi.fn();
render(<ChatEmptyState onSuggestionClick={onSuggestionClick} />);
const explainButton = screen.getByTestId("suggestion-explain-this-project");
fireEvent.click(explainButton);
expect(onSuggestionClick).toHaveBeenCalledOnce();
const [calledWith] = onSuggestionClick.mock.calls[0] as [string];
expect(calledWith).toContain("overview of this project");
});
it("should call onSuggestionClick for 'Help me debug' prompt", () => {
const onSuggestionClick = vi.fn();
render(<ChatEmptyState onSuggestionClick={onSuggestionClick} />);
const debugButton = screen.getByTestId("suggestion-help-me-debug");
fireEvent.click(debugButton);
expect(onSuggestionClick).toHaveBeenCalledOnce();
const [calledWith] = onSuggestionClick.mock.calls[0] as [string];
expect(calledWith).toContain("debugging");
});
it("should call onSuggestionClick for 'Write a test for' prompt", () => {
const onSuggestionClick = vi.fn();
render(<ChatEmptyState onSuggestionClick={onSuggestionClick} />);
const testButton = screen.getByTestId("suggestion-write-a-test-for");
fireEvent.click(testButton);
expect(onSuggestionClick).toHaveBeenCalledOnce();
});
it("should call onSuggestionClick for 'Refactor this code' prompt", () => {
const onSuggestionClick = vi.fn();
render(<ChatEmptyState onSuggestionClick={onSuggestionClick} />);
const refactorButton = screen.getByTestId("suggestion-refactor-this-code");
fireEvent.click(refactorButton);
expect(onSuggestionClick).toHaveBeenCalledOnce();
const [calledWith] = onSuggestionClick.mock.calls[0] as [string];
expect(calledWith).toContain("refactor");
});
it("should have accessible aria-label on each suggestion button", () => {
render(<ChatEmptyState onSuggestionClick={vi.fn()} />);
const buttons = screen.getAllByRole("button");
for (const button of buttons) {
const label = button.getAttribute("aria-label");
expect(label).toBeTruthy();
expect(label).toMatch(/suggestion:/i);
}
});
});

View File

@@ -0,0 +1,99 @@
"use client";
interface Suggestion {
label: string;
prompt: string;
}
const SUGGESTIONS: Suggestion[] = [
{
label: "Explain this project",
prompt: "Can you give me an overview of this project and its key components?",
},
{
label: "Help me debug",
prompt: "I have a bug I need help debugging. Can you walk me through the process?",
},
{
label: "Write a test for",
prompt: "Can you help me write a test for the following function or component?",
},
{
label: "Refactor this code",
prompt: "I have some code I'd like to refactor for better readability and maintainability.",
},
];
interface ChatEmptyStateProps {
onSuggestionClick: (prompt: string) => void;
}
export function ChatEmptyState({ onSuggestionClick }: ChatEmptyStateProps): React.JSX.Element {
return (
<div
className="flex flex-col items-center justify-center gap-6 py-12 px-4 text-center"
data-testid="chat-empty-state"
>
{/* Icon */}
<div
className="flex h-16 w-16 items-center justify-center rounded-2xl"
style={{ backgroundColor: "rgb(var(--accent-primary) / 0.12)" }}
>
<svg
className="h-8 w-8"
style={{ color: "rgb(var(--accent-primary))" }}
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
strokeWidth={1.5}
aria-hidden="true"
>
<path d="M8 12h.01M12 12h.01M16 12h.01M21 12c0 4.418-4.03 8-9 8a9.863 9.863 0 01-4.255-.949L3 20l1.395-3.72C3.512 15.042 3 13.574 3 12c0-4.418 4.03-8 9-8s9 3.582 9 8z" />
</svg>
</div>
{/* Greeting */}
<div className="space-y-1">
<h3 className="text-lg font-semibold" style={{ color: "rgb(var(--text-primary))" }}>
How can I help you today?
</h3>
<p className="text-sm max-w-sm" style={{ color: "rgb(var(--text-secondary))" }}>
Ask me anything I can help with code, explanations, debugging, and more.
</p>
</div>
{/* Suggestions */}
<div className="grid grid-cols-1 gap-2 w-full max-w-sm sm:grid-cols-2">
{SUGGESTIONS.map((suggestion) => (
<button
key={suggestion.label}
onClick={() => {
onSuggestionClick(suggestion.prompt);
}}
className="rounded-lg border px-3 py-2.5 text-left text-sm transition-all duration-150 hover:shadow-sm focus:outline-none focus:ring-2"
style={{
backgroundColor: "rgb(var(--surface-1))",
borderColor: "rgb(var(--border-default))",
color: "rgb(var(--text-secondary))",
}}
aria-label={`Suggestion: ${suggestion.label}`}
data-testid={`suggestion-${suggestion.label.toLowerCase().replace(/\s+/g, "-")}`}
>
<span
className="block text-xs font-medium mb-0.5"
style={{ color: "rgb(var(--text-primary))" }}
>
{suggestion.label}
</span>
<span
className="block text-xs leading-relaxed line-clamp-2"
style={{ color: "rgb(var(--text-muted))" }}
>
{suggestion.prompt}
</span>
</button>
))}
</div>
</div>
);
}

View File

@@ -0,0 +1,293 @@
/**
* @file ChatInput.test.tsx
* @description Tests for ChatInput: model selector, temperature/params, localStorage persistence
*/
import { render, screen, fireEvent, waitFor, within } from "@testing-library/react";
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
import {
ChatInput,
AVAILABLE_MODELS,
DEFAULT_MODEL,
DEFAULT_TEMPERATURE,
DEFAULT_MAX_TOKENS,
} from "./ChatInput";
// Mock fetch for version.json
beforeEach(() => {
global.fetch = vi.fn().mockRejectedValue(new Error("Not found"));
});
afterEach(() => {
vi.restoreAllMocks();
localStorage.clear();
});
/** Get the first non-default model from the list */
function getNonDefaultModel(): (typeof AVAILABLE_MODELS)[number] {
const model = AVAILABLE_MODELS.find((m) => m.id !== DEFAULT_MODEL);
if (!model) throw new Error("No non-default model found");
return model;
}
describe("ChatInput — model selector", () => {
it("should render the model selector chip showing the default model", () => {
render(<ChatInput onSend={vi.fn()} />);
const defaultLabel =
AVAILABLE_MODELS.find((m) => m.id === DEFAULT_MODEL)?.label ?? DEFAULT_MODEL;
expect(screen.getByText(defaultLabel)).toBeDefined();
});
it("should open the model dropdown when the chip is clicked", () => {
render(<ChatInput onSend={vi.fn()} />);
const chip = screen.getByLabelText(/model:/i);
fireEvent.click(chip);
// The dropdown (listbox role) should be visible
const listbox = screen.getByRole("listbox", { name: /available models/i });
expect(listbox).toBeDefined();
// All model options should appear in the dropdown
const options = within(listbox).getAllByRole("option");
expect(options.length).toBe(AVAILABLE_MODELS.length);
});
it("should call onModelChange when a model is selected", async () => {
const onModelChange = vi.fn();
render(<ChatInput onSend={vi.fn()} onModelChange={onModelChange} />);
const chip = screen.getByLabelText(/model:/i);
fireEvent.click(chip);
const targetModel = getNonDefaultModel();
const listbox = screen.getByRole("listbox", { name: /available models/i });
const targetOption = within(listbox).getByText(targetModel.label);
fireEvent.click(targetOption);
await waitFor(() => {
const calls = onModelChange.mock.calls.map((c: unknown[]) => c[0]);
expect(calls).toContain(targetModel.id);
});
});
it("should persist the selected model in localStorage", async () => {
render(<ChatInput onSend={vi.fn()} />);
const chip = screen.getByLabelText(/model:/i);
fireEvent.click(chip);
const targetModel = getNonDefaultModel();
const listbox = screen.getByRole("listbox", { name: /available models/i });
const targetOption = within(listbox).getByText(targetModel.label);
fireEvent.click(targetOption);
await waitFor(() => {
expect(localStorage.getItem("chat:selectedModel")).toBe(targetModel.id);
});
});
it("should restore the model from localStorage on mount", async () => {
const targetModel = getNonDefaultModel();
localStorage.setItem("chat:selectedModel", targetModel.id);
render(<ChatInput onSend={vi.fn()} />);
await waitFor(() => {
expect(screen.getByText(targetModel.label)).toBeDefined();
});
});
it("should close the dropdown after selecting a model", async () => {
render(<ChatInput onSend={vi.fn()} />);
const chip = screen.getByLabelText(/model:/i);
fireEvent.click(chip);
const targetModel = getNonDefaultModel();
const listbox = screen.getByRole("listbox", { name: /available models/i });
const targetOption = within(listbox).getByText(targetModel.label);
fireEvent.click(targetOption);
// After selection, dropdown should close
await waitFor(() => {
expect(screen.queryByRole("listbox")).toBeNull();
});
});
it("should have aria-expanded on the model chip button", () => {
render(<ChatInput onSend={vi.fn()} />);
const chip = screen.getByLabelText(/model:/i);
expect(chip.getAttribute("aria-expanded")).toBe("false");
fireEvent.click(chip);
expect(chip.getAttribute("aria-expanded")).toBe("true");
});
});
describe("ChatInput — temperature and max tokens", () => {
it("should render the settings/params button", () => {
render(<ChatInput onSend={vi.fn()} />);
const settingsBtn = screen.getByLabelText(/chat parameters/i);
expect(settingsBtn).toBeDefined();
});
it("should open the params popover when settings button is clicked", () => {
render(<ChatInput onSend={vi.fn()} />);
const settingsBtn = screen.getByLabelText(/chat parameters/i);
fireEvent.click(settingsBtn);
expect(screen.getByLabelText(/temperature/i)).toBeDefined();
expect(screen.getByLabelText(/maximum tokens/i)).toBeDefined();
});
it("should show the default temperature value", () => {
render(<ChatInput onSend={vi.fn()} />);
fireEvent.click(screen.getByLabelText(/chat parameters/i));
const slider = screen.getByLabelText(/temperature/i);
expect(parseFloat((slider as HTMLInputElement).value)).toBeCloseTo(DEFAULT_TEMPERATURE);
});
it("should call onTemperatureChange when the slider is moved", async () => {
const onTemperatureChange = vi.fn();
render(<ChatInput onSend={vi.fn()} onTemperatureChange={onTemperatureChange} />);
fireEvent.click(screen.getByLabelText(/chat parameters/i));
const slider = screen.getByLabelText(/temperature/i);
fireEvent.change(slider, { target: { value: "1.2" } });
await waitFor(() => {
const calls = onTemperatureChange.mock.calls.map((c: unknown[]) => c[0]);
expect(calls).toContain(1.2);
});
});
it("should persist temperature in localStorage", async () => {
render(<ChatInput onSend={vi.fn()} />);
fireEvent.click(screen.getByLabelText(/chat parameters/i));
const slider = screen.getByLabelText(/temperature/i);
fireEvent.change(slider, { target: { value: "0.5" } });
await waitFor(() => {
expect(localStorage.getItem("chat:temperature")).toBe("0.5");
});
});
it("should restore temperature from localStorage on mount", async () => {
localStorage.setItem("chat:temperature", "1.5");
const onTemperatureChange = vi.fn();
render(<ChatInput onSend={vi.fn()} onTemperatureChange={onTemperatureChange} />);
await waitFor(() => {
const calls = onTemperatureChange.mock.calls.map((c: unknown[]) => c[0]);
expect(calls).toContain(1.5);
});
});
it("should show the default max tokens value", () => {
render(<ChatInput onSend={vi.fn()} />);
fireEvent.click(screen.getByLabelText(/chat parameters/i));
const input = screen.getByLabelText(/maximum tokens/i);
expect(parseInt((input as HTMLInputElement).value, 10)).toBe(DEFAULT_MAX_TOKENS);
});
it("should call onMaxTokensChange when the max tokens input changes", async () => {
const onMaxTokensChange = vi.fn();
render(<ChatInput onSend={vi.fn()} onMaxTokensChange={onMaxTokensChange} />);
fireEvent.click(screen.getByLabelText(/chat parameters/i));
const input = screen.getByLabelText(/maximum tokens/i);
fireEvent.change(input, { target: { value: "8192" } });
await waitFor(() => {
const calls = onMaxTokensChange.mock.calls.map((c: unknown[]) => c[0]);
expect(calls).toContain(8192);
});
});
it("should persist max tokens in localStorage", async () => {
render(<ChatInput onSend={vi.fn()} />);
fireEvent.click(screen.getByLabelText(/chat parameters/i));
const input = screen.getByLabelText(/maximum tokens/i);
fireEvent.change(input, { target: { value: "2000" } });
await waitFor(() => {
expect(localStorage.getItem("chat:maxTokens")).toBe("2000");
});
});
it("should restore max tokens from localStorage on mount", async () => {
localStorage.setItem("chat:maxTokens", "8000");
const onMaxTokensChange = vi.fn();
render(<ChatInput onSend={vi.fn()} onMaxTokensChange={onMaxTokensChange} />);
await waitFor(() => {
const calls = onMaxTokensChange.mock.calls.map((c: unknown[]) => c[0]);
expect(calls).toContain(8000);
});
});
});
describe("ChatInput — externalValue (suggestion fill)", () => {
it("should update the textarea when externalValue is provided", async () => {
const { rerender } = render(<ChatInput onSend={vi.fn()} />);
const textarea = screen.getByLabelText(/message input/i);
expect((textarea as HTMLTextAreaElement).value).toBe("");
rerender(<ChatInput onSend={vi.fn()} externalValue="Hello suggestion" />);
await waitFor(() => {
expect((textarea as HTMLTextAreaElement).value).toBe("Hello suggestion");
});
});
});
describe("ChatInput — send behavior", () => {
it("should call onSend with the message when the send button is clicked", async () => {
const onSend = vi.fn();
render(<ChatInput onSend={onSend} />);
const textarea = screen.getByLabelText(/message input/i);
fireEvent.change(textarea, { target: { value: "Hello world" } });
const sendButton = screen.getByLabelText(/send message/i);
fireEvent.click(sendButton);
await waitFor(() => {
expect(onSend).toHaveBeenCalledWith("Hello world");
});
});
it("should clear the textarea after sending", async () => {
const onSend = vi.fn();
render(<ChatInput onSend={onSend} />);
const textarea = screen.getByLabelText(/message input/i);
fireEvent.change(textarea, { target: { value: "Hello world" } });
fireEvent.click(screen.getByLabelText(/send message/i));
await waitFor(() => {
expect((textarea as HTMLTextAreaElement).value).toBe("");
});
});
it("should show the stop button when streaming", () => {
render(<ChatInput onSend={vi.fn()} isStreaming={true} />);
expect(screen.getByLabelText(/stop generating/i)).toBeDefined();
});
it("should call onStopStreaming when stop button is clicked", () => {
const onStop = vi.fn();
render(<ChatInput onSend={vi.fn()} isStreaming={true} onStopStreaming={onStop} />);
fireEvent.click(screen.getByLabelText(/stop generating/i));
expect(onStop).toHaveBeenCalledOnce();
});
});

View File

@@ -1,7 +1,66 @@
"use client"; "use client";
import type { KeyboardEvent, RefObject } from "react"; import type { KeyboardEvent, RefObject } from "react";
import { useCallback, useState, useEffect } from "react"; import { useCallback, useState, useEffect, useRef } from "react";
export const AVAILABLE_MODELS = [
{ id: "llama3.2", label: "Llama 3.2" },
{ id: "claude-3.5-sonnet", label: "Claude 3.5 Sonnet" },
{ id: "gpt-4o", label: "GPT-4o" },
{ id: "deepseek-r1", label: "DeepSeek R1" },
] as const;
export type ModelId = (typeof AVAILABLE_MODELS)[number]["id"];
const STORAGE_KEY_MODEL = "chat:selectedModel";
const STORAGE_KEY_TEMPERATURE = "chat:temperature";
const STORAGE_KEY_MAX_TOKENS = "chat:maxTokens";
export const DEFAULT_TEMPERATURE = 0.7;
export const DEFAULT_MAX_TOKENS = 4096;
export const DEFAULT_MODEL: ModelId = "llama3.2";
function loadStoredModel(): ModelId {
try {
const stored = localStorage.getItem(STORAGE_KEY_MODEL);
if (stored && AVAILABLE_MODELS.some((m) => m.id === stored)) {
return stored as ModelId;
}
} catch {
// localStorage not available
}
return DEFAULT_MODEL;
}
function loadStoredTemperature(): number {
try {
const stored = localStorage.getItem(STORAGE_KEY_TEMPERATURE);
if (stored !== null) {
const parsed = parseFloat(stored);
if (!isNaN(parsed) && parsed >= 0 && parsed <= 2) {
return parsed;
}
}
} catch {
// localStorage not available
}
return DEFAULT_TEMPERATURE;
}
function loadStoredMaxTokens(): number {
try {
const stored = localStorage.getItem(STORAGE_KEY_MAX_TOKENS);
if (stored !== null) {
const parsed = parseInt(stored, 10);
if (!isNaN(parsed) && parsed >= 100 && parsed <= 32000) {
return parsed;
}
}
} catch {
// localStorage not available
}
return DEFAULT_MAX_TOKENS;
}
interface ChatInputProps { interface ChatInputProps {
onSend: (message: string) => void; onSend: (message: string) => void;
@@ -9,6 +68,11 @@ interface ChatInputProps {
inputRef?: RefObject<HTMLTextAreaElement | null>; inputRef?: RefObject<HTMLTextAreaElement | null>;
isStreaming?: boolean; isStreaming?: boolean;
onStopStreaming?: () => void; onStopStreaming?: () => void;
onModelChange?: (model: ModelId) => void;
onTemperatureChange?: (temperature: number) => void;
onMaxTokensChange?: (maxTokens: number) => void;
onSuggestionFill?: (text: string) => void;
externalValue?: string;
} }
export function ChatInput({ export function ChatInput({
@@ -17,9 +81,52 @@ export function ChatInput({
inputRef, inputRef,
isStreaming = false, isStreaming = false,
onStopStreaming, onStopStreaming,
onModelChange,
onTemperatureChange,
onMaxTokensChange,
externalValue,
}: ChatInputProps): React.JSX.Element { }: ChatInputProps): React.JSX.Element {
const [message, setMessage] = useState(""); const [message, setMessage] = useState("");
const [version, setVersion] = useState<string | null>(null); const [version, setVersion] = useState<string | null>(null);
const [selectedModel, setSelectedModel] = useState<ModelId>(DEFAULT_MODEL);
const [temperature, setTemperature] = useState<number>(DEFAULT_TEMPERATURE);
const [maxTokens, setMaxTokens] = useState<number>(DEFAULT_MAX_TOKENS);
const [isModelDropdownOpen, setIsModelDropdownOpen] = useState(false);
const [isParamsOpen, setIsParamsOpen] = useState(false);
const modelDropdownRef = useRef<HTMLDivElement>(null);
const paramsDropdownRef = useRef<HTMLDivElement>(null);
// Stable refs for callbacks so the mount effect stays dependency-free
const onModelChangeRef = useRef(onModelChange);
onModelChangeRef.current = onModelChange;
const onTemperatureChangeRef = useRef(onTemperatureChange);
onTemperatureChangeRef.current = onTemperatureChange;
const onMaxTokensChangeRef = useRef(onMaxTokensChange);
onMaxTokensChangeRef.current = onMaxTokensChange;
// Load persisted values from localStorage on mount only
useEffect(() => {
const storedModel = loadStoredModel();
const storedTemperature = loadStoredTemperature();
const storedMaxTokens = loadStoredMaxTokens();
setSelectedModel(storedModel);
setTemperature(storedTemperature);
setMaxTokens(storedMaxTokens);
// Notify parent of initial values via refs to avoid stale closure
onModelChangeRef.current?.(storedModel);
onTemperatureChangeRef.current?.(storedTemperature);
onMaxTokensChangeRef.current?.(storedMaxTokens);
}, []);
// Sync external value (e.g. from suggestion clicks)
useEffect(() => {
if (externalValue !== undefined) {
setMessage(externalValue);
}
}, [externalValue]);
useEffect(() => { useEffect(() => {
interface VersionData { interface VersionData {
@@ -40,6 +147,22 @@ export function ChatInput({
}); });
}, []); }, []);
// Close dropdowns on outside click
useEffect(() => {
const handleClickOutside = (e: MouseEvent): void => {
if (modelDropdownRef.current && !modelDropdownRef.current.contains(e.target as Node)) {
setIsModelDropdownOpen(false);
}
if (paramsDropdownRef.current && !paramsDropdownRef.current.contains(e.target as Node)) {
setIsParamsOpen(false);
}
};
document.addEventListener("mousedown", handleClickOutside);
return (): void => {
document.removeEventListener("mousedown", handleClickOutside);
};
}, []);
const handleSubmit = useCallback(() => { const handleSubmit = useCallback(() => {
if (message.trim() && !disabled && !isStreaming) { if (message.trim() && !disabled && !isStreaming) {
onSend(message); onSend(message);
@@ -65,6 +188,49 @@ export function ChatInput({
[handleSubmit] [handleSubmit]
); );
const handleModelSelect = useCallback(
(model: ModelId): void => {
setSelectedModel(model);
try {
localStorage.setItem(STORAGE_KEY_MODEL, model);
} catch {
// ignore
}
onModelChange?.(model);
setIsModelDropdownOpen(false);
},
[onModelChange]
);
const handleTemperatureChange = useCallback(
(value: number): void => {
setTemperature(value);
try {
localStorage.setItem(STORAGE_KEY_TEMPERATURE, value.toString());
} catch {
// ignore
}
onTemperatureChange?.(value);
},
[onTemperatureChange]
);
const handleMaxTokensChange = useCallback(
(value: number): void => {
setMaxTokens(value);
try {
localStorage.setItem(STORAGE_KEY_MAX_TOKENS, value.toString());
} catch {
// ignore
}
onMaxTokensChange?.(value);
},
[onMaxTokensChange]
);
const selectedModelLabel =
AVAILABLE_MODELS.find((m) => m.id === selectedModel)?.label ?? selectedModel;
const characterCount = message.length; const characterCount = message.length;
const maxCharacters = 4000; const maxCharacters = 4000;
const isNearLimit = characterCount > maxCharacters * 0.9; const isNearLimit = characterCount > maxCharacters * 0.9;
@@ -72,7 +238,230 @@ export function ChatInput({
const isInputDisabled = disabled ?? false; const isInputDisabled = disabled ?? false;
return ( return (
<div className="space-y-3"> <div className="space-y-2">
{/* Model Selector + Params Row */}
<div className="flex items-center gap-2">
{/* Model Selector */}
<div className="relative" ref={modelDropdownRef}>
<button
type="button"
onClick={() => {
setIsModelDropdownOpen((prev) => !prev);
setIsParamsOpen(false);
}}
className="flex items-center gap-1.5 rounded-full border px-2.5 py-1 text-xs font-medium transition-colors hover:bg-black/5 focus:outline-none focus:ring-2"
style={{
borderColor: "rgb(var(--border-default))",
backgroundColor: "rgb(var(--surface-1))",
color: "rgb(var(--text-secondary))",
}}
aria-label={`Model: ${selectedModelLabel}. Click to change`}
aria-expanded={isModelDropdownOpen}
aria-haspopup="listbox"
title="Select AI model"
>
<svg
className="h-3 w-3 flex-shrink-0"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
strokeWidth={2}
aria-hidden="true"
>
<circle cx="12" cy="12" r="3" />
<path d="M12 1v4M12 19v4M4.22 4.22l2.83 2.83M16.95 16.95l2.83 2.83M1 12h4M19 12h4M4.22 19.78l2.83-2.83M16.95 7.05l2.83-2.83" />
</svg>
<span>{selectedModelLabel}</span>
<svg
className={`h-3 w-3 transition-transform ${isModelDropdownOpen ? "rotate-180" : ""}`}
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
strokeWidth={2}
aria-hidden="true"
>
<path d="M6 9l6 6 6-6" />
</svg>
</button>
{/* Model Dropdown */}
{isModelDropdownOpen && (
<div
className="absolute bottom-full left-0 mb-1 z-50 min-w-[160px] rounded-lg border shadow-lg"
style={{
backgroundColor: "rgb(var(--surface-0))",
borderColor: "rgb(var(--border-default))",
}}
role="listbox"
aria-label="Available models"
>
{AVAILABLE_MODELS.map((model) => (
<button
key={model.id}
role="option"
aria-selected={model.id === selectedModel}
onClick={() => {
handleModelSelect(model.id);
}}
className="w-full px-3 py-2 text-left text-xs transition-colors first:rounded-t-lg last:rounded-b-lg hover:bg-black/5"
style={{
color:
model.id === selectedModel
? "rgb(var(--accent-primary))"
: "rgb(var(--text-primary))",
fontWeight: model.id === selectedModel ? 600 : 400,
}}
>
{model.label}
{model.id === selectedModel && (
<svg
className="inline-block ml-1.5 h-3 w-3"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
strokeWidth={3}
aria-hidden="true"
>
<path d="M5 13l4 4L19 7" />
</svg>
)}
</button>
))}
</div>
)}
</div>
{/* Settings / Params Icon */}
<div className="relative" ref={paramsDropdownRef}>
<button
type="button"
onClick={() => {
setIsParamsOpen((prev) => !prev);
setIsModelDropdownOpen(false);
}}
className="flex items-center justify-center rounded-full border p-1 transition-colors hover:bg-black/5 focus:outline-none focus:ring-2"
style={{
borderColor: "rgb(var(--border-default))",
backgroundColor: isParamsOpen ? "rgb(var(--surface-2))" : "rgb(var(--surface-1))",
color: "rgb(var(--text-muted))",
}}
aria-label="Chat parameters"
aria-expanded={isParamsOpen}
aria-haspopup="dialog"
title="Configure temperature and max tokens"
>
<svg
className="h-3.5 w-3.5"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
strokeWidth={2}
aria-hidden="true"
>
<path d="M12 6V4m0 2a2 2 0 100 4m0-4a2 2 0 110 4m-6 8a2 2 0 100-4m0 4a2 2 0 110-4m0 4v2m0-6V4m6 6v10m6-2a2 2 0 100-4m0 4a2 2 0 110-4m0 4v2m0-6V4" />
</svg>
</button>
{/* Params Popover */}
{isParamsOpen && (
<div
className="absolute bottom-full left-0 mb-1 z-50 w-64 rounded-lg border p-4 shadow-lg"
style={{
backgroundColor: "rgb(var(--surface-0))",
borderColor: "rgb(var(--border-default))",
}}
role="dialog"
aria-label="Chat parameters"
>
<h3
className="mb-3 text-xs font-semibold uppercase tracking-wide"
style={{ color: "rgb(var(--text-muted))" }}
>
Parameters
</h3>
{/* Temperature */}
<div className="mb-4">
<div className="mb-1.5 flex items-center justify-between">
<label
className="text-xs font-medium"
style={{ color: "rgb(var(--text-secondary))" }}
htmlFor="temperature-slider"
>
Temperature
</label>
<span
className="text-xs font-mono tabular-nums"
style={{ color: "rgb(var(--accent-primary))" }}
>
{temperature.toFixed(1)}
</span>
</div>
<input
id="temperature-slider"
type="range"
min={0}
max={2}
step={0.1}
value={temperature}
onChange={(e) => {
handleTemperatureChange(parseFloat(e.target.value));
}}
className="w-full h-1.5 rounded-full appearance-none cursor-pointer"
style={{
accentColor: "rgb(var(--accent-primary))",
backgroundColor: "rgb(var(--surface-2))",
}}
aria-label={`Temperature: ${temperature.toFixed(1)}`}
/>
<div
className="mt-1 flex justify-between text-[10px]"
style={{ color: "rgb(var(--text-muted))" }}
>
<span>Precise</span>
<span>Creative</span>
</div>
</div>
{/* Max Tokens */}
<div>
<label
className="mb-1.5 block text-xs font-medium"
style={{ color: "rgb(var(--text-secondary))" }}
htmlFor="max-tokens-input"
>
Max Tokens
</label>
<input
id="max-tokens-input"
type="number"
min={100}
max={32000}
step={100}
value={maxTokens}
onChange={(e) => {
const val = parseInt(e.target.value, 10);
if (!isNaN(val) && val >= 100 && val <= 32000) {
handleMaxTokensChange(val);
}
}}
className="w-full rounded-md border px-2.5 py-1.5 text-xs outline-none focus:ring-2"
style={{
backgroundColor: "rgb(var(--surface-1))",
borderColor: "rgb(var(--border-default))",
color: "rgb(var(--text-primary))",
}}
aria-label="Maximum tokens"
/>
<p className="mt-1 text-[10px]" style={{ color: "rgb(var(--text-muted))" }}>
100 32,000
</p>
</div>
</div>
)}
</div>
</div>
{/* Input Container */} {/* Input Container */}
<div <div
className="relative rounded-lg border transition-all duration-150" className="relative rounded-lg border transition-all duration-150"

View File

@@ -250,6 +250,46 @@ describe("ChatOverlay", () => {
}); });
}); });
describe("new conversation button", () => {
it("should render the new conversation button when chat is open", async () => {
const { useChatOverlay } = await import("../../hooks/useChatOverlay");
vi.mocked(useChatOverlay).mockReturnValue({
isOpen: true,
isMinimized: false,
open: mockOpen,
close: mockClose,
minimize: mockMinimize,
expand: mockExpand,
toggle: mockToggle,
toggleMinimize: mockToggleMinimize,
});
render(<ChatOverlay />);
const newConvBtn = screen.getByRole("button", { name: /new conversation/i });
expect(newConvBtn).toBeDefined();
});
it("should have a tooltip on the new conversation button", async () => {
const { useChatOverlay } = await import("../../hooks/useChatOverlay");
vi.mocked(useChatOverlay).mockReturnValue({
isOpen: true,
isMinimized: false,
open: mockOpen,
close: mockClose,
minimize: mockMinimize,
expand: mockExpand,
toggle: mockToggle,
toggleMinimize: mockToggleMinimize,
});
render(<ChatOverlay />);
const newConvBtn = screen.getByRole("button", { name: /new conversation/i });
expect(newConvBtn.getAttribute("title")).toContain("Cmd+N");
});
});
describe("responsive design", () => { describe("responsive design", () => {
it("should render as a sidebar on desktop", () => { it("should render as a sidebar on desktop", () => {
render(<ChatOverlay />); render(<ChatOverlay />);

View File

@@ -164,6 +164,27 @@ export function ChatOverlay(): React.JSX.Element {
{/* Header Controls */} {/* Header Controls */}
<div className="flex items-center gap-1"> <div className="flex items-center gap-1">
{/* New Conversation Button */}
<button
onClick={() => {
chatRef.current?.startNewConversation(null);
}}
className="rounded p-1.5 transition-colors hover:bg-black/5 focus:outline-none focus:ring-2"
aria-label="New conversation"
title="New conversation (Cmd+N)"
>
<svg
className="h-4 w-4"
style={{ color: "rgb(var(--text-secondary))" }}
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
strokeWidth={2}
>
<path d="M12 4v16m8-8H4" />
</svg>
</button>
{/* Minimize Button */} {/* Minimize Button */}
<button <button
onClick={minimize} onClick={minimize}

View File

@@ -11,9 +11,17 @@
*/ */
export { Chat, type ChatRef, type NewConversationData } from "./Chat"; export { Chat, type ChatRef, type NewConversationData } from "./Chat";
export { ChatInput } from "./ChatInput"; export {
ChatInput,
AVAILABLE_MODELS,
DEFAULT_MODEL,
DEFAULT_TEMPERATURE,
DEFAULT_MAX_TOKENS,
} from "./ChatInput";
export type { ModelId } from "./ChatInput";
export { MessageList } from "./MessageList"; export { MessageList } from "./MessageList";
export { ConversationSidebar, type ConversationSidebarRef } from "./ConversationSidebar"; export { ConversationSidebar, type ConversationSidebarRef } from "./ConversationSidebar";
export { BackendStatusBanner } from "./BackendStatusBanner"; export { BackendStatusBanner } from "./BackendStatusBanner";
export { ChatOverlay } from "./ChatOverlay"; export { ChatOverlay } from "./ChatOverlay";
export { ChatEmptyState } from "./ChatEmptyState";
export type { Message } from "@/hooks/useChat"; export type { Message } from "@/hooks/useChat";