feat(web): polish master chat with model selector, params config, and empty state (#519)
All checks were successful
ci/woodpecker/push/web Pipeline was successful
All checks were successful
ci/woodpecker/push/web Pipeline was successful
Co-authored-by: Jason Woltje <jason@diversecanvas.com> Co-committed-by: Jason Woltje <jason@diversecanvas.com>
This commit was merged in pull request #519.
This commit is contained in:
@@ -5,7 +5,8 @@ import { useAuth } from "@/lib/auth/auth-context";
|
||||
import { useChat } from "@/hooks/useChat";
|
||||
import { useWebSocket } from "@/hooks/useWebSocket";
|
||||
import { MessageList } from "./MessageList";
|
||||
import { ChatInput } from "./ChatInput";
|
||||
import { ChatInput, type ModelId, DEFAULT_TEMPERATURE, DEFAULT_MAX_TOKENS } from "./ChatInput";
|
||||
import { ChatEmptyState } from "./ChatEmptyState";
|
||||
import type { Message } from "@/hooks/useChat";
|
||||
|
||||
export interface ChatRef {
|
||||
@@ -59,6 +60,14 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
|
||||
|
||||
const { user, isLoading: authLoading } = useAuth();
|
||||
|
||||
// Model and params state — initialized from ChatInput's persisted values
|
||||
const [selectedModel, setSelectedModel] = useState<ModelId>("llama3.2");
|
||||
const [temperature, setTemperature] = useState<number>(DEFAULT_TEMPERATURE);
|
||||
const [maxTokens, setMaxTokens] = useState<number>(DEFAULT_MAX_TOKENS);
|
||||
|
||||
// Suggestion fill value: controls ChatInput's textarea content
|
||||
const [suggestionValue, setSuggestionValue] = useState<string | undefined>(undefined);
|
||||
|
||||
const {
|
||||
messages,
|
||||
isLoading: isChatLoading,
|
||||
@@ -72,7 +81,9 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
|
||||
startNewConversation,
|
||||
clearError,
|
||||
} = useChat({
|
||||
model: "llama3.2",
|
||||
model: selectedModel,
|
||||
temperature,
|
||||
maxTokens,
|
||||
...(initialProjectId !== undefined && { projectId: initialProjectId }),
|
||||
});
|
||||
|
||||
@@ -88,6 +99,11 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
|
||||
const streamingMessageId =
|
||||
isStreaming && messages.length > 0 ? messages[messages.length - 1]?.id : undefined;
|
||||
|
||||
// Whether the conversation is empty (only welcome message or no messages)
|
||||
const isEmptyConversation =
|
||||
messages.length === 0 ||
|
||||
(messages.length === 1 && messages[0]?.id === "welcome" && !isChatLoading && !isStreaming);
|
||||
|
||||
useImperativeHandle(ref, () => ({
|
||||
loadConversation: async (cId: string): Promise<void> => {
|
||||
await loadConversation(cId);
|
||||
@@ -122,16 +138,29 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
|
||||
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent): void => {
|
||||
// Cmd/Ctrl + / : Focus input
|
||||
if ((e.ctrlKey || e.metaKey) && e.key === "/") {
|
||||
e.preventDefault();
|
||||
inputRef.current?.focus();
|
||||
}
|
||||
// Cmd/Ctrl + N : Start new conversation
|
||||
if ((e.ctrlKey || e.metaKey) && (e.key === "n" || e.key === "N")) {
|
||||
e.preventDefault();
|
||||
startNewConversation(null);
|
||||
inputRef.current?.focus();
|
||||
}
|
||||
// Cmd/Ctrl + L : Clear / start new conversation
|
||||
if ((e.ctrlKey || e.metaKey) && (e.key === "l" || e.key === "L")) {
|
||||
e.preventDefault();
|
||||
startNewConversation(null);
|
||||
inputRef.current?.focus();
|
||||
}
|
||||
};
|
||||
document.addEventListener("keydown", handleKeyDown);
|
||||
return (): void => {
|
||||
document.removeEventListener("keydown", handleKeyDown);
|
||||
};
|
||||
}, []);
|
||||
}, [startNewConversation]);
|
||||
|
||||
// Show loading quips only during non-streaming load (initial fetch wait)
|
||||
useEffect(() => {
|
||||
@@ -168,6 +197,14 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
|
||||
[sendMessage]
|
||||
);
|
||||
|
||||
const handleSuggestionClick = useCallback((prompt: string): void => {
|
||||
setSuggestionValue(prompt);
|
||||
// Clear after a tick so input receives it, then focus
|
||||
setTimeout(() => {
|
||||
inputRef.current?.focus();
|
||||
}, 0);
|
||||
}, []);
|
||||
|
||||
if (authLoading) {
|
||||
return (
|
||||
<div
|
||||
@@ -214,13 +251,17 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
|
||||
{/* Messages Area */}
|
||||
<div className="flex-1 overflow-y-auto">
|
||||
<div className="mx-auto max-w-4xl px-4 py-6 lg:px-8">
|
||||
<MessageList
|
||||
messages={messages as (Message & { thinking?: string })[]}
|
||||
isLoading={isChatLoading}
|
||||
isStreaming={isStreaming}
|
||||
{...(streamingMessageId != null ? { streamingMessageId } : {})}
|
||||
loadingQuip={loadingQuip}
|
||||
/>
|
||||
{isEmptyConversation ? (
|
||||
<ChatEmptyState onSuggestionClick={handleSuggestionClick} />
|
||||
) : (
|
||||
<MessageList
|
||||
messages={messages as (Message & { thinking?: string })[]}
|
||||
isLoading={isChatLoading}
|
||||
isStreaming={isStreaming}
|
||||
{...(streamingMessageId != null ? { streamingMessageId } : {})}
|
||||
loadingQuip={loadingQuip}
|
||||
/>
|
||||
)}
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
</div>
|
||||
@@ -288,6 +329,10 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
|
||||
inputRef={inputRef}
|
||||
isStreaming={isStreaming}
|
||||
onStopStreaming={abortStream}
|
||||
onModelChange={setSelectedModel}
|
||||
onTemperatureChange={setTemperature}
|
||||
onMaxTokensChange={setMaxTokens}
|
||||
{...(suggestionValue !== undefined ? { externalValue: suggestionValue } : {})}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
Reference in New Issue
Block a user