"use client"; import type { KeyboardEvent, RefObject } from "react"; import { useCallback, useState, useEffect, useRef } from "react"; export const AVAILABLE_MODELS = [ { id: "llama3.2", label: "Llama 3.2" }, { id: "claude-3.5-sonnet", label: "Claude 3.5 Sonnet" }, { id: "gpt-4o", label: "GPT-4o" }, { id: "deepseek-r1", label: "DeepSeek R1" }, ] as const; export type ModelId = (typeof AVAILABLE_MODELS)[number]["id"]; const STORAGE_KEY_MODEL = "chat:selectedModel"; const STORAGE_KEY_TEMPERATURE = "chat:temperature"; const STORAGE_KEY_MAX_TOKENS = "chat:maxTokens"; export const DEFAULT_TEMPERATURE = 0.7; export const DEFAULT_MAX_TOKENS = 4096; export const DEFAULT_MODEL: ModelId = "llama3.2"; function loadStoredModel(): ModelId { try { const stored = localStorage.getItem(STORAGE_KEY_MODEL); if (stored && AVAILABLE_MODELS.some((m) => m.id === stored)) { return stored as ModelId; } } catch { // localStorage not available } return DEFAULT_MODEL; } function loadStoredTemperature(): number { try { const stored = localStorage.getItem(STORAGE_KEY_TEMPERATURE); if (stored !== null) { const parsed = parseFloat(stored); if (!isNaN(parsed) && parsed >= 0 && parsed <= 2) { return parsed; } } } catch { // localStorage not available } return DEFAULT_TEMPERATURE; } function loadStoredMaxTokens(): number { try { const stored = localStorage.getItem(STORAGE_KEY_MAX_TOKENS); if (stored !== null) { const parsed = parseInt(stored, 10); if (!isNaN(parsed) && parsed >= 100 && parsed <= 32000) { return parsed; } } } catch { // localStorage not available } return DEFAULT_MAX_TOKENS; } interface ChatInputProps { onSend: (message: string) => void; disabled?: boolean; inputRef?: RefObject; isStreaming?: boolean; onStopStreaming?: () => void; onModelChange?: (model: ModelId) => void; onTemperatureChange?: (temperature: number) => void; onMaxTokensChange?: (maxTokens: number) => void; onSuggestionFill?: (text: string) => void; externalValue?: string; } export function ChatInput({ onSend, disabled, inputRef, isStreaming = false, onStopStreaming, onModelChange, onTemperatureChange, onMaxTokensChange, externalValue, }: ChatInputProps): React.JSX.Element { const [message, setMessage] = useState(""); const [version, setVersion] = useState(null); const [selectedModel, setSelectedModel] = useState(DEFAULT_MODEL); const [temperature, setTemperature] = useState(DEFAULT_TEMPERATURE); const [maxTokens, setMaxTokens] = useState(DEFAULT_MAX_TOKENS); const [isModelDropdownOpen, setIsModelDropdownOpen] = useState(false); const [isParamsOpen, setIsParamsOpen] = useState(false); const modelDropdownRef = useRef(null); const paramsDropdownRef = useRef(null); // Stable refs for callbacks so the mount effect stays dependency-free const onModelChangeRef = useRef(onModelChange); onModelChangeRef.current = onModelChange; const onTemperatureChangeRef = useRef(onTemperatureChange); onTemperatureChangeRef.current = onTemperatureChange; const onMaxTokensChangeRef = useRef(onMaxTokensChange); onMaxTokensChangeRef.current = onMaxTokensChange; // Load persisted values from localStorage on mount only useEffect(() => { const storedModel = loadStoredModel(); const storedTemperature = loadStoredTemperature(); const storedMaxTokens = loadStoredMaxTokens(); setSelectedModel(storedModel); setTemperature(storedTemperature); setMaxTokens(storedMaxTokens); // Notify parent of initial values via refs to avoid stale closure onModelChangeRef.current?.(storedModel); onTemperatureChangeRef.current?.(storedTemperature); onMaxTokensChangeRef.current?.(storedMaxTokens); }, []); // Sync external value (e.g. from suggestion clicks) useEffect(() => { if (externalValue !== undefined) { setMessage(externalValue); } }, [externalValue]); useEffect(() => { interface VersionData { version?: string; commit?: string; } fetch("/version.json") .then((res) => res.json() as Promise) .then((data) => { if (data.version) { const fullVersion = data.commit ? `${data.version}+${data.commit}` : data.version; setVersion(fullVersion); } }) .catch(() => { // Silently fail - version display is non-critical }); }, []); // Close dropdowns on outside click useEffect(() => { const handleClickOutside = (e: MouseEvent): void => { if (modelDropdownRef.current && !modelDropdownRef.current.contains(e.target as Node)) { setIsModelDropdownOpen(false); } if (paramsDropdownRef.current && !paramsDropdownRef.current.contains(e.target as Node)) { setIsParamsOpen(false); } }; document.addEventListener("mousedown", handleClickOutside); return (): void => { document.removeEventListener("mousedown", handleClickOutside); }; }, []); const handleSubmit = useCallback(() => { if (message.trim() && !disabled && !isStreaming) { onSend(message); setMessage(""); } }, [message, onSend, disabled, isStreaming]); const handleStop = useCallback(() => { onStopStreaming?.(); }, [onStopStreaming]); const handleKeyDown = useCallback( (e: KeyboardEvent) => { if (e.key === "Enter" && !e.shiftKey) { e.preventDefault(); handleSubmit(); } if (e.key === "Enter" && (e.ctrlKey || e.metaKey)) { e.preventDefault(); handleSubmit(); } }, [handleSubmit] ); const handleModelSelect = useCallback( (model: ModelId): void => { setSelectedModel(model); try { localStorage.setItem(STORAGE_KEY_MODEL, model); } catch { // ignore } onModelChange?.(model); setIsModelDropdownOpen(false); }, [onModelChange] ); const handleTemperatureChange = useCallback( (value: number): void => { setTemperature(value); try { localStorage.setItem(STORAGE_KEY_TEMPERATURE, value.toString()); } catch { // ignore } onTemperatureChange?.(value); }, [onTemperatureChange] ); const handleMaxTokensChange = useCallback( (value: number): void => { setMaxTokens(value); try { localStorage.setItem(STORAGE_KEY_MAX_TOKENS, value.toString()); } catch { // ignore } onMaxTokensChange?.(value); }, [onMaxTokensChange] ); const selectedModelLabel = AVAILABLE_MODELS.find((m) => m.id === selectedModel)?.label ?? selectedModel; const characterCount = message.length; const maxCharacters = 4000; const isNearLimit = characterCount > maxCharacters * 0.9; const isOverLimit = characterCount > maxCharacters; const isInputDisabled = disabled ?? false; return (
{/* Model Selector + Params Row */}
{/* Model Selector */}
{/* Model Dropdown */} {isModelDropdownOpen && (
{AVAILABLE_MODELS.map((model) => ( ))}
)}
{/* Settings / Params Icon */}
{/* Params Popover */} {isParamsOpen && (

Parameters

{/* Temperature */}
{temperature.toFixed(1)}
{ handleTemperatureChange(parseFloat(e.target.value)); }} className="w-full h-1.5 rounded-full appearance-none cursor-pointer" style={{ accentColor: "rgb(var(--accent-primary))", backgroundColor: "rgb(var(--surface-2))", }} aria-label={`Temperature: ${temperature.toFixed(1)}`} />
Precise Creative
{/* Max Tokens */}
{ const val = parseInt(e.target.value, 10); if (!isNaN(val) && val >= 100 && val <= 32000) { handleMaxTokensChange(val); } }} className="w-full rounded-md border px-2.5 py-1.5 text-xs outline-none focus:ring-2" style={{ backgroundColor: "rgb(var(--surface-1))", borderColor: "rgb(var(--border-default))", color: "rgb(var(--text-primary))", }} aria-label="Maximum tokens" />

100 – 32,000

)}
{/* Input Container */}