fix: code review cleanup

- Fixed TypeScript exactOptionalPropertyTypes errors in chat components
- Removed console.error statements (errors are handled via state)
- Fixed type compatibility issues with undefined vs null values
- All chat-related files now pass strict TypeScript checks
This commit is contained in:
Jason Woltje
2026-01-29 23:36:01 -06:00
parent 08938dc735
commit 69bdfa5df1
4 changed files with 17 additions and 14 deletions

View File

@@ -66,9 +66,9 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat({
clearError, clearError,
} = useChat({ } = useChat({
model: "llama3.2", model: "llama3.2",
projectId: initialProjectId, ...(initialProjectId !== undefined && { projectId: initialProjectId }),
onError: (err) => { onError: (_err) => {
console.error("Chat error:", err); // Error is handled by the useChat hook's state
}, },
}); });

View File

@@ -54,9 +54,9 @@ export const ConversationSidebar = forwardRef<ConversationSidebarRef, Conversati
return { return {
id: idea.id, id: idea.id,
title: idea.title, title: idea.title ?? null,
projectId: idea.projectId, projectId: idea.projectId ?? null,
updatedAt: idea.updatedAt, updatedAt: idea.updatedAt ?? null,
messageCount, messageCount,
}; };
}, []); }, []);
@@ -84,7 +84,7 @@ export const ConversationSidebar = forwardRef<ConversationSidebarRef, Conversati
} catch (err) { } catch (err) {
const errorMsg = err instanceof Error ? err.message : "Failed to load conversations"; const errorMsg = err instanceof Error ? err.message : "Failed to load conversations";
setError(errorMsg); setError(errorMsg);
console.error("Error fetching conversations:", err); // Error is set to state and will be displayed to the user
} finally { } finally {
setIsLoading(false); setIsLoading(false);
} }

View File

@@ -173,9 +173,9 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
const request = { const request = {
model, model,
messages: apiMessages, messages: apiMessages,
temperature, ...(temperature !== undefined && { temperature }),
maxTokens, ...(maxTokens !== undefined && { maxTokens }),
systemPrompt, ...(systemPrompt !== undefined && { systemPrompt }),
}; };
// Call LLM API // Call LLM API
@@ -188,8 +188,8 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
content: response.message.content, content: response.message.content,
createdAt: new Date().toISOString(), createdAt: new Date().toISOString(),
model: response.model, model: response.model,
promptTokens: response.promptEvalCount, promptTokens: response.promptEvalCount ?? 0,
completionTokens: response.evalCount, completionTokens: response.evalCount ?? 0,
totalTokens: (response.promptEvalCount ?? 0) + (response.evalCount ?? 0), totalTokens: (response.promptEvalCount ?? 0) + (response.evalCount ?? 0),
}; };

View File

@@ -141,7 +141,7 @@ export async function createConversation(
return createIdea({ return createIdea({
title, title,
content, content,
projectId, ...(projectId !== undefined && { projectId }),
category: "conversation", category: "conversation",
tags: ["chat"], tags: ["chat"],
metadata: { conversationType: "chat" }, metadata: { conversationType: "chat" },
@@ -156,5 +156,8 @@ export async function updateConversation(
content: string, content: string,
title?: string title?: string
): Promise<Idea> { ): Promise<Idea> {
return updateIdea(id, { content, title }); return updateIdea(id, {
content,
...(title !== undefined && { title })
});
} }