feat: wire chat UI to backend APIs
- Created API clients for LLM chat (/api/llm/chat) and Ideas (/api/ideas) - Implemented useChat hook for conversation state management - Connected Chat component to backend with full CRUD operations - Integrated ConversationSidebar with conversation fetching - Added automatic conversation persistence after each message - Integrated WebSocket for connection status - Used existing better-auth for authentication - All TypeScript strict mode compliant (no any types) Deliverables: ✅ Working chat interface at /chat route ✅ Conversations save to database via Ideas API ✅ Real-time WebSocket connection ✅ Clean TypeScript (no errors) ✅ Full conversation loading and persistence See CHAT_INTEGRATION_SUMMARY.md for detailed documentation.
This commit is contained in:
57
apps/web/src/lib/api/chat.ts
Normal file
57
apps/web/src/lib/api/chat.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* Chat API client
|
||||
* Handles LLM chat interactions via /api/llm/chat
|
||||
*/
|
||||
|
||||
import { apiPost } from "./client";
|
||||
|
||||
export interface ChatMessage {
|
||||
role: "system" | "user" | "assistant";
|
||||
content: string;
|
||||
}
|
||||
|
||||
export interface ChatRequest {
|
||||
model: string;
|
||||
messages: ChatMessage[];
|
||||
stream?: boolean;
|
||||
temperature?: number;
|
||||
maxTokens?: number;
|
||||
systemPrompt?: string;
|
||||
}
|
||||
|
||||
export interface ChatResponse {
|
||||
model: string;
|
||||
message: {
|
||||
role: "assistant";
|
||||
content: string;
|
||||
};
|
||||
done: boolean;
|
||||
totalDuration?: number;
|
||||
promptEvalCount?: number;
|
||||
evalCount?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a chat message to the LLM
|
||||
*/
|
||||
export async function sendChatMessage(request: ChatRequest): Promise<ChatResponse> {
|
||||
return apiPost<ChatResponse>("/api/llm/chat", request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stream a chat message from the LLM (not implemented yet)
|
||||
* TODO: Implement streaming support
|
||||
*/
|
||||
export function streamChatMessage(
|
||||
request: ChatRequest,
|
||||
onChunk: (chunk: string) => void,
|
||||
onComplete: () => void,
|
||||
onError: (error: Error) => void
|
||||
): void {
|
||||
// Streaming implementation would go here
|
||||
void request;
|
||||
void onChunk;
|
||||
void onComplete;
|
||||
void onError;
|
||||
throw new Error("Streaming not implemented yet");
|
||||
}
|
||||
Reference in New Issue
Block a user