Compare commits
1 Commits
feature/ch
...
fix/ms21-m
| Author | SHA1 | Date | |
|---|---|---|---|
| b172b5839c |
@@ -1,4 +1,14 @@
|
|||||||
import { Body, Controller, HttpException, Logger, Post, Req, Res, UseGuards } from "@nestjs/common";
|
import {
|
||||||
|
Body,
|
||||||
|
Controller,
|
||||||
|
HttpException,
|
||||||
|
Logger,
|
||||||
|
Post,
|
||||||
|
Req,
|
||||||
|
Res,
|
||||||
|
UnauthorizedException,
|
||||||
|
UseGuards,
|
||||||
|
} from "@nestjs/common";
|
||||||
import type { Response } from "express";
|
import type { Response } from "express";
|
||||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
import type { MaybeAuthenticatedRequest } from "../auth/types/better-auth-request.interface";
|
import type { MaybeAuthenticatedRequest } from "../auth/types/better-auth-request.interface";
|
||||||
@@ -6,72 +16,16 @@ import { ChatStreamDto } from "./chat-proxy.dto";
|
|||||||
import { ChatProxyService } from "./chat-proxy.service";
|
import { ChatProxyService } from "./chat-proxy.service";
|
||||||
|
|
||||||
@Controller("chat")
|
@Controller("chat")
|
||||||
|
@UseGuards(AuthGuard)
|
||||||
export class ChatProxyController {
|
export class ChatProxyController {
|
||||||
private readonly logger = new Logger(ChatProxyController.name);
|
private readonly logger = new Logger(ChatProxyController.name);
|
||||||
|
|
||||||
constructor(private readonly chatProxyService: ChatProxyService) {}
|
constructor(private readonly chatProxyService: ChatProxyService) {}
|
||||||
|
|
||||||
// POST /api/chat/guest
|
|
||||||
// Guest chat endpoint - no authentication required
|
|
||||||
// Uses a shared LLM configuration for unauthenticated users
|
|
||||||
@Post("guest")
|
|
||||||
async guestChat(
|
|
||||||
@Body() body: ChatStreamDto,
|
|
||||||
@Req() req: MaybeAuthenticatedRequest,
|
|
||||||
@Res() res: Response
|
|
||||||
): Promise<void> {
|
|
||||||
const abortController = new AbortController();
|
|
||||||
req.once("close", () => {
|
|
||||||
abortController.abort();
|
|
||||||
});
|
|
||||||
|
|
||||||
res.setHeader("Content-Type", "text/event-stream");
|
|
||||||
res.setHeader("Cache-Control", "no-cache");
|
|
||||||
res.setHeader("Connection", "keep-alive");
|
|
||||||
res.setHeader("X-Accel-Buffering", "no");
|
|
||||||
|
|
||||||
try {
|
|
||||||
const upstreamResponse = await this.chatProxyService.proxyGuestChat(
|
|
||||||
body.messages,
|
|
||||||
abortController.signal
|
|
||||||
);
|
|
||||||
|
|
||||||
const upstreamContentType = upstreamResponse.headers.get("content-type");
|
|
||||||
if (upstreamContentType) {
|
|
||||||
res.setHeader("Content-Type", upstreamContentType);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!upstreamResponse.body) {
|
|
||||||
throw new Error("LLM response did not include a stream body");
|
|
||||||
}
|
|
||||||
|
|
||||||
for await (const chunk of upstreamResponse.body as unknown as AsyncIterable<Uint8Array>) {
|
|
||||||
if (res.writableEnded || res.destroyed) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
res.write(Buffer.from(chunk));
|
|
||||||
}
|
|
||||||
} catch (error: unknown) {
|
|
||||||
this.logStreamError(error);
|
|
||||||
|
|
||||||
if (!res.writableEnded && !res.destroyed) {
|
|
||||||
res.write("event: error\n");
|
|
||||||
res.write(`data: ${JSON.stringify({ error: this.toSafeClientMessage(error) })}\n\n`);
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
if (!res.writableEnded && !res.destroyed) {
|
|
||||||
res.end();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// POST /api/chat/stream
|
// POST /api/chat/stream
|
||||||
// Request: { messages: Array<{role, content}> }
|
// Request: { messages: Array<{role, content}> }
|
||||||
// Response: SSE stream of chat completion events
|
// Response: SSE stream of chat completion events
|
||||||
// Requires authentication - uses user's personal OpenClaw container
|
|
||||||
@Post("stream")
|
@Post("stream")
|
||||||
@UseGuards(AuthGuard)
|
|
||||||
async streamChat(
|
async streamChat(
|
||||||
@Body() body: ChatStreamDto,
|
@Body() body: ChatStreamDto,
|
||||||
@Req() req: MaybeAuthenticatedRequest,
|
@Req() req: MaybeAuthenticatedRequest,
|
||||||
@@ -79,8 +33,7 @@ export class ChatProxyController {
|
|||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const userId = req.user?.id;
|
const userId = req.user?.id;
|
||||||
if (!userId) {
|
if (!userId) {
|
||||||
this.logger.warn("streamChat called without user ID after AuthGuard");
|
throw new UnauthorizedException("No authenticated user found on request");
|
||||||
throw new HttpException("Authentication required", 401);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const abortController = new AbortController();
|
const abortController = new AbortController();
|
||||||
|
|||||||
@@ -4,14 +4,11 @@ import {
|
|||||||
Logger,
|
Logger,
|
||||||
ServiceUnavailableException,
|
ServiceUnavailableException,
|
||||||
} from "@nestjs/common";
|
} from "@nestjs/common";
|
||||||
import { ConfigService } from "@nestjs/config";
|
|
||||||
import { ContainerLifecycleService } from "../container-lifecycle/container-lifecycle.service";
|
import { ContainerLifecycleService } from "../container-lifecycle/container-lifecycle.service";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
import type { ChatMessage } from "./chat-proxy.dto";
|
import type { ChatMessage } from "./chat-proxy.dto";
|
||||||
|
|
||||||
const DEFAULT_OPENCLAW_MODEL = "openclaw:default";
|
const DEFAULT_OPENCLAW_MODEL = "openclaw:default";
|
||||||
const DEFAULT_GUEST_LLM_URL = "http://10.1.1.42:11434/v1";
|
|
||||||
const DEFAULT_GUEST_LLM_MODEL = "llama3.2";
|
|
||||||
|
|
||||||
interface ContainerConnection {
|
interface ContainerConnection {
|
||||||
url: string;
|
url: string;
|
||||||
@@ -24,8 +21,7 @@ export class ChatProxyService {
|
|||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly prisma: PrismaService,
|
private readonly prisma: PrismaService,
|
||||||
private readonly containerLifecycle: ContainerLifecycleService,
|
private readonly containerLifecycle: ContainerLifecycleService
|
||||||
private readonly config: ConfigService
|
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
// Get the user's OpenClaw container URL and mark it active.
|
// Get the user's OpenClaw container URL and mark it active.
|
||||||
@@ -83,65 +79,6 @@ export class ChatProxyService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Proxy guest chat request to configured LLM endpoint.
|
|
||||||
* Uses environment variables for configuration:
|
|
||||||
* - GUEST_LLM_URL: OpenAI-compatible endpoint URL
|
|
||||||
* - GUEST_LLM_API_KEY: API key (optional, for cloud providers)
|
|
||||||
* - GUEST_LLM_MODEL: Model name to use
|
|
||||||
*/
|
|
||||||
async proxyGuestChat(messages: ChatMessage[], signal?: AbortSignal): Promise<Response> {
|
|
||||||
const llmUrl = this.config.get<string>("GUEST_LLM_URL") ?? DEFAULT_GUEST_LLM_URL;
|
|
||||||
const llmApiKey = this.config.get<string>("GUEST_LLM_API_KEY");
|
|
||||||
const llmModel = this.config.get<string>("GUEST_LLM_MODEL") ?? DEFAULT_GUEST_LLM_MODEL;
|
|
||||||
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
};
|
|
||||||
|
|
||||||
if (llmApiKey) {
|
|
||||||
headers.Authorization = `Bearer ${llmApiKey}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const requestInit: RequestInit = {
|
|
||||||
method: "POST",
|
|
||||||
headers,
|
|
||||||
body: JSON.stringify({
|
|
||||||
messages,
|
|
||||||
model: llmModel,
|
|
||||||
stream: true,
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
if (signal) {
|
|
||||||
requestInit.signal = signal;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
this.logger.debug(`Guest chat proxying to ${llmUrl} with model ${llmModel}`);
|
|
||||||
const response = await fetch(`${llmUrl}/chat/completions`, requestInit);
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const detail = await this.readResponseText(response);
|
|
||||||
const status = `${String(response.status)} ${response.statusText}`.trim();
|
|
||||||
this.logger.warn(
|
|
||||||
detail ? `Guest LLM returned ${status}: ${detail}` : `Guest LLM returned ${status}`
|
|
||||||
);
|
|
||||||
throw new BadGatewayException(`Guest LLM returned ${status}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return response;
|
|
||||||
} catch (error: unknown) {
|
|
||||||
if (error instanceof BadGatewayException) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
const message = error instanceof Error ? error.message : String(error);
|
|
||||||
this.logger.warn(`Failed to proxy guest chat request: ${message}`);
|
|
||||||
throw new ServiceUnavailableException("Failed to proxy guest chat to LLM");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async getContainerConnection(userId: string): Promise<ContainerConnection> {
|
private async getContainerConnection(userId: string): Promise<ContainerConnection> {
|
||||||
const connection = await this.containerLifecycle.ensureRunning(userId);
|
const connection = await this.containerLifecycle.ensureRunning(userId);
|
||||||
await this.containerLifecycle.touch(userId);
|
await this.containerLifecycle.touch(userId);
|
||||||
|
|||||||
@@ -352,7 +352,7 @@ export const Chat = forwardRef<ChatRef, ChatProps>(function Chat(
|
|||||||
<div className="mx-auto max-w-4xl px-4 py-4 lg:px-8">
|
<div className="mx-auto max-w-4xl px-4 py-4 lg:px-8">
|
||||||
<ChatInput
|
<ChatInput
|
||||||
onSend={handleSendMessage}
|
onSend={handleSendMessage}
|
||||||
disabled={isChatLoading}
|
disabled={isChatLoading || !user}
|
||||||
inputRef={inputRef}
|
inputRef={inputRef}
|
||||||
isStreaming={isStreaming}
|
isStreaming={isStreaming}
|
||||||
onStopStreaming={abortStream}
|
onStopStreaming={abortStream}
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import { useState, useCallback, useRef } from "react";
|
|||||||
import {
|
import {
|
||||||
sendChatMessage,
|
sendChatMessage,
|
||||||
streamChatMessage,
|
streamChatMessage,
|
||||||
streamGuestChat,
|
|
||||||
type ChatMessage as ApiChatMessage,
|
type ChatMessage as ApiChatMessage,
|
||||||
} from "@/lib/api/chat";
|
} from "@/lib/api/chat";
|
||||||
import { createConversation, updateConversation, getIdea, type Idea } from "@/lib/api/ideas";
|
import { createConversation, updateConversation, getIdea, type Idea } from "@/lib/api/ideas";
|
||||||
@@ -279,131 +278,68 @@ export function useChat(options: UseChatOptions = {}): UseChatReturn {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Streaming failed - check if auth error, try guest mode
|
// Streaming failed — fall back to non-streaming
|
||||||
const isAuthError =
|
console.warn("Streaming failed, falling back to non-streaming", {
|
||||||
err instanceof Error &&
|
error: err instanceof Error ? err : new Error(String(err)),
|
||||||
(err.message.includes("403") ||
|
});
|
||||||
err.message.includes("401") ||
|
|
||||||
err.message.includes("auth") ||
|
|
||||||
err.message.includes("Forbidden"));
|
|
||||||
|
|
||||||
if (isAuthError) {
|
setMessages((prev) => {
|
||||||
console.warn("Auth failed, trying guest chat mode");
|
const withoutPlaceholder = prev.filter((m) => m.id !== assistantMessageId);
|
||||||
|
messagesRef.current = withoutPlaceholder;
|
||||||
|
return withoutPlaceholder;
|
||||||
|
});
|
||||||
|
setIsStreaming(false);
|
||||||
|
|
||||||
// Try guest chat streaming
|
try {
|
||||||
try {
|
const response = await sendChatMessage(request);
|
||||||
await new Promise<void>((guestResolve, guestReject) => {
|
|
||||||
let hasReceivedData = false;
|
|
||||||
|
|
||||||
streamGuestChat(
|
const assistantMessage: Message = {
|
||||||
request,
|
id: `assistant-${Date.now().toString()}`,
|
||||||
(chunk: string) => {
|
role: "assistant",
|
||||||
if (!hasReceivedData) {
|
content: response.message.content,
|
||||||
hasReceivedData = true;
|
createdAt: new Date().toISOString(),
|
||||||
setIsLoading(false);
|
model: response.model,
|
||||||
setIsStreaming(true);
|
promptTokens: response.promptEvalCount ?? 0,
|
||||||
setMessages((prev) => {
|
completionTokens: response.evalCount ?? 0,
|
||||||
const updated = [...prev, { ...placeholderMessage }];
|
totalTokens: (response.promptEvalCount ?? 0) + (response.evalCount ?? 0),
|
||||||
messagesRef.current = updated;
|
};
|
||||||
return updated;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
setMessages((prev) => {
|
|
||||||
const updated = prev.map((msg) =>
|
|
||||||
msg.id === assistantMessageId ? { ...msg, content: msg.content + chunk } : msg
|
|
||||||
);
|
|
||||||
messagesRef.current = updated;
|
|
||||||
return updated;
|
|
||||||
});
|
|
||||||
},
|
|
||||||
() => {
|
|
||||||
streamingSucceeded = true;
|
|
||||||
setIsStreaming(false);
|
|
||||||
guestResolve();
|
|
||||||
},
|
|
||||||
(guestErr: Error) => {
|
|
||||||
guestReject(guestErr);
|
|
||||||
},
|
|
||||||
controller.signal
|
|
||||||
);
|
|
||||||
});
|
|
||||||
} catch (guestErr: unknown) {
|
|
||||||
// Guest also failed
|
|
||||||
setMessages((prev) => {
|
|
||||||
const withoutPlaceholder = prev.filter((m) => m.id !== assistantMessageId);
|
|
||||||
messagesRef.current = withoutPlaceholder;
|
|
||||||
return withoutPlaceholder;
|
|
||||||
});
|
|
||||||
const errorMsg = guestErr instanceof Error ? guestErr.message : "Chat unavailable";
|
|
||||||
setError(`Unable to connect to chat: ${errorMsg}`);
|
|
||||||
setIsLoading(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Streaming failed — fall back to non-streaming
|
|
||||||
console.warn("Streaming failed, falling back to non-streaming", {
|
|
||||||
error: err instanceof Error ? err : new Error(String(err)),
|
|
||||||
});
|
|
||||||
|
|
||||||
setMessages((prev) => {
|
setMessages((prev) => {
|
||||||
const withoutPlaceholder = prev.filter((m) => m.id !== assistantMessageId);
|
const updated = [...prev, assistantMessage];
|
||||||
messagesRef.current = withoutPlaceholder;
|
messagesRef.current = updated;
|
||||||
return withoutPlaceholder;
|
return updated;
|
||||||
});
|
});
|
||||||
setIsStreaming(false);
|
|
||||||
|
|
||||||
try {
|
streamingSucceeded = true;
|
||||||
const response = await sendChatMessage(request);
|
} catch (fallbackErr: unknown) {
|
||||||
|
const errorMsg =
|
||||||
|
fallbackErr instanceof Error ? fallbackErr.message : "Failed to send message";
|
||||||
|
setError("Unable to send message. Please try again.");
|
||||||
|
onError?.(fallbackErr instanceof Error ? fallbackErr : new Error(errorMsg));
|
||||||
|
console.error("Failed to send chat message", {
|
||||||
|
error: fallbackErr,
|
||||||
|
errorType: "LLM_ERROR",
|
||||||
|
conversationId: conversationIdRef.current,
|
||||||
|
messageLength: content.length,
|
||||||
|
messagePreview: content.substring(0, 50),
|
||||||
|
model,
|
||||||
|
messageCount: messagesRef.current.length,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
const assistantMessage: Message = {
|
const errorMessage: Message = {
|
||||||
id: `assistant-${Date.now().toString()}`,
|
id: `error-${String(Date.now())}`,
|
||||||
role: "assistant",
|
role: "assistant",
|
||||||
content: response.message.content,
|
content: "Something went wrong. Please try again.",
|
||||||
createdAt: new Date().toISOString(),
|
createdAt: new Date().toISOString(),
|
||||||
model: response.model,
|
};
|
||||||
promptTokens: response.promptEvalCount ?? 0,
|
setMessages((prev) => {
|
||||||
completionTokens: response.evalCount ?? 0,
|
const updated = [...prev, errorMessage];
|
||||||
totalTokens: (response.promptEvalCount ?? 0) + (response.evalCount ?? 0),
|
messagesRef.current = updated;
|
||||||
};
|
return updated;
|
||||||
|
});
|
||||||
setMessages((prev) => {
|
setIsLoading(false);
|
||||||
const updated = [...prev, assistantMessage];
|
return;
|
||||||
messagesRef.current = updated;
|
|
||||||
return updated;
|
|
||||||
});
|
|
||||||
|
|
||||||
streamingSucceeded = true;
|
|
||||||
} catch (fallbackErr: unknown) {
|
|
||||||
const errorMsg =
|
|
||||||
fallbackErr instanceof Error ? fallbackErr.message : "Failed to send message";
|
|
||||||
setError("Unable to send message. Please try again.");
|
|
||||||
onError?.(fallbackErr instanceof Error ? fallbackErr : new Error(errorMsg));
|
|
||||||
console.error("Failed to send chat message", {
|
|
||||||
error: fallbackErr,
|
|
||||||
errorType: "LLM_ERROR",
|
|
||||||
conversationId: conversationIdRef.current,
|
|
||||||
messageLength: content.length,
|
|
||||||
messagePreview: content.substring(0, 50),
|
|
||||||
model,
|
|
||||||
messageCount: messagesRef.current.length,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const errorMessage: Message = {
|
|
||||||
id: `error-${String(Date.now())}`,
|
|
||||||
role: "assistant",
|
|
||||||
content: "Something went wrong. Please try again.",
|
|
||||||
createdAt: new Date().toISOString(),
|
|
||||||
};
|
|
||||||
setMessages((prev) => {
|
|
||||||
const updated = [...prev, errorMessage];
|
|
||||||
messagesRef.current = updated;
|
|
||||||
return updated;
|
|
||||||
});
|
|
||||||
setIsLoading(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -92,141 +92,6 @@ async function ensureCsrfTokenForStream(): Promise<string> {
|
|||||||
return fetchCsrfToken();
|
return fetchCsrfToken();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Stream a guest chat message (no authentication required).
|
|
||||||
* Uses /api/chat/guest endpoint with shared LLM configuration.
|
|
||||||
*
|
|
||||||
* @param request - Chat request
|
|
||||||
* @param onChunk - Called with each token string as it arrives
|
|
||||||
* @param onComplete - Called when the stream finishes successfully
|
|
||||||
* @param onError - Called if the stream encounters an error
|
|
||||||
* @param signal - Optional AbortSignal for cancellation
|
|
||||||
*/
|
|
||||||
export function streamGuestChat(
|
|
||||||
request: ChatRequest,
|
|
||||||
onChunk: (chunk: string) => void,
|
|
||||||
onComplete: () => void,
|
|
||||||
onError: (error: Error) => void,
|
|
||||||
signal?: AbortSignal
|
|
||||||
): void {
|
|
||||||
void (async (): Promise<void> => {
|
|
||||||
try {
|
|
||||||
const response = await fetch(`${API_BASE_URL}/api/chat/guest`, {
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
credentials: "include",
|
|
||||||
body: JSON.stringify({ messages: request.messages, stream: true }),
|
|
||||||
signal: signal ?? null,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text().catch(() => response.statusText);
|
|
||||||
throw new Error(`Guest chat failed: ${errorText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!response.body) {
|
|
||||||
throw new Error("Response body is not readable");
|
|
||||||
}
|
|
||||||
|
|
||||||
const reader = response.body.getReader();
|
|
||||||
const decoder = new TextDecoder("utf-8");
|
|
||||||
let buffer = "";
|
|
||||||
|
|
||||||
let readerDone = false;
|
|
||||||
while (!readerDone) {
|
|
||||||
const { done, value } = await reader.read();
|
|
||||||
readerDone = done;
|
|
||||||
if (done) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
buffer += decoder.decode(value, { stream: true });
|
|
||||||
|
|
||||||
// SSE messages are separated by double newlines
|
|
||||||
const parts = buffer.split("\n\n");
|
|
||||||
buffer = parts.pop() ?? "";
|
|
||||||
|
|
||||||
for (const part of parts) {
|
|
||||||
const trimmed = part.trim();
|
|
||||||
if (!trimmed) continue;
|
|
||||||
|
|
||||||
// Handle event: error format
|
|
||||||
const eventMatch = /^event:\s*(\S+)\n/i.exec(trimmed);
|
|
||||||
const dataMatch = /^data:\s*(.+)$/im.exec(trimmed);
|
|
||||||
|
|
||||||
if (eventMatch?.[1] === "error" && dataMatch?.[1]) {
|
|
||||||
try {
|
|
||||||
const errorData = JSON.parse(dataMatch[1].trim()) as {
|
|
||||||
error?: string;
|
|
||||||
};
|
|
||||||
throw new Error(errorData.error ?? "Stream error occurred");
|
|
||||||
} catch (parseErr) {
|
|
||||||
if (parseErr instanceof SyntaxError) {
|
|
||||||
throw new Error("Stream error occurred");
|
|
||||||
}
|
|
||||||
throw parseErr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Standard SSE format: data: {...}
|
|
||||||
for (const line of trimmed.split("\n")) {
|
|
||||||
if (!line.startsWith("data: ")) continue;
|
|
||||||
|
|
||||||
const data = line.slice("data: ".length).trim();
|
|
||||||
|
|
||||||
if (data === "[DONE]") {
|
|
||||||
onComplete();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const parsed: unknown = JSON.parse(data);
|
|
||||||
|
|
||||||
// Handle OpenAI format
|
|
||||||
const openAiChunk = parsed as OpenAiSseChunk;
|
|
||||||
if (openAiChunk.choices?.[0]?.delta?.content) {
|
|
||||||
onChunk(openAiChunk.choices[0].delta.content);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle simple token format
|
|
||||||
const simpleChunk = parsed as SimpleTokenChunk;
|
|
||||||
if (simpleChunk.token) {
|
|
||||||
onChunk(simpleChunk.token);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (simpleChunk.done === true) {
|
|
||||||
onComplete();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const error = openAiChunk.error ?? simpleChunk.error;
|
|
||||||
if (error) {
|
|
||||||
throw new Error(error);
|
|
||||||
}
|
|
||||||
} catch (parseErr) {
|
|
||||||
if (parseErr instanceof SyntaxError) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
throw parseErr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
onComplete();
|
|
||||||
} catch (err: unknown) {
|
|
||||||
if (err instanceof DOMException && err.name === "AbortError") {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
onError(err instanceof Error ? err : new Error(String(err)));
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stream a chat message from the LLM using SSE over fetch.
|
* Stream a chat message from the LLM using SSE over fetch.
|
||||||
*
|
*
|
||||||
|
|||||||
Reference in New Issue
Block a user