feat(chat): add guest chat mode for unauthenticated users
Some checks failed
ci/woodpecker/push/ci Pipeline failed
Some checks failed
ci/woodpecker/push/ci Pipeline failed
- Add POST /api/chat/guest endpoint (no auth required) - Add proxyGuestChat() method using configurable LLM endpoint - Add streamGuestChat() function to frontend chat API - Modify useChat to fall back to guest mode on auth errors (403/401) - Remove !user check from ChatInput disabled prop - Configure guest LLM via env vars: GUEST_LLM_URL, GUEST_LLM_API_KEY, GUEST_LLM_MODEL - Default guest LLM: http://10.1.1.42:11434/v1 (Ollama) with llama3.2 model
This commit is contained in:
@@ -4,11 +4,14 @@ import {
|
||||
Logger,
|
||||
ServiceUnavailableException,
|
||||
} from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { ContainerLifecycleService } from "../container-lifecycle/container-lifecycle.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import type { ChatMessage } from "./chat-proxy.dto";
|
||||
|
||||
const DEFAULT_OPENCLAW_MODEL = "openclaw:default";
|
||||
const DEFAULT_GUEST_LLM_URL = "http://10.1.1.42:11434/v1";
|
||||
const DEFAULT_GUEST_LLM_MODEL = "llama3.2";
|
||||
|
||||
interface ContainerConnection {
|
||||
url: string;
|
||||
@@ -21,7 +24,8 @@ export class ChatProxyService {
|
||||
|
||||
constructor(
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly containerLifecycle: ContainerLifecycleService
|
||||
private readonly containerLifecycle: ContainerLifecycleService,
|
||||
private readonly config: ConfigService
|
||||
) {}
|
||||
|
||||
// Get the user's OpenClaw container URL and mark it active.
|
||||
@@ -79,6 +83,68 @@ export class ChatProxyService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Proxy guest chat request to configured LLM endpoint.
|
||||
* Uses environment variables for configuration:
|
||||
* - GUEST_LLM_URL: OpenAI-compatible endpoint URL
|
||||
* - GUEST_LLM_API_KEY: API key (optional, for cloud providers)
|
||||
* - GUEST_LLM_MODEL: Model name to use
|
||||
*/
|
||||
async proxyGuestChat(
|
||||
messages: ChatMessage[],
|
||||
signal?: AbortSignal
|
||||
): Promise<Response> {
|
||||
const llmUrl = this.config.get<string>("GUEST_LLM_URL") ?? DEFAULT_GUEST_LLM_URL;
|
||||
const llmApiKey = this.config.get<string>("GUEST_LLM_API_KEY");
|
||||
const llmModel = this.config.get<string>("GUEST_LLM_MODEL") ?? DEFAULT_GUEST_LLM_MODEL;
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
"Content-Type": "application/json",
|
||||
};
|
||||
|
||||
if (llmApiKey) {
|
||||
headers["Authorization"] = `Bearer ${llmApiKey}`;
|
||||
}
|
||||
|
||||
const requestInit: RequestInit = {
|
||||
method: "POST",
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
messages,
|
||||
model: llmModel,
|
||||
stream: true,
|
||||
}),
|
||||
};
|
||||
|
||||
if (signal) {
|
||||
requestInit.signal = signal;
|
||||
}
|
||||
|
||||
try {
|
||||
this.logger.debug(`Guest chat proxying to ${llmUrl} with model ${llmModel}`);
|
||||
const response = await fetch(`${llmUrl}/chat/completions`, requestInit);
|
||||
|
||||
if (!response.ok) {
|
||||
const detail = await this.readResponseText(response);
|
||||
const status = `${String(response.status)} ${response.statusText}`.trim();
|
||||
this.logger.warn(
|
||||
detail ? `Guest LLM returned ${status}: ${detail}` : `Guest LLM returned ${status}`
|
||||
);
|
||||
throw new BadGatewayException(`Guest LLM returned ${status}`);
|
||||
}
|
||||
|
||||
return response;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof BadGatewayException) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
this.logger.warn(`Failed to proxy guest chat request: ${message}`);
|
||||
throw new ServiceUnavailableException("Failed to proxy guest chat to LLM");
|
||||
}
|
||||
}
|
||||
|
||||
private async getContainerConnection(userId: string): Promise<ContainerConnection> {
|
||||
const connection = await this.containerLifecycle.ensureRunning(userId);
|
||||
await this.containerLifecycle.touch(userId);
|
||||
|
||||
Reference in New Issue
Block a user