Files
stack/apps/api/src/chat-proxy/chat-proxy.service.ts
Jason Woltje 48d734516a
Some checks failed
ci/woodpecker/push/ci Pipeline failed
fix(lint): resolve prettier and dot-notation errors
2026-03-03 11:40:38 -06:00

174 lines
5.3 KiB
TypeScript

import {
BadGatewayException,
Injectable,
Logger,
ServiceUnavailableException,
} from "@nestjs/common";
import { ConfigService } from "@nestjs/config";
import { ContainerLifecycleService } from "../container-lifecycle/container-lifecycle.service";
import { PrismaService } from "../prisma/prisma.service";
import type { ChatMessage } from "./chat-proxy.dto";
const DEFAULT_OPENCLAW_MODEL = "openclaw:default";
const DEFAULT_GUEST_LLM_URL = "http://10.1.1.42:11434/v1";
const DEFAULT_GUEST_LLM_MODEL = "llama3.2";
interface ContainerConnection {
url: string;
token: string;
}
@Injectable()
export class ChatProxyService {
private readonly logger = new Logger(ChatProxyService.name);
constructor(
private readonly prisma: PrismaService,
private readonly containerLifecycle: ContainerLifecycleService,
private readonly config: ConfigService
) {}
// Get the user's OpenClaw container URL and mark it active.
async getContainerUrl(userId: string): Promise<string> {
const { url } = await this.getContainerConnection(userId);
return url;
}
// Proxy chat request to OpenClaw.
async proxyChat(
userId: string,
messages: ChatMessage[],
signal?: AbortSignal
): Promise<Response> {
const { url: containerUrl, token: gatewayToken } = await this.getContainerConnection(userId);
const model = await this.getPreferredModel(userId);
const requestInit: RequestInit = {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${gatewayToken}`,
},
body: JSON.stringify({
messages,
model,
stream: true,
}),
};
if (signal) {
requestInit.signal = signal;
}
try {
const response = await fetch(`${containerUrl}/v1/chat/completions`, requestInit);
if (!response.ok) {
const detail = await this.readResponseText(response);
const status = `${String(response.status)} ${response.statusText}`.trim();
this.logger.warn(
detail ? `OpenClaw returned ${status}: ${detail}` : `OpenClaw returned ${status}`
);
throw new BadGatewayException(`OpenClaw returned ${status}`);
}
return response;
} catch (error: unknown) {
if (error instanceof BadGatewayException) {
throw error;
}
const message = error instanceof Error ? error.message : String(error);
this.logger.warn(`Failed to proxy chat request: ${message}`);
throw new ServiceUnavailableException("Failed to proxy chat to OpenClaw");
}
}
/**
* Proxy guest chat request to configured LLM endpoint.
* Uses environment variables for configuration:
* - GUEST_LLM_URL: OpenAI-compatible endpoint URL
* - GUEST_LLM_API_KEY: API key (optional, for cloud providers)
* - GUEST_LLM_MODEL: Model name to use
*/
async proxyGuestChat(messages: ChatMessage[], signal?: AbortSignal): Promise<Response> {
const llmUrl = this.config.get<string>("GUEST_LLM_URL") ?? DEFAULT_GUEST_LLM_URL;
const llmApiKey = this.config.get<string>("GUEST_LLM_API_KEY");
const llmModel = this.config.get<string>("GUEST_LLM_MODEL") ?? DEFAULT_GUEST_LLM_MODEL;
const headers: Record<string, string> = {
"Content-Type": "application/json",
};
if (llmApiKey) {
headers.Authorization = `Bearer ${llmApiKey}`;
}
const requestInit: RequestInit = {
method: "POST",
headers,
body: JSON.stringify({
messages,
model: llmModel,
stream: true,
}),
};
if (signal) {
requestInit.signal = signal;
}
try {
this.logger.debug(`Guest chat proxying to ${llmUrl} with model ${llmModel}`);
const response = await fetch(`${llmUrl}/chat/completions`, requestInit);
if (!response.ok) {
const detail = await this.readResponseText(response);
const status = `${String(response.status)} ${response.statusText}`.trim();
this.logger.warn(
detail ? `Guest LLM returned ${status}: ${detail}` : `Guest LLM returned ${status}`
);
throw new BadGatewayException(`Guest LLM returned ${status}`);
}
return response;
} catch (error: unknown) {
if (error instanceof BadGatewayException) {
throw error;
}
const message = error instanceof Error ? error.message : String(error);
this.logger.warn(`Failed to proxy guest chat request: ${message}`);
throw new ServiceUnavailableException("Failed to proxy guest chat to LLM");
}
}
private async getContainerConnection(userId: string): Promise<ContainerConnection> {
const connection = await this.containerLifecycle.ensureRunning(userId);
await this.containerLifecycle.touch(userId);
return connection;
}
private async getPreferredModel(userId: string): Promise<string> {
const config = await this.prisma.userAgentConfig.findUnique({
where: { userId },
select: { primaryModel: true },
});
const primaryModel = config?.primaryModel?.trim();
if (!primaryModel) {
return DEFAULT_OPENCLAW_MODEL;
}
return primaryModel;
}
private async readResponseText(response: Response): Promise<string | null> {
try {
const text = (await response.text()).trim();
return text.length > 0 ? text : null;
} catch {
return null;
}
}
}