All checks were successful
ci/woodpecker/push/ci Pipeline was successful
Co-authored-by: Jason Woltje <jason@diversecanvas.com> Co-committed-by: Jason Woltje <jason@diversecanvas.com>
227 lines
6.6 KiB
TypeScript
227 lines
6.6 KiB
TypeScript
import {
|
|
BadGatewayException,
|
|
Injectable,
|
|
Logger,
|
|
NotFoundException,
|
|
ServiceUnavailableException,
|
|
} from "@nestjs/common";
|
|
import { ConfigService } from "@nestjs/config";
|
|
import { ContainerLifecycleService } from "../container-lifecycle/container-lifecycle.service";
|
|
import { PrismaService } from "../prisma/prisma.service";
|
|
import type { ChatMessage } from "./chat-proxy.dto";
|
|
|
|
const DEFAULT_OPENCLAW_MODEL = "openclaw:default";
|
|
const DEFAULT_GUEST_LLM_URL = "http://10.1.1.42:11434/v1";
|
|
const DEFAULT_GUEST_LLM_MODEL = "llama3.2";
|
|
|
|
interface ContainerConnection {
|
|
url: string;
|
|
token: string;
|
|
}
|
|
|
|
interface AgentConfig {
|
|
name: string;
|
|
displayName: string;
|
|
personality: string;
|
|
primaryModel: string | null;
|
|
}
|
|
|
|
@Injectable()
|
|
export class ChatProxyService {
|
|
private readonly logger = new Logger(ChatProxyService.name);
|
|
|
|
constructor(
|
|
private readonly prisma: PrismaService,
|
|
private readonly containerLifecycle: ContainerLifecycleService,
|
|
private readonly config: ConfigService
|
|
) {}
|
|
|
|
// Get the user's OpenClaw container URL and mark it active.
|
|
async getContainerUrl(userId: string): Promise<string> {
|
|
const { url } = await this.getContainerConnection(userId);
|
|
return url;
|
|
}
|
|
|
|
// Proxy chat request to OpenClaw.
|
|
async proxyChat(
|
|
userId: string,
|
|
messages: ChatMessage[],
|
|
signal?: AbortSignal,
|
|
agentName?: string
|
|
): Promise<Response> {
|
|
const { url: containerUrl, token: gatewayToken } = await this.getContainerConnection(userId);
|
|
|
|
// Get agent config if specified
|
|
let agentConfig: AgentConfig | null = null;
|
|
if (agentName) {
|
|
agentConfig = await this.getAgentConfig(userId, agentName);
|
|
}
|
|
|
|
const model = agentConfig?.primaryModel ?? (await this.getPreferredModel(userId));
|
|
|
|
const requestBody: Record<string, unknown> = {
|
|
messages,
|
|
model,
|
|
stream: true,
|
|
};
|
|
|
|
// Add agent config if available
|
|
if (agentConfig) {
|
|
requestBody.agent = agentConfig.name;
|
|
requestBody.agent_personality = agentConfig.personality;
|
|
}
|
|
|
|
const requestInit: RequestInit = {
|
|
method: "POST",
|
|
headers: {
|
|
"Content-Type": "application/json",
|
|
Authorization: `Bearer ${gatewayToken}`,
|
|
},
|
|
body: JSON.stringify(requestBody),
|
|
};
|
|
|
|
if (signal) {
|
|
requestInit.signal = signal;
|
|
}
|
|
|
|
try {
|
|
const response = await fetch(`${containerUrl}/v1/chat/completions`, requestInit);
|
|
|
|
if (!response.ok) {
|
|
const detail = await this.readResponseText(response);
|
|
const status = `${String(response.status)} ${response.statusText}`.trim();
|
|
this.logger.warn(
|
|
detail ? `OpenClaw returned ${status}: ${detail}` : `OpenClaw returned ${status}`
|
|
);
|
|
throw new BadGatewayException(`OpenClaw returned ${status}`);
|
|
}
|
|
|
|
return response;
|
|
} catch (error: unknown) {
|
|
if (error instanceof BadGatewayException) {
|
|
throw error;
|
|
}
|
|
|
|
const message = error instanceof Error ? error.message : String(error);
|
|
this.logger.warn(`Failed to proxy chat request: ${message}`);
|
|
throw new ServiceUnavailableException("Failed to proxy chat to OpenClaw");
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Proxy guest chat request to configured LLM endpoint.
|
|
* Uses environment variables for configuration:
|
|
* - GUEST_LLM_URL: OpenAI-compatible endpoint URL
|
|
* - GUEST_LLM_API_KEY: API key (optional, for cloud providers)
|
|
* - GUEST_LLM_MODEL: Model name to use
|
|
*/
|
|
async proxyGuestChat(messages: ChatMessage[], signal?: AbortSignal): Promise<Response> {
|
|
const llmUrl = this.config.get<string>("GUEST_LLM_URL") ?? DEFAULT_GUEST_LLM_URL;
|
|
const llmApiKey = this.config.get<string>("GUEST_LLM_API_KEY");
|
|
const llmModel = this.config.get<string>("GUEST_LLM_MODEL") ?? DEFAULT_GUEST_LLM_MODEL;
|
|
|
|
const headers: Record<string, string> = {
|
|
"Content-Type": "application/json",
|
|
};
|
|
|
|
if (llmApiKey) {
|
|
headers.Authorization = `Bearer ${llmApiKey}`;
|
|
}
|
|
|
|
const requestInit: RequestInit = {
|
|
method: "POST",
|
|
headers,
|
|
body: JSON.stringify({
|
|
messages,
|
|
model: llmModel,
|
|
stream: true,
|
|
}),
|
|
};
|
|
|
|
if (signal) {
|
|
requestInit.signal = signal;
|
|
}
|
|
|
|
try {
|
|
this.logger.debug(`Guest chat proxying to ${llmUrl} with model ${llmModel}`);
|
|
const response = await fetch(`${llmUrl}/chat/completions`, requestInit);
|
|
|
|
if (!response.ok) {
|
|
const detail = await this.readResponseText(response);
|
|
const status = `${String(response.status)} ${response.statusText}`.trim();
|
|
this.logger.warn(
|
|
detail ? `Guest LLM returned ${status}: ${detail}` : `Guest LLM returned ${status}`
|
|
);
|
|
throw new BadGatewayException(`Guest LLM returned ${status}`);
|
|
}
|
|
|
|
return response;
|
|
} catch (error: unknown) {
|
|
if (error instanceof BadGatewayException) {
|
|
throw error;
|
|
}
|
|
|
|
const message = error instanceof Error ? error.message : String(error);
|
|
this.logger.warn(`Failed to proxy guest chat request: ${message}`);
|
|
throw new ServiceUnavailableException("Failed to proxy guest chat to LLM");
|
|
}
|
|
}
|
|
|
|
private async getContainerConnection(userId: string): Promise<ContainerConnection> {
|
|
const connection = await this.containerLifecycle.ensureRunning(userId);
|
|
await this.containerLifecycle.touch(userId);
|
|
return connection;
|
|
}
|
|
|
|
private async getPreferredModel(userId: string): Promise<string> {
|
|
const config = await this.prisma.userAgentConfig.findUnique({
|
|
where: { userId },
|
|
select: { primaryModel: true },
|
|
});
|
|
|
|
const primaryModel = config?.primaryModel?.trim();
|
|
if (!primaryModel) {
|
|
return DEFAULT_OPENCLAW_MODEL;
|
|
}
|
|
|
|
return primaryModel;
|
|
}
|
|
|
|
private async readResponseText(response: Response): Promise<string | null> {
|
|
try {
|
|
const text = (await response.text()).trim();
|
|
return text.length > 0 ? text : null;
|
|
} catch {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
private async getAgentConfig(userId: string, agentName: string): Promise<AgentConfig> {
|
|
const agent = await this.prisma.userAgent.findUnique({
|
|
where: { userId_name: { userId, name: agentName } },
|
|
select: {
|
|
name: true,
|
|
displayName: true,
|
|
personality: true,
|
|
primaryModel: true,
|
|
isActive: true,
|
|
},
|
|
});
|
|
|
|
if (!agent) {
|
|
throw new NotFoundException(`Agent "${agentName}" not found for user`);
|
|
}
|
|
|
|
if (!agent.isActive) {
|
|
throw new NotFoundException(`Agent "${agentName}" is not active`);
|
|
}
|
|
|
|
return {
|
|
name: agent.name,
|
|
displayName: agent.displayName,
|
|
personality: agent.personality,
|
|
primaryModel: agent.primaryModel,
|
|
};
|
|
}
|
|
}
|