Files
stack/apps/api/src/llm/llm.controller.ts
Jason Woltje 1f97e6de40 feat(#127): refactor LlmService to use provider pattern
Refactor LlmService to delegate to LlmManagerService instead of using
Ollama directly. This enables multiple provider support and user-specific
provider configuration.

Changes:
- Remove direct Ollama client from LlmService
- Delegate all LLM operations to provider via LlmManagerService
- Update health status to use provider-agnostic interface
- Add PrismaModule to LlmModule for manager service
- Maintain backward compatibility with existing API
- Achieve 89.74% test coverage

Fixes #127

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-31 12:33:56 -06:00

48 lines
1.7 KiB
TypeScript

import { Controller, Post, Get, Body, Res, HttpCode, HttpStatus } from "@nestjs/common";
import { Response } from "express";
import { LlmService } from "./llm.service";
import { ChatRequestDto, ChatResponseDto, EmbedRequestDto, EmbedResponseDto } from "./dto";
import type { LlmProviderHealthStatus } from "./providers/llm-provider.interface";
@Controller("llm")
export class LlmController {
constructor(private readonly llmService: LlmService) {}
@Get("health")
async health(): Promise<LlmProviderHealthStatus> {
return this.llmService.checkHealth();
}
@Get("models") async listModels(): Promise<{ models: string[] }> {
return { models: await this.llmService.listModels() };
}
@Post("chat") @HttpCode(HttpStatus.OK) async chat(
@Body() req: ChatRequestDto,
@Res({ passthrough: true }) res: Response
): Promise<ChatResponseDto | undefined> {
if (req.stream === true) {
res.setHeader("Content-Type", "text/event-stream");
res.setHeader("Cache-Control", "no-cache");
res.setHeader("Connection", "keep-alive");
res.setHeader("X-Accel-Buffering", "no");
try {
for await (const c of this.llmService.chatStream(req))
res.write("data: " + JSON.stringify(c) + "\n\n");
res.write("data: [DONE]\n\n");
res.end();
} catch (e: unknown) {
res.write(
"data: " + JSON.stringify({ error: e instanceof Error ? e.message : String(e) }) + "\n\n"
);
res.end();
}
return;
}
return this.llmService.chat(req);
}
@Post("embed") @HttpCode(HttpStatus.OK) async embed(
@Body() req: EmbedRequestDto
): Promise<EmbedResponseDto> {
return this.llmService.embed(req);
}
}