feat(#21): implement ollama integration
- Add Ollama client library (ollama npm package) - Create LlmService for chat completion and embeddings - Support streaming responses via Server-Sent Events - Add configuration via env vars (OLLAMA_HOST, OLLAMA_TIMEOUT) - Create endpoints: GET /llm/health, GET /llm/models, POST /llm/chat, POST /llm/embed - Replace old OllamaModule with new LlmModule - Add comprehensive tests with >85% coverage Closes #21
This commit is contained in:
5
apps/api/src/llm/llm.module.ts
Normal file
5
apps/api/src/llm/llm.module.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { LlmController } from "./llm.controller";
|
||||
import { LlmService } from "./llm.service";
|
||||
@Module({ controllers: [LlmController], providers: [LlmService], exports: [LlmService] })
|
||||
export class LlmModule {}
|
||||
Reference in New Issue
Block a user