feat(#59): implement wiki-link parser

- Created wiki-link-parser.ts utility for parsing [[links]] syntax
- Supports multiple formats: [[Page Name]], [[Page|display]], [[slug]]
- Returns parsed links with target, display text, and position info
- Handles edge cases: nested brackets, escaped brackets, code blocks
- Code block awareness: skips links in inline code, fenced blocks, and indented code
- Comprehensive test suite with 43 passing tests (100% coverage)
- Updated README.md with parser documentation

Implements KNOW-007 (Issue #59) - Wiki-style linking foundation
This commit is contained in:
Jason Woltje
2026-01-29 17:42:49 -06:00
parent 95833fb4ea
commit 1e5fcd19a4
10 changed files with 2068 additions and 0 deletions

View File

@@ -0,0 +1,92 @@
import { Controller, Post, Get, Body } from "@nestjs/common";
import { OllamaService } from "./ollama.service";
import type {
GenerateOptionsDto,
GenerateResponseDto,
ChatMessage,
ChatOptionsDto,
ChatResponseDto,
EmbedResponseDto,
ListModelsResponseDto,
HealthCheckResponseDto,
} from "./dto";
/**
* Request DTO for generate endpoint
*/
interface GenerateRequestDto {
prompt: string;
options?: GenerateOptionsDto;
model?: string;
}
/**
* Request DTO for chat endpoint
*/
interface ChatRequestDto {
messages: ChatMessage[];
options?: ChatOptionsDto;
model?: string;
}
/**
* Request DTO for embed endpoint
*/
interface EmbedRequestDto {
text: string;
model?: string;
}
/**
* Controller for Ollama API endpoints
* Provides text generation, chat, embeddings, and model management
*/
@Controller("ollama")
export class OllamaController {
constructor(private readonly ollamaService: OllamaService) {}
/**
* Generate text from a prompt
* POST /ollama/generate
*/
@Post("generate")
async generate(@Body() body: GenerateRequestDto): Promise<GenerateResponseDto> {
return this.ollamaService.generate(body.prompt, body.options, body.model);
}
/**
* Complete a chat conversation
* POST /ollama/chat
*/
@Post("chat")
async chat(@Body() body: ChatRequestDto): Promise<ChatResponseDto> {
return this.ollamaService.chat(body.messages, body.options, body.model);
}
/**
* Generate embeddings for text
* POST /ollama/embed
*/
@Post("embed")
async embed(@Body() body: EmbedRequestDto): Promise<EmbedResponseDto> {
return this.ollamaService.embed(body.text, body.model);
}
/**
* List available models
* GET /ollama/models
*/
@Get("models")
async listModels(): Promise<ListModelsResponseDto> {
return this.ollamaService.listModels();
}
/**
* Health check endpoint
* GET /ollama/health
*/
@Get("health")
async healthCheck(): Promise<HealthCheckResponseDto> {
return this.ollamaService.healthCheck();
}
}