diff --git a/apps/gateway/package.json b/apps/gateway/package.json index ff696d2..6602939 100644 --- a/apps/gateway/package.json +++ b/apps/gateway/package.json @@ -18,6 +18,8 @@ "@mosaic/brain": "workspace:^", "@mosaic/coord": "workspace:^", "@mosaic/db": "workspace:^", + "@mosaic/log": "workspace:^", + "@mosaic/memory": "workspace:^", "@mosaic/types": "workspace:^", "@nestjs/common": "^11.0.0", "@nestjs/core": "^11.0.0", @@ -34,6 +36,7 @@ "@sinclair/typebox": "^0.34.48", "better-auth": "^1.5.5", "fastify": "^5.0.0", + "node-cron": "^4.2.1", "reflect-metadata": "^0.2.0", "rxjs": "^7.8.0", "socket.io": "^4.8.0", @@ -41,6 +44,7 @@ }, "devDependencies": { "@types/node": "^22.0.0", + "@types/node-cron": "^3.0.11", "@types/uuid": "^10.0.0", "tsx": "^4.0.0", "typescript": "^5.8.0", diff --git a/apps/gateway/src/agent/agent.service.ts b/apps/gateway/src/agent/agent.service.ts index 9fe14f3..2e90197 100644 --- a/apps/gateway/src/agent/agent.service.ts +++ b/apps/gateway/src/agent/agent.service.ts @@ -7,11 +7,15 @@ import { type ToolDefinition, } from '@mariozechner/pi-coding-agent'; import type { Brain } from '@mosaic/brain'; +import type { Memory } from '@mosaic/memory'; import { BRAIN } from '../brain/brain.tokens.js'; +import { MEMORY } from '../memory/memory.tokens.js'; +import { EmbeddingService } from '../memory/embedding.service.js'; import { CoordService } from '../coord/coord.service.js'; import { ProviderService } from './provider.service.js'; import { createBrainTools } from './tools/brain-tools.js'; import { createCoordTools } from './tools/coord-tools.js'; +import { createMemoryTools } from './tools/memory-tools.js'; import type { SessionInfoDto } from './session.dto.js'; export interface AgentSessionOptions { @@ -42,9 +46,15 @@ export class AgentService implements OnModuleDestroy { constructor( @Inject(ProviderService) private readonly providerService: ProviderService, @Inject(BRAIN) private readonly brain: Brain, + @Inject(MEMORY) private readonly memory: Memory, + @Inject(EmbeddingService) private readonly embeddingService: EmbeddingService, @Inject(CoordService) private readonly coordService: CoordService, ) { - this.customTools = [...createBrainTools(brain), ...createCoordTools(coordService)]; + this.customTools = [ + ...createBrainTools(brain), + ...createCoordTools(coordService), + ...createMemoryTools(memory, embeddingService.available ? embeddingService : null), + ]; this.logger.log(`Registered ${this.customTools.length} custom tools`); } diff --git a/apps/gateway/src/agent/tools/memory-tools.ts b/apps/gateway/src/agent/tools/memory-tools.ts new file mode 100644 index 0000000..c8a3b43 --- /dev/null +++ b/apps/gateway/src/agent/tools/memory-tools.ts @@ -0,0 +1,158 @@ +import { Type } from '@sinclair/typebox'; +import type { ToolDefinition } from '@mariozechner/pi-coding-agent'; +import type { Memory } from '@mosaic/memory'; +import type { EmbeddingProvider } from '@mosaic/memory'; + +export function createMemoryTools( + memory: Memory, + embeddingProvider: EmbeddingProvider | null, +): ToolDefinition[] { + const searchMemory: ToolDefinition = { + name: 'memory_search', + label: 'Search Memory', + description: + 'Search across stored insights and knowledge using natural language. Returns semantically similar results.', + parameters: Type.Object({ + userId: Type.String({ description: 'User ID to search memory for' }), + query: Type.String({ description: 'Natural language search query' }), + limit: Type.Optional(Type.Number({ description: 'Max results (default 5)' })), + }), + async execute(_toolCallId, params) { + const { userId, query, limit } = params as { + userId: string; + query: string; + limit?: number; + }; + + if (!embeddingProvider) { + return { + content: [ + { + type: 'text' as const, + text: 'Semantic search unavailable — no embedding provider configured', + }, + ], + details: undefined, + }; + } + + const embedding = await embeddingProvider.embed(query); + const results = await memory.insights.searchByEmbedding(userId, embedding, limit ?? 5); + return { + content: [{ type: 'text' as const, text: JSON.stringify(results, null, 2) }], + details: undefined, + }; + }, + }; + + const getPreferences: ToolDefinition = { + name: 'memory_get_preferences', + label: 'Get User Preferences', + description: 'Retrieve stored preferences for a user.', + parameters: Type.Object({ + userId: Type.String({ description: 'User ID' }), + category: Type.Optional( + Type.String({ + description: 'Filter by category: communication, coding, workflow, appearance, general', + }), + ), + }), + async execute(_toolCallId, params) { + const { userId, category } = params as { userId: string; category?: string }; + type Cat = 'communication' | 'coding' | 'workflow' | 'appearance' | 'general'; + const prefs = category + ? await memory.preferences.findByUserAndCategory(userId, category as Cat) + : await memory.preferences.findByUser(userId); + return { + content: [{ type: 'text' as const, text: JSON.stringify(prefs, null, 2) }], + details: undefined, + }; + }, + }; + + const savePreference: ToolDefinition = { + name: 'memory_save_preference', + label: 'Save User Preference', + description: + 'Store a learned user preference (e.g., "prefers tables over paragraphs", "timezone: America/Chicago").', + parameters: Type.Object({ + userId: Type.String({ description: 'User ID' }), + key: Type.String({ description: 'Preference key' }), + value: Type.String({ description: 'Preference value (JSON string)' }), + category: Type.Optional( + Type.String({ + description: 'Category: communication, coding, workflow, appearance, general', + }), + ), + }), + async execute(_toolCallId, params) { + const { userId, key, value, category } = params as { + userId: string; + key: string; + value: string; + category?: string; + }; + type Cat = 'communication' | 'coding' | 'workflow' | 'appearance' | 'general'; + let parsedValue: unknown; + try { + parsedValue = JSON.parse(value); + } catch { + parsedValue = value; + } + const pref = await memory.preferences.upsert({ + userId, + key, + value: parsedValue, + category: (category as Cat) ?? 'general', + source: 'agent', + }); + return { + content: [{ type: 'text' as const, text: JSON.stringify(pref, null, 2) }], + details: undefined, + }; + }, + }; + + const saveInsight: ToolDefinition = { + name: 'memory_save_insight', + label: 'Save Insight', + description: + 'Store a learned insight, decision, or knowledge extracted from the current interaction.', + parameters: Type.Object({ + userId: Type.String({ description: 'User ID' }), + content: Type.String({ description: 'The insight or knowledge to store' }), + category: Type.Optional( + Type.String({ + description: 'Category: decision, learning, preference, fact, pattern, general', + }), + ), + }), + async execute(_toolCallId, params) { + const { userId, content, category } = params as { + userId: string; + content: string; + category?: string; + }; + type Cat = 'decision' | 'learning' | 'preference' | 'fact' | 'pattern' | 'general'; + + let embedding: number[] | null = null; + if (embeddingProvider) { + embedding = await embeddingProvider.embed(content); + } + + const insight = await memory.insights.create({ + userId, + content, + embedding, + source: 'agent', + category: (category as Cat) ?? 'learning', + }); + return { + content: [{ type: 'text' as const, text: JSON.stringify(insight, null, 2) }], + details: undefined, + }; + }, + }; + + return [searchMemory, getPreferences, savePreference, saveInsight]; +} diff --git a/apps/gateway/src/app.module.ts b/apps/gateway/src/app.module.ts index 694123b..17ab9b9 100644 --- a/apps/gateway/src/app.module.ts +++ b/apps/gateway/src/app.module.ts @@ -10,6 +10,9 @@ import { ProjectsModule } from './projects/projects.module.js'; import { MissionsModule } from './missions/missions.module.js'; import { TasksModule } from './tasks/tasks.module.js'; import { CoordModule } from './coord/coord.module.js'; +import { MemoryModule } from './memory/memory.module.js'; +import { LogModule } from './log/log.module.js'; +import { SkillsModule } from './skills/skills.module.js'; @Module({ imports: [ @@ -23,6 +26,9 @@ import { CoordModule } from './coord/coord.module.js'; MissionsModule, TasksModule, CoordModule, + MemoryModule, + LogModule, + SkillsModule, ], controllers: [HealthController], }) diff --git a/apps/gateway/src/log/cron.service.ts b/apps/gateway/src/log/cron.service.ts new file mode 100644 index 0000000..536e396 --- /dev/null +++ b/apps/gateway/src/log/cron.service.ts @@ -0,0 +1,44 @@ +import { Injectable, Logger, type OnModuleInit, type OnModuleDestroy } from '@nestjs/common'; +import cron from 'node-cron'; +import { SummarizationService } from './summarization.service.js'; + +@Injectable() +export class CronService implements OnModuleInit, OnModuleDestroy { + private readonly logger = new Logger(CronService.name); + private readonly tasks: cron.ScheduledTask[] = []; + + constructor(private readonly summarization: SummarizationService) {} + + onModuleInit(): void { + const summarizationSchedule = process.env['SUMMARIZATION_CRON'] ?? '0 */6 * * *'; // every 6 hours + const tierManagementSchedule = process.env['TIER_MANAGEMENT_CRON'] ?? '0 3 * * *'; // daily at 3am + + this.tasks.push( + cron.schedule(summarizationSchedule, () => { + this.summarization.runSummarization().catch((err) => { + this.logger.error(`Scheduled summarization failed: ${err}`); + }); + }), + ); + + this.tasks.push( + cron.schedule(tierManagementSchedule, () => { + this.summarization.runTierManagement().catch((err) => { + this.logger.error(`Scheduled tier management failed: ${err}`); + }); + }), + ); + + this.logger.log( + `Cron scheduled: summarization="${summarizationSchedule}", tier="${tierManagementSchedule}"`, + ); + } + + onModuleDestroy(): void { + for (const task of this.tasks) { + task.stop(); + } + this.tasks.length = 0; + this.logger.log('Cron tasks stopped'); + } +} diff --git a/apps/gateway/src/log/log.controller.ts b/apps/gateway/src/log/log.controller.ts new file mode 100644 index 0000000..91ad6da --- /dev/null +++ b/apps/gateway/src/log/log.controller.ts @@ -0,0 +1,62 @@ +import { Body, Controller, Get, Inject, Param, Post, Query, UseGuards } from '@nestjs/common'; +import type { LogService } from '@mosaic/log'; +import { LOG_SERVICE } from './log.tokens.js'; +import { AuthGuard } from '../auth/auth.guard.js'; +import type { IngestLogDto, QueryLogsDto } from './log.dto.js'; + +@Controller('api/logs') +@UseGuards(AuthGuard) +export class LogController { + constructor(@Inject(LOG_SERVICE) private readonly logService: LogService) {} + + @Post() + async ingest(@Query('userId') userId: string, @Body() dto: IngestLogDto) { + return this.logService.logs.ingest({ + sessionId: dto.sessionId, + userId, + level: dto.level, + category: dto.category, + content: dto.content, + metadata: dto.metadata, + }); + } + + @Post('batch') + async ingestBatch(@Query('userId') userId: string, @Body() dtos: IngestLogDto[]) { + const entries = dtos.map((dto) => ({ + sessionId: dto.sessionId, + userId, + level: dto.level as 'debug' | 'info' | 'warn' | 'error' | undefined, + category: dto.category as + | 'decision' + | 'tool_use' + | 'learning' + | 'error' + | 'general' + | undefined, + content: dto.content, + metadata: dto.metadata, + })); + return this.logService.logs.ingestBatch(entries); + } + + @Get() + async query(@Query('userId') userId: string, @Query() params: QueryLogsDto) { + return this.logService.logs.query({ + userId, + sessionId: params.sessionId, + level: params.level, + category: params.category, + tier: params.tier, + since: params.since ? new Date(params.since) : undefined, + until: params.until ? new Date(params.until) : undefined, + limit: params.limit ? Number(params.limit) : undefined, + offset: params.offset ? Number(params.offset) : undefined, + }); + } + + @Get(':id') + async findOne(@Param('id') id: string) { + return this.logService.logs.findById(id); + } +} diff --git a/apps/gateway/src/log/log.dto.ts b/apps/gateway/src/log/log.dto.ts new file mode 100644 index 0000000..b0eac35 --- /dev/null +++ b/apps/gateway/src/log/log.dto.ts @@ -0,0 +1,18 @@ +export interface IngestLogDto { + sessionId: string; + level?: 'debug' | 'info' | 'warn' | 'error'; + category?: 'decision' | 'tool_use' | 'learning' | 'error' | 'general'; + content: string; + metadata?: Record; +} + +export interface QueryLogsDto { + sessionId?: string; + level?: 'debug' | 'info' | 'warn' | 'error'; + category?: 'decision' | 'tool_use' | 'learning' | 'error' | 'general'; + tier?: 'hot' | 'warm' | 'cold'; + since?: string; + until?: string; + limit?: string; + offset?: string; +} diff --git a/apps/gateway/src/log/log.module.ts b/apps/gateway/src/log/log.module.ts new file mode 100644 index 0000000..063f12a --- /dev/null +++ b/apps/gateway/src/log/log.module.ts @@ -0,0 +1,24 @@ +import { Global, Module } from '@nestjs/common'; +import { createLogService, type LogService } from '@mosaic/log'; +import type { Db } from '@mosaic/db'; +import { DB } from '../database/database.module.js'; +import { LOG_SERVICE } from './log.tokens.js'; +import { LogController } from './log.controller.js'; +import { SummarizationService } from './summarization.service.js'; +import { CronService } from './cron.service.js'; + +@Global() +@Module({ + providers: [ + { + provide: LOG_SERVICE, + useFactory: (db: Db): LogService => createLogService(db), + inject: [DB], + }, + SummarizationService, + CronService, + ], + controllers: [LogController], + exports: [LOG_SERVICE, SummarizationService], +}) +export class LogModule {} diff --git a/apps/gateway/src/log/log.tokens.ts b/apps/gateway/src/log/log.tokens.ts new file mode 100644 index 0000000..3117b2a --- /dev/null +++ b/apps/gateway/src/log/log.tokens.ts @@ -0,0 +1 @@ +export const LOG_SERVICE = 'LOG_SERVICE'; diff --git a/apps/gateway/src/log/summarization.service.ts b/apps/gateway/src/log/summarization.service.ts new file mode 100644 index 0000000..cd511c9 --- /dev/null +++ b/apps/gateway/src/log/summarization.service.ts @@ -0,0 +1,178 @@ +import { Inject, Injectable, Logger } from '@nestjs/common'; +import type { LogService } from '@mosaic/log'; +import type { Memory } from '@mosaic/memory'; +import { LOG_SERVICE } from './log.tokens.js'; +import { MEMORY } from '../memory/memory.tokens.js'; +import { EmbeddingService } from '../memory/embedding.service.js'; +import type { Db } from '@mosaic/db'; +import { sql, summarizationJobs } from '@mosaic/db'; +import { DB } from '../database/database.module.js'; + +const SUMMARIZATION_PROMPT = `You are a knowledge extraction assistant. Given the following agent interaction logs, extract the key decisions, learnings, and patterns. Output a concise summary (2-4 sentences) that captures the most important information for future reference. Focus on actionable insights, not raw events. + +Logs: +{logs} + +Summary:`; + +interface ChatCompletion { + choices: Array<{ message: { content: string } }>; +} + +@Injectable() +export class SummarizationService { + private readonly logger = new Logger(SummarizationService.name); + private readonly apiKey: string | undefined; + private readonly baseUrl: string; + private readonly model: string; + + constructor( + @Inject(LOG_SERVICE) private readonly logService: LogService, + @Inject(MEMORY) private readonly memory: Memory, + private readonly embeddings: EmbeddingService, + @Inject(DB) private readonly db: Db, + ) { + this.apiKey = process.env['OPENAI_API_KEY']; + this.baseUrl = process.env['SUMMARIZATION_API_URL'] ?? 'https://api.openai.com/v1'; + this.model = process.env['SUMMARIZATION_MODEL'] ?? 'gpt-4o-mini'; + } + + /** + * Run one summarization cycle: + * 1. Find hot logs older than 24h with decision/learning/tool_use categories + * 2. Group by session + * 3. Summarize each group via cheap LLM + * 4. Store as insights with embeddings + * 5. Transition processed logs to warm tier + */ + async runSummarization(): Promise<{ logsProcessed: number; insightsCreated: number }> { + const cutoff = new Date(Date.now() - 24 * 60 * 60 * 1000); // 24h ago + + // Create job record + const [job] = await this.db + .insert(summarizationJobs) + .values({ status: 'running', startedAt: new Date() }) + .returning(); + + try { + const logs = await this.logService.logs.getLogsForSummarization(cutoff, 200); + if (logs.length === 0) { + await this.db + .update(summarizationJobs) + .set({ status: 'completed', completedAt: new Date() }) + .where(sql`id = ${job!.id}`); + return { logsProcessed: 0, insightsCreated: 0 }; + } + + // Group logs by session + const bySession = new Map(); + for (const log of logs) { + const group = bySession.get(log.sessionId) ?? []; + group.push(log); + bySession.set(log.sessionId, group); + } + + let insightsCreated = 0; + + for (const [sessionId, sessionLogs] of bySession) { + const userId = sessionLogs[0]?.userId; + if (!userId) continue; + + const logsText = sessionLogs.map((l) => `[${l.category}] ${l.content}`).join('\n'); + + const summary = await this.summarize(logsText); + if (!summary) continue; + + const embedding = this.embeddings.available + ? await this.embeddings.embed(summary) + : undefined; + + await this.memory.insights.create({ + userId, + content: summary, + embedding: embedding ?? null, + source: 'summarization', + category: 'learning', + metadata: { sessionId, logCount: sessionLogs.length }, + }); + insightsCreated++; + } + + // Transition processed logs to warm + await this.logService.logs.promoteToWarm(cutoff); + + await this.db + .update(summarizationJobs) + .set({ + status: 'completed', + logsProcessed: logs.length, + insightsCreated, + completedAt: new Date(), + }) + .where(sql`id = ${job!.id}`); + + this.logger.log(`Summarization complete: ${logs.length} logs → ${insightsCreated} insights`); + return { logsProcessed: logs.length, insightsCreated }; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + await this.db + .update(summarizationJobs) + .set({ status: 'failed', errorMessage: message, completedAt: new Date() }) + .where(sql`id = ${job!.id}`); + this.logger.error(`Summarization failed: ${message}`); + throw error; + } + } + + /** + * Run tier management: + * - Warm logs older than 30 days → cold + * - Cold logs older than 90 days → purged + * - Decay old insight relevance scores + */ + async runTierManagement(): Promise { + const warmCutoff = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000); + const coldCutoff = new Date(Date.now() - 90 * 24 * 60 * 60 * 1000); + const decayCutoff = new Date(Date.now() - 14 * 24 * 60 * 60 * 1000); + + const promoted = await this.logService.logs.promoteToCold(warmCutoff); + const purged = await this.logService.logs.purge(coldCutoff); + const decayed = await this.memory.insights.decayOldInsights(decayCutoff); + + this.logger.log( + `Tier management: ${promoted} logs→cold, ${purged} purged, ${decayed} insights decayed`, + ); + } + + private async summarize(logsText: string): Promise { + if (!this.apiKey) { + this.logger.warn('No API key configured — skipping summarization'); + return null; + } + + const prompt = SUMMARIZATION_PROMPT.replace('{logs}', logsText); + + const response = await fetch(`${this.baseUrl}/chat/completions`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${this.apiKey}`, + }, + body: JSON.stringify({ + model: this.model, + messages: [{ role: 'user', content: prompt }], + max_tokens: 300, + temperature: 0.3, + }), + }); + + if (!response.ok) { + const body = await response.text(); + this.logger.error(`Summarization API error: ${response.status} ${body}`); + return null; + } + + const json = (await response.json()) as ChatCompletion; + return json.choices[0]?.message.content ?? null; + } +} diff --git a/apps/gateway/src/memory/embedding.service.ts b/apps/gateway/src/memory/embedding.service.ts new file mode 100644 index 0000000..14f8551 --- /dev/null +++ b/apps/gateway/src/memory/embedding.service.ts @@ -0,0 +1,69 @@ +import { Injectable, Logger } from '@nestjs/common'; +import type { EmbeddingProvider } from '@mosaic/memory'; + +const DEFAULT_MODEL = 'text-embedding-3-small'; +const DEFAULT_DIMENSIONS = 1536; + +interface EmbeddingResponse { + data: Array<{ embedding: number[]; index: number }>; + model: string; + usage: { prompt_tokens: number; total_tokens: number }; +} + +/** + * Generates embeddings via the OpenAI-compatible embeddings API. + * Supports OpenAI, Azure OpenAI, and any provider with a compatible endpoint. + */ +@Injectable() +export class EmbeddingService implements EmbeddingProvider { + private readonly logger = new Logger(EmbeddingService.name); + private readonly apiKey: string | undefined; + private readonly baseUrl: string; + private readonly model: string; + + readonly dimensions = DEFAULT_DIMENSIONS; + + constructor() { + this.apiKey = process.env['OPENAI_API_KEY']; + this.baseUrl = process.env['EMBEDDING_API_URL'] ?? 'https://api.openai.com/v1'; + this.model = process.env['EMBEDDING_MODEL'] ?? DEFAULT_MODEL; + } + + get available(): boolean { + return !!this.apiKey; + } + + async embed(text: string): Promise { + const results = await this.embedBatch([text]); + return results[0]!; + } + + async embedBatch(texts: string[]): Promise { + if (!this.apiKey) { + this.logger.warn('No OPENAI_API_KEY configured — returning zero vectors'); + return texts.map(() => new Array(this.dimensions).fill(0)); + } + + const response = await fetch(`${this.baseUrl}/embeddings`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${this.apiKey}`, + }, + body: JSON.stringify({ + model: this.model, + input: texts, + dimensions: this.dimensions, + }), + }); + + if (!response.ok) { + const body = await response.text(); + this.logger.error(`Embedding API error: ${response.status} ${body}`); + throw new Error(`Embedding API returned ${response.status}`); + } + + const json = (await response.json()) as EmbeddingResponse; + return json.data.sort((a, b) => a.index - b.index).map((d) => d.embedding); + } +} diff --git a/apps/gateway/src/memory/memory.controller.ts b/apps/gateway/src/memory/memory.controller.ts new file mode 100644 index 0000000..ff69238 --- /dev/null +++ b/apps/gateway/src/memory/memory.controller.ts @@ -0,0 +1,126 @@ +import { + Body, + Controller, + Delete, + Get, + HttpCode, + HttpStatus, + Inject, + NotFoundException, + Param, + Post, + Query, + UseGuards, +} from '@nestjs/common'; +import type { Memory } from '@mosaic/memory'; +import { MEMORY } from './memory.tokens.js'; +import { AuthGuard } from '../auth/auth.guard.js'; +import { EmbeddingService } from './embedding.service.js'; +import type { UpsertPreferenceDto, CreateInsightDto, SearchMemoryDto } from './memory.dto.js'; + +@Controller('api/memory') +@UseGuards(AuthGuard) +export class MemoryController { + constructor( + @Inject(MEMORY) private readonly memory: Memory, + private readonly embeddings: EmbeddingService, + ) {} + + // ─── Preferences ──────────────────────────────────────────────────── + + @Get('preferences') + async listPreferences(@Query('userId') userId: string, @Query('category') category?: string) { + if (category) { + return this.memory.preferences.findByUserAndCategory( + userId, + category as Parameters[1], + ); + } + return this.memory.preferences.findByUser(userId); + } + + @Get('preferences/:key') + async getPreference(@Query('userId') userId: string, @Param('key') key: string) { + const pref = await this.memory.preferences.findByUserAndKey(userId, key); + if (!pref) throw new NotFoundException('Preference not found'); + return pref; + } + + @Post('preferences') + async upsertPreference(@Query('userId') userId: string, @Body() dto: UpsertPreferenceDto) { + return this.memory.preferences.upsert({ + userId, + key: dto.key, + value: dto.value, + category: dto.category, + source: dto.source, + }); + } + + @Delete('preferences/:key') + @HttpCode(HttpStatus.NO_CONTENT) + async removePreference(@Query('userId') userId: string, @Param('key') key: string) { + const deleted = await this.memory.preferences.remove(userId, key); + if (!deleted) throw new NotFoundException('Preference not found'); + } + + // ─── Insights ─────────────────────────────────────────────────────── + + @Get('insights') + async listInsights(@Query('userId') userId: string, @Query('limit') limit?: string) { + return this.memory.insights.findByUser(userId, limit ? Number(limit) : undefined); + } + + @Get('insights/:id') + async getInsight(@Param('id') id: string) { + const insight = await this.memory.insights.findById(id); + if (!insight) throw new NotFoundException('Insight not found'); + return insight; + } + + @Post('insights') + async createInsight(@Query('userId') userId: string, @Body() dto: CreateInsightDto) { + const embedding = this.embeddings.available + ? await this.embeddings.embed(dto.content) + : undefined; + + return this.memory.insights.create({ + userId, + content: dto.content, + source: dto.source, + category: dto.category, + metadata: dto.metadata, + embedding: embedding ?? null, + }); + } + + @Delete('insights/:id') + @HttpCode(HttpStatus.NO_CONTENT) + async removeInsight(@Param('id') id: string) { + const deleted = await this.memory.insights.remove(id); + if (!deleted) throw new NotFoundException('Insight not found'); + } + + // ─── Search ───────────────────────────────────────────────────────── + + @Post('search') + async searchMemory(@Query('userId') userId: string, @Body() dto: SearchMemoryDto) { + if (!this.embeddings.available) { + return { + query: dto.query, + results: [], + message: 'Semantic search requires OPENAI_API_KEY for embeddings', + }; + } + + const queryEmbedding = await this.embeddings.embed(dto.query); + const results = await this.memory.insights.searchByEmbedding( + userId, + queryEmbedding, + dto.limit ?? 10, + dto.maxDistance ?? 0.8, + ); + + return { query: dto.query, results }; + } +} diff --git a/apps/gateway/src/memory/memory.dto.ts b/apps/gateway/src/memory/memory.dto.ts new file mode 100644 index 0000000..5182ee2 --- /dev/null +++ b/apps/gateway/src/memory/memory.dto.ts @@ -0,0 +1,19 @@ +export interface UpsertPreferenceDto { + key: string; + value: unknown; + category?: 'communication' | 'coding' | 'workflow' | 'appearance' | 'general'; + source?: string; +} + +export interface CreateInsightDto { + content: string; + source?: 'agent' | 'user' | 'summarization' | 'system'; + category?: 'decision' | 'learning' | 'preference' | 'fact' | 'pattern' | 'general'; + metadata?: Record; +} + +export interface SearchMemoryDto { + query: string; + limit?: number; + maxDistance?: number; +} diff --git a/apps/gateway/src/memory/memory.module.ts b/apps/gateway/src/memory/memory.module.ts new file mode 100644 index 0000000..0108047 --- /dev/null +++ b/apps/gateway/src/memory/memory.module.ts @@ -0,0 +1,22 @@ +import { Global, Module } from '@nestjs/common'; +import { createMemory, type Memory } from '@mosaic/memory'; +import type { Db } from '@mosaic/db'; +import { DB } from '../database/database.module.js'; +import { MEMORY } from './memory.tokens.js'; +import { MemoryController } from './memory.controller.js'; +import { EmbeddingService } from './embedding.service.js'; + +@Global() +@Module({ + providers: [ + { + provide: MEMORY, + useFactory: (db: Db): Memory => createMemory(db), + inject: [DB], + }, + EmbeddingService, + ], + controllers: [MemoryController], + exports: [MEMORY, EmbeddingService], +}) +export class MemoryModule {} diff --git a/apps/gateway/src/memory/memory.tokens.ts b/apps/gateway/src/memory/memory.tokens.ts new file mode 100644 index 0000000..53baa78 --- /dev/null +++ b/apps/gateway/src/memory/memory.tokens.ts @@ -0,0 +1 @@ +export const MEMORY = 'MEMORY'; diff --git a/apps/gateway/src/skills/skills.controller.ts b/apps/gateway/src/skills/skills.controller.ts new file mode 100644 index 0000000..8661db6 --- /dev/null +++ b/apps/gateway/src/skills/skills.controller.ts @@ -0,0 +1,67 @@ +import { + Body, + Controller, + Delete, + Get, + HttpCode, + HttpStatus, + NotFoundException, + Param, + Patch, + Post, + UseGuards, +} from '@nestjs/common'; +import { SkillsService } from './skills.service.js'; +import { AuthGuard } from '../auth/auth.guard.js'; +import type { CreateSkillDto, UpdateSkillDto } from './skills.dto.js'; + +@Controller('api/skills') +@UseGuards(AuthGuard) +export class SkillsController { + constructor(private readonly skills: SkillsService) {} + + @Get() + async list() { + return this.skills.findAll(); + } + + @Get(':id') + async findOne(@Param('id') id: string) { + const skill = await this.skills.findById(id); + if (!skill) throw new NotFoundException('Skill not found'); + return skill; + } + + @Post() + async create(@Body() dto: CreateSkillDto) { + return this.skills.create({ + name: dto.name, + description: dto.description, + version: dto.version, + source: dto.source, + config: dto.config, + enabled: dto.enabled, + }); + } + + @Patch(':id') + async update(@Param('id') id: string, @Body() dto: UpdateSkillDto) { + const skill = await this.skills.update(id, dto); + if (!skill) throw new NotFoundException('Skill not found'); + return skill; + } + + @Patch(':id/toggle') + async toggle(@Param('id') id: string, @Body() body: { enabled: boolean }) { + const skill = await this.skills.toggle(id, body.enabled); + if (!skill) throw new NotFoundException('Skill not found'); + return skill; + } + + @Delete(':id') + @HttpCode(HttpStatus.NO_CONTENT) + async remove(@Param('id') id: string) { + const deleted = await this.skills.remove(id); + if (!deleted) throw new NotFoundException('Skill not found'); + } +} diff --git a/apps/gateway/src/skills/skills.dto.ts b/apps/gateway/src/skills/skills.dto.ts new file mode 100644 index 0000000..e2a1f4d --- /dev/null +++ b/apps/gateway/src/skills/skills.dto.ts @@ -0,0 +1,15 @@ +export interface CreateSkillDto { + name: string; + description?: string; + version?: string; + source?: 'builtin' | 'community' | 'custom'; + config?: Record; + enabled?: boolean; +} + +export interface UpdateSkillDto { + description?: string; + version?: string; + config?: Record; + enabled?: boolean; +} diff --git a/apps/gateway/src/skills/skills.module.ts b/apps/gateway/src/skills/skills.module.ts new file mode 100644 index 0000000..49ab88d --- /dev/null +++ b/apps/gateway/src/skills/skills.module.ts @@ -0,0 +1,10 @@ +import { Module } from '@nestjs/common'; +import { SkillsService } from './skills.service.js'; +import { SkillsController } from './skills.controller.js'; + +@Module({ + providers: [SkillsService], + controllers: [SkillsController], + exports: [SkillsService], +}) +export class SkillsModule {} diff --git a/apps/gateway/src/skills/skills.service.ts b/apps/gateway/src/skills/skills.service.ts new file mode 100644 index 0000000..b015e0d --- /dev/null +++ b/apps/gateway/src/skills/skills.service.ts @@ -0,0 +1,52 @@ +import { Inject, Injectable } from '@nestjs/common'; +import { eq, type Db, skills } from '@mosaic/db'; +import { DB } from '../database/database.module.js'; + +type Skill = typeof skills.$inferSelect; +type NewSkill = typeof skills.$inferInsert; + +@Injectable() +export class SkillsService { + constructor(@Inject(DB) private readonly db: Db) {} + + async findAll(): Promise { + return this.db.select().from(skills); + } + + async findEnabled(): Promise { + return this.db.select().from(skills).where(eq(skills.enabled, true)); + } + + async findById(id: string): Promise { + const rows = await this.db.select().from(skills).where(eq(skills.id, id)); + return rows[0]; + } + + async findByName(name: string): Promise { + const rows = await this.db.select().from(skills).where(eq(skills.name, name)); + return rows[0]; + } + + async create(data: NewSkill): Promise { + const rows = await this.db.insert(skills).values(data).returning(); + return rows[0]!; + } + + async update(id: string, data: Partial): Promise { + const rows = await this.db + .update(skills) + .set({ ...data, updatedAt: new Date() }) + .where(eq(skills.id, id)) + .returning(); + return rows[0]; + } + + async remove(id: string): Promise { + const rows = await this.db.delete(skills).where(eq(skills.id, id)).returning(); + return rows.length > 0; + } + + async toggle(id: string, enabled: boolean): Promise { + return this.update(id, { enabled }); + } +} diff --git a/docs/MISSION-MANIFEST.md b/docs/MISSION-MANIFEST.md index 6d5a3ee..66b1634 100644 --- a/docs/MISSION-MANIFEST.md +++ b/docs/MISSION-MANIFEST.md @@ -8,8 +8,8 @@ **ID:** mvp-20260312 **Statement:** Build Mosaic Stack v0.1.0 — a self-hosted, multi-user AI agent platform with web dashboard, TUI, remote control, shared memory, mission orchestration, and extensible skill/plugin architecture. All TypeScript. Pi as agent harness. Brain as knowledge layer. Queue as coordination backbone. **Phase:** Execution -**Current Milestone:** Phase 4: Memory & Intelligence (v0.0.5) -**Progress:** 4 / 8 milestones +**Current Milestone:** Phase 5: Remote Control (v0.0.6) +**Progress:** 5 / 8 milestones **Status:** active **Last Updated:** 2026-03-13 UTC @@ -35,7 +35,7 @@ | 1 | ms-158 | Phase 1: Core API (v0.0.2) | done | — | — | 2026-03-13 | 2026-03-13 | | 2 | ms-159 | Phase 2: Agent Layer (v0.0.3) | done | — | — | 2026-03-13 | 2026-03-12 | | 3 | ms-160 | Phase 3: Web Dashboard (v0.0.4) | done | — | — | 2026-03-12 | 2026-03-13 | -| 4 | ms-161 | Phase 4: Memory & Intelligence (v0.0.5) | not-started | — | — | — | — | +| 4 | ms-161 | Phase 4: Memory & Intelligence (v0.0.5) | done | — | — | 2026-03-13 | 2026-03-13 | | 5 | ms-162 | Phase 5: Remote Control (v0.0.6) | not-started | — | — | — | — | | 6 | ms-163 | Phase 6: CLI & Tools (v0.0.7) | not-started | — | — | — | — | | 7 | ms-164 | Phase 7: Polish & Beta (v0.1.0) | not-started | — | — | — | — | diff --git a/docs/TASKS.md b/docs/TASKS.md index 49fd6db..430210f 100644 --- a/docs/TASKS.md +++ b/docs/TASKS.md @@ -37,13 +37,13 @@ | P3-006 | done | Phase 3 | Settings — provider config, profile, integrations | #88 | #31 | | P3-007 | done | Phase 3 | Admin panel — user management, RBAC | #89 | #32 | | P3-008 | done | Phase 3 | Verify Phase 3 — web dashboard functional E2E | — | #33 | -| P4-001 | not-started | Phase 4 | @mosaic/memory — preference + insight stores | — | #34 | -| P4-002 | not-started | Phase 4 | Semantic search — pgvector embeddings + search API | — | #35 | -| P4-003 | not-started | Phase 4 | @mosaic/log — log ingest, parsing, tiered storage | — | #36 | -| P4-004 | not-started | Phase 4 | Summarization pipeline — Haiku-tier LLM + cron | — | #37 | -| P4-005 | not-started | Phase 4 | Memory integration — inject into agent sessions | — | #38 | -| P4-006 | not-started | Phase 4 | Skill management — catalog, install, config | — | #39 | -| P4-007 | not-started | Phase 4 | Verify Phase 4 — memory + log pipeline working | — | #40 | +| P4-001 | done | Phase 4 | @mosaic/memory — preference + insight stores | — | #34 | +| P4-002 | done | Phase 4 | Semantic search — pgvector embeddings + search API | — | #35 | +| P4-003 | done | Phase 4 | @mosaic/log — log ingest, parsing, tiered storage | — | #36 | +| P4-004 | done | Phase 4 | Summarization pipeline — Haiku-tier LLM + cron | — | #37 | +| P4-005 | done | Phase 4 | Memory integration — inject into agent sessions | — | #38 | +| P4-006 | done | Phase 4 | Skill management — catalog, install, config | — | #39 | +| P4-007 | done | Phase 4 | Verify Phase 4 — memory + log pipeline working | — | #40 | | P5-001 | not-started | Phase 5 | Plugin host — gateway plugin loading + channel interface | — | #41 | | P5-002 | done | Phase 5 | @mosaic/discord-plugin — Discord bot + channel plugin | #61 | #42 | | P5-003 | not-started | Phase 5 | @mosaic/telegram-plugin — Telegraf bot + channel plugin | — | #43 | diff --git a/docs/scratchpads/mvp-20260312.md b/docs/scratchpads/mvp-20260312.md index 188f90c..9881cc9 100644 --- a/docs/scratchpads/mvp-20260312.md +++ b/docs/scratchpads/mvp-20260312.md @@ -114,3 +114,9 @@ User confirmed: start the planning gate. | Session | Date | Milestone | Tasks Done | Outcome | | ------- | ---------- | --------- | ---------- | -------------------------------------------------------------------------------------------------------------------------- | | 10 | 2026-03-13 | Phase 3 | P3-008 | Phase 3 verification: typecheck 18/18, lint 18/18, format clean, build green (10 routes), 10 tests pass. Phase 3 complete. | + +### Session 10 (continued) — Phase 4 Memory & Intelligence + +| Session | Date | Milestone | Tasks Done | Outcome | +| ------- | ---------- | --------- | --------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| 10 | 2026-03-13 | Phase 4 | P4-001 through P4-007 | Full memory + log system: DB schema (preferences, insights w/ pgvector, agent_logs, skills, summarization_jobs), @mosaic/memory + @mosaic/log packages, embedding service, summarization pipeline w/ cron, memory tools in agent sessions, skill management CRUD. All gates green. | diff --git a/packages/db/src/index.ts b/packages/db/src/index.ts index 4e8e998..f14fb70 100644 --- a/packages/db/src/index.ts +++ b/packages/db/src/index.ts @@ -1,4 +1,18 @@ export { createDb, type Db, type DbHandle } from './client.js'; export { runMigrations } from './migrate.js'; export * from './schema.js'; -export { eq, and, or, desc, asc, sql, inArray, isNull, isNotNull } from 'drizzle-orm'; +export { + eq, + and, + or, + desc, + asc, + sql, + inArray, + isNull, + isNotNull, + gt, + lt, + gte, + lte, +} from 'drizzle-orm'; diff --git a/packages/db/src/schema.ts b/packages/db/src/schema.ts index 0ee28d0..a5c0993 100644 --- a/packages/db/src/schema.ts +++ b/packages/db/src/schema.ts @@ -3,7 +3,18 @@ * drizzle-kit reads this file directly (avoids CJS/ESM extension issues). */ -import { pgTable, text, timestamp, boolean, uuid, jsonb, index } from 'drizzle-orm/pg-core'; +import { + pgTable, + text, + timestamp, + boolean, + uuid, + jsonb, + index, + real, + integer, + customType, +} from 'drizzle-orm/pg-core'; // ─── Auth (BetterAuth-compatible) ──────────────────────────────────────────── @@ -211,3 +222,152 @@ export const messages = pgTable( }, (t) => [index('messages_conversation_id_idx').on(t.conversationId)], ); + +// ─── pgvector custom type ─────────────────────────────────────────────────── + +const vector = customType<{ data: number[]; driverParam: string; config: { dimensions: number } }>({ + dataType(config) { + return `vector(${config?.dimensions ?? 1536})`; + }, + fromDriver(value: unknown): number[] { + const str = value as string; + return str + .slice(1, -1) + .split(',') + .map((v) => Number(v)); + }, + toDriver(value: number[]): string { + return `[${value.join(',')}]`; + }, +}); + +// ─── Memory ───────────────────────────────────────────────────────────────── + +export const preferences = pgTable( + 'preferences', + { + id: uuid('id').primaryKey().defaultRandom(), + userId: text('user_id') + .notNull() + .references(() => users.id, { onDelete: 'cascade' }), + key: text('key').notNull(), + value: jsonb('value').notNull(), + category: text('category', { + enum: ['communication', 'coding', 'workflow', 'appearance', 'general'], + }) + .notNull() + .default('general'), + source: text('source'), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(), + }, + (t) => [ + index('preferences_user_id_idx').on(t.userId), + index('preferences_user_key_idx').on(t.userId, t.key), + ], +); + +export const insights = pgTable( + 'insights', + { + id: uuid('id').primaryKey().defaultRandom(), + userId: text('user_id') + .notNull() + .references(() => users.id, { onDelete: 'cascade' }), + content: text('content').notNull(), + embedding: vector('embedding', { dimensions: 1536 }), + source: text('source', { + enum: ['agent', 'user', 'summarization', 'system'], + }) + .notNull() + .default('agent'), + category: text('category', { + enum: ['decision', 'learning', 'preference', 'fact', 'pattern', 'general'], + }) + .notNull() + .default('general'), + relevanceScore: real('relevance_score').notNull().default(1.0), + metadata: jsonb('metadata'), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(), + decayedAt: timestamp('decayed_at', { withTimezone: true }), + }, + (t) => [ + index('insights_user_id_idx').on(t.userId), + index('insights_category_idx').on(t.category), + index('insights_relevance_idx').on(t.relevanceScore), + ], +); + +// ─── Agent Logs ───────────────────────────────────────────────────────────── + +export const agentLogs = pgTable( + 'agent_logs', + { + id: uuid('id').primaryKey().defaultRandom(), + sessionId: text('session_id').notNull(), + userId: text('user_id').references(() => users.id, { onDelete: 'set null' }), + level: text('level', { enum: ['debug', 'info', 'warn', 'error'] }) + .notNull() + .default('info'), + category: text('category', { + enum: ['decision', 'tool_use', 'learning', 'error', 'general'], + }) + .notNull() + .default('general'), + content: text('content').notNull(), + metadata: jsonb('metadata'), + tier: text('tier', { enum: ['hot', 'warm', 'cold'] }) + .notNull() + .default('hot'), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + summarizedAt: timestamp('summarized_at', { withTimezone: true }), + archivedAt: timestamp('archived_at', { withTimezone: true }), + }, + (t) => [ + index('agent_logs_session_id_idx').on(t.sessionId), + index('agent_logs_user_id_idx').on(t.userId), + index('agent_logs_tier_idx').on(t.tier), + index('agent_logs_created_at_idx').on(t.createdAt), + ], +); + +// ─── Skills ───────────────────────────────────────────────────────────────── + +export const skills = pgTable( + 'skills', + { + id: uuid('id').primaryKey().defaultRandom(), + name: text('name').notNull().unique(), + description: text('description'), + version: text('version'), + source: text('source', { enum: ['builtin', 'community', 'custom'] }) + .notNull() + .default('custom'), + config: jsonb('config'), + enabled: boolean('enabled').notNull().default(true), + installedBy: text('installed_by').references(() => users.id, { onDelete: 'set null' }), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(), + }, + (t) => [index('skills_enabled_idx').on(t.enabled)], +); + +// ─── Summarization Jobs ───────────────────────────────────────────────────── + +export const summarizationJobs = pgTable( + 'summarization_jobs', + { + id: uuid('id').primaryKey().defaultRandom(), + status: text('status', { enum: ['pending', 'running', 'completed', 'failed'] }) + .notNull() + .default('pending'), + logsProcessed: integer('logs_processed').notNull().default(0), + insightsCreated: integer('insights_created').notNull().default(0), + errorMessage: text('error_message'), + startedAt: timestamp('started_at', { withTimezone: true }), + completedAt: timestamp('completed_at', { withTimezone: true }), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }, + (t) => [index('summarization_jobs_status_idx').on(t.status)], +); diff --git a/packages/log/package.json b/packages/log/package.json index b1ae41f..08cf723 100644 --- a/packages/log/package.json +++ b/packages/log/package.json @@ -1,6 +1,7 @@ { "name": "@mosaic/log", "version": "0.0.0", + "type": "module", "main": "dist/index.js", "types": "dist/index.d.ts", "exports": { @@ -15,6 +16,10 @@ "typecheck": "tsc --noEmit", "test": "vitest run --passWithNoTests" }, + "dependencies": { + "@mosaic/db": "workspace:*", + "drizzle-orm": "^0.45.1" + }, "devDependencies": { "typescript": "^5.8.0", "vitest": "^2.0.0" diff --git a/packages/log/src/agent-logs.ts b/packages/log/src/agent-logs.ts new file mode 100644 index 0000000..36237f4 --- /dev/null +++ b/packages/log/src/agent-logs.ts @@ -0,0 +1,117 @@ +import { eq, and, desc, lt, sql, type Db, agentLogs } from '@mosaic/db'; + +export type AgentLog = typeof agentLogs.$inferSelect; +export type NewAgentLog = typeof agentLogs.$inferInsert; + +export type LogLevel = 'debug' | 'info' | 'warn' | 'error'; +export type LogCategory = 'decision' | 'tool_use' | 'learning' | 'error' | 'general'; +export type LogTier = 'hot' | 'warm' | 'cold'; + +export interface LogQuery { + userId?: string; + sessionId?: string; + level?: LogLevel; + category?: LogCategory; + tier?: LogTier; + since?: Date; + until?: Date; + limit?: number; + offset?: number; +} + +export function createAgentLogsRepo(db: Db) { + return { + async ingest(entry: NewAgentLog): Promise { + const rows = await db.insert(agentLogs).values(entry).returning(); + return rows[0]!; + }, + + async ingestBatch(entries: NewAgentLog[]): Promise { + if (entries.length === 0) return []; + return db.insert(agentLogs).values(entries).returning(); + }, + + async query(params: LogQuery): Promise { + const conditions = []; + + if (params.userId) conditions.push(eq(agentLogs.userId, params.userId)); + if (params.sessionId) conditions.push(eq(agentLogs.sessionId, params.sessionId)); + if (params.level) conditions.push(eq(agentLogs.level, params.level)); + if (params.category) conditions.push(eq(agentLogs.category, params.category)); + if (params.tier) conditions.push(eq(agentLogs.tier, params.tier)); + if (params.since) conditions.push(sql`${agentLogs.createdAt} >= ${params.since}`); + if (params.until) conditions.push(sql`${agentLogs.createdAt} <= ${params.until}`); + + const where = conditions.length > 0 ? and(...conditions) : undefined; + + return db + .select() + .from(agentLogs) + .where(where) + .orderBy(desc(agentLogs.createdAt)) + .limit(params.limit ?? 100) + .offset(params.offset ?? 0); + }, + + async findById(id: string): Promise { + const rows = await db.select().from(agentLogs).where(eq(agentLogs.id, id)); + return rows[0]; + }, + + /** + * Transition hot logs older than the cutoff to warm tier. + * Returns the number of logs transitioned. + */ + async promoteToWarm(olderThan: Date): Promise { + const result = await db + .update(agentLogs) + .set({ tier: 'warm', summarizedAt: new Date() }) + .where(and(eq(agentLogs.tier, 'hot'), lt(agentLogs.createdAt, olderThan))) + .returning(); + return result.length; + }, + + /** + * Transition warm logs older than the cutoff to cold tier. + */ + async promoteToCold(olderThan: Date): Promise { + const result = await db + .update(agentLogs) + .set({ tier: 'cold', archivedAt: new Date() }) + .where(and(eq(agentLogs.tier, 'warm'), lt(agentLogs.createdAt, olderThan))) + .returning(); + return result.length; + }, + + /** + * Delete cold logs older than the retention period. + */ + async purge(olderThan: Date): Promise { + const result = await db + .delete(agentLogs) + .where(and(eq(agentLogs.tier, 'cold'), lt(agentLogs.createdAt, olderThan))) + .returning(); + return result.length; + }, + + /** + * Get hot logs ready for summarization (decisions + learnings). + */ + async getLogsForSummarization(olderThan: Date, limit = 100): Promise { + return db + .select() + .from(agentLogs) + .where( + and( + eq(agentLogs.tier, 'hot'), + lt(agentLogs.createdAt, olderThan), + sql`${agentLogs.category} IN ('decision', 'learning', 'tool_use')`, + ), + ) + .orderBy(agentLogs.createdAt) + .limit(limit); + }, + }; +} + +export type AgentLogsRepo = ReturnType; diff --git a/packages/log/src/index.ts b/packages/log/src/index.ts index 0c18d5d..f73585f 100644 --- a/packages/log/src/index.ts +++ b/packages/log/src/index.ts @@ -1 +1,11 @@ -export const VERSION = '0.0.0'; +export { createLogService, type LogService } from './log-service.js'; +export { + createAgentLogsRepo, + type AgentLogsRepo, + type AgentLog, + type NewAgentLog, + type LogLevel, + type LogCategory, + type LogTier, + type LogQuery, +} from './agent-logs.js'; diff --git a/packages/log/src/log-service.ts b/packages/log/src/log-service.ts new file mode 100644 index 0000000..aa050a9 --- /dev/null +++ b/packages/log/src/log-service.ts @@ -0,0 +1,12 @@ +import type { Db } from '@mosaic/db'; +import { createAgentLogsRepo, type AgentLogsRepo } from './agent-logs.js'; + +export interface LogService { + logs: AgentLogsRepo; +} + +export function createLogService(db: Db): LogService { + return { + logs: createAgentLogsRepo(db), + }; +} diff --git a/packages/memory/package.json b/packages/memory/package.json index cce07cd..f669d56 100644 --- a/packages/memory/package.json +++ b/packages/memory/package.json @@ -1,6 +1,7 @@ { "name": "@mosaic/memory", "version": "0.0.0", + "type": "module", "main": "dist/index.js", "types": "dist/index.d.ts", "exports": { @@ -16,7 +17,9 @@ "test": "vitest run --passWithNoTests" }, "dependencies": { - "@mosaic/types": "workspace:*" + "@mosaic/db": "workspace:*", + "@mosaic/types": "workspace:*", + "drizzle-orm": "^0.45.1" }, "devDependencies": { "typescript": "^5.8.0", diff --git a/packages/memory/src/index.ts b/packages/memory/src/index.ts index 0c18d5d..ebe925c 100644 --- a/packages/memory/src/index.ts +++ b/packages/memory/src/index.ts @@ -1 +1,15 @@ -export const VERSION = '0.0.0'; +export { createMemory, type Memory } from './memory.js'; +export { + createPreferencesRepo, + type PreferencesRepo, + type Preference, + type NewPreference, +} from './preferences.js'; +export { + createInsightsRepo, + type InsightsRepo, + type Insight, + type NewInsight, + type SearchResult, +} from './insights.js'; +export type { VectorStore, VectorSearchResult, EmbeddingProvider } from './vector-store.js'; diff --git a/packages/memory/src/insights.ts b/packages/memory/src/insights.ts new file mode 100644 index 0000000..d844caa --- /dev/null +++ b/packages/memory/src/insights.ts @@ -0,0 +1,89 @@ +import { eq, and, desc, sql, lt, type Db, insights } from '@mosaic/db'; + +export type Insight = typeof insights.$inferSelect; +export type NewInsight = typeof insights.$inferInsert; + +export interface SearchResult { + insight: Insight; + distance: number; +} + +export function createInsightsRepo(db: Db) { + return { + async findByUser(userId: string, limit = 50): Promise { + return db + .select() + .from(insights) + .where(eq(insights.userId, userId)) + .orderBy(desc(insights.createdAt)) + .limit(limit); + }, + + async findById(id: string): Promise { + const rows = await db.select().from(insights).where(eq(insights.id, id)); + return rows[0]; + }, + + async create(data: NewInsight): Promise { + const rows = await db.insert(insights).values(data).returning(); + return rows[0]!; + }, + + async update(id: string, data: Partial): Promise { + const rows = await db + .update(insights) + .set({ ...data, updatedAt: new Date() }) + .where(eq(insights.id, id)) + .returning(); + return rows[0]; + }, + + async remove(id: string): Promise { + const rows = await db.delete(insights).where(eq(insights.id, id)).returning(); + return rows.length > 0; + }, + + /** + * Semantic search using pgvector cosine distance. + * Requires the vector extension and an embedding for the query. + */ + async searchByEmbedding( + userId: string, + queryEmbedding: number[], + limit = 10, + maxDistance = 0.8, + ): Promise { + const embeddingStr = `[${queryEmbedding.join(',')}]`; + const rows = await db.execute(sql` + SELECT *, + (embedding <=> ${embeddingStr}::vector) AS distance + FROM insights + WHERE user_id = ${userId} + AND embedding IS NOT NULL + AND (embedding <=> ${embeddingStr}::vector) < ${maxDistance} + ORDER BY distance ASC + LIMIT ${limit} + `); + + return rows as unknown as SearchResult[]; + }, + + /** + * Decay relevance scores for old insights that haven't been accessed recently. + */ + async decayOldInsights(olderThan: Date, decayFactor = 0.95): Promise { + const result = await db + .update(insights) + .set({ + relevanceScore: sql`${insights.relevanceScore} * ${decayFactor}`, + decayedAt: new Date(), + updatedAt: new Date(), + }) + .where(and(lt(insights.updatedAt, olderThan), sql`${insights.relevanceScore} > 0.1`)) + .returning(); + return result.length; + }, + }; +} + +export type InsightsRepo = ReturnType; diff --git a/packages/memory/src/memory.ts b/packages/memory/src/memory.ts new file mode 100644 index 0000000..ef46aca --- /dev/null +++ b/packages/memory/src/memory.ts @@ -0,0 +1,15 @@ +import type { Db } from '@mosaic/db'; +import { createPreferencesRepo, type PreferencesRepo } from './preferences.js'; +import { createInsightsRepo, type InsightsRepo } from './insights.js'; + +export interface Memory { + preferences: PreferencesRepo; + insights: InsightsRepo; +} + +export function createMemory(db: Db): Memory { + return { + preferences: createPreferencesRepo(db), + insights: createInsightsRepo(db), + }; +} diff --git a/packages/memory/src/preferences.ts b/packages/memory/src/preferences.ts new file mode 100644 index 0000000..9b3a811 --- /dev/null +++ b/packages/memory/src/preferences.ts @@ -0,0 +1,59 @@ +import { eq, and, type Db, preferences } from '@mosaic/db'; + +export type Preference = typeof preferences.$inferSelect; +export type NewPreference = typeof preferences.$inferInsert; + +export function createPreferencesRepo(db: Db) { + return { + async findByUser(userId: string): Promise { + return db.select().from(preferences).where(eq(preferences.userId, userId)); + }, + + async findByUserAndKey(userId: string, key: string): Promise { + const rows = await db + .select() + .from(preferences) + .where(and(eq(preferences.userId, userId), eq(preferences.key, key))); + return rows[0]; + }, + + async findByUserAndCategory( + userId: string, + category: Preference['category'], + ): Promise { + return db + .select() + .from(preferences) + .where(and(eq(preferences.userId, userId), eq(preferences.category, category))); + }, + + async upsert(data: NewPreference): Promise { + const existing = await db + .select() + .from(preferences) + .where(and(eq(preferences.userId, data.userId), eq(preferences.key, data.key))); + + if (existing[0]) { + const rows = await db + .update(preferences) + .set({ value: data.value, category: data.category, updatedAt: new Date() }) + .where(eq(preferences.id, existing[0].id)) + .returning(); + return rows[0]!; + } + + const rows = await db.insert(preferences).values(data).returning(); + return rows[0]!; + }, + + async remove(userId: string, key: string): Promise { + const rows = await db + .delete(preferences) + .where(and(eq(preferences.userId, userId), eq(preferences.key, key))) + .returning(); + return rows.length > 0; + }, + }; +} + +export type PreferencesRepo = ReturnType; diff --git a/packages/memory/src/vector-store.ts b/packages/memory/src/vector-store.ts new file mode 100644 index 0000000..f268d49 --- /dev/null +++ b/packages/memory/src/vector-store.ts @@ -0,0 +1,39 @@ +/** + * VectorStore interface — abstraction over pgvector that allows future + * swap to Qdrant, Pinecone, etc. + */ +export interface VectorStore { + /** Store an embedding with an associated document ID. */ + store(documentId: string, embedding: number[], metadata?: Record): Promise; + + /** Search for similar embeddings, returning document IDs and distances. */ + search( + queryEmbedding: number[], + limit?: number, + filter?: Record, + ): Promise; + + /** Delete an embedding by document ID. */ + remove(documentId: string): Promise; +} + +export interface VectorSearchResult { + documentId: string; + distance: number; + metadata?: Record; +} + +/** + * EmbeddingProvider interface — generates embeddings from text. + * Implemented by the gateway using the configured LLM provider. + */ +export interface EmbeddingProvider { + /** Generate an embedding vector for the given text. */ + embed(text: string): Promise; + + /** Generate embeddings for multiple texts in batch. */ + embedBatch(texts: string[]): Promise; + + /** The dimensionality of the embeddings this provider generates. */ + dimensions: number; +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b51a464..48827c7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -59,6 +59,12 @@ importers: '@mosaic/db': specifier: workspace:^ version: link:../../packages/db + '@mosaic/log': + specifier: workspace:^ + version: link:../../packages/log + '@mosaic/memory': + specifier: workspace:^ + version: link:../../packages/memory '@mosaic/types': specifier: workspace:^ version: link:../../packages/types @@ -107,6 +113,9 @@ importers: fastify: specifier: ^5.0.0 version: 5.8.2 + node-cron: + specifier: ^4.2.1 + version: 4.2.1 reflect-metadata: specifier: ^0.2.0 version: 0.2.2 @@ -123,6 +132,9 @@ importers: '@types/node': specifier: ^22.0.0 version: 22.19.15 + '@types/node-cron': + specifier: ^3.0.11 + version: 3.0.11 '@types/uuid': specifier: ^10.0.0 version: 10.0.0 @@ -321,6 +333,13 @@ importers: version: 2.1.9(@types/node@22.19.15)(lightningcss@1.31.1) packages/log: + dependencies: + '@mosaic/db': + specifier: workspace:* + version: link:../db + drizzle-orm: + specifier: ^0.45.1 + version: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8) devDependencies: typescript: specifier: ^5.8.0 @@ -331,9 +350,15 @@ importers: packages/memory: dependencies: + '@mosaic/db': + specifier: workspace:* + version: link:../db '@mosaic/types': specifier: workspace:* version: link:../types + drizzle-orm: + specifier: ^0.45.1 + version: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8) devDependencies: typescript: specifier: ^5.8.0 @@ -2728,6 +2753,9 @@ packages: '@types/mysql@2.15.27': resolution: {integrity: sha512-YfWiV16IY0OeBfBCk8+hXKmdTKrKlwKN1MNKAPBu5JYxLwBEZl7QzeEpGnlZb3VMGJrrGmB84gXiH+ofs/TezA==} + '@types/node-cron@3.0.11': + resolution: {integrity: sha512-0ikrnug3/IyneSHqCBeslAhlK2aBfYek1fGo4bP4QnZPmiqSGRK+Oy7ZMisLWkesffJvQ1cqAcBnJC+8+nxIAg==} + '@types/node@22.19.15': resolution: {integrity: sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==} @@ -4155,6 +4183,10 @@ packages: sass: optional: true + node-cron@4.2.1: + resolution: {integrity: sha512-lgimEHPE/QDgFlywTd8yTR61ptugX3Qer29efeyWw2rv259HtGBNn1vZVmp8lB9uo9wC0t/AT4iGqXxia+CJFg==} + engines: {node: '>=6.0.0'} + node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} @@ -7529,6 +7561,8 @@ snapshots: dependencies: '@types/node': 22.19.15 + '@types/node-cron@3.0.11': {} + '@types/node@22.19.15': dependencies: undici-types: 6.21.0 @@ -8982,6 +9016,8 @@ snapshots: - '@babel/core' - babel-plugin-macros + node-cron@4.2.1: {} + node-domexception@1.0.0: {} node-fetch@3.3.2: