feat(Phase 4): Memory & Intelligence — memory, log, summarization, skills (#91)
Co-authored-by: Jason Woltje <jason@diversecanvas.com> Co-committed-by: Jason Woltje <jason@diversecanvas.com>
This commit was merged in pull request #91.
This commit is contained in:
@@ -18,6 +18,8 @@
|
|||||||
"@mosaic/brain": "workspace:^",
|
"@mosaic/brain": "workspace:^",
|
||||||
"@mosaic/coord": "workspace:^",
|
"@mosaic/coord": "workspace:^",
|
||||||
"@mosaic/db": "workspace:^",
|
"@mosaic/db": "workspace:^",
|
||||||
|
"@mosaic/log": "workspace:^",
|
||||||
|
"@mosaic/memory": "workspace:^",
|
||||||
"@mosaic/types": "workspace:^",
|
"@mosaic/types": "workspace:^",
|
||||||
"@nestjs/common": "^11.0.0",
|
"@nestjs/common": "^11.0.0",
|
||||||
"@nestjs/core": "^11.0.0",
|
"@nestjs/core": "^11.0.0",
|
||||||
@@ -34,6 +36,7 @@
|
|||||||
"@sinclair/typebox": "^0.34.48",
|
"@sinclair/typebox": "^0.34.48",
|
||||||
"better-auth": "^1.5.5",
|
"better-auth": "^1.5.5",
|
||||||
"fastify": "^5.0.0",
|
"fastify": "^5.0.0",
|
||||||
|
"node-cron": "^4.2.1",
|
||||||
"reflect-metadata": "^0.2.0",
|
"reflect-metadata": "^0.2.0",
|
||||||
"rxjs": "^7.8.0",
|
"rxjs": "^7.8.0",
|
||||||
"socket.io": "^4.8.0",
|
"socket.io": "^4.8.0",
|
||||||
@@ -41,6 +44,7 @@
|
|||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^22.0.0",
|
"@types/node": "^22.0.0",
|
||||||
|
"@types/node-cron": "^3.0.11",
|
||||||
"@types/uuid": "^10.0.0",
|
"@types/uuid": "^10.0.0",
|
||||||
"tsx": "^4.0.0",
|
"tsx": "^4.0.0",
|
||||||
"typescript": "^5.8.0",
|
"typescript": "^5.8.0",
|
||||||
|
|||||||
@@ -7,11 +7,15 @@ import {
|
|||||||
type ToolDefinition,
|
type ToolDefinition,
|
||||||
} from '@mariozechner/pi-coding-agent';
|
} from '@mariozechner/pi-coding-agent';
|
||||||
import type { Brain } from '@mosaic/brain';
|
import type { Brain } from '@mosaic/brain';
|
||||||
|
import type { Memory } from '@mosaic/memory';
|
||||||
import { BRAIN } from '../brain/brain.tokens.js';
|
import { BRAIN } from '../brain/brain.tokens.js';
|
||||||
|
import { MEMORY } from '../memory/memory.tokens.js';
|
||||||
|
import { EmbeddingService } from '../memory/embedding.service.js';
|
||||||
import { CoordService } from '../coord/coord.service.js';
|
import { CoordService } from '../coord/coord.service.js';
|
||||||
import { ProviderService } from './provider.service.js';
|
import { ProviderService } from './provider.service.js';
|
||||||
import { createBrainTools } from './tools/brain-tools.js';
|
import { createBrainTools } from './tools/brain-tools.js';
|
||||||
import { createCoordTools } from './tools/coord-tools.js';
|
import { createCoordTools } from './tools/coord-tools.js';
|
||||||
|
import { createMemoryTools } from './tools/memory-tools.js';
|
||||||
import type { SessionInfoDto } from './session.dto.js';
|
import type { SessionInfoDto } from './session.dto.js';
|
||||||
|
|
||||||
export interface AgentSessionOptions {
|
export interface AgentSessionOptions {
|
||||||
@@ -42,9 +46,15 @@ export class AgentService implements OnModuleDestroy {
|
|||||||
constructor(
|
constructor(
|
||||||
@Inject(ProviderService) private readonly providerService: ProviderService,
|
@Inject(ProviderService) private readonly providerService: ProviderService,
|
||||||
@Inject(BRAIN) private readonly brain: Brain,
|
@Inject(BRAIN) private readonly brain: Brain,
|
||||||
|
@Inject(MEMORY) private readonly memory: Memory,
|
||||||
|
@Inject(EmbeddingService) private readonly embeddingService: EmbeddingService,
|
||||||
@Inject(CoordService) private readonly coordService: CoordService,
|
@Inject(CoordService) private readonly coordService: CoordService,
|
||||||
) {
|
) {
|
||||||
this.customTools = [...createBrainTools(brain), ...createCoordTools(coordService)];
|
this.customTools = [
|
||||||
|
...createBrainTools(brain),
|
||||||
|
...createCoordTools(coordService),
|
||||||
|
...createMemoryTools(memory, embeddingService.available ? embeddingService : null),
|
||||||
|
];
|
||||||
this.logger.log(`Registered ${this.customTools.length} custom tools`);
|
this.logger.log(`Registered ${this.customTools.length} custom tools`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
158
apps/gateway/src/agent/tools/memory-tools.ts
Normal file
158
apps/gateway/src/agent/tools/memory-tools.ts
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
import { Type } from '@sinclair/typebox';
|
||||||
|
import type { ToolDefinition } from '@mariozechner/pi-coding-agent';
|
||||||
|
import type { Memory } from '@mosaic/memory';
|
||||||
|
import type { EmbeddingProvider } from '@mosaic/memory';
|
||||||
|
|
||||||
|
export function createMemoryTools(
|
||||||
|
memory: Memory,
|
||||||
|
embeddingProvider: EmbeddingProvider | null,
|
||||||
|
): ToolDefinition[] {
|
||||||
|
const searchMemory: ToolDefinition = {
|
||||||
|
name: 'memory_search',
|
||||||
|
label: 'Search Memory',
|
||||||
|
description:
|
||||||
|
'Search across stored insights and knowledge using natural language. Returns semantically similar results.',
|
||||||
|
parameters: Type.Object({
|
||||||
|
userId: Type.String({ description: 'User ID to search memory for' }),
|
||||||
|
query: Type.String({ description: 'Natural language search query' }),
|
||||||
|
limit: Type.Optional(Type.Number({ description: 'Max results (default 5)' })),
|
||||||
|
}),
|
||||||
|
async execute(_toolCallId, params) {
|
||||||
|
const { userId, query, limit } = params as {
|
||||||
|
userId: string;
|
||||||
|
query: string;
|
||||||
|
limit?: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!embeddingProvider) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: 'Semantic search unavailable — no embedding provider configured',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const embedding = await embeddingProvider.embed(query);
|
||||||
|
const results = await memory.insights.searchByEmbedding(userId, embedding, limit ?? 5);
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: JSON.stringify(results, null, 2) }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const getPreferences: ToolDefinition = {
|
||||||
|
name: 'memory_get_preferences',
|
||||||
|
label: 'Get User Preferences',
|
||||||
|
description: 'Retrieve stored preferences for a user.',
|
||||||
|
parameters: Type.Object({
|
||||||
|
userId: Type.String({ description: 'User ID' }),
|
||||||
|
category: Type.Optional(
|
||||||
|
Type.String({
|
||||||
|
description: 'Filter by category: communication, coding, workflow, appearance, general',
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
async execute(_toolCallId, params) {
|
||||||
|
const { userId, category } = params as { userId: string; category?: string };
|
||||||
|
type Cat = 'communication' | 'coding' | 'workflow' | 'appearance' | 'general';
|
||||||
|
const prefs = category
|
||||||
|
? await memory.preferences.findByUserAndCategory(userId, category as Cat)
|
||||||
|
: await memory.preferences.findByUser(userId);
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: JSON.stringify(prefs, null, 2) }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const savePreference: ToolDefinition = {
|
||||||
|
name: 'memory_save_preference',
|
||||||
|
label: 'Save User Preference',
|
||||||
|
description:
|
||||||
|
'Store a learned user preference (e.g., "prefers tables over paragraphs", "timezone: America/Chicago").',
|
||||||
|
parameters: Type.Object({
|
||||||
|
userId: Type.String({ description: 'User ID' }),
|
||||||
|
key: Type.String({ description: 'Preference key' }),
|
||||||
|
value: Type.String({ description: 'Preference value (JSON string)' }),
|
||||||
|
category: Type.Optional(
|
||||||
|
Type.String({
|
||||||
|
description: 'Category: communication, coding, workflow, appearance, general',
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
async execute(_toolCallId, params) {
|
||||||
|
const { userId, key, value, category } = params as {
|
||||||
|
userId: string;
|
||||||
|
key: string;
|
||||||
|
value: string;
|
||||||
|
category?: string;
|
||||||
|
};
|
||||||
|
type Cat = 'communication' | 'coding' | 'workflow' | 'appearance' | 'general';
|
||||||
|
let parsedValue: unknown;
|
||||||
|
try {
|
||||||
|
parsedValue = JSON.parse(value);
|
||||||
|
} catch {
|
||||||
|
parsedValue = value;
|
||||||
|
}
|
||||||
|
const pref = await memory.preferences.upsert({
|
||||||
|
userId,
|
||||||
|
key,
|
||||||
|
value: parsedValue,
|
||||||
|
category: (category as Cat) ?? 'general',
|
||||||
|
source: 'agent',
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: JSON.stringify(pref, null, 2) }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const saveInsight: ToolDefinition = {
|
||||||
|
name: 'memory_save_insight',
|
||||||
|
label: 'Save Insight',
|
||||||
|
description:
|
||||||
|
'Store a learned insight, decision, or knowledge extracted from the current interaction.',
|
||||||
|
parameters: Type.Object({
|
||||||
|
userId: Type.String({ description: 'User ID' }),
|
||||||
|
content: Type.String({ description: 'The insight or knowledge to store' }),
|
||||||
|
category: Type.Optional(
|
||||||
|
Type.String({
|
||||||
|
description: 'Category: decision, learning, preference, fact, pattern, general',
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
async execute(_toolCallId, params) {
|
||||||
|
const { userId, content, category } = params as {
|
||||||
|
userId: string;
|
||||||
|
content: string;
|
||||||
|
category?: string;
|
||||||
|
};
|
||||||
|
type Cat = 'decision' | 'learning' | 'preference' | 'fact' | 'pattern' | 'general';
|
||||||
|
|
||||||
|
let embedding: number[] | null = null;
|
||||||
|
if (embeddingProvider) {
|
||||||
|
embedding = await embeddingProvider.embed(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
const insight = await memory.insights.create({
|
||||||
|
userId,
|
||||||
|
content,
|
||||||
|
embedding,
|
||||||
|
source: 'agent',
|
||||||
|
category: (category as Cat) ?? 'learning',
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: JSON.stringify(insight, null, 2) }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return [searchMemory, getPreferences, savePreference, saveInsight];
|
||||||
|
}
|
||||||
@@ -10,6 +10,9 @@ import { ProjectsModule } from './projects/projects.module.js';
|
|||||||
import { MissionsModule } from './missions/missions.module.js';
|
import { MissionsModule } from './missions/missions.module.js';
|
||||||
import { TasksModule } from './tasks/tasks.module.js';
|
import { TasksModule } from './tasks/tasks.module.js';
|
||||||
import { CoordModule } from './coord/coord.module.js';
|
import { CoordModule } from './coord/coord.module.js';
|
||||||
|
import { MemoryModule } from './memory/memory.module.js';
|
||||||
|
import { LogModule } from './log/log.module.js';
|
||||||
|
import { SkillsModule } from './skills/skills.module.js';
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [
|
imports: [
|
||||||
@@ -23,6 +26,9 @@ import { CoordModule } from './coord/coord.module.js';
|
|||||||
MissionsModule,
|
MissionsModule,
|
||||||
TasksModule,
|
TasksModule,
|
||||||
CoordModule,
|
CoordModule,
|
||||||
|
MemoryModule,
|
||||||
|
LogModule,
|
||||||
|
SkillsModule,
|
||||||
],
|
],
|
||||||
controllers: [HealthController],
|
controllers: [HealthController],
|
||||||
})
|
})
|
||||||
|
|||||||
44
apps/gateway/src/log/cron.service.ts
Normal file
44
apps/gateway/src/log/cron.service.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import { Injectable, Logger, type OnModuleInit, type OnModuleDestroy } from '@nestjs/common';
|
||||||
|
import cron from 'node-cron';
|
||||||
|
import { SummarizationService } from './summarization.service.js';
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class CronService implements OnModuleInit, OnModuleDestroy {
|
||||||
|
private readonly logger = new Logger(CronService.name);
|
||||||
|
private readonly tasks: cron.ScheduledTask[] = [];
|
||||||
|
|
||||||
|
constructor(private readonly summarization: SummarizationService) {}
|
||||||
|
|
||||||
|
onModuleInit(): void {
|
||||||
|
const summarizationSchedule = process.env['SUMMARIZATION_CRON'] ?? '0 */6 * * *'; // every 6 hours
|
||||||
|
const tierManagementSchedule = process.env['TIER_MANAGEMENT_CRON'] ?? '0 3 * * *'; // daily at 3am
|
||||||
|
|
||||||
|
this.tasks.push(
|
||||||
|
cron.schedule(summarizationSchedule, () => {
|
||||||
|
this.summarization.runSummarization().catch((err) => {
|
||||||
|
this.logger.error(`Scheduled summarization failed: ${err}`);
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.tasks.push(
|
||||||
|
cron.schedule(tierManagementSchedule, () => {
|
||||||
|
this.summarization.runTierManagement().catch((err) => {
|
||||||
|
this.logger.error(`Scheduled tier management failed: ${err}`);
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
this.logger.log(
|
||||||
|
`Cron scheduled: summarization="${summarizationSchedule}", tier="${tierManagementSchedule}"`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
onModuleDestroy(): void {
|
||||||
|
for (const task of this.tasks) {
|
||||||
|
task.stop();
|
||||||
|
}
|
||||||
|
this.tasks.length = 0;
|
||||||
|
this.logger.log('Cron tasks stopped');
|
||||||
|
}
|
||||||
|
}
|
||||||
62
apps/gateway/src/log/log.controller.ts
Normal file
62
apps/gateway/src/log/log.controller.ts
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import { Body, Controller, Get, Inject, Param, Post, Query, UseGuards } from '@nestjs/common';
|
||||||
|
import type { LogService } from '@mosaic/log';
|
||||||
|
import { LOG_SERVICE } from './log.tokens.js';
|
||||||
|
import { AuthGuard } from '../auth/auth.guard.js';
|
||||||
|
import type { IngestLogDto, QueryLogsDto } from './log.dto.js';
|
||||||
|
|
||||||
|
@Controller('api/logs')
|
||||||
|
@UseGuards(AuthGuard)
|
||||||
|
export class LogController {
|
||||||
|
constructor(@Inject(LOG_SERVICE) private readonly logService: LogService) {}
|
||||||
|
|
||||||
|
@Post()
|
||||||
|
async ingest(@Query('userId') userId: string, @Body() dto: IngestLogDto) {
|
||||||
|
return this.logService.logs.ingest({
|
||||||
|
sessionId: dto.sessionId,
|
||||||
|
userId,
|
||||||
|
level: dto.level,
|
||||||
|
category: dto.category,
|
||||||
|
content: dto.content,
|
||||||
|
metadata: dto.metadata,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post('batch')
|
||||||
|
async ingestBatch(@Query('userId') userId: string, @Body() dtos: IngestLogDto[]) {
|
||||||
|
const entries = dtos.map((dto) => ({
|
||||||
|
sessionId: dto.sessionId,
|
||||||
|
userId,
|
||||||
|
level: dto.level as 'debug' | 'info' | 'warn' | 'error' | undefined,
|
||||||
|
category: dto.category as
|
||||||
|
| 'decision'
|
||||||
|
| 'tool_use'
|
||||||
|
| 'learning'
|
||||||
|
| 'error'
|
||||||
|
| 'general'
|
||||||
|
| undefined,
|
||||||
|
content: dto.content,
|
||||||
|
metadata: dto.metadata,
|
||||||
|
}));
|
||||||
|
return this.logService.logs.ingestBatch(entries);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Get()
|
||||||
|
async query(@Query('userId') userId: string, @Query() params: QueryLogsDto) {
|
||||||
|
return this.logService.logs.query({
|
||||||
|
userId,
|
||||||
|
sessionId: params.sessionId,
|
||||||
|
level: params.level,
|
||||||
|
category: params.category,
|
||||||
|
tier: params.tier,
|
||||||
|
since: params.since ? new Date(params.since) : undefined,
|
||||||
|
until: params.until ? new Date(params.until) : undefined,
|
||||||
|
limit: params.limit ? Number(params.limit) : undefined,
|
||||||
|
offset: params.offset ? Number(params.offset) : undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Get(':id')
|
||||||
|
async findOne(@Param('id') id: string) {
|
||||||
|
return this.logService.logs.findById(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
18
apps/gateway/src/log/log.dto.ts
Normal file
18
apps/gateway/src/log/log.dto.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
export interface IngestLogDto {
|
||||||
|
sessionId: string;
|
||||||
|
level?: 'debug' | 'info' | 'warn' | 'error';
|
||||||
|
category?: 'decision' | 'tool_use' | 'learning' | 'error' | 'general';
|
||||||
|
content: string;
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface QueryLogsDto {
|
||||||
|
sessionId?: string;
|
||||||
|
level?: 'debug' | 'info' | 'warn' | 'error';
|
||||||
|
category?: 'decision' | 'tool_use' | 'learning' | 'error' | 'general';
|
||||||
|
tier?: 'hot' | 'warm' | 'cold';
|
||||||
|
since?: string;
|
||||||
|
until?: string;
|
||||||
|
limit?: string;
|
||||||
|
offset?: string;
|
||||||
|
}
|
||||||
24
apps/gateway/src/log/log.module.ts
Normal file
24
apps/gateway/src/log/log.module.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import { Global, Module } from '@nestjs/common';
|
||||||
|
import { createLogService, type LogService } from '@mosaic/log';
|
||||||
|
import type { Db } from '@mosaic/db';
|
||||||
|
import { DB } from '../database/database.module.js';
|
||||||
|
import { LOG_SERVICE } from './log.tokens.js';
|
||||||
|
import { LogController } from './log.controller.js';
|
||||||
|
import { SummarizationService } from './summarization.service.js';
|
||||||
|
import { CronService } from './cron.service.js';
|
||||||
|
|
||||||
|
@Global()
|
||||||
|
@Module({
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: LOG_SERVICE,
|
||||||
|
useFactory: (db: Db): LogService => createLogService(db),
|
||||||
|
inject: [DB],
|
||||||
|
},
|
||||||
|
SummarizationService,
|
||||||
|
CronService,
|
||||||
|
],
|
||||||
|
controllers: [LogController],
|
||||||
|
exports: [LOG_SERVICE, SummarizationService],
|
||||||
|
})
|
||||||
|
export class LogModule {}
|
||||||
1
apps/gateway/src/log/log.tokens.ts
Normal file
1
apps/gateway/src/log/log.tokens.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export const LOG_SERVICE = 'LOG_SERVICE';
|
||||||
178
apps/gateway/src/log/summarization.service.ts
Normal file
178
apps/gateway/src/log/summarization.service.ts
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||||
|
import type { LogService } from '@mosaic/log';
|
||||||
|
import type { Memory } from '@mosaic/memory';
|
||||||
|
import { LOG_SERVICE } from './log.tokens.js';
|
||||||
|
import { MEMORY } from '../memory/memory.tokens.js';
|
||||||
|
import { EmbeddingService } from '../memory/embedding.service.js';
|
||||||
|
import type { Db } from '@mosaic/db';
|
||||||
|
import { sql, summarizationJobs } from '@mosaic/db';
|
||||||
|
import { DB } from '../database/database.module.js';
|
||||||
|
|
||||||
|
const SUMMARIZATION_PROMPT = `You are a knowledge extraction assistant. Given the following agent interaction logs, extract the key decisions, learnings, and patterns. Output a concise summary (2-4 sentences) that captures the most important information for future reference. Focus on actionable insights, not raw events.
|
||||||
|
|
||||||
|
Logs:
|
||||||
|
{logs}
|
||||||
|
|
||||||
|
Summary:`;
|
||||||
|
|
||||||
|
interface ChatCompletion {
|
||||||
|
choices: Array<{ message: { content: string } }>;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class SummarizationService {
|
||||||
|
private readonly logger = new Logger(SummarizationService.name);
|
||||||
|
private readonly apiKey: string | undefined;
|
||||||
|
private readonly baseUrl: string;
|
||||||
|
private readonly model: string;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
@Inject(LOG_SERVICE) private readonly logService: LogService,
|
||||||
|
@Inject(MEMORY) private readonly memory: Memory,
|
||||||
|
private readonly embeddings: EmbeddingService,
|
||||||
|
@Inject(DB) private readonly db: Db,
|
||||||
|
) {
|
||||||
|
this.apiKey = process.env['OPENAI_API_KEY'];
|
||||||
|
this.baseUrl = process.env['SUMMARIZATION_API_URL'] ?? 'https://api.openai.com/v1';
|
||||||
|
this.model = process.env['SUMMARIZATION_MODEL'] ?? 'gpt-4o-mini';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run one summarization cycle:
|
||||||
|
* 1. Find hot logs older than 24h with decision/learning/tool_use categories
|
||||||
|
* 2. Group by session
|
||||||
|
* 3. Summarize each group via cheap LLM
|
||||||
|
* 4. Store as insights with embeddings
|
||||||
|
* 5. Transition processed logs to warm tier
|
||||||
|
*/
|
||||||
|
async runSummarization(): Promise<{ logsProcessed: number; insightsCreated: number }> {
|
||||||
|
const cutoff = new Date(Date.now() - 24 * 60 * 60 * 1000); // 24h ago
|
||||||
|
|
||||||
|
// Create job record
|
||||||
|
const [job] = await this.db
|
||||||
|
.insert(summarizationJobs)
|
||||||
|
.values({ status: 'running', startedAt: new Date() })
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
try {
|
||||||
|
const logs = await this.logService.logs.getLogsForSummarization(cutoff, 200);
|
||||||
|
if (logs.length === 0) {
|
||||||
|
await this.db
|
||||||
|
.update(summarizationJobs)
|
||||||
|
.set({ status: 'completed', completedAt: new Date() })
|
||||||
|
.where(sql`id = ${job!.id}`);
|
||||||
|
return { logsProcessed: 0, insightsCreated: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Group logs by session
|
||||||
|
const bySession = new Map<string, typeof logs>();
|
||||||
|
for (const log of logs) {
|
||||||
|
const group = bySession.get(log.sessionId) ?? [];
|
||||||
|
group.push(log);
|
||||||
|
bySession.set(log.sessionId, group);
|
||||||
|
}
|
||||||
|
|
||||||
|
let insightsCreated = 0;
|
||||||
|
|
||||||
|
for (const [sessionId, sessionLogs] of bySession) {
|
||||||
|
const userId = sessionLogs[0]?.userId;
|
||||||
|
if (!userId) continue;
|
||||||
|
|
||||||
|
const logsText = sessionLogs.map((l) => `[${l.category}] ${l.content}`).join('\n');
|
||||||
|
|
||||||
|
const summary = await this.summarize(logsText);
|
||||||
|
if (!summary) continue;
|
||||||
|
|
||||||
|
const embedding = this.embeddings.available
|
||||||
|
? await this.embeddings.embed(summary)
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
await this.memory.insights.create({
|
||||||
|
userId,
|
||||||
|
content: summary,
|
||||||
|
embedding: embedding ?? null,
|
||||||
|
source: 'summarization',
|
||||||
|
category: 'learning',
|
||||||
|
metadata: { sessionId, logCount: sessionLogs.length },
|
||||||
|
});
|
||||||
|
insightsCreated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transition processed logs to warm
|
||||||
|
await this.logService.logs.promoteToWarm(cutoff);
|
||||||
|
|
||||||
|
await this.db
|
||||||
|
.update(summarizationJobs)
|
||||||
|
.set({
|
||||||
|
status: 'completed',
|
||||||
|
logsProcessed: logs.length,
|
||||||
|
insightsCreated,
|
||||||
|
completedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(sql`id = ${job!.id}`);
|
||||||
|
|
||||||
|
this.logger.log(`Summarization complete: ${logs.length} logs → ${insightsCreated} insights`);
|
||||||
|
return { logsProcessed: logs.length, insightsCreated };
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
await this.db
|
||||||
|
.update(summarizationJobs)
|
||||||
|
.set({ status: 'failed', errorMessage: message, completedAt: new Date() })
|
||||||
|
.where(sql`id = ${job!.id}`);
|
||||||
|
this.logger.error(`Summarization failed: ${message}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run tier management:
|
||||||
|
* - Warm logs older than 30 days → cold
|
||||||
|
* - Cold logs older than 90 days → purged
|
||||||
|
* - Decay old insight relevance scores
|
||||||
|
*/
|
||||||
|
async runTierManagement(): Promise<void> {
|
||||||
|
const warmCutoff = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
|
||||||
|
const coldCutoff = new Date(Date.now() - 90 * 24 * 60 * 60 * 1000);
|
||||||
|
const decayCutoff = new Date(Date.now() - 14 * 24 * 60 * 60 * 1000);
|
||||||
|
|
||||||
|
const promoted = await this.logService.logs.promoteToCold(warmCutoff);
|
||||||
|
const purged = await this.logService.logs.purge(coldCutoff);
|
||||||
|
const decayed = await this.memory.insights.decayOldInsights(decayCutoff);
|
||||||
|
|
||||||
|
this.logger.log(
|
||||||
|
`Tier management: ${promoted} logs→cold, ${purged} purged, ${decayed} insights decayed`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async summarize(logsText: string): Promise<string | null> {
|
||||||
|
if (!this.apiKey) {
|
||||||
|
this.logger.warn('No API key configured — skipping summarization');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const prompt = SUMMARIZATION_PROMPT.replace('{logs}', logsText);
|
||||||
|
|
||||||
|
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${this.apiKey}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
model: this.model,
|
||||||
|
messages: [{ role: 'user', content: prompt }],
|
||||||
|
max_tokens: 300,
|
||||||
|
temperature: 0.3,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const body = await response.text();
|
||||||
|
this.logger.error(`Summarization API error: ${response.status} ${body}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const json = (await response.json()) as ChatCompletion;
|
||||||
|
return json.choices[0]?.message.content ?? null;
|
||||||
|
}
|
||||||
|
}
|
||||||
69
apps/gateway/src/memory/embedding.service.ts
Normal file
69
apps/gateway/src/memory/embedding.service.ts
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import { Injectable, Logger } from '@nestjs/common';
|
||||||
|
import type { EmbeddingProvider } from '@mosaic/memory';
|
||||||
|
|
||||||
|
const DEFAULT_MODEL = 'text-embedding-3-small';
|
||||||
|
const DEFAULT_DIMENSIONS = 1536;
|
||||||
|
|
||||||
|
interface EmbeddingResponse {
|
||||||
|
data: Array<{ embedding: number[]; index: number }>;
|
||||||
|
model: string;
|
||||||
|
usage: { prompt_tokens: number; total_tokens: number };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates embeddings via the OpenAI-compatible embeddings API.
|
||||||
|
* Supports OpenAI, Azure OpenAI, and any provider with a compatible endpoint.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class EmbeddingService implements EmbeddingProvider {
|
||||||
|
private readonly logger = new Logger(EmbeddingService.name);
|
||||||
|
private readonly apiKey: string | undefined;
|
||||||
|
private readonly baseUrl: string;
|
||||||
|
private readonly model: string;
|
||||||
|
|
||||||
|
readonly dimensions = DEFAULT_DIMENSIONS;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.apiKey = process.env['OPENAI_API_KEY'];
|
||||||
|
this.baseUrl = process.env['EMBEDDING_API_URL'] ?? 'https://api.openai.com/v1';
|
||||||
|
this.model = process.env['EMBEDDING_MODEL'] ?? DEFAULT_MODEL;
|
||||||
|
}
|
||||||
|
|
||||||
|
get available(): boolean {
|
||||||
|
return !!this.apiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
async embed(text: string): Promise<number[]> {
|
||||||
|
const results = await this.embedBatch([text]);
|
||||||
|
return results[0]!;
|
||||||
|
}
|
||||||
|
|
||||||
|
async embedBatch(texts: string[]): Promise<number[][]> {
|
||||||
|
if (!this.apiKey) {
|
||||||
|
this.logger.warn('No OPENAI_API_KEY configured — returning zero vectors');
|
||||||
|
return texts.map(() => new Array<number>(this.dimensions).fill(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(`${this.baseUrl}/embeddings`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${this.apiKey}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
model: this.model,
|
||||||
|
input: texts,
|
||||||
|
dimensions: this.dimensions,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const body = await response.text();
|
||||||
|
this.logger.error(`Embedding API error: ${response.status} ${body}`);
|
||||||
|
throw new Error(`Embedding API returned ${response.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const json = (await response.json()) as EmbeddingResponse;
|
||||||
|
return json.data.sort((a, b) => a.index - b.index).map((d) => d.embedding);
|
||||||
|
}
|
||||||
|
}
|
||||||
126
apps/gateway/src/memory/memory.controller.ts
Normal file
126
apps/gateway/src/memory/memory.controller.ts
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
import {
|
||||||
|
Body,
|
||||||
|
Controller,
|
||||||
|
Delete,
|
||||||
|
Get,
|
||||||
|
HttpCode,
|
||||||
|
HttpStatus,
|
||||||
|
Inject,
|
||||||
|
NotFoundException,
|
||||||
|
Param,
|
||||||
|
Post,
|
||||||
|
Query,
|
||||||
|
UseGuards,
|
||||||
|
} from '@nestjs/common';
|
||||||
|
import type { Memory } from '@mosaic/memory';
|
||||||
|
import { MEMORY } from './memory.tokens.js';
|
||||||
|
import { AuthGuard } from '../auth/auth.guard.js';
|
||||||
|
import { EmbeddingService } from './embedding.service.js';
|
||||||
|
import type { UpsertPreferenceDto, CreateInsightDto, SearchMemoryDto } from './memory.dto.js';
|
||||||
|
|
||||||
|
@Controller('api/memory')
|
||||||
|
@UseGuards(AuthGuard)
|
||||||
|
export class MemoryController {
|
||||||
|
constructor(
|
||||||
|
@Inject(MEMORY) private readonly memory: Memory,
|
||||||
|
private readonly embeddings: EmbeddingService,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
// ─── Preferences ────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@Get('preferences')
|
||||||
|
async listPreferences(@Query('userId') userId: string, @Query('category') category?: string) {
|
||||||
|
if (category) {
|
||||||
|
return this.memory.preferences.findByUserAndCategory(
|
||||||
|
userId,
|
||||||
|
category as Parameters<typeof this.memory.preferences.findByUserAndCategory>[1],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return this.memory.preferences.findByUser(userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Get('preferences/:key')
|
||||||
|
async getPreference(@Query('userId') userId: string, @Param('key') key: string) {
|
||||||
|
const pref = await this.memory.preferences.findByUserAndKey(userId, key);
|
||||||
|
if (!pref) throw new NotFoundException('Preference not found');
|
||||||
|
return pref;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post('preferences')
|
||||||
|
async upsertPreference(@Query('userId') userId: string, @Body() dto: UpsertPreferenceDto) {
|
||||||
|
return this.memory.preferences.upsert({
|
||||||
|
userId,
|
||||||
|
key: dto.key,
|
||||||
|
value: dto.value,
|
||||||
|
category: dto.category,
|
||||||
|
source: dto.source,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Delete('preferences/:key')
|
||||||
|
@HttpCode(HttpStatus.NO_CONTENT)
|
||||||
|
async removePreference(@Query('userId') userId: string, @Param('key') key: string) {
|
||||||
|
const deleted = await this.memory.preferences.remove(userId, key);
|
||||||
|
if (!deleted) throw new NotFoundException('Preference not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Insights ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@Get('insights')
|
||||||
|
async listInsights(@Query('userId') userId: string, @Query('limit') limit?: string) {
|
||||||
|
return this.memory.insights.findByUser(userId, limit ? Number(limit) : undefined);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Get('insights/:id')
|
||||||
|
async getInsight(@Param('id') id: string) {
|
||||||
|
const insight = await this.memory.insights.findById(id);
|
||||||
|
if (!insight) throw new NotFoundException('Insight not found');
|
||||||
|
return insight;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post('insights')
|
||||||
|
async createInsight(@Query('userId') userId: string, @Body() dto: CreateInsightDto) {
|
||||||
|
const embedding = this.embeddings.available
|
||||||
|
? await this.embeddings.embed(dto.content)
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
return this.memory.insights.create({
|
||||||
|
userId,
|
||||||
|
content: dto.content,
|
||||||
|
source: dto.source,
|
||||||
|
category: dto.category,
|
||||||
|
metadata: dto.metadata,
|
||||||
|
embedding: embedding ?? null,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Delete('insights/:id')
|
||||||
|
@HttpCode(HttpStatus.NO_CONTENT)
|
||||||
|
async removeInsight(@Param('id') id: string) {
|
||||||
|
const deleted = await this.memory.insights.remove(id);
|
||||||
|
if (!deleted) throw new NotFoundException('Insight not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Search ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
@Post('search')
|
||||||
|
async searchMemory(@Query('userId') userId: string, @Body() dto: SearchMemoryDto) {
|
||||||
|
if (!this.embeddings.available) {
|
||||||
|
return {
|
||||||
|
query: dto.query,
|
||||||
|
results: [],
|
||||||
|
message: 'Semantic search requires OPENAI_API_KEY for embeddings',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const queryEmbedding = await this.embeddings.embed(dto.query);
|
||||||
|
const results = await this.memory.insights.searchByEmbedding(
|
||||||
|
userId,
|
||||||
|
queryEmbedding,
|
||||||
|
dto.limit ?? 10,
|
||||||
|
dto.maxDistance ?? 0.8,
|
||||||
|
);
|
||||||
|
|
||||||
|
return { query: dto.query, results };
|
||||||
|
}
|
||||||
|
}
|
||||||
19
apps/gateway/src/memory/memory.dto.ts
Normal file
19
apps/gateway/src/memory/memory.dto.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
export interface UpsertPreferenceDto {
|
||||||
|
key: string;
|
||||||
|
value: unknown;
|
||||||
|
category?: 'communication' | 'coding' | 'workflow' | 'appearance' | 'general';
|
||||||
|
source?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateInsightDto {
|
||||||
|
content: string;
|
||||||
|
source?: 'agent' | 'user' | 'summarization' | 'system';
|
||||||
|
category?: 'decision' | 'learning' | 'preference' | 'fact' | 'pattern' | 'general';
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchMemoryDto {
|
||||||
|
query: string;
|
||||||
|
limit?: number;
|
||||||
|
maxDistance?: number;
|
||||||
|
}
|
||||||
22
apps/gateway/src/memory/memory.module.ts
Normal file
22
apps/gateway/src/memory/memory.module.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import { Global, Module } from '@nestjs/common';
|
||||||
|
import { createMemory, type Memory } from '@mosaic/memory';
|
||||||
|
import type { Db } from '@mosaic/db';
|
||||||
|
import { DB } from '../database/database.module.js';
|
||||||
|
import { MEMORY } from './memory.tokens.js';
|
||||||
|
import { MemoryController } from './memory.controller.js';
|
||||||
|
import { EmbeddingService } from './embedding.service.js';
|
||||||
|
|
||||||
|
@Global()
|
||||||
|
@Module({
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: MEMORY,
|
||||||
|
useFactory: (db: Db): Memory => createMemory(db),
|
||||||
|
inject: [DB],
|
||||||
|
},
|
||||||
|
EmbeddingService,
|
||||||
|
],
|
||||||
|
controllers: [MemoryController],
|
||||||
|
exports: [MEMORY, EmbeddingService],
|
||||||
|
})
|
||||||
|
export class MemoryModule {}
|
||||||
1
apps/gateway/src/memory/memory.tokens.ts
Normal file
1
apps/gateway/src/memory/memory.tokens.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export const MEMORY = 'MEMORY';
|
||||||
67
apps/gateway/src/skills/skills.controller.ts
Normal file
67
apps/gateway/src/skills/skills.controller.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
import {
|
||||||
|
Body,
|
||||||
|
Controller,
|
||||||
|
Delete,
|
||||||
|
Get,
|
||||||
|
HttpCode,
|
||||||
|
HttpStatus,
|
||||||
|
NotFoundException,
|
||||||
|
Param,
|
||||||
|
Patch,
|
||||||
|
Post,
|
||||||
|
UseGuards,
|
||||||
|
} from '@nestjs/common';
|
||||||
|
import { SkillsService } from './skills.service.js';
|
||||||
|
import { AuthGuard } from '../auth/auth.guard.js';
|
||||||
|
import type { CreateSkillDto, UpdateSkillDto } from './skills.dto.js';
|
||||||
|
|
||||||
|
@Controller('api/skills')
|
||||||
|
@UseGuards(AuthGuard)
|
||||||
|
export class SkillsController {
|
||||||
|
constructor(private readonly skills: SkillsService) {}
|
||||||
|
|
||||||
|
@Get()
|
||||||
|
async list() {
|
||||||
|
return this.skills.findAll();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Get(':id')
|
||||||
|
async findOne(@Param('id') id: string) {
|
||||||
|
const skill = await this.skills.findById(id);
|
||||||
|
if (!skill) throw new NotFoundException('Skill not found');
|
||||||
|
return skill;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post()
|
||||||
|
async create(@Body() dto: CreateSkillDto) {
|
||||||
|
return this.skills.create({
|
||||||
|
name: dto.name,
|
||||||
|
description: dto.description,
|
||||||
|
version: dto.version,
|
||||||
|
source: dto.source,
|
||||||
|
config: dto.config,
|
||||||
|
enabled: dto.enabled,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Patch(':id')
|
||||||
|
async update(@Param('id') id: string, @Body() dto: UpdateSkillDto) {
|
||||||
|
const skill = await this.skills.update(id, dto);
|
||||||
|
if (!skill) throw new NotFoundException('Skill not found');
|
||||||
|
return skill;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Patch(':id/toggle')
|
||||||
|
async toggle(@Param('id') id: string, @Body() body: { enabled: boolean }) {
|
||||||
|
const skill = await this.skills.toggle(id, body.enabled);
|
||||||
|
if (!skill) throw new NotFoundException('Skill not found');
|
||||||
|
return skill;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Delete(':id')
|
||||||
|
@HttpCode(HttpStatus.NO_CONTENT)
|
||||||
|
async remove(@Param('id') id: string) {
|
||||||
|
const deleted = await this.skills.remove(id);
|
||||||
|
if (!deleted) throw new NotFoundException('Skill not found');
|
||||||
|
}
|
||||||
|
}
|
||||||
15
apps/gateway/src/skills/skills.dto.ts
Normal file
15
apps/gateway/src/skills/skills.dto.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
export interface CreateSkillDto {
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
version?: string;
|
||||||
|
source?: 'builtin' | 'community' | 'custom';
|
||||||
|
config?: Record<string, unknown>;
|
||||||
|
enabled?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UpdateSkillDto {
|
||||||
|
description?: string;
|
||||||
|
version?: string;
|
||||||
|
config?: Record<string, unknown>;
|
||||||
|
enabled?: boolean;
|
||||||
|
}
|
||||||
10
apps/gateway/src/skills/skills.module.ts
Normal file
10
apps/gateway/src/skills/skills.module.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { Module } from '@nestjs/common';
|
||||||
|
import { SkillsService } from './skills.service.js';
|
||||||
|
import { SkillsController } from './skills.controller.js';
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
providers: [SkillsService],
|
||||||
|
controllers: [SkillsController],
|
||||||
|
exports: [SkillsService],
|
||||||
|
})
|
||||||
|
export class SkillsModule {}
|
||||||
52
apps/gateway/src/skills/skills.service.ts
Normal file
52
apps/gateway/src/skills/skills.service.ts
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import { Inject, Injectable } from '@nestjs/common';
|
||||||
|
import { eq, type Db, skills } from '@mosaic/db';
|
||||||
|
import { DB } from '../database/database.module.js';
|
||||||
|
|
||||||
|
type Skill = typeof skills.$inferSelect;
|
||||||
|
type NewSkill = typeof skills.$inferInsert;
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class SkillsService {
|
||||||
|
constructor(@Inject(DB) private readonly db: Db) {}
|
||||||
|
|
||||||
|
async findAll(): Promise<Skill[]> {
|
||||||
|
return this.db.select().from(skills);
|
||||||
|
}
|
||||||
|
|
||||||
|
async findEnabled(): Promise<Skill[]> {
|
||||||
|
return this.db.select().from(skills).where(eq(skills.enabled, true));
|
||||||
|
}
|
||||||
|
|
||||||
|
async findById(id: string): Promise<Skill | undefined> {
|
||||||
|
const rows = await this.db.select().from(skills).where(eq(skills.id, id));
|
||||||
|
return rows[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
async findByName(name: string): Promise<Skill | undefined> {
|
||||||
|
const rows = await this.db.select().from(skills).where(eq(skills.name, name));
|
||||||
|
return rows[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(data: NewSkill): Promise<Skill> {
|
||||||
|
const rows = await this.db.insert(skills).values(data).returning();
|
||||||
|
return rows[0]!;
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(id: string, data: Partial<NewSkill>): Promise<Skill | undefined> {
|
||||||
|
const rows = await this.db
|
||||||
|
.update(skills)
|
||||||
|
.set({ ...data, updatedAt: new Date() })
|
||||||
|
.where(eq(skills.id, id))
|
||||||
|
.returning();
|
||||||
|
return rows[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
async remove(id: string): Promise<boolean> {
|
||||||
|
const rows = await this.db.delete(skills).where(eq(skills.id, id)).returning();
|
||||||
|
return rows.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async toggle(id: string, enabled: boolean): Promise<Skill | undefined> {
|
||||||
|
return this.update(id, { enabled });
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,8 +8,8 @@
|
|||||||
**ID:** mvp-20260312
|
**ID:** mvp-20260312
|
||||||
**Statement:** Build Mosaic Stack v0.1.0 — a self-hosted, multi-user AI agent platform with web dashboard, TUI, remote control, shared memory, mission orchestration, and extensible skill/plugin architecture. All TypeScript. Pi as agent harness. Brain as knowledge layer. Queue as coordination backbone.
|
**Statement:** Build Mosaic Stack v0.1.0 — a self-hosted, multi-user AI agent platform with web dashboard, TUI, remote control, shared memory, mission orchestration, and extensible skill/plugin architecture. All TypeScript. Pi as agent harness. Brain as knowledge layer. Queue as coordination backbone.
|
||||||
**Phase:** Execution
|
**Phase:** Execution
|
||||||
**Current Milestone:** Phase 4: Memory & Intelligence (v0.0.5)
|
**Current Milestone:** Phase 5: Remote Control (v0.0.6)
|
||||||
**Progress:** 4 / 8 milestones
|
**Progress:** 5 / 8 milestones
|
||||||
**Status:** active
|
**Status:** active
|
||||||
**Last Updated:** 2026-03-13 UTC
|
**Last Updated:** 2026-03-13 UTC
|
||||||
|
|
||||||
@@ -35,7 +35,7 @@
|
|||||||
| 1 | ms-158 | Phase 1: Core API (v0.0.2) | done | — | — | 2026-03-13 | 2026-03-13 |
|
| 1 | ms-158 | Phase 1: Core API (v0.0.2) | done | — | — | 2026-03-13 | 2026-03-13 |
|
||||||
| 2 | ms-159 | Phase 2: Agent Layer (v0.0.3) | done | — | — | 2026-03-13 | 2026-03-12 |
|
| 2 | ms-159 | Phase 2: Agent Layer (v0.0.3) | done | — | — | 2026-03-13 | 2026-03-12 |
|
||||||
| 3 | ms-160 | Phase 3: Web Dashboard (v0.0.4) | done | — | — | 2026-03-12 | 2026-03-13 |
|
| 3 | ms-160 | Phase 3: Web Dashboard (v0.0.4) | done | — | — | 2026-03-12 | 2026-03-13 |
|
||||||
| 4 | ms-161 | Phase 4: Memory & Intelligence (v0.0.5) | not-started | — | — | — | — |
|
| 4 | ms-161 | Phase 4: Memory & Intelligence (v0.0.5) | done | — | — | 2026-03-13 | 2026-03-13 |
|
||||||
| 5 | ms-162 | Phase 5: Remote Control (v0.0.6) | not-started | — | — | — | — |
|
| 5 | ms-162 | Phase 5: Remote Control (v0.0.6) | not-started | — | — | — | — |
|
||||||
| 6 | ms-163 | Phase 6: CLI & Tools (v0.0.7) | not-started | — | — | — | — |
|
| 6 | ms-163 | Phase 6: CLI & Tools (v0.0.7) | not-started | — | — | — | — |
|
||||||
| 7 | ms-164 | Phase 7: Polish & Beta (v0.1.0) | not-started | — | — | — | — |
|
| 7 | ms-164 | Phase 7: Polish & Beta (v0.1.0) | not-started | — | — | — | — |
|
||||||
|
|||||||
@@ -37,13 +37,13 @@
|
|||||||
| P3-006 | done | Phase 3 | Settings — provider config, profile, integrations | #88 | #31 |
|
| P3-006 | done | Phase 3 | Settings — provider config, profile, integrations | #88 | #31 |
|
||||||
| P3-007 | done | Phase 3 | Admin panel — user management, RBAC | #89 | #32 |
|
| P3-007 | done | Phase 3 | Admin panel — user management, RBAC | #89 | #32 |
|
||||||
| P3-008 | done | Phase 3 | Verify Phase 3 — web dashboard functional E2E | — | #33 |
|
| P3-008 | done | Phase 3 | Verify Phase 3 — web dashboard functional E2E | — | #33 |
|
||||||
| P4-001 | not-started | Phase 4 | @mosaic/memory — preference + insight stores | — | #34 |
|
| P4-001 | done | Phase 4 | @mosaic/memory — preference + insight stores | — | #34 |
|
||||||
| P4-002 | not-started | Phase 4 | Semantic search — pgvector embeddings + search API | — | #35 |
|
| P4-002 | done | Phase 4 | Semantic search — pgvector embeddings + search API | — | #35 |
|
||||||
| P4-003 | not-started | Phase 4 | @mosaic/log — log ingest, parsing, tiered storage | — | #36 |
|
| P4-003 | done | Phase 4 | @mosaic/log — log ingest, parsing, tiered storage | — | #36 |
|
||||||
| P4-004 | not-started | Phase 4 | Summarization pipeline — Haiku-tier LLM + cron | — | #37 |
|
| P4-004 | done | Phase 4 | Summarization pipeline — Haiku-tier LLM + cron | — | #37 |
|
||||||
| P4-005 | not-started | Phase 4 | Memory integration — inject into agent sessions | — | #38 |
|
| P4-005 | done | Phase 4 | Memory integration — inject into agent sessions | — | #38 |
|
||||||
| P4-006 | not-started | Phase 4 | Skill management — catalog, install, config | — | #39 |
|
| P4-006 | done | Phase 4 | Skill management — catalog, install, config | — | #39 |
|
||||||
| P4-007 | not-started | Phase 4 | Verify Phase 4 — memory + log pipeline working | — | #40 |
|
| P4-007 | done | Phase 4 | Verify Phase 4 — memory + log pipeline working | — | #40 |
|
||||||
| P5-001 | not-started | Phase 5 | Plugin host — gateway plugin loading + channel interface | — | #41 |
|
| P5-001 | not-started | Phase 5 | Plugin host — gateway plugin loading + channel interface | — | #41 |
|
||||||
| P5-002 | done | Phase 5 | @mosaic/discord-plugin — Discord bot + channel plugin | #61 | #42 |
|
| P5-002 | done | Phase 5 | @mosaic/discord-plugin — Discord bot + channel plugin | #61 | #42 |
|
||||||
| P5-003 | not-started | Phase 5 | @mosaic/telegram-plugin — Telegraf bot + channel plugin | — | #43 |
|
| P5-003 | not-started | Phase 5 | @mosaic/telegram-plugin — Telegraf bot + channel plugin | — | #43 |
|
||||||
|
|||||||
@@ -114,3 +114,9 @@ User confirmed: start the planning gate.
|
|||||||
| Session | Date | Milestone | Tasks Done | Outcome |
|
| Session | Date | Milestone | Tasks Done | Outcome |
|
||||||
| ------- | ---------- | --------- | ---------- | -------------------------------------------------------------------------------------------------------------------------- |
|
| ------- | ---------- | --------- | ---------- | -------------------------------------------------------------------------------------------------------------------------- |
|
||||||
| 10 | 2026-03-13 | Phase 3 | P3-008 | Phase 3 verification: typecheck 18/18, lint 18/18, format clean, build green (10 routes), 10 tests pass. Phase 3 complete. |
|
| 10 | 2026-03-13 | Phase 3 | P3-008 | Phase 3 verification: typecheck 18/18, lint 18/18, format clean, build green (10 routes), 10 tests pass. Phase 3 complete. |
|
||||||
|
|
||||||
|
### Session 10 (continued) — Phase 4 Memory & Intelligence
|
||||||
|
|
||||||
|
| Session | Date | Milestone | Tasks Done | Outcome |
|
||||||
|
| ------- | ---------- | --------- | --------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
|
| 10 | 2026-03-13 | Phase 4 | P4-001 through P4-007 | Full memory + log system: DB schema (preferences, insights w/ pgvector, agent_logs, skills, summarization_jobs), @mosaic/memory + @mosaic/log packages, embedding service, summarization pipeline w/ cron, memory tools in agent sessions, skill management CRUD. All gates green. |
|
||||||
|
|||||||
@@ -1,4 +1,18 @@
|
|||||||
export { createDb, type Db, type DbHandle } from './client.js';
|
export { createDb, type Db, type DbHandle } from './client.js';
|
||||||
export { runMigrations } from './migrate.js';
|
export { runMigrations } from './migrate.js';
|
||||||
export * from './schema.js';
|
export * from './schema.js';
|
||||||
export { eq, and, or, desc, asc, sql, inArray, isNull, isNotNull } from 'drizzle-orm';
|
export {
|
||||||
|
eq,
|
||||||
|
and,
|
||||||
|
or,
|
||||||
|
desc,
|
||||||
|
asc,
|
||||||
|
sql,
|
||||||
|
inArray,
|
||||||
|
isNull,
|
||||||
|
isNotNull,
|
||||||
|
gt,
|
||||||
|
lt,
|
||||||
|
gte,
|
||||||
|
lte,
|
||||||
|
} from 'drizzle-orm';
|
||||||
|
|||||||
@@ -3,7 +3,18 @@
|
|||||||
* drizzle-kit reads this file directly (avoids CJS/ESM extension issues).
|
* drizzle-kit reads this file directly (avoids CJS/ESM extension issues).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { pgTable, text, timestamp, boolean, uuid, jsonb, index } from 'drizzle-orm/pg-core';
|
import {
|
||||||
|
pgTable,
|
||||||
|
text,
|
||||||
|
timestamp,
|
||||||
|
boolean,
|
||||||
|
uuid,
|
||||||
|
jsonb,
|
||||||
|
index,
|
||||||
|
real,
|
||||||
|
integer,
|
||||||
|
customType,
|
||||||
|
} from 'drizzle-orm/pg-core';
|
||||||
|
|
||||||
// ─── Auth (BetterAuth-compatible) ────────────────────────────────────────────
|
// ─── Auth (BetterAuth-compatible) ────────────────────────────────────────────
|
||||||
|
|
||||||
@@ -211,3 +222,152 @@ export const messages = pgTable(
|
|||||||
},
|
},
|
||||||
(t) => [index('messages_conversation_id_idx').on(t.conversationId)],
|
(t) => [index('messages_conversation_id_idx').on(t.conversationId)],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// ─── pgvector custom type ───────────────────────────────────────────────────
|
||||||
|
|
||||||
|
const vector = customType<{ data: number[]; driverParam: string; config: { dimensions: number } }>({
|
||||||
|
dataType(config) {
|
||||||
|
return `vector(${config?.dimensions ?? 1536})`;
|
||||||
|
},
|
||||||
|
fromDriver(value: unknown): number[] {
|
||||||
|
const str = value as string;
|
||||||
|
return str
|
||||||
|
.slice(1, -1)
|
||||||
|
.split(',')
|
||||||
|
.map((v) => Number(v));
|
||||||
|
},
|
||||||
|
toDriver(value: number[]): string {
|
||||||
|
return `[${value.join(',')}]`;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// ─── Memory ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export const preferences = pgTable(
|
||||||
|
'preferences',
|
||||||
|
{
|
||||||
|
id: uuid('id').primaryKey().defaultRandom(),
|
||||||
|
userId: text('user_id')
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.id, { onDelete: 'cascade' }),
|
||||||
|
key: text('key').notNull(),
|
||||||
|
value: jsonb('value').notNull(),
|
||||||
|
category: text('category', {
|
||||||
|
enum: ['communication', 'coding', 'workflow', 'appearance', 'general'],
|
||||||
|
})
|
||||||
|
.notNull()
|
||||||
|
.default('general'),
|
||||||
|
source: text('source'),
|
||||||
|
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||||
|
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
index('preferences_user_id_idx').on(t.userId),
|
||||||
|
index('preferences_user_key_idx').on(t.userId, t.key),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export const insights = pgTable(
|
||||||
|
'insights',
|
||||||
|
{
|
||||||
|
id: uuid('id').primaryKey().defaultRandom(),
|
||||||
|
userId: text('user_id')
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.id, { onDelete: 'cascade' }),
|
||||||
|
content: text('content').notNull(),
|
||||||
|
embedding: vector('embedding', { dimensions: 1536 }),
|
||||||
|
source: text('source', {
|
||||||
|
enum: ['agent', 'user', 'summarization', 'system'],
|
||||||
|
})
|
||||||
|
.notNull()
|
||||||
|
.default('agent'),
|
||||||
|
category: text('category', {
|
||||||
|
enum: ['decision', 'learning', 'preference', 'fact', 'pattern', 'general'],
|
||||||
|
})
|
||||||
|
.notNull()
|
||||||
|
.default('general'),
|
||||||
|
relevanceScore: real('relevance_score').notNull().default(1.0),
|
||||||
|
metadata: jsonb('metadata'),
|
||||||
|
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||||
|
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
|
||||||
|
decayedAt: timestamp('decayed_at', { withTimezone: true }),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
index('insights_user_id_idx').on(t.userId),
|
||||||
|
index('insights_category_idx').on(t.category),
|
||||||
|
index('insights_relevance_idx').on(t.relevanceScore),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
// ─── Agent Logs ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export const agentLogs = pgTable(
|
||||||
|
'agent_logs',
|
||||||
|
{
|
||||||
|
id: uuid('id').primaryKey().defaultRandom(),
|
||||||
|
sessionId: text('session_id').notNull(),
|
||||||
|
userId: text('user_id').references(() => users.id, { onDelete: 'set null' }),
|
||||||
|
level: text('level', { enum: ['debug', 'info', 'warn', 'error'] })
|
||||||
|
.notNull()
|
||||||
|
.default('info'),
|
||||||
|
category: text('category', {
|
||||||
|
enum: ['decision', 'tool_use', 'learning', 'error', 'general'],
|
||||||
|
})
|
||||||
|
.notNull()
|
||||||
|
.default('general'),
|
||||||
|
content: text('content').notNull(),
|
||||||
|
metadata: jsonb('metadata'),
|
||||||
|
tier: text('tier', { enum: ['hot', 'warm', 'cold'] })
|
||||||
|
.notNull()
|
||||||
|
.default('hot'),
|
||||||
|
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||||
|
summarizedAt: timestamp('summarized_at', { withTimezone: true }),
|
||||||
|
archivedAt: timestamp('archived_at', { withTimezone: true }),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
index('agent_logs_session_id_idx').on(t.sessionId),
|
||||||
|
index('agent_logs_user_id_idx').on(t.userId),
|
||||||
|
index('agent_logs_tier_idx').on(t.tier),
|
||||||
|
index('agent_logs_created_at_idx').on(t.createdAt),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
// ─── Skills ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export const skills = pgTable(
|
||||||
|
'skills',
|
||||||
|
{
|
||||||
|
id: uuid('id').primaryKey().defaultRandom(),
|
||||||
|
name: text('name').notNull().unique(),
|
||||||
|
description: text('description'),
|
||||||
|
version: text('version'),
|
||||||
|
source: text('source', { enum: ['builtin', 'community', 'custom'] })
|
||||||
|
.notNull()
|
||||||
|
.default('custom'),
|
||||||
|
config: jsonb('config'),
|
||||||
|
enabled: boolean('enabled').notNull().default(true),
|
||||||
|
installedBy: text('installed_by').references(() => users.id, { onDelete: 'set null' }),
|
||||||
|
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||||
|
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
|
||||||
|
},
|
||||||
|
(t) => [index('skills_enabled_idx').on(t.enabled)],
|
||||||
|
);
|
||||||
|
|
||||||
|
// ─── Summarization Jobs ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export const summarizationJobs = pgTable(
|
||||||
|
'summarization_jobs',
|
||||||
|
{
|
||||||
|
id: uuid('id').primaryKey().defaultRandom(),
|
||||||
|
status: text('status', { enum: ['pending', 'running', 'completed', 'failed'] })
|
||||||
|
.notNull()
|
||||||
|
.default('pending'),
|
||||||
|
logsProcessed: integer('logs_processed').notNull().default(0),
|
||||||
|
insightsCreated: integer('insights_created').notNull().default(0),
|
||||||
|
errorMessage: text('error_message'),
|
||||||
|
startedAt: timestamp('started_at', { withTimezone: true }),
|
||||||
|
completedAt: timestamp('completed_at', { withTimezone: true }),
|
||||||
|
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||||
|
},
|
||||||
|
(t) => [index('summarization_jobs_status_idx').on(t.status)],
|
||||||
|
);
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/log",
|
"name": "@mosaic/log",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
@@ -15,6 +16,10 @@
|
|||||||
"typecheck": "tsc --noEmit",
|
"typecheck": "tsc --noEmit",
|
||||||
"test": "vitest run --passWithNoTests"
|
"test": "vitest run --passWithNoTests"
|
||||||
},
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@mosaic/db": "workspace:*",
|
||||||
|
"drizzle-orm": "^0.45.1"
|
||||||
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"typescript": "^5.8.0",
|
"typescript": "^5.8.0",
|
||||||
"vitest": "^2.0.0"
|
"vitest": "^2.0.0"
|
||||||
|
|||||||
117
packages/log/src/agent-logs.ts
Normal file
117
packages/log/src/agent-logs.ts
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
import { eq, and, desc, lt, sql, type Db, agentLogs } from '@mosaic/db';
|
||||||
|
|
||||||
|
export type AgentLog = typeof agentLogs.$inferSelect;
|
||||||
|
export type NewAgentLog = typeof agentLogs.$inferInsert;
|
||||||
|
|
||||||
|
export type LogLevel = 'debug' | 'info' | 'warn' | 'error';
|
||||||
|
export type LogCategory = 'decision' | 'tool_use' | 'learning' | 'error' | 'general';
|
||||||
|
export type LogTier = 'hot' | 'warm' | 'cold';
|
||||||
|
|
||||||
|
export interface LogQuery {
|
||||||
|
userId?: string;
|
||||||
|
sessionId?: string;
|
||||||
|
level?: LogLevel;
|
||||||
|
category?: LogCategory;
|
||||||
|
tier?: LogTier;
|
||||||
|
since?: Date;
|
||||||
|
until?: Date;
|
||||||
|
limit?: number;
|
||||||
|
offset?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createAgentLogsRepo(db: Db) {
|
||||||
|
return {
|
||||||
|
async ingest(entry: NewAgentLog): Promise<AgentLog> {
|
||||||
|
const rows = await db.insert(agentLogs).values(entry).returning();
|
||||||
|
return rows[0]!;
|
||||||
|
},
|
||||||
|
|
||||||
|
async ingestBatch(entries: NewAgentLog[]): Promise<AgentLog[]> {
|
||||||
|
if (entries.length === 0) return [];
|
||||||
|
return db.insert(agentLogs).values(entries).returning();
|
||||||
|
},
|
||||||
|
|
||||||
|
async query(params: LogQuery): Promise<AgentLog[]> {
|
||||||
|
const conditions = [];
|
||||||
|
|
||||||
|
if (params.userId) conditions.push(eq(agentLogs.userId, params.userId));
|
||||||
|
if (params.sessionId) conditions.push(eq(agentLogs.sessionId, params.sessionId));
|
||||||
|
if (params.level) conditions.push(eq(agentLogs.level, params.level));
|
||||||
|
if (params.category) conditions.push(eq(agentLogs.category, params.category));
|
||||||
|
if (params.tier) conditions.push(eq(agentLogs.tier, params.tier));
|
||||||
|
if (params.since) conditions.push(sql`${agentLogs.createdAt} >= ${params.since}`);
|
||||||
|
if (params.until) conditions.push(sql`${agentLogs.createdAt} <= ${params.until}`);
|
||||||
|
|
||||||
|
const where = conditions.length > 0 ? and(...conditions) : undefined;
|
||||||
|
|
||||||
|
return db
|
||||||
|
.select()
|
||||||
|
.from(agentLogs)
|
||||||
|
.where(where)
|
||||||
|
.orderBy(desc(agentLogs.createdAt))
|
||||||
|
.limit(params.limit ?? 100)
|
||||||
|
.offset(params.offset ?? 0);
|
||||||
|
},
|
||||||
|
|
||||||
|
async findById(id: string): Promise<AgentLog | undefined> {
|
||||||
|
const rows = await db.select().from(agentLogs).where(eq(agentLogs.id, id));
|
||||||
|
return rows[0];
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transition hot logs older than the cutoff to warm tier.
|
||||||
|
* Returns the number of logs transitioned.
|
||||||
|
*/
|
||||||
|
async promoteToWarm(olderThan: Date): Promise<number> {
|
||||||
|
const result = await db
|
||||||
|
.update(agentLogs)
|
||||||
|
.set({ tier: 'warm', summarizedAt: new Date() })
|
||||||
|
.where(and(eq(agentLogs.tier, 'hot'), lt(agentLogs.createdAt, olderThan)))
|
||||||
|
.returning();
|
||||||
|
return result.length;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transition warm logs older than the cutoff to cold tier.
|
||||||
|
*/
|
||||||
|
async promoteToCold(olderThan: Date): Promise<number> {
|
||||||
|
const result = await db
|
||||||
|
.update(agentLogs)
|
||||||
|
.set({ tier: 'cold', archivedAt: new Date() })
|
||||||
|
.where(and(eq(agentLogs.tier, 'warm'), lt(agentLogs.createdAt, olderThan)))
|
||||||
|
.returning();
|
||||||
|
return result.length;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete cold logs older than the retention period.
|
||||||
|
*/
|
||||||
|
async purge(olderThan: Date): Promise<number> {
|
||||||
|
const result = await db
|
||||||
|
.delete(agentLogs)
|
||||||
|
.where(and(eq(agentLogs.tier, 'cold'), lt(agentLogs.createdAt, olderThan)))
|
||||||
|
.returning();
|
||||||
|
return result.length;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get hot logs ready for summarization (decisions + learnings).
|
||||||
|
*/
|
||||||
|
async getLogsForSummarization(olderThan: Date, limit = 100): Promise<AgentLog[]> {
|
||||||
|
return db
|
||||||
|
.select()
|
||||||
|
.from(agentLogs)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(agentLogs.tier, 'hot'),
|
||||||
|
lt(agentLogs.createdAt, olderThan),
|
||||||
|
sql`${agentLogs.category} IN ('decision', 'learning', 'tool_use')`,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.orderBy(agentLogs.createdAt)
|
||||||
|
.limit(limit);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AgentLogsRepo = ReturnType<typeof createAgentLogsRepo>;
|
||||||
@@ -1 +1,11 @@
|
|||||||
export const VERSION = '0.0.0';
|
export { createLogService, type LogService } from './log-service.js';
|
||||||
|
export {
|
||||||
|
createAgentLogsRepo,
|
||||||
|
type AgentLogsRepo,
|
||||||
|
type AgentLog,
|
||||||
|
type NewAgentLog,
|
||||||
|
type LogLevel,
|
||||||
|
type LogCategory,
|
||||||
|
type LogTier,
|
||||||
|
type LogQuery,
|
||||||
|
} from './agent-logs.js';
|
||||||
|
|||||||
12
packages/log/src/log-service.ts
Normal file
12
packages/log/src/log-service.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import type { Db } from '@mosaic/db';
|
||||||
|
import { createAgentLogsRepo, type AgentLogsRepo } from './agent-logs.js';
|
||||||
|
|
||||||
|
export interface LogService {
|
||||||
|
logs: AgentLogsRepo;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createLogService(db: Db): LogService {
|
||||||
|
return {
|
||||||
|
logs: createAgentLogsRepo(db),
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/memory",
|
"name": "@mosaic/memory",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
@@ -16,7 +17,9 @@
|
|||||||
"test": "vitest run --passWithNoTests"
|
"test": "vitest run --passWithNoTests"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@mosaic/types": "workspace:*"
|
"@mosaic/db": "workspace:*",
|
||||||
|
"@mosaic/types": "workspace:*",
|
||||||
|
"drizzle-orm": "^0.45.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"typescript": "^5.8.0",
|
"typescript": "^5.8.0",
|
||||||
|
|||||||
@@ -1 +1,15 @@
|
|||||||
export const VERSION = '0.0.0';
|
export { createMemory, type Memory } from './memory.js';
|
||||||
|
export {
|
||||||
|
createPreferencesRepo,
|
||||||
|
type PreferencesRepo,
|
||||||
|
type Preference,
|
||||||
|
type NewPreference,
|
||||||
|
} from './preferences.js';
|
||||||
|
export {
|
||||||
|
createInsightsRepo,
|
||||||
|
type InsightsRepo,
|
||||||
|
type Insight,
|
||||||
|
type NewInsight,
|
||||||
|
type SearchResult,
|
||||||
|
} from './insights.js';
|
||||||
|
export type { VectorStore, VectorSearchResult, EmbeddingProvider } from './vector-store.js';
|
||||||
|
|||||||
89
packages/memory/src/insights.ts
Normal file
89
packages/memory/src/insights.ts
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
import { eq, and, desc, sql, lt, type Db, insights } from '@mosaic/db';
|
||||||
|
|
||||||
|
export type Insight = typeof insights.$inferSelect;
|
||||||
|
export type NewInsight = typeof insights.$inferInsert;
|
||||||
|
|
||||||
|
export interface SearchResult {
|
||||||
|
insight: Insight;
|
||||||
|
distance: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createInsightsRepo(db: Db) {
|
||||||
|
return {
|
||||||
|
async findByUser(userId: string, limit = 50): Promise<Insight[]> {
|
||||||
|
return db
|
||||||
|
.select()
|
||||||
|
.from(insights)
|
||||||
|
.where(eq(insights.userId, userId))
|
||||||
|
.orderBy(desc(insights.createdAt))
|
||||||
|
.limit(limit);
|
||||||
|
},
|
||||||
|
|
||||||
|
async findById(id: string): Promise<Insight | undefined> {
|
||||||
|
const rows = await db.select().from(insights).where(eq(insights.id, id));
|
||||||
|
return rows[0];
|
||||||
|
},
|
||||||
|
|
||||||
|
async create(data: NewInsight): Promise<Insight> {
|
||||||
|
const rows = await db.insert(insights).values(data).returning();
|
||||||
|
return rows[0]!;
|
||||||
|
},
|
||||||
|
|
||||||
|
async update(id: string, data: Partial<NewInsight>): Promise<Insight | undefined> {
|
||||||
|
const rows = await db
|
||||||
|
.update(insights)
|
||||||
|
.set({ ...data, updatedAt: new Date() })
|
||||||
|
.where(eq(insights.id, id))
|
||||||
|
.returning();
|
||||||
|
return rows[0];
|
||||||
|
},
|
||||||
|
|
||||||
|
async remove(id: string): Promise<boolean> {
|
||||||
|
const rows = await db.delete(insights).where(eq(insights.id, id)).returning();
|
||||||
|
return rows.length > 0;
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Semantic search using pgvector cosine distance.
|
||||||
|
* Requires the vector extension and an embedding for the query.
|
||||||
|
*/
|
||||||
|
async searchByEmbedding(
|
||||||
|
userId: string,
|
||||||
|
queryEmbedding: number[],
|
||||||
|
limit = 10,
|
||||||
|
maxDistance = 0.8,
|
||||||
|
): Promise<SearchResult[]> {
|
||||||
|
const embeddingStr = `[${queryEmbedding.join(',')}]`;
|
||||||
|
const rows = await db.execute(sql`
|
||||||
|
SELECT *,
|
||||||
|
(embedding <=> ${embeddingStr}::vector) AS distance
|
||||||
|
FROM insights
|
||||||
|
WHERE user_id = ${userId}
|
||||||
|
AND embedding IS NOT NULL
|
||||||
|
AND (embedding <=> ${embeddingStr}::vector) < ${maxDistance}
|
||||||
|
ORDER BY distance ASC
|
||||||
|
LIMIT ${limit}
|
||||||
|
`);
|
||||||
|
|
||||||
|
return rows as unknown as SearchResult[];
|
||||||
|
},
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decay relevance scores for old insights that haven't been accessed recently.
|
||||||
|
*/
|
||||||
|
async decayOldInsights(olderThan: Date, decayFactor = 0.95): Promise<number> {
|
||||||
|
const result = await db
|
||||||
|
.update(insights)
|
||||||
|
.set({
|
||||||
|
relevanceScore: sql`${insights.relevanceScore} * ${decayFactor}`,
|
||||||
|
decayedAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(and(lt(insights.updatedAt, olderThan), sql`${insights.relevanceScore} > 0.1`))
|
||||||
|
.returning();
|
||||||
|
return result.length;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export type InsightsRepo = ReturnType<typeof createInsightsRepo>;
|
||||||
15
packages/memory/src/memory.ts
Normal file
15
packages/memory/src/memory.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import type { Db } from '@mosaic/db';
|
||||||
|
import { createPreferencesRepo, type PreferencesRepo } from './preferences.js';
|
||||||
|
import { createInsightsRepo, type InsightsRepo } from './insights.js';
|
||||||
|
|
||||||
|
export interface Memory {
|
||||||
|
preferences: PreferencesRepo;
|
||||||
|
insights: InsightsRepo;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createMemory(db: Db): Memory {
|
||||||
|
return {
|
||||||
|
preferences: createPreferencesRepo(db),
|
||||||
|
insights: createInsightsRepo(db),
|
||||||
|
};
|
||||||
|
}
|
||||||
59
packages/memory/src/preferences.ts
Normal file
59
packages/memory/src/preferences.ts
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
import { eq, and, type Db, preferences } from '@mosaic/db';
|
||||||
|
|
||||||
|
export type Preference = typeof preferences.$inferSelect;
|
||||||
|
export type NewPreference = typeof preferences.$inferInsert;
|
||||||
|
|
||||||
|
export function createPreferencesRepo(db: Db) {
|
||||||
|
return {
|
||||||
|
async findByUser(userId: string): Promise<Preference[]> {
|
||||||
|
return db.select().from(preferences).where(eq(preferences.userId, userId));
|
||||||
|
},
|
||||||
|
|
||||||
|
async findByUserAndKey(userId: string, key: string): Promise<Preference | undefined> {
|
||||||
|
const rows = await db
|
||||||
|
.select()
|
||||||
|
.from(preferences)
|
||||||
|
.where(and(eq(preferences.userId, userId), eq(preferences.key, key)));
|
||||||
|
return rows[0];
|
||||||
|
},
|
||||||
|
|
||||||
|
async findByUserAndCategory(
|
||||||
|
userId: string,
|
||||||
|
category: Preference['category'],
|
||||||
|
): Promise<Preference[]> {
|
||||||
|
return db
|
||||||
|
.select()
|
||||||
|
.from(preferences)
|
||||||
|
.where(and(eq(preferences.userId, userId), eq(preferences.category, category)));
|
||||||
|
},
|
||||||
|
|
||||||
|
async upsert(data: NewPreference): Promise<Preference> {
|
||||||
|
const existing = await db
|
||||||
|
.select()
|
||||||
|
.from(preferences)
|
||||||
|
.where(and(eq(preferences.userId, data.userId), eq(preferences.key, data.key)));
|
||||||
|
|
||||||
|
if (existing[0]) {
|
||||||
|
const rows = await db
|
||||||
|
.update(preferences)
|
||||||
|
.set({ value: data.value, category: data.category, updatedAt: new Date() })
|
||||||
|
.where(eq(preferences.id, existing[0].id))
|
||||||
|
.returning();
|
||||||
|
return rows[0]!;
|
||||||
|
}
|
||||||
|
|
||||||
|
const rows = await db.insert(preferences).values(data).returning();
|
||||||
|
return rows[0]!;
|
||||||
|
},
|
||||||
|
|
||||||
|
async remove(userId: string, key: string): Promise<boolean> {
|
||||||
|
const rows = await db
|
||||||
|
.delete(preferences)
|
||||||
|
.where(and(eq(preferences.userId, userId), eq(preferences.key, key)))
|
||||||
|
.returning();
|
||||||
|
return rows.length > 0;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export type PreferencesRepo = ReturnType<typeof createPreferencesRepo>;
|
||||||
39
packages/memory/src/vector-store.ts
Normal file
39
packages/memory/src/vector-store.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
/**
|
||||||
|
* VectorStore interface — abstraction over pgvector that allows future
|
||||||
|
* swap to Qdrant, Pinecone, etc.
|
||||||
|
*/
|
||||||
|
export interface VectorStore {
|
||||||
|
/** Store an embedding with an associated document ID. */
|
||||||
|
store(documentId: string, embedding: number[], metadata?: Record<string, unknown>): Promise<void>;
|
||||||
|
|
||||||
|
/** Search for similar embeddings, returning document IDs and distances. */
|
||||||
|
search(
|
||||||
|
queryEmbedding: number[],
|
||||||
|
limit?: number,
|
||||||
|
filter?: Record<string, unknown>,
|
||||||
|
): Promise<VectorSearchResult[]>;
|
||||||
|
|
||||||
|
/** Delete an embedding by document ID. */
|
||||||
|
remove(documentId: string): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VectorSearchResult {
|
||||||
|
documentId: string;
|
||||||
|
distance: number;
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* EmbeddingProvider interface — generates embeddings from text.
|
||||||
|
* Implemented by the gateway using the configured LLM provider.
|
||||||
|
*/
|
||||||
|
export interface EmbeddingProvider {
|
||||||
|
/** Generate an embedding vector for the given text. */
|
||||||
|
embed(text: string): Promise<number[]>;
|
||||||
|
|
||||||
|
/** Generate embeddings for multiple texts in batch. */
|
||||||
|
embedBatch(texts: string[]): Promise<number[][]>;
|
||||||
|
|
||||||
|
/** The dimensionality of the embeddings this provider generates. */
|
||||||
|
dimensions: number;
|
||||||
|
}
|
||||||
36
pnpm-lock.yaml
generated
36
pnpm-lock.yaml
generated
@@ -59,6 +59,12 @@ importers:
|
|||||||
'@mosaic/db':
|
'@mosaic/db':
|
||||||
specifier: workspace:^
|
specifier: workspace:^
|
||||||
version: link:../../packages/db
|
version: link:../../packages/db
|
||||||
|
'@mosaic/log':
|
||||||
|
specifier: workspace:^
|
||||||
|
version: link:../../packages/log
|
||||||
|
'@mosaic/memory':
|
||||||
|
specifier: workspace:^
|
||||||
|
version: link:../../packages/memory
|
||||||
'@mosaic/types':
|
'@mosaic/types':
|
||||||
specifier: workspace:^
|
specifier: workspace:^
|
||||||
version: link:../../packages/types
|
version: link:../../packages/types
|
||||||
@@ -107,6 +113,9 @@ importers:
|
|||||||
fastify:
|
fastify:
|
||||||
specifier: ^5.0.0
|
specifier: ^5.0.0
|
||||||
version: 5.8.2
|
version: 5.8.2
|
||||||
|
node-cron:
|
||||||
|
specifier: ^4.2.1
|
||||||
|
version: 4.2.1
|
||||||
reflect-metadata:
|
reflect-metadata:
|
||||||
specifier: ^0.2.0
|
specifier: ^0.2.0
|
||||||
version: 0.2.2
|
version: 0.2.2
|
||||||
@@ -123,6 +132,9 @@ importers:
|
|||||||
'@types/node':
|
'@types/node':
|
||||||
specifier: ^22.0.0
|
specifier: ^22.0.0
|
||||||
version: 22.19.15
|
version: 22.19.15
|
||||||
|
'@types/node-cron':
|
||||||
|
specifier: ^3.0.11
|
||||||
|
version: 3.0.11
|
||||||
'@types/uuid':
|
'@types/uuid':
|
||||||
specifier: ^10.0.0
|
specifier: ^10.0.0
|
||||||
version: 10.0.0
|
version: 10.0.0
|
||||||
@@ -321,6 +333,13 @@ importers:
|
|||||||
version: 2.1.9(@types/node@22.19.15)(lightningcss@1.31.1)
|
version: 2.1.9(@types/node@22.19.15)(lightningcss@1.31.1)
|
||||||
|
|
||||||
packages/log:
|
packages/log:
|
||||||
|
dependencies:
|
||||||
|
'@mosaic/db':
|
||||||
|
specifier: workspace:*
|
||||||
|
version: link:../db
|
||||||
|
drizzle-orm:
|
||||||
|
specifier: ^0.45.1
|
||||||
|
version: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8)
|
||||||
devDependencies:
|
devDependencies:
|
||||||
typescript:
|
typescript:
|
||||||
specifier: ^5.8.0
|
specifier: ^5.8.0
|
||||||
@@ -331,9 +350,15 @@ importers:
|
|||||||
|
|
||||||
packages/memory:
|
packages/memory:
|
||||||
dependencies:
|
dependencies:
|
||||||
|
'@mosaic/db':
|
||||||
|
specifier: workspace:*
|
||||||
|
version: link:../db
|
||||||
'@mosaic/types':
|
'@mosaic/types':
|
||||||
specifier: workspace:*
|
specifier: workspace:*
|
||||||
version: link:../types
|
version: link:../types
|
||||||
|
drizzle-orm:
|
||||||
|
specifier: ^0.45.1
|
||||||
|
version: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8)
|
||||||
devDependencies:
|
devDependencies:
|
||||||
typescript:
|
typescript:
|
||||||
specifier: ^5.8.0
|
specifier: ^5.8.0
|
||||||
@@ -2728,6 +2753,9 @@ packages:
|
|||||||
'@types/mysql@2.15.27':
|
'@types/mysql@2.15.27':
|
||||||
resolution: {integrity: sha512-YfWiV16IY0OeBfBCk8+hXKmdTKrKlwKN1MNKAPBu5JYxLwBEZl7QzeEpGnlZb3VMGJrrGmB84gXiH+ofs/TezA==}
|
resolution: {integrity: sha512-YfWiV16IY0OeBfBCk8+hXKmdTKrKlwKN1MNKAPBu5JYxLwBEZl7QzeEpGnlZb3VMGJrrGmB84gXiH+ofs/TezA==}
|
||||||
|
|
||||||
|
'@types/node-cron@3.0.11':
|
||||||
|
resolution: {integrity: sha512-0ikrnug3/IyneSHqCBeslAhlK2aBfYek1fGo4bP4QnZPmiqSGRK+Oy7ZMisLWkesffJvQ1cqAcBnJC+8+nxIAg==}
|
||||||
|
|
||||||
'@types/node@22.19.15':
|
'@types/node@22.19.15':
|
||||||
resolution: {integrity: sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==}
|
resolution: {integrity: sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg==}
|
||||||
|
|
||||||
@@ -4155,6 +4183,10 @@ packages:
|
|||||||
sass:
|
sass:
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
|
node-cron@4.2.1:
|
||||||
|
resolution: {integrity: sha512-lgimEHPE/QDgFlywTd8yTR61ptugX3Qer29efeyWw2rv259HtGBNn1vZVmp8lB9uo9wC0t/AT4iGqXxia+CJFg==}
|
||||||
|
engines: {node: '>=6.0.0'}
|
||||||
|
|
||||||
node-domexception@1.0.0:
|
node-domexception@1.0.0:
|
||||||
resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
|
resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==}
|
||||||
engines: {node: '>=10.5.0'}
|
engines: {node: '>=10.5.0'}
|
||||||
@@ -7529,6 +7561,8 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@types/node': 22.19.15
|
'@types/node': 22.19.15
|
||||||
|
|
||||||
|
'@types/node-cron@3.0.11': {}
|
||||||
|
|
||||||
'@types/node@22.19.15':
|
'@types/node@22.19.15':
|
||||||
dependencies:
|
dependencies:
|
||||||
undici-types: 6.21.0
|
undici-types: 6.21.0
|
||||||
@@ -8982,6 +9016,8 @@ snapshots:
|
|||||||
- '@babel/core'
|
- '@babel/core'
|
||||||
- babel-plugin-macros
|
- babel-plugin-macros
|
||||||
|
|
||||||
|
node-cron@4.2.1: {}
|
||||||
|
|
||||||
node-domexception@1.0.0: {}
|
node-domexception@1.0.0: {}
|
||||||
|
|
||||||
node-fetch@3.3.2:
|
node-fetch@3.3.2:
|
||||||
|
|||||||
Reference in New Issue
Block a user