Compare commits
42 Commits
2b99908de4
...
fix/pnpm-b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1274df7ffc | ||
| 0b0fe10b37 | |||
| acfb31f8f6 | |||
|
|
fd83bd4f2d | ||
|
|
ce3ca1dbd1 | ||
|
|
95e7b071d4 | ||
| d4c5797a65 | |||
| 70a51ba711 | |||
| db8023bdbb | |||
| 9e597ecf87 | |||
| a23c117ea4 | |||
| 0cf80dab8c | |||
|
|
04a80fb9ba | ||
|
|
626adac363 | ||
|
|
35fbd88a1d | ||
| 381b0eed7b | |||
|
|
25383ea645 | ||
|
|
e7db9ddf98 | ||
|
|
7bb878718d | ||
|
|
46a31d4e71 | ||
|
|
e128a7a322 | ||
|
|
27b1898ec6 | ||
|
|
d19ef45bb0 | ||
|
|
5e852df6c3 | ||
|
|
e0eca771c6 | ||
|
|
9d22ef4cc9 | ||
|
|
41961a6980 | ||
|
|
e797676a02 | ||
|
|
05d61e62be | ||
|
|
73043773d8 | ||
| 0be9729e40 | |||
|
|
e83674ac51 | ||
|
|
a6e59bf829 | ||
| e46f0641f6 | |||
|
|
07efaa9580 | ||
|
|
361fece023 | ||
| 80e69016b0 | |||
|
|
e084a88a9d | ||
| 990a88362f | |||
|
|
ea9782b2dc | ||
| 8efbaf100e | |||
|
|
15830e2f2a |
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/gateway",
|
"name": "@mosaic/gateway",
|
||||||
"version": "0.0.0",
|
"version": "0.0.2",
|
||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/main.js",
|
"main": "dist/main.js",
|
||||||
@@ -19,12 +19,14 @@
|
|||||||
"@modelcontextprotocol/sdk": "^1.27.1",
|
"@modelcontextprotocol/sdk": "^1.27.1",
|
||||||
"@mosaic/auth": "workspace:^",
|
"@mosaic/auth": "workspace:^",
|
||||||
"@mosaic/brain": "workspace:^",
|
"@mosaic/brain": "workspace:^",
|
||||||
|
"@mosaic/config": "workspace:^",
|
||||||
"@mosaic/coord": "workspace:^",
|
"@mosaic/coord": "workspace:^",
|
||||||
"@mosaic/db": "workspace:^",
|
"@mosaic/db": "workspace:^",
|
||||||
"@mosaic/discord-plugin": "workspace:^",
|
"@mosaic/discord-plugin": "workspace:^",
|
||||||
"@mosaic/log": "workspace:^",
|
"@mosaic/log": "workspace:^",
|
||||||
"@mosaic/memory": "workspace:^",
|
"@mosaic/memory": "workspace:^",
|
||||||
"@mosaic/queue": "workspace:^",
|
"@mosaic/queue": "workspace:^",
|
||||||
|
"@mosaic/storage": "workspace:^",
|
||||||
"@mosaic/telegram-plugin": "workspace:^",
|
"@mosaic/telegram-plugin": "workspace:^",
|
||||||
"@mosaic/types": "workspace:^",
|
"@mosaic/types": "workspace:^",
|
||||||
"@nestjs/common": "^11.0.0",
|
"@nestjs/common": "^11.0.0",
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { Module } from '@nestjs/common';
|
import { Module } from '@nestjs/common';
|
||||||
import { APP_GUARD } from '@nestjs/core';
|
import { APP_GUARD } from '@nestjs/core';
|
||||||
import { HealthController } from './health/health.controller.js';
|
import { HealthController } from './health/health.controller.js';
|
||||||
|
import { ConfigModule } from './config/config.module.js';
|
||||||
import { DatabaseModule } from './database/database.module.js';
|
import { DatabaseModule } from './database/database.module.js';
|
||||||
import { AuthModule } from './auth/auth.module.js';
|
import { AuthModule } from './auth/auth.module.js';
|
||||||
import { BrainModule } from './brain/brain.module.js';
|
import { BrainModule } from './brain/brain.module.js';
|
||||||
@@ -28,6 +29,7 @@ import { ThrottlerGuard, ThrottlerModule } from '@nestjs/throttler';
|
|||||||
@Module({
|
@Module({
|
||||||
imports: [
|
imports: [
|
||||||
ThrottlerModule.forRoot([{ name: 'default', ttl: 60_000, limit: 60 }]),
|
ThrottlerModule.forRoot([{ name: 'default', ttl: 60_000, limit: 60 }]),
|
||||||
|
ConfigModule,
|
||||||
DatabaseModule,
|
DatabaseModule,
|
||||||
AuthModule,
|
AuthModule,
|
||||||
BrainModule,
|
BrainModule,
|
||||||
|
|||||||
16
apps/gateway/src/config/config.module.ts
Normal file
16
apps/gateway/src/config/config.module.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { Global, Module } from '@nestjs/common';
|
||||||
|
import { loadConfig, type MosaicConfig } from '@mosaic/config';
|
||||||
|
|
||||||
|
export const MOSAIC_CONFIG = 'MOSAIC_CONFIG';
|
||||||
|
|
||||||
|
@Global()
|
||||||
|
@Module({
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: MOSAIC_CONFIG,
|
||||||
|
useFactory: (): MosaicConfig => loadConfig(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
exports: [MOSAIC_CONFIG],
|
||||||
|
})
|
||||||
|
export class ConfigModule {}
|
||||||
@@ -1,28 +1,42 @@
|
|||||||
import { Global, Inject, Module, type OnApplicationShutdown } from '@nestjs/common';
|
import { Global, Inject, Module, type OnApplicationShutdown } from '@nestjs/common';
|
||||||
import { createDb, type Db, type DbHandle } from '@mosaic/db';
|
import { createDb, type Db, type DbHandle } from '@mosaic/db';
|
||||||
|
import { createStorageAdapter, type StorageAdapter } from '@mosaic/storage';
|
||||||
|
import type { MosaicConfig } from '@mosaic/config';
|
||||||
|
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
||||||
|
|
||||||
export const DB_HANDLE = 'DB_HANDLE';
|
export const DB_HANDLE = 'DB_HANDLE';
|
||||||
export const DB = 'DB';
|
export const DB = 'DB';
|
||||||
|
export const STORAGE_ADAPTER = 'STORAGE_ADAPTER';
|
||||||
|
|
||||||
@Global()
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
providers: [
|
providers: [
|
||||||
{
|
{
|
||||||
provide: DB_HANDLE,
|
provide: DB_HANDLE,
|
||||||
useFactory: (): DbHandle => createDb(),
|
useFactory: (config: MosaicConfig): DbHandle =>
|
||||||
|
createDb(config.storage.type === 'postgres' ? config.storage.url : undefined),
|
||||||
|
inject: [MOSAIC_CONFIG],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
provide: DB,
|
provide: DB,
|
||||||
useFactory: (handle: DbHandle): Db => handle.db,
|
useFactory: (handle: DbHandle): Db => handle.db,
|
||||||
inject: [DB_HANDLE],
|
inject: [DB_HANDLE],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
provide: STORAGE_ADAPTER,
|
||||||
|
useFactory: (config: MosaicConfig): StorageAdapter => createStorageAdapter(config.storage),
|
||||||
|
inject: [MOSAIC_CONFIG],
|
||||||
|
},
|
||||||
],
|
],
|
||||||
exports: [DB],
|
exports: [DB, STORAGE_ADAPTER],
|
||||||
})
|
})
|
||||||
export class DatabaseModule implements OnApplicationShutdown {
|
export class DatabaseModule implements OnApplicationShutdown {
|
||||||
constructor(@Inject(DB_HANDLE) private readonly handle: DbHandle) {}
|
constructor(
|
||||||
|
@Inject(DB_HANDLE) private readonly handle: DbHandle,
|
||||||
|
@Inject(STORAGE_ADAPTER) private readonly storageAdapter: StorageAdapter,
|
||||||
|
) {}
|
||||||
|
|
||||||
async onApplicationShutdown(): Promise<void> {
|
async onApplicationShutdown(): Promise<void> {
|
||||||
await this.handle.close();
|
await Promise.all([this.handle.close(), this.storageAdapter.close()]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,29 @@
|
|||||||
import { Global, Module } from '@nestjs/common';
|
import { Global, Module } from '@nestjs/common';
|
||||||
import { createMemory, type Memory } from '@mosaic/memory';
|
import {
|
||||||
|
createMemory,
|
||||||
|
type Memory,
|
||||||
|
createMemoryAdapter,
|
||||||
|
type MemoryAdapter,
|
||||||
|
type MemoryConfig,
|
||||||
|
} from '@mosaic/memory';
|
||||||
import type { Db } from '@mosaic/db';
|
import type { Db } from '@mosaic/db';
|
||||||
import { DB } from '../database/database.module.js';
|
import type { StorageAdapter } from '@mosaic/storage';
|
||||||
|
import type { MosaicConfig } from '@mosaic/config';
|
||||||
|
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
||||||
|
import { DB, STORAGE_ADAPTER } from '../database/database.module.js';
|
||||||
import { MEMORY } from './memory.tokens.js';
|
import { MEMORY } from './memory.tokens.js';
|
||||||
import { MemoryController } from './memory.controller.js';
|
import { MemoryController } from './memory.controller.js';
|
||||||
import { EmbeddingService } from './embedding.service.js';
|
import { EmbeddingService } from './embedding.service.js';
|
||||||
|
|
||||||
|
export const MEMORY_ADAPTER = 'MEMORY_ADAPTER';
|
||||||
|
|
||||||
|
function buildMemoryConfig(config: MosaicConfig, storageAdapter: StorageAdapter): MemoryConfig {
|
||||||
|
if (config.memory.type === 'keyword') {
|
||||||
|
return { type: 'keyword', storage: storageAdapter };
|
||||||
|
}
|
||||||
|
return { type: config.memory.type };
|
||||||
|
}
|
||||||
|
|
||||||
@Global()
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
providers: [
|
providers: [
|
||||||
@@ -14,9 +32,15 @@ import { EmbeddingService } from './embedding.service.js';
|
|||||||
useFactory: (db: Db): Memory => createMemory(db),
|
useFactory: (db: Db): Memory => createMemory(db),
|
||||||
inject: [DB],
|
inject: [DB],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
provide: MEMORY_ADAPTER,
|
||||||
|
useFactory: (config: MosaicConfig, storageAdapter: StorageAdapter): MemoryAdapter =>
|
||||||
|
createMemoryAdapter(buildMemoryConfig(config, storageAdapter)),
|
||||||
|
inject: [MOSAIC_CONFIG, STORAGE_ADAPTER],
|
||||||
|
},
|
||||||
EmbeddingService,
|
EmbeddingService,
|
||||||
],
|
],
|
||||||
controllers: [MemoryController],
|
controllers: [MemoryController],
|
||||||
exports: [MEMORY, EmbeddingService],
|
exports: [MEMORY, MEMORY_ADAPTER, EmbeddingService],
|
||||||
})
|
})
|
||||||
export class MemoryModule {}
|
export class MemoryModule {}
|
||||||
|
|||||||
@@ -1,9 +1,21 @@
|
|||||||
import { Global, Module } from '@nestjs/common';
|
import { Global, Module } from '@nestjs/common';
|
||||||
|
import { createQueueAdapter, type QueueAdapter } from '@mosaic/queue';
|
||||||
|
import type { MosaicConfig } from '@mosaic/config';
|
||||||
|
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
||||||
import { QueueService } from './queue.service.js';
|
import { QueueService } from './queue.service.js';
|
||||||
|
|
||||||
|
export const QUEUE_ADAPTER = 'QUEUE_ADAPTER';
|
||||||
|
|
||||||
@Global()
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
providers: [QueueService],
|
providers: [
|
||||||
exports: [QueueService],
|
QueueService,
|
||||||
|
{
|
||||||
|
provide: QUEUE_ADAPTER,
|
||||||
|
useFactory: (config: MosaicConfig): QueueAdapter => createQueueAdapter(config.queue),
|
||||||
|
inject: [MOSAIC_CONFIG],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
exports: [QueueService, QUEUE_ADAPTER],
|
||||||
})
|
})
|
||||||
export class QueueModule {}
|
export class QueueModule {}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/web",
|
"name": "@mosaic/web",
|
||||||
"version": "0.0.0",
|
"version": "0.0.2",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "next build",
|
"build": "next build",
|
||||||
|
|||||||
@@ -1,73 +1,30 @@
|
|||||||
# Tasks — Harness Foundation
|
# Tasks — Storage Abstraction Retrofit
|
||||||
|
|
||||||
> Single-writer: orchestrator only. Workers read but never modify.
|
> Single-writer: orchestrator only. Workers read but never modify.
|
||||||
>
|
>
|
||||||
|
> **Mission:** Decouple gateway from hardcoded Postgres/Valkey backends. Introduce interface-driven middleware so the gateway is backend-agnostic. Default to local tier (SQLite + JSON) for zero-dependency installs.
|
||||||
|
>
|
||||||
> **`agent` column values:** `codex` | `sonnet` | `haiku` | `glm-5` | `opus` | `—` (auto/default)
|
> **`agent` column values:** `codex` | `sonnet` | `haiku` | `glm-5` | `opus` | `—` (auto/default)
|
||||||
|
|
||||||
| id | status | agent | milestone | description | pr | notes |
|
| id | status | agent | description | tokens |
|
||||||
| ------ | ------ | ------ | ------------------ | ------------------------------------------------------------------ | ---- | ----------- |
|
| --------- | ----------- | ------ | ---------------------------------------------------------------- | ------ |
|
||||||
| M1-001 | done | sonnet | M1: Persistence | Wire ChatGateway → ConversationsRepo for user messages | #292 | #224 closed |
|
| SA-P1-001 | done | sonnet | Define QueueAdapter interface in packages/queue/src/types.ts | 3K |
|
||||||
| M1-002 | done | sonnet | M1: Persistence | Wire agent event relay → ConversationsRepo for assistant responses | #292 | #225 closed |
|
| SA-P1-002 | done | sonnet | Define StorageAdapter interface in packages/storage/src/types.ts | 3K |
|
||||||
| M1-003 | done | sonnet | M1: Persistence | Store message metadata: model, provider, tokens, tool calls | #292 | #226 closed |
|
| SA-P1-003 | done | sonnet | Define MemoryAdapter interface in packages/memory/src/types.ts | 3K |
|
||||||
| M1-004 | done | sonnet | M1: Persistence | Load message history into Pi session on resume | #301 | #227 closed |
|
| SA-P1-004 | done | sonnet | Create adapter factory pattern + config types | 3K |
|
||||||
| M1-005 | done | sonnet | M1: Persistence | Context window management: summarize when >80% | #301 | #228 closed |
|
| SA-P2-001 | done | sonnet | Refactor @mosaic/queue: wrap ioredis as BullMQ adapter | 3K |
|
||||||
| M1-006 | done | sonnet | M1: Persistence | Conversation search endpoint | #299 | #229 closed |
|
| SA-P2-002 | done | sonnet | Create @mosaic/storage: wrap Drizzle as Postgres adapter | 6K |
|
||||||
| M1-007 | done | sonnet | M1: Persistence | TUI /history command | #297 | #230 closed |
|
| SA-P2-003 | done | sonnet | Refactor @mosaic/memory: extract pgvector adapter | 4K |
|
||||||
| M1-008 | done | sonnet | M1: Persistence | Verify persistence — 20 tests | #304 | #231 closed |
|
| SA-P2-004 | done | sonnet | Update gateway modules to use factories + DI tokens | 5K |
|
||||||
| M2-001 | done | sonnet | M2: Security | InsightsRepo userId on searchByEmbedding | #290 | #232 closed |
|
| SA-P2-005 | done | opus | Verify Phase 2: all tests pass, typecheck clean | — |
|
||||||
| M2-002 | done | sonnet | M2: Security | InsightsRepo userId on findByUser/decay | #290 | #233 closed |
|
| SA-P3-001 | done | sonnet | Implement local queue adapter: JSON file persistence | 5K |
|
||||||
| M2-003 | done | sonnet | M2: Security | PreferencesRepo userId verified | #294 | #234 closed |
|
| SA-P3-002 | done | sonnet | Implement SQLite storage adapter with better-sqlite3 | 8K |
|
||||||
| M2-004 | done | sonnet | M2: Security | Memory tools userId injection fixed | #294 | #235 closed |
|
| SA-P3-003 | done | sonnet | Implement keyword memory adapter — no vector dependency | 4K |
|
||||||
| M2-005 | done | sonnet | M2: Security | ConversationsRepo ownership checks | #293 | #236 closed |
|
| SA-P3-004 | done | opus | Verify Phase 3: 42 new tests, 347 total passing | — |
|
||||||
| M2-006 | done | sonnet | M2: Security | AgentsRepo findAccessible scoped | #293 | #237 closed |
|
| SA-P4-001 | done | sonnet | MosaicConfig schema + loader with tier auto-detection | 6K |
|
||||||
| M2-007 | done | sonnet | M2: Security | Cross-user isolation — 28 tests | #305 | #238 closed |
|
| SA-P4-002 | done | sonnet | CLI: mosaic gateway init — interactive wizard | 4K |
|
||||||
| M2-008 | done | sonnet | M2: Security | Valkey SCAN + /gc admin-only | #298 | #239 closed |
|
| SA-P4-003 | done | sonnet | CLI: mosaic gateway start/stop/status lifecycle | 5K |
|
||||||
| M3-001 | done | sonnet | M3: Providers | IProviderAdapter + OllamaAdapter | #306 | #240 closed |
|
| SA-P4-004 | done | opus | Verify Phase 4: 381 tests passing, 40/40 tasks clean | — |
|
||||||
| M3-002 | done | sonnet | M3: Providers | AnthropicAdapter | #309 | #241 closed |
|
| SA-P5-001 | not-started | codex | Migration tooling: mosaic storage export/import | — |
|
||||||
| M3-003 | done | sonnet | M3: Providers | OpenAIAdapter | #310 | #242 closed |
|
| SA-P5-002 | not-started | codex | Docker Compose profiles: local vs team | — |
|
||||||
| M3-004 | done | sonnet | M3: Providers | OpenRouterAdapter | #311 | #243 closed |
|
| SA-P5-003 | not-started | codex | Final verification + docs: README, architecture diagram | — |
|
||||||
| M3-005 | done | sonnet | M3: Providers | ZaiAdapter (GLM-5) | #314 | #244 closed |
|
|
||||||
| M3-006 | done | sonnet | M3: Providers | Ollama embedding support | #311 | #245 closed |
|
|
||||||
| M3-007 | done | sonnet | M3: Providers | Provider health checks | #308 | #246 closed |
|
|
||||||
| M3-008 | done | sonnet | M3: Providers | Model capability matrix | #303 | #247 closed |
|
|
||||||
| M3-009 | done | sonnet | M3: Providers | EmbeddingService → Ollama default | #308 | #248 closed |
|
|
||||||
| M3-010 | done | sonnet | M3: Providers | OAuth token storage (AES-256-GCM) | #317 | #249 closed |
|
|
||||||
| M3-011 | done | sonnet | M3: Providers | Provider credentials CRUD | #317 | #250 closed |
|
|
||||||
| M3-012 | done | sonnet | M3: Providers | Verify providers — 40 tests | #319 | #251 closed |
|
|
||||||
| M4-001 | done | sonnet | M4: Routing | routing_rules DB schema | #315 | #252 closed |
|
|
||||||
| M4-002 | done | sonnet | M4: Routing | Condition types | #315 | #253 closed |
|
|
||||||
| M4-003 | done | sonnet | M4: Routing | Action types | #315 | #254 closed |
|
|
||||||
| M4-004 | done | sonnet | M4: Routing | Default routing rules (11 seeds) | #316 | #255 closed |
|
|
||||||
| M4-005 | done | sonnet | M4: Routing | Task classifier (60+ tests) | #316 | #256 closed |
|
|
||||||
| M4-006 | done | sonnet | M4: Routing | Routing decision pipeline | #318 | #257 closed |
|
|
||||||
| M4-007 | done | sonnet | M4: Routing | /model override | #323 | #258 closed |
|
|
||||||
| M4-008 | done | sonnet | M4: Routing | Routing transparency in session:info | #323 | #259 closed |
|
|
||||||
| M4-009 | done | sonnet | M4: Routing | Routing rules CRUD API | #320 | #260 closed |
|
|
||||||
| M4-010 | done | sonnet | M4: Routing | Per-user routing overrides | #320 | #261 closed |
|
|
||||||
| M4-011 | done | sonnet | M4: Routing | Agent specialization capabilities | #320 | #262 closed |
|
|
||||||
| M4-012 | done | sonnet | M4: Routing | Routing wired into ChatGateway | #323 | #263 closed |
|
|
||||||
| M4-013 | done | sonnet | M4: Routing | Verify routing — 9 E2E tests | #323 | #264 closed |
|
|
||||||
| M5-001 | done | sonnet | M5: Sessions | Agent config loaded on session create | #323 | #265 closed |
|
|
||||||
| M5-002 | done | sonnet | M5: Sessions | /model command end-to-end | #323 | #266 closed |
|
|
||||||
| M5-003 | done | sonnet | M5: Sessions | /agent command mid-session | #323 | #267 closed |
|
|
||||||
| M5-004 | done | sonnet | M5: Sessions | Session ↔ conversation binding | #321 | #268 closed |
|
|
||||||
| M5-005 | done | sonnet | M5: Sessions | Session info broadcast | #321 | #269 closed |
|
|
||||||
| M5-006 | done | sonnet | M5: Sessions | /agent new from TUI | #321 | #270 closed |
|
|
||||||
| M5-007 | done | sonnet | M5: Sessions | Session metrics | #321 | #271 closed |
|
|
||||||
| M5-008 | done | sonnet | M5: Sessions | Verify sessions — 28 tests | #324 | #272 closed |
|
|
||||||
| M6-001 | done | sonnet | M6: Jobs | BullMQ + Valkey config | #324 | #273 closed |
|
|
||||||
| M6-002 | done | sonnet | M6: Jobs | Queue service with typed jobs | #324 | #274 closed |
|
|
||||||
| M6-003 | done | sonnet | M6: Jobs | Summarization → BullMQ | #324 | #275 closed |
|
|
||||||
| M6-004 | done | sonnet | M6: Jobs | GC → BullMQ | #324 | #276 closed |
|
|
||||||
| M6-005 | done | sonnet | M6: Jobs | Tier management → BullMQ | #324 | #277 closed |
|
|
||||||
| M6-006 | done | sonnet | M6: Jobs | Admin jobs API | #325 | #278 closed |
|
|
||||||
| M6-007 | done | sonnet | M6: Jobs | Job event logging | #325 | #279 closed |
|
|
||||||
| M6-008 | done | sonnet | M6: Jobs | Verify jobs | #324 | #280 closed |
|
|
||||||
| M7-001 | done | sonnet | M7: Channel Design | IChannelAdapter interface | #325 | #281 closed |
|
|
||||||
| M7-002 | done | sonnet | M7: Channel Design | Channel message protocol | #325 | #282 closed |
|
|
||||||
| M7-003 | done | sonnet | M7: Channel Design | Matrix integration design | #326 | #283 closed |
|
|
||||||
| M7-004 | done | sonnet | M7: Channel Design | Conversation multiplexing | #326 | #284 closed |
|
|
||||||
| M7-005 | done | sonnet | M7: Channel Design | Remote auth bridging | #326 | #285 closed |
|
|
||||||
| M7-006 | done | sonnet | M7: Channel Design | Agent-to-agent via Matrix | #326 | #286 closed |
|
|
||||||
| M7-007 | done | sonnet | M7: Channel Design | Multi-user isolation in Matrix | #326 | #287 closed |
|
|
||||||
| M7-008 | done | sonnet | M7: Channel Design | channel-protocol.md published | #326 | #288 closed |
|
|
||||||
|
|||||||
555
docs/design/storage-abstraction-middleware.md
Normal file
555
docs/design/storage-abstraction-middleware.md
Normal file
@@ -0,0 +1,555 @@
|
|||||||
|
# Storage & Queue Abstraction — Middleware Architecture
|
||||||
|
|
||||||
|
Design
|
||||||
|
Status: Design (retrofit required)
|
||||||
|
date: 2026-04-02
|
||||||
|
context: Agents coupled directly to infrastructure backends, bypassing intended middleware layer
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The Problem
|
||||||
|
|
||||||
|
Current packages are **direct adapters**, not **middleware**:
|
||||||
|
| Package | Current State | Intended Design |
|
||||||
|
|---------|---------------|-----------------|
|
||||||
|
| `@mosaic/queue` | `ioredis` hardcoded | Interface → BullMQ OR local-files |
|
||||||
|
| `@mosaic/db` | Drizzle + Postgres hardcoded | Interface → Postgres OR SQLite OR JSON/MD |
|
||||||
|
| `@mosaic/memory` | pgvector required | Interface → pgvector OR sqlite-vec OR keyword-search |
|
||||||
|
|
||||||
|
## The gateway and TUI import these packages directly, which means they they're coupled to specific infrastructure. Users cannot run Mosaic Stack without Postgres + Valkey.
|
||||||
|
|
||||||
|
## The Intended Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────────────┐
|
||||||
|
│ Gateway / TUI / CLI │
|
||||||
|
│ (agnostic of storage backend, talks to middleware) │
|
||||||
|
└───────────────────────────┬─────────────────────────────────────┘
|
||||||
|
│
|
||||||
|
┌───────────────────┼───────────────────┐
|
||||||
|
│ │ │
|
||||||
|
▼─────────────────┴─────────────────┴─────────────────┘
|
||||||
|
| | | |
|
||||||
|
▼─────────────────┴───────────────────┴─────────────────┘
|
||||||
|
| | | |
|
||||||
|
Queue Storage Memory
|
||||||
|
| | | |
|
||||||
|
┌─────────┬─────────┬─────────┬─────────────────────────────────┐
|
||||||
|
| BullMQ | | Local | | Postgres | SQLite | JSON/MD | pgvector | sqlite-vec | keyword |
|
||||||
|
|(Valkey)| |(files) | | | | | |
|
||||||
|
└─────────┴─────────┴─────────┴─────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
The gateway imports the interface, not the backend. At startup it reads config and instantiates the correct adapter.
|
||||||
|
|
||||||
|
## The Drift
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// What should have happened:
|
||||||
|
gateway/queue.service.ts → @mosaic/queue (interface) → queue.adapter.ts
|
||||||
|
|
||||||
|
// What actually happened:
|
||||||
|
gateway/queue.service.ts → @mosaic/queue → ioredis (hardcoded)
|
||||||
|
```
|
||||||
|
|
||||||
|
## The Current State Analysis
|
||||||
|
|
||||||
|
### `@mosaic/queue` (packages/queue/src/queue.ts)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import Redis from 'ioredis'; // ← Direct import of backend
|
||||||
|
|
||||||
|
export function createQueue(config?: QueueConfig): QueueHandle {
|
||||||
|
const url = config?.url ?? process.env['VALKEY_URL'] ?? DEFAULT_VALKEY_URL;
|
||||||
|
const redis = new Redis(url, { maxRetriesPerRequest: 3 });
|
||||||
|
// ...queue ops directly on redis...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Problem:** `ioredis` is imported in the package, not the adapter interface. Consumers cannot swap backends.
|
||||||
|
|
||||||
|
### `@mosaic/db` (packages/db/src/client.ts)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { drizzle, type PostgresJsDatabase } from 'drizzle-orm/postgres-js';
|
||||||
|
import postgres from 'postgres';
|
||||||
|
|
||||||
|
export function createDb(url?: string): DbHandle {
|
||||||
|
const connectionString = url ?? process.env['DATABASE_URL'] ?? DEFAULT_DATABASE_URL;
|
||||||
|
const sql = postgres(connectionString, { max: 20, idle_timeout: 30, connect_timeout: 5 });
|
||||||
|
const db = drizzle(sql, { schema });
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Problem:** Drizzle + Postgres is hardcoded. No SQLite, JSON, or file-based options.
|
||||||
|
|
||||||
|
### `@mosaic/memory` (packages/memory/src/memory.ts)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import type { Db } from '@mosaic/db'; // ← Depends on Drizzle/PG
|
||||||
|
|
||||||
|
export function createMemory(db: Db): Memory {
|
||||||
|
return {
|
||||||
|
preferences: createPreferencesRepo(db),
|
||||||
|
insights: createInsightsRepo(db),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Problem:** Memory package is tightly coupled to `@mosaic/db` (which is Postgres-only). No alternative storage backends.
|
||||||
|
|
||||||
|
## The Target Interfaces
|
||||||
|
|
||||||
|
### Queue Interface
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// packages/queue/src/types.ts
|
||||||
|
export interface QueueAdapter {
|
||||||
|
readonly name: string;
|
||||||
|
|
||||||
|
enqueue(queueName: string, payload: TaskPayload): Promise<void>;
|
||||||
|
dequeue(queueName: string): Promise<TaskPayload | null>;
|
||||||
|
length(queueName: string): Promise<number>;
|
||||||
|
publish(channel: string, message: string): Promise<void>;
|
||||||
|
subscribe(channel: string, handler: (message: string) => void): () => void;
|
||||||
|
close(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TaskPayload {
|
||||||
|
id: string;
|
||||||
|
type: string;
|
||||||
|
data: Record<string, unknown>;
|
||||||
|
createdAt: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface QueueConfig {
|
||||||
|
type: 'bullmq' | 'local';
|
||||||
|
url?: string; // For bullmq: Valkey/Redis URL
|
||||||
|
dataDir?: string; // For local: directory for JSON persistence
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Storage Interface
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// packages/storage/src/types.ts
|
||||||
|
export interface StorageAdapter {
|
||||||
|
readonly name: string;
|
||||||
|
|
||||||
|
// Entity CRUD
|
||||||
|
create<T>(collection: string, data: O): Promise<T>;
|
||||||
|
read<T>(collection: string, id: string): Promise<T | null>;
|
||||||
|
update<T>(collection: string, id: string, data: Partial<O>): Promise<T | null>;
|
||||||
|
delete(collection: string, id: string): Promise<boolean>;
|
||||||
|
|
||||||
|
// Queries
|
||||||
|
find<T>(collection: string, filter: Record<string, unknown>): Promise<T[]>;
|
||||||
|
findOne<T>(collection: string, filter: Record<string, unknown): Promise<T | null>;
|
||||||
|
|
||||||
|
// Bulk operations
|
||||||
|
createMany<T>(collection: string, items: O[]): Promise<T[]>;
|
||||||
|
updateMany<T>(collection: string, ids: string[], data: Partial<O>): Promise<number>;
|
||||||
|
deleteMany(collection: string, ids: string[]): Promise<number>;
|
||||||
|
|
||||||
|
// Raw queries (for complex queries)
|
||||||
|
query<T>(collection: string, query: string, params?: unknown[]): Promise<T[]>;
|
||||||
|
|
||||||
|
// Transaction support
|
||||||
|
transaction<T>(fn: (tx: StorageTransaction) => Promise<T>): Promise<T>;
|
||||||
|
|
||||||
|
close(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StorageTransaction {
|
||||||
|
commit(): Promise<void>;
|
||||||
|
rollback(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StorageConfig {
|
||||||
|
type: 'postgres' | 'sqlite' | 'files';
|
||||||
|
url?: string; // For postgres
|
||||||
|
path?: string; // For sqlite/files
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Memory Interface (Vector + Preferences)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// packages/memory/src/types.ts
|
||||||
|
export interface MemoryAdapter {
|
||||||
|
readonly name: string;
|
||||||
|
|
||||||
|
// Preferences (key-value storage)
|
||||||
|
getPreference(userId: string, key: string): Promise<unknown | null>;
|
||||||
|
setPreference(userId: string, key: string, value: unknown): Promise<void>;
|
||||||
|
deletePreference(userId: string, key: string): Promise<boolean>;
|
||||||
|
listPreferences(
|
||||||
|
userId: string,
|
||||||
|
category?: string,
|
||||||
|
): Promise<Array<{ key: string; value: unknown }>>;
|
||||||
|
|
||||||
|
// Insights (with optional vector search)
|
||||||
|
storeInsight(insight: NewInsight): Promise<Insight>;
|
||||||
|
getInsight(id: string): Promise<Insight | null>;
|
||||||
|
searchInsights(query: string, limit?: number, filter?: InsightFilter): Promise<SearchResult[]>;
|
||||||
|
deleteInsight(id: string): Promise<boolean>;
|
||||||
|
|
||||||
|
// Embedding provider (optional, null = no vector search)
|
||||||
|
readonly embedder?: EmbeddingProvider | null;
|
||||||
|
|
||||||
|
close(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NewInsight {
|
||||||
|
id: string;
|
||||||
|
userId: string;
|
||||||
|
content: string;
|
||||||
|
embedding?: number[]; // If embedder is available
|
||||||
|
source: 'agent' | 'user' | 'summarization' | 'system';
|
||||||
|
category: 'decision' | 'learning' | 'preference' | 'fact' | 'pattern' | 'general';
|
||||||
|
relevanceScore: number;
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
createdAt: Date;
|
||||||
|
decayedAt?: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface InsightFilter {
|
||||||
|
userId?: string;
|
||||||
|
category?: string;
|
||||||
|
source?: string;
|
||||||
|
minRelevance?: number;
|
||||||
|
fromDate?: Date;
|
||||||
|
toDate?: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchResult {
|
||||||
|
documentId: string;
|
||||||
|
content: string;
|
||||||
|
distance: number;
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MemoryConfig {
|
||||||
|
type: 'pgvector' | 'sqlite-vec' | 'keyword';
|
||||||
|
storage: StorageAdapter;
|
||||||
|
embedder?: EmbeddingProvider;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EmbeddingProvider {
|
||||||
|
embed(text: string): Promise<number[]>;
|
||||||
|
embedBatch(texts: string[]): Promise<number[][]>;
|
||||||
|
readonly dimensions: number;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Three Tiers
|
||||||
|
|
||||||
|
### Tier 1: Local (Zero Dependencies)
|
||||||
|
|
||||||
|
**Target:** Single user, single machine, no external services
|
||||||
|
|
||||||
|
| Component | Backend | Storage |
|
||||||
|
| --------- | --------------------------------------------- | ------------ |
|
||||||
|
| Queue | In-process + JSON files in `~/.mosaic/queue/` |
|
||||||
|
| Storage | SQLite (better-sqlite3) `~/.mosaic/data.db` |
|
||||||
|
| Memory | Keyword search | SQLite table |
|
||||||
|
| Vector | None | N/A |
|
||||||
|
|
||||||
|
**Dependencies:**
|
||||||
|
|
||||||
|
- `better-sqlite3` (bundled)
|
||||||
|
- No Postgres, No Valkey, No pgvector
|
||||||
|
|
||||||
|
**Upgrade path:**
|
||||||
|
|
||||||
|
1. Run `mosaic gateway configure` → select "local" tier
|
||||||
|
2. Gateway starts with SQLite database
|
||||||
|
3. Optional: run `mosaic gateway upgrade --tier team` to migrate to Postgres
|
||||||
|
|
||||||
|
### Tier 2: Team (Postgres + Valkey)
|
||||||
|
|
||||||
|
**Target:** Multiple users, shared server, CI/CD environments
|
||||||
|
|
||||||
|
| Component | Backend | Storage |
|
||||||
|
| --------- | -------------- | ------------------------------ |
|
||||||
|
| Queue | BullMQ | Valkey |
|
||||||
|
| Storage | Postgres | Shared PG instance |
|
||||||
|
| Memory | pgvector | Postgres with vector extension |
|
||||||
|
| Vector | LLM embeddings | Configured provider |
|
||||||
|
|
||||||
|
**Dependencies:**
|
||||||
|
|
||||||
|
- PostgreSQL 17+ with pgvector extension
|
||||||
|
- Valkey (Redis-compatible)
|
||||||
|
- LLM provider for embeddings
|
||||||
|
|
||||||
|
**Migration from Local → Team:**
|
||||||
|
|
||||||
|
1. `mosaic gateway backup` → creates dump of SQLite database
|
||||||
|
2. `mosaic gateway upgrade --tier team` → restores to Postgres
|
||||||
|
3. Queue replays from BullMQ (may need manual reconciliation for in-flight jobs)
|
||||||
|
4. Memory embeddings regenerated if vector search was new
|
||||||
|
|
||||||
|
### Tier 3: Enterprise (Clustered)
|
||||||
|
|
||||||
|
**Target:** Large teams, multi-region, high availability
|
||||||
|
|
||||||
|
| Component | Backend | Storage |
|
||||||
|
| --------- | --------------------------- | ----------------------------- |
|
||||||
|
| Queue | BullMQ cluster | Multiple Valkey nodes |
|
||||||
|
| Storage | Postgres cluster | Primary + replicas |
|
||||||
|
| Memory | Dedicated vector DB | Qdrant, Pinecone, or pgvector |
|
||||||
|
| Vector | Dedicated embedding service | Separate microservice |
|
||||||
|
|
||||||
|
## MarkdownDB Integration
|
||||||
|
|
||||||
|
For file-based storage, we use [MarkdownDB](https://markdowndb.com) to parse MD files into queryable data.
|
||||||
|
|
||||||
|
**What it provides:**
|
||||||
|
|
||||||
|
- Parses frontmatter (YAML/JSON/TOML)
|
||||||
|
- Extracts links, tags, metadata
|
||||||
|
- Builds index in JSON or SQLite
|
||||||
|
- Queryable via SQL-like interface
|
||||||
|
|
||||||
|
**Usage in Mosaic:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Local tier with MD files for documents
|
||||||
|
const storage = createStorageAdapter({
|
||||||
|
type: 'files',
|
||||||
|
path: path.join(mosaicHome, 'docs'),
|
||||||
|
markdowndb: {
|
||||||
|
parseFrontmatter: true,
|
||||||
|
extractLinks: true,
|
||||||
|
indexFile: 'index.json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Dream Mode — Memory Consolidation
|
||||||
|
|
||||||
|
Automated equivalent to Claude Code's "Dream: Memory Consolidation" cycle
|
||||||
|
|
||||||
|
**Trigger:** Every 24 hours (if 5+ sessions active)
|
||||||
|
|
||||||
|
**Phases:**
|
||||||
|
|
||||||
|
1. **Orient** — What happened, what's the current state
|
||||||
|
- Scan recent session logs
|
||||||
|
- Identify active tasks, missions, conversations
|
||||||
|
- Calculate time window (last 24h)
|
||||||
|
|
||||||
|
2. **Gather** — Pull in relevant context
|
||||||
|
- Load conversations, decisions, agent logs
|
||||||
|
- Extract key interactions and outcomes
|
||||||
|
- Identify patterns and learnings
|
||||||
|
|
||||||
|
3. **Consolidate** — Summarize and compress
|
||||||
|
- Generate summary of the last 24h
|
||||||
|
- Extract key decisions and their rationale
|
||||||
|
- Identify recurring patterns
|
||||||
|
- Compress verbose logs into concise insights
|
||||||
|
|
||||||
|
4. **Prune** — Archive and cleanup
|
||||||
|
- Archive raw session files to dated folders
|
||||||
|
- Delete redundant/temporary data
|
||||||
|
- Update MEMORY.md with consolidated content
|
||||||
|
- Update insight relevance scores
|
||||||
|
|
||||||
|
**Implementation:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In @mosaic/dream (new package)
|
||||||
|
export async function runDreamCycle(config: DreamConfig): Promise<DreamResult> {
|
||||||
|
const memory = await loadMemoryAdapter(config.storage);
|
||||||
|
|
||||||
|
// Orient
|
||||||
|
const sessions = await memory.getRecentSessions(24 * 60 * 60 * 1000);
|
||||||
|
if (sessions.length < 5) return { skipped: true, reason: 'insufficient_sessions' };
|
||||||
|
|
||||||
|
// Gather
|
||||||
|
const context = await gatherContext(memory, sessions);
|
||||||
|
|
||||||
|
// Consolidate
|
||||||
|
const consolidated = await consolidateWithLLM(context, config.llm);
|
||||||
|
|
||||||
|
// Prune
|
||||||
|
await pruneArchivedData(memory, config.retention);
|
||||||
|
|
||||||
|
// Store consolidated insights
|
||||||
|
await memory.storeInsights(consolidated.insights);
|
||||||
|
|
||||||
|
return {
|
||||||
|
sessionsProcessed: sessions.length,
|
||||||
|
insightsCreated: consolidated.insights.length,
|
||||||
|
bytesPruned: consolidated.bytesRemoved,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Retrofit Plan
|
||||||
|
|
||||||
|
### Phase 1: Interface Extraction (2-3 days)
|
||||||
|
|
||||||
|
**Goal:** Define interfaces without changing existing behavior
|
||||||
|
|
||||||
|
1. Create `packages/queue/src/types.ts` with `QueueAdapter` interface
|
||||||
|
2. Create `packages/storage/src/types.ts` with `StorageAdapter` interface
|
||||||
|
3. Create `packages/memory/src/types.ts` with `MemoryAdapter` interface (refactor existing)
|
||||||
|
4. Add adapter registry pattern to each package
|
||||||
|
5. No breaking changes — existing code continues to work
|
||||||
|
|
||||||
|
### Phase 2: Refactor Existing to Adapters (3-5 days)
|
||||||
|
|
||||||
|
**Goal:** Move existing implementations behind adapters
|
||||||
|
|
||||||
|
#### 2.1 Queue Refactor
|
||||||
|
|
||||||
|
1. Rename `packages/queue/src/queue.ts` → `packages/queue/src/adapters/bullmq.ts`
|
||||||
|
2. Create `packages/queue/src/index.ts` to export factory function
|
||||||
|
3. Factory function reads config, instantiates correct adapter
|
||||||
|
4. Update gateway imports to use factory
|
||||||
|
|
||||||
|
#### 2.2 Storage Refactor
|
||||||
|
|
||||||
|
1. Create `packages/storage/` (new package)
|
||||||
|
2. Move Drizzle logic to `packages/storage/src/adapters/postgres.ts`
|
||||||
|
3. Create SQLite adapter in `packages/storage/src/adapters/sqlite.ts`
|
||||||
|
4. Update gateway to use storage factory
|
||||||
|
5. Deprecate direct `@mosaic/db` imports
|
||||||
|
|
||||||
|
#### 2.3 Memory Refactor
|
||||||
|
|
||||||
|
1. Extract existing logic to `packages/memory/src/adapters/pgvector.ts`
|
||||||
|
2. Create keyword adapter in `packages/memory/src/adapters/keyword.ts`
|
||||||
|
3. Update vector-store.ts to be adapter-agnostic
|
||||||
|
|
||||||
|
### Phase 3: Local Tier Implementation (2-3 days)
|
||||||
|
|
||||||
|
**Goal:** Zero-dependency baseline
|
||||||
|
|
||||||
|
1. Implement `packages/queue/src/adapters/local.ts` (in-process + JSON persistence)
|
||||||
|
2. Implement `packages/storage/src/adapters/files.ts` (JSON + MD via MarkdownDB)
|
||||||
|
3. Implement `packages/memory/src/adapters/keyword.ts` (TF-IDF search)
|
||||||
|
4. Add `packages/dream/` for consolidation cycle
|
||||||
|
5. Wire up local tier in gateway startup
|
||||||
|
|
||||||
|
### Phase 4: Configuration System (1-2 days)
|
||||||
|
|
||||||
|
**Goal:** Runtime backend selection
|
||||||
|
|
||||||
|
1. Create `packages/config/src/storage.ts` for storage configuration
|
||||||
|
2. Add `mosaic.config.ts` schema with storage tier settings
|
||||||
|
3. Update gateway to read config on startup
|
||||||
|
4. Add `mosaic gateway configure` CLI command
|
||||||
|
5. Add tier migration commands (`mosaic gateway upgrade`)
|
||||||
|
|
||||||
|
### Phase 5: Testing & Documentation (2-3 days)
|
||||||
|
|
||||||
|
1. Unit tests for each adapter
|
||||||
|
2. Integration tests for factory pattern
|
||||||
|
3. Migration tests (local → team)
|
||||||
|
4. Update README and architecture docs
|
||||||
|
5. Add configuration guide
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## File Changes Summary
|
||||||
|
|
||||||
|
### New Files
|
||||||
|
|
||||||
|
```
|
||||||
|
packages/
|
||||||
|
├── config/
|
||||||
|
│ └── src/
|
||||||
|
│ ├── storage.ts # Storage config schema
|
||||||
|
│ └── index.ts
|
||||||
|
├── dream/ # NEW: Dream mode consolidation
|
||||||
|
│ ├── src/
|
||||||
|
│ │ ├── index.ts
|
||||||
|
│ │ ├── orient.ts
|
||||||
|
│ │ ├── gather.ts
|
||||||
|
│ │ ├── consolidate.ts
|
||||||
|
│ │ └── prune.ts
|
||||||
|
│ └── package.json
|
||||||
|
├── queue/
|
||||||
|
│ └── src/
|
||||||
|
│ ├── types.ts # NEW: QueueAdapter interface
|
||||||
|
│ ├── index.ts # NEW: Factory function
|
||||||
|
│ └── adapters/
|
||||||
|
│ ├── bullmq.ts # MOVED from queue.ts
|
||||||
|
│ └── local.ts # NEW: In-process adapter
|
||||||
|
├── storage/ # NEW: Storage abstraction
|
||||||
|
│ ├── src/
|
||||||
|
│ │ ├── types.ts # StorageAdapter interface
|
||||||
|
│ │ ├── index.ts # Factory function
|
||||||
|
│ │ └── adapters/
|
||||||
|
│ │ ├── postgres.ts # MOVED from @mosaic/db
|
||||||
|
│ │ ├── sqlite.ts # NEW: SQLite adapter
|
||||||
|
│ │ └── files.ts # NEW: JSON/MD adapter
|
||||||
|
│ └── package.json
|
||||||
|
└── memory/
|
||||||
|
└── src/
|
||||||
|
├── types.ts # UPDATED: MemoryAdapter interface
|
||||||
|
├── index.ts # UPDATED: Factory function
|
||||||
|
└── adapters/
|
||||||
|
├── pgvector.ts # EXTRACTED from existing code
|
||||||
|
├── sqlite-vec.ts # NEW: SQLite with vectors
|
||||||
|
└── keyword.ts # NEW: TF-IDF search
|
||||||
|
```
|
||||||
|
|
||||||
|
### Modified Files
|
||||||
|
|
||||||
|
```
|
||||||
|
packages/
|
||||||
|
├── db/ # DEPRECATED: Logic moved to storage adapters
|
||||||
|
├── queue/
|
||||||
|
│ └── src/
|
||||||
|
│ └── queue.ts # → adapters/bullmq.ts
|
||||||
|
├── memory/
|
||||||
|
│ ├── src/
|
||||||
|
│ │ ├── memory.ts # → use factory
|
||||||
|
│ │ ├── insights.ts # → use factory
|
||||||
|
│ │ └── preferences.ts # → use factory
|
||||||
|
│ └── package.json # Remove pgvector from dependencies
|
||||||
|
└── gateway/
|
||||||
|
└── src/
|
||||||
|
├── database/
|
||||||
|
│ └── database.module.ts # Update to use storage factory
|
||||||
|
├── memory/
|
||||||
|
│ └── memory.module.ts # Update to use memory factory
|
||||||
|
└── queue/
|
||||||
|
└── queue.module.ts # Update to use queue factory
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Breaking Changes
|
||||||
|
|
||||||
|
1. **`@mosaic/db`** → **`@mosaic/storage`** (with migration guide)
|
||||||
|
2. Direct `ioredis` imports → Use `@mosaic/queue` factory
|
||||||
|
3. Direct `pgvector` queries → Use `@mosaic/memory` factory
|
||||||
|
4. Gateway startup now requires storage config (defaults to local)
|
||||||
|
|
||||||
|
## Non-Breaking Migration Path
|
||||||
|
|
||||||
|
1. Existing deployments with Postgres/Valkey continue to work (default config)
|
||||||
|
2. New deployments can choose local tier
|
||||||
|
3. Migration commands available when ready to upgrade
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
- [ ] Local tier runs with zero external dependencies
|
||||||
|
- [ ] All three tiers (local, team, enterprise) work correctly
|
||||||
|
- [ ] Factory pattern correctly selects backend at runtime
|
||||||
|
- [ ] Migration from local → team preserves all data
|
||||||
|
- [ ] Dream mode consolidates 24h of sessions
|
||||||
|
- [ ] Documentation covers all three tiers and migration paths
|
||||||
|
- [ ] All existing tests pass
|
||||||
|
- [ ] New adapters have >80% coverage
|
||||||
6
mosaic.config.json
Normal file
6
mosaic.config.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"tier": "local",
|
||||||
|
"storage": { "type": "sqlite", "path": ".mosaic/data.db" },
|
||||||
|
"queue": { "type": "local", "dataDir": ".mosaic/queue" },
|
||||||
|
"memory": { "type": "keyword" }
|
||||||
|
}
|
||||||
@@ -23,5 +23,10 @@
|
|||||||
"turbo": "^2.0.0",
|
"turbo": "^2.0.0",
|
||||||
"typescript": "^5.8.0",
|
"typescript": "^5.8.0",
|
||||||
"vitest": "^2.0.0"
|
"vitest": "^2.0.0"
|
||||||
|
},
|
||||||
|
"pnpm": {
|
||||||
|
"onlyBuiltDependencies": [
|
||||||
|
"better-sqlite3"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/agent",
|
"name": "@mosaic/agent",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/auth",
|
"name": "@mosaic/auth",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/brain",
|
"name": "@mosaic/brain",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/cli",
|
"name": "@mosaic/cli",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.10",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -22,6 +22,7 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@clack/prompts": "^0.9.0",
|
"@clack/prompts": "^0.9.0",
|
||||||
|
"@mosaic/config": "workspace:^",
|
||||||
"@mosaic/mosaic": "workspace:^",
|
"@mosaic/mosaic": "workspace:^",
|
||||||
"@mosaic/prdy": "workspace:^",
|
"@mosaic/prdy": "workspace:^",
|
||||||
"@mosaic/quality-rails": "workspace:^",
|
"@mosaic/quality-rails": "workspace:^",
|
||||||
|
|||||||
@@ -2,10 +2,12 @@
|
|||||||
|
|
||||||
import { createRequire } from 'module';
|
import { createRequire } from 'module';
|
||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { createQualityRailsCli } from '@mosaic/quality-rails';
|
import { registerQualityRails } from '@mosaic/quality-rails';
|
||||||
import { registerAgentCommand } from './commands/agent.js';
|
import { registerAgentCommand } from './commands/agent.js';
|
||||||
import { registerMissionCommand } from './commands/mission.js';
|
import { registerMissionCommand } from './commands/mission.js';
|
||||||
import { registerPrdyCommand } from './commands/prdy.js';
|
// prdy is registered via launch.ts
|
||||||
|
import { registerLaunchCommands } from './commands/launch.js';
|
||||||
|
import { registerGatewayCommand } from './commands/gateway.js';
|
||||||
|
|
||||||
const _require = createRequire(import.meta.url);
|
const _require = createRequire(import.meta.url);
|
||||||
const CLI_VERSION: string = (_require('../package.json') as { version: string }).version;
|
const CLI_VERSION: string = (_require('../package.json') as { version: string }).version;
|
||||||
@@ -22,6 +24,10 @@ const program = new Command();
|
|||||||
|
|
||||||
program.name('mosaic').description('Mosaic Stack CLI').version(CLI_VERSION);
|
program.name('mosaic').description('Mosaic Stack CLI').version(CLI_VERSION);
|
||||||
|
|
||||||
|
// ─── runtime launchers + framework commands ────────────────────────────
|
||||||
|
|
||||||
|
registerLaunchCommands(program);
|
||||||
|
|
||||||
// ─── login ──────────────────────────────────────────────────────────────
|
// ─── login ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
program
|
program
|
||||||
@@ -285,6 +291,10 @@ sessionsCmd
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// ─── gateway ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
registerGatewayCommand(program);
|
||||||
|
|
||||||
// ─── agent ─────────────────────────────────────────────────────────────
|
// ─── agent ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
registerAgentCommand(program);
|
registerAgentCommand(program);
|
||||||
@@ -293,17 +303,9 @@ registerAgentCommand(program);
|
|||||||
|
|
||||||
registerMissionCommand(program);
|
registerMissionCommand(program);
|
||||||
|
|
||||||
// ─── prdy ──────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
registerPrdyCommand(program);
|
|
||||||
|
|
||||||
// ─── quality-rails ──────────────────────────────────────────────────────
|
// ─── quality-rails ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
const qrWrapper = createQualityRailsCli();
|
registerQualityRails(program);
|
||||||
const qrCmd = qrWrapper.commands.find((c) => c.name() === 'quality-rails');
|
|
||||||
if (qrCmd !== undefined) {
|
|
||||||
program.addCommand(qrCmd as unknown as Command);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── update ─────────────────────────────────────────────────────────────
|
// ─── update ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
|||||||
198
packages/cli/src/commands/gateway.ts
Normal file
198
packages/cli/src/commands/gateway.ts
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
import { createInterface } from 'node:readline';
|
||||||
|
import { spawn } from 'node:child_process';
|
||||||
|
import { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from 'node:fs';
|
||||||
|
import { dirname, resolve } from 'node:path';
|
||||||
|
import type { Command } from 'commander';
|
||||||
|
import {
|
||||||
|
DEFAULT_LOCAL_CONFIG,
|
||||||
|
DEFAULT_TEAM_CONFIG,
|
||||||
|
loadConfig,
|
||||||
|
type MosaicConfig,
|
||||||
|
type StorageTier,
|
||||||
|
} from '@mosaic/config';
|
||||||
|
|
||||||
|
function ask(rl: ReturnType<typeof createInterface>, question: string): Promise<string> {
|
||||||
|
return new Promise((res) => rl.question(question, res));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runInit(opts: { tier?: string; output: string }): Promise<void> {
|
||||||
|
const outputPath = resolve(opts.output);
|
||||||
|
let tier: StorageTier;
|
||||||
|
|
||||||
|
if (opts.tier) {
|
||||||
|
if (opts.tier !== 'local' && opts.tier !== 'team') {
|
||||||
|
console.error(`Invalid tier "${opts.tier}" — expected "local" or "team"`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
tier = opts.tier;
|
||||||
|
} else {
|
||||||
|
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
||||||
|
const answer = await ask(rl, 'Select tier (local/team) [local]: ');
|
||||||
|
rl.close();
|
||||||
|
const trimmed = answer.trim().toLowerCase();
|
||||||
|
tier = trimmed === 'team' ? 'team' : 'local';
|
||||||
|
}
|
||||||
|
|
||||||
|
let config: MosaicConfig;
|
||||||
|
|
||||||
|
if (tier === 'local') {
|
||||||
|
config = DEFAULT_LOCAL_CONFIG;
|
||||||
|
} else {
|
||||||
|
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
||||||
|
const dbUrl = await ask(
|
||||||
|
rl,
|
||||||
|
'DATABASE_URL [postgresql://mosaic:mosaic@localhost:5432/mosaic]: ',
|
||||||
|
);
|
||||||
|
const valkeyUrl = await ask(rl, 'VALKEY_URL [redis://localhost:6379]: ');
|
||||||
|
rl.close();
|
||||||
|
|
||||||
|
config = {
|
||||||
|
...DEFAULT_TEAM_CONFIG,
|
||||||
|
storage: {
|
||||||
|
type: 'postgres',
|
||||||
|
url: dbUrl.trim() || 'postgresql://mosaic:mosaic@localhost:5432/mosaic',
|
||||||
|
},
|
||||||
|
queue: {
|
||||||
|
type: 'bullmq',
|
||||||
|
url: valkeyUrl.trim() || 'redis://localhost:6379',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
writeFileSync(outputPath, JSON.stringify(config, null, 2) + '\n');
|
||||||
|
console.log(`\nWrote ${outputPath}`);
|
||||||
|
console.log('\nNext steps:');
|
||||||
|
console.log(' 1. Review the generated config');
|
||||||
|
console.log(' 2. Run: pnpm --filter @mosaic/gateway exec tsx src/main.ts');
|
||||||
|
}
|
||||||
|
|
||||||
|
const PID_FILE = resolve(process.cwd(), '.mosaic/gateway.pid');
|
||||||
|
|
||||||
|
function writePidFile(pid: number): void {
|
||||||
|
const dir = dirname(PID_FILE);
|
||||||
|
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
||||||
|
writeFileSync(PID_FILE, String(pid));
|
||||||
|
}
|
||||||
|
|
||||||
|
function readPidFile(): number | null {
|
||||||
|
if (!existsSync(PID_FILE)) return null;
|
||||||
|
const raw = readFileSync(PID_FILE, 'utf-8').trim();
|
||||||
|
const pid = Number(raw);
|
||||||
|
return Number.isFinite(pid) ? pid : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isProcessRunning(pid: number): boolean {
|
||||||
|
try {
|
||||||
|
process.kill(pid, 0);
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function printConfigSummary(config: MosaicConfig): void {
|
||||||
|
console.log(` Tier: ${config.tier}`);
|
||||||
|
console.log(` Storage: ${config.storage.type}`);
|
||||||
|
console.log(` Queue: ${config.queue.type}`);
|
||||||
|
console.log(` Memory: ${config.memory.type}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function registerGatewayCommand(program: Command): void {
|
||||||
|
const gateway = program.command('gateway').description('Gateway management commands');
|
||||||
|
|
||||||
|
gateway
|
||||||
|
.command('init')
|
||||||
|
.description('Generate a mosaic.config.json for the gateway')
|
||||||
|
.option('--tier <tier>', 'Storage tier: local or team (skips interactive prompt)')
|
||||||
|
.option('--output <path>', 'Output file path', './mosaic.config.json')
|
||||||
|
.action(async (opts: { tier?: string; output: string }) => {
|
||||||
|
await runInit(opts);
|
||||||
|
});
|
||||||
|
|
||||||
|
gateway
|
||||||
|
.command('start')
|
||||||
|
.description('Start the Mosaic gateway process')
|
||||||
|
.option('--port <port>', 'Port to listen on (overrides config)')
|
||||||
|
.option('--daemon', 'Run in background and write PID to .mosaic/gateway.pid')
|
||||||
|
.action((opts: { port?: string; daemon?: boolean }) => {
|
||||||
|
const config = loadConfig();
|
||||||
|
const port = opts.port ?? '4000';
|
||||||
|
|
||||||
|
console.log('Starting gateway…');
|
||||||
|
printConfigSummary(config);
|
||||||
|
console.log(` Port: ${port}`);
|
||||||
|
|
||||||
|
const entryPoint = resolve(process.cwd(), 'apps/gateway/src/main.ts');
|
||||||
|
const env = { ...process.env, GATEWAY_PORT: port };
|
||||||
|
|
||||||
|
if (opts.daemon) {
|
||||||
|
const child = spawn('npx', ['tsx', entryPoint], {
|
||||||
|
env,
|
||||||
|
stdio: 'ignore',
|
||||||
|
detached: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
child.unref();
|
||||||
|
|
||||||
|
if (child.pid) {
|
||||||
|
writePidFile(child.pid);
|
||||||
|
console.log(`\nGateway started in background (PID ${child.pid})`);
|
||||||
|
console.log(`PID file: ${PID_FILE}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const child = spawn('npx', ['tsx', entryPoint], {
|
||||||
|
env,
|
||||||
|
stdio: 'inherit',
|
||||||
|
});
|
||||||
|
|
||||||
|
child.on('exit', (code) => {
|
||||||
|
process.exit(code ?? 0);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
gateway
|
||||||
|
.command('stop')
|
||||||
|
.description('Stop the running gateway process')
|
||||||
|
.action(() => {
|
||||||
|
const pid = readPidFile();
|
||||||
|
|
||||||
|
if (pid === null) {
|
||||||
|
console.error('No PID file found at', PID_FILE);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isProcessRunning(pid)) {
|
||||||
|
console.log(`Process ${pid} is not running. Removing stale PID file.`);
|
||||||
|
unlinkSync(PID_FILE);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
process.kill(pid, 'SIGTERM');
|
||||||
|
unlinkSync(PID_FILE);
|
||||||
|
console.log(`Gateway stopped (PID ${pid})`);
|
||||||
|
});
|
||||||
|
|
||||||
|
gateway
|
||||||
|
.command('status')
|
||||||
|
.description('Show gateway process status')
|
||||||
|
.action(() => {
|
||||||
|
const config = loadConfig();
|
||||||
|
const pid = readPidFile();
|
||||||
|
|
||||||
|
if (pid !== null && isProcessRunning(pid)) {
|
||||||
|
console.log('Gateway: running');
|
||||||
|
console.log(` PID: ${pid}`);
|
||||||
|
} else {
|
||||||
|
console.log('Gateway: stopped');
|
||||||
|
if (pid !== null) {
|
||||||
|
console.log(` (stale PID file for ${pid})`);
|
||||||
|
unlinkSync(PID_FILE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
console.log('Config:');
|
||||||
|
printConfigSummary(config);
|
||||||
|
});
|
||||||
|
}
|
||||||
772
packages/cli/src/commands/launch.ts
Normal file
772
packages/cli/src/commands/launch.ts
Normal file
@@ -0,0 +1,772 @@
|
|||||||
|
/**
|
||||||
|
* Native runtime launcher — replaces the bash mosaic-launch script.
|
||||||
|
*
|
||||||
|
* Builds a composed runtime prompt from AGENTS.md + RUNTIME.md + USER.md +
|
||||||
|
* TOOLS.md + mission context + PRD status, then exec's into the target CLI.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { execFileSync, execSync, spawnSync } from 'node:child_process';
|
||||||
|
import { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync, rmSync } from 'node:fs';
|
||||||
|
import { createRequire } from 'node:module';
|
||||||
|
import { homedir } from 'node:os';
|
||||||
|
import { join, dirname } from 'node:path';
|
||||||
|
import type { Command } from 'commander';
|
||||||
|
|
||||||
|
const MOSAIC_HOME = process.env['MOSAIC_HOME'] ?? join(homedir(), '.config', 'mosaic');
|
||||||
|
|
||||||
|
type RuntimeName = 'claude' | 'codex' | 'opencode' | 'pi';
|
||||||
|
|
||||||
|
const RUNTIME_LABELS: Record<RuntimeName, string> = {
|
||||||
|
claude: 'Claude Code',
|
||||||
|
codex: 'Codex',
|
||||||
|
opencode: 'OpenCode',
|
||||||
|
pi: 'Pi',
|
||||||
|
};
|
||||||
|
|
||||||
|
// ─── Pre-flight checks ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function checkMosaicHome(): void {
|
||||||
|
if (!existsSync(MOSAIC_HOME)) {
|
||||||
|
console.error(`[mosaic] ERROR: ${MOSAIC_HOME} not found.`);
|
||||||
|
console.error(
|
||||||
|
'[mosaic] Install: bash <(curl -fsSL https://git.mosaicstack.dev/mosaic/mosaic-stack/raw/branch/main/tools/install.sh)',
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkFile(path: string, label: string): void {
|
||||||
|
if (!existsSync(path)) {
|
||||||
|
console.error(`[mosaic] ERROR: ${label} not found: ${path}`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkRuntime(cmd: string): void {
|
||||||
|
try {
|
||||||
|
execSync(`which ${cmd}`, { stdio: 'ignore' });
|
||||||
|
} catch {
|
||||||
|
console.error(`[mosaic] ERROR: '${cmd}' not found in PATH.`);
|
||||||
|
console.error(`[mosaic] Install ${cmd} before launching.`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkSoul(): void {
|
||||||
|
const soulPath = join(MOSAIC_HOME, 'SOUL.md');
|
||||||
|
if (!existsSync(soulPath)) {
|
||||||
|
console.log('[mosaic] SOUL.md not found. Running setup wizard...');
|
||||||
|
|
||||||
|
// Prefer the TypeScript wizard (idempotent, detects existing files)
|
||||||
|
try {
|
||||||
|
const result = spawnSync(process.execPath, [process.argv[1]!, 'wizard'], {
|
||||||
|
stdio: 'inherit',
|
||||||
|
});
|
||||||
|
if (result.status === 0 && existsSync(soulPath)) return;
|
||||||
|
} catch {
|
||||||
|
// Fall through to legacy init
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: legacy bash mosaic-init
|
||||||
|
const initBin = fwScript('mosaic-init');
|
||||||
|
if (existsSync(initBin)) {
|
||||||
|
spawnSync(initBin, [], { stdio: 'inherit' });
|
||||||
|
} else {
|
||||||
|
console.error('[mosaic] Setup failed. Run: mosaic wizard');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkSequentialThinking(runtime: string): void {
|
||||||
|
const checker = fwScript('mosaic-ensure-sequential-thinking');
|
||||||
|
if (!existsSync(checker)) return; // Skip if checker doesn't exist
|
||||||
|
const result = spawnSync(checker, ['--check', '--runtime', runtime], { stdio: 'ignore' });
|
||||||
|
if (result.status !== 0) {
|
||||||
|
console.error('[mosaic] ERROR: sequential-thinking MCP is required but not configured.');
|
||||||
|
console.error(`[mosaic] Fix: ${checker} --runtime ${runtime}`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── File helpers ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function readOptional(path: string): string {
|
||||||
|
try {
|
||||||
|
return readFileSync(path, 'utf-8');
|
||||||
|
} catch {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function readJson(path: string): Record<string, unknown> | null {
|
||||||
|
try {
|
||||||
|
return JSON.parse(readFileSync(path, 'utf-8')) as Record<string, unknown>;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Mission context ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
interface MissionInfo {
|
||||||
|
name: string;
|
||||||
|
id: string;
|
||||||
|
status: string;
|
||||||
|
milestoneCount: number;
|
||||||
|
completedCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
function detectMission(): MissionInfo | null {
|
||||||
|
const missionFile = '.mosaic/orchestrator/mission.json';
|
||||||
|
const data = readJson(missionFile);
|
||||||
|
if (!data) return null;
|
||||||
|
|
||||||
|
const status = String(data['status'] ?? 'inactive');
|
||||||
|
if (status !== 'active' && status !== 'paused') return null;
|
||||||
|
|
||||||
|
const milestones = Array.isArray(data['milestones']) ? data['milestones'] : [];
|
||||||
|
const completed = milestones.filter(
|
||||||
|
(m) =>
|
||||||
|
typeof m === 'object' &&
|
||||||
|
m !== null &&
|
||||||
|
(m as Record<string, unknown>)['status'] === 'completed',
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: String(data['name'] ?? 'unnamed'),
|
||||||
|
id: String(data['mission_id'] ?? ''),
|
||||||
|
status,
|
||||||
|
milestoneCount: milestones.length,
|
||||||
|
completedCount: completed.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildMissionBlock(mission: MissionInfo): string {
|
||||||
|
return `# ACTIVE MISSION — HARD GATE (Read Before Anything Else)
|
||||||
|
|
||||||
|
An active orchestration mission exists in this project. This is a BLOCKING requirement.
|
||||||
|
|
||||||
|
**Mission:** ${mission.name}
|
||||||
|
**ID:** ${mission.id}
|
||||||
|
**Status:** ${mission.status}
|
||||||
|
**Milestones:** ${mission.completedCount} / ${mission.milestoneCount} completed
|
||||||
|
|
||||||
|
## MANDATORY — Before ANY Response to the User
|
||||||
|
|
||||||
|
You MUST complete these steps before responding to any user message, including simple greetings:
|
||||||
|
|
||||||
|
1. Read \`~/.config/mosaic/guides/ORCHESTRATOR-PROTOCOL.md\` (mission lifecycle protocol)
|
||||||
|
2. Read \`docs/MISSION-MANIFEST.md\` for full mission scope, milestones, and success criteria
|
||||||
|
3. Read the latest scratchpad in \`docs/scratchpads/\` for session history, decisions, and corrections
|
||||||
|
4. Read \`docs/TASKS.md\` for current task state (what is done, what is next)
|
||||||
|
5. After reading all four, acknowledge the mission state to the user before proceeding
|
||||||
|
|
||||||
|
If the user gives a task, execute it within the mission context. If no task is given, present mission status and ask how to proceed.
|
||||||
|
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── PRD status ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function buildPrdBlock(): string {
|
||||||
|
const prdFile = 'docs/PRD.md';
|
||||||
|
if (!existsSync(prdFile)) return '';
|
||||||
|
|
||||||
|
const content = readFileSync(prdFile, 'utf-8');
|
||||||
|
const patterns = [
|
||||||
|
/^#{2,3} .*(problem statement|objective)/im,
|
||||||
|
/^#{2,3} .*(scope|non.goal|out of scope|in.scope)/im,
|
||||||
|
/^#{2,3} .*(user stor|stakeholder|user.*requirement)/im,
|
||||||
|
/^#{2,3} .*functional requirement/im,
|
||||||
|
/^#{2,3} .*non.functional/im,
|
||||||
|
/^#{2,3} .*acceptance criteria/im,
|
||||||
|
/^#{2,3} .*(technical consideration|constraint|dependenc)/im,
|
||||||
|
/^#{2,3} .*(risk|open question)/im,
|
||||||
|
/^#{2,3} .*(success metric|test|verification)/im,
|
||||||
|
/^#{2,3} .*(milestone|delivery|scope version)/im,
|
||||||
|
];
|
||||||
|
|
||||||
|
let sections = 0;
|
||||||
|
for (const pattern of patterns) {
|
||||||
|
if (pattern.test(content)) sections++;
|
||||||
|
}
|
||||||
|
|
||||||
|
const assumptions = (content.match(/ASSUMPTION:/g) ?? []).length;
|
||||||
|
const status = sections < 10 ? `incomplete (${sections}/10 sections)` : 'ready';
|
||||||
|
|
||||||
|
return `
|
||||||
|
# PRD Status
|
||||||
|
|
||||||
|
- **File:** docs/PRD.md
|
||||||
|
- **Status:** ${status}
|
||||||
|
- **Assumptions:** ${assumptions}
|
||||||
|
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Runtime prompt builder ──────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function buildRuntimePrompt(runtime: RuntimeName): string {
|
||||||
|
const runtimeContractPaths: Record<RuntimeName, string> = {
|
||||||
|
claude: join(MOSAIC_HOME, 'runtime', 'claude', 'RUNTIME.md'),
|
||||||
|
codex: join(MOSAIC_HOME, 'runtime', 'codex', 'RUNTIME.md'),
|
||||||
|
opencode: join(MOSAIC_HOME, 'runtime', 'opencode', 'RUNTIME.md'),
|
||||||
|
pi: join(MOSAIC_HOME, 'runtime', 'pi', 'RUNTIME.md'),
|
||||||
|
};
|
||||||
|
|
||||||
|
const runtimeFile = runtimeContractPaths[runtime];
|
||||||
|
checkFile(runtimeFile, `Runtime contract for ${runtime}`);
|
||||||
|
|
||||||
|
const parts: string[] = [];
|
||||||
|
|
||||||
|
// Mission context (injected first)
|
||||||
|
const mission = detectMission();
|
||||||
|
if (mission) {
|
||||||
|
parts.push(buildMissionBlock(mission));
|
||||||
|
}
|
||||||
|
|
||||||
|
// PRD status
|
||||||
|
const prdBlock = buildPrdBlock();
|
||||||
|
if (prdBlock) parts.push(prdBlock);
|
||||||
|
|
||||||
|
// Hard gate
|
||||||
|
parts.push(`# Mosaic Launcher Runtime Contract (Hard Gate)
|
||||||
|
|
||||||
|
This contract is injected by \`mosaic\` launch and is mandatory.
|
||||||
|
|
||||||
|
First assistant response MUST start with exactly one mode declaration line:
|
||||||
|
1. Orchestration mission: \`Now initiating Orchestrator mode...\`
|
||||||
|
2. Implementation mission: \`Now initiating Delivery mode...\`
|
||||||
|
3. Review-only mission: \`Now initiating Review mode...\`
|
||||||
|
|
||||||
|
No tool call or implementation step may occur before that first line.
|
||||||
|
|
||||||
|
Mosaic hard gates OVERRIDE runtime-default caution for routine delivery operations.
|
||||||
|
For required push/merge/issue-close/release actions, execute without routine confirmation prompts.
|
||||||
|
`);
|
||||||
|
|
||||||
|
// AGENTS.md
|
||||||
|
parts.push(readFileSync(join(MOSAIC_HOME, 'AGENTS.md'), 'utf-8'));
|
||||||
|
|
||||||
|
// USER.md
|
||||||
|
const user = readOptional(join(MOSAIC_HOME, 'USER.md'));
|
||||||
|
if (user) parts.push('\n\n# User Profile\n\n' + user);
|
||||||
|
|
||||||
|
// TOOLS.md
|
||||||
|
const tools = readOptional(join(MOSAIC_HOME, 'TOOLS.md'));
|
||||||
|
if (tools) parts.push('\n\n# Machine Tools\n\n' + tools);
|
||||||
|
|
||||||
|
// Runtime-specific contract
|
||||||
|
parts.push('\n\n# Runtime-Specific Contract\n\n' + readFileSync(runtimeFile, 'utf-8'));
|
||||||
|
|
||||||
|
return parts.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Session lock ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function writeSessionLock(runtime: string): void {
|
||||||
|
const missionFile = '.mosaic/orchestrator/mission.json';
|
||||||
|
const lockFile = '.mosaic/orchestrator/session.lock';
|
||||||
|
const data = readJson(missionFile);
|
||||||
|
if (!data) return;
|
||||||
|
|
||||||
|
const status = String(data['status'] ?? 'inactive');
|
||||||
|
if (status !== 'active' && status !== 'paused') return;
|
||||||
|
|
||||||
|
const sessionId = `${runtime}-${new Date().toISOString().replace(/[:.]/g, '-')}-${process.pid}`;
|
||||||
|
const lock = {
|
||||||
|
session_id: sessionId,
|
||||||
|
runtime,
|
||||||
|
pid: process.pid,
|
||||||
|
started_at: new Date().toISOString(),
|
||||||
|
project_path: process.cwd(),
|
||||||
|
milestone_id: '',
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
mkdirSync(dirname(lockFile), { recursive: true });
|
||||||
|
writeFileSync(lockFile, JSON.stringify(lock, null, 2) + '\n');
|
||||||
|
|
||||||
|
// Clean up on exit
|
||||||
|
const cleanup = () => {
|
||||||
|
try {
|
||||||
|
rmSync(lockFile, { force: true });
|
||||||
|
} catch {
|
||||||
|
// best-effort
|
||||||
|
}
|
||||||
|
};
|
||||||
|
process.on('exit', cleanup);
|
||||||
|
process.on('SIGINT', () => {
|
||||||
|
cleanup();
|
||||||
|
process.exit(130);
|
||||||
|
});
|
||||||
|
process.on('SIGTERM', () => {
|
||||||
|
cleanup();
|
||||||
|
process.exit(143);
|
||||||
|
});
|
||||||
|
} catch {
|
||||||
|
// Non-fatal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Resumable session advisory ──────────────────────────────────────────────
|
||||||
|
|
||||||
|
function checkResumableSession(): void {
|
||||||
|
const lockFile = '.mosaic/orchestrator/session.lock';
|
||||||
|
const missionFile = '.mosaic/orchestrator/mission.json';
|
||||||
|
|
||||||
|
if (existsSync(lockFile)) {
|
||||||
|
const lock = readJson(lockFile);
|
||||||
|
if (lock) {
|
||||||
|
const pid = Number(lock['pid'] ?? 0);
|
||||||
|
if (pid > 0) {
|
||||||
|
try {
|
||||||
|
process.kill(pid, 0); // Check if alive
|
||||||
|
} catch {
|
||||||
|
// Process is dead — stale lock
|
||||||
|
rmSync(lockFile, { force: true });
|
||||||
|
console.log(`[mosaic] Cleaned up stale session lock (PID ${pid} no longer running).\n`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (existsSync(missionFile)) {
|
||||||
|
const data = readJson(missionFile);
|
||||||
|
if (data && data['status'] === 'active') {
|
||||||
|
console.log('[mosaic] Active mission detected. Generate continuation prompt with:');
|
||||||
|
console.log('[mosaic] mosaic coord continue\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Write config for runtimes that read from fixed paths ────────────────────
|
||||||
|
|
||||||
|
function ensureRuntimeConfig(runtime: RuntimeName, destPath: string): void {
|
||||||
|
const prompt = buildRuntimePrompt(runtime);
|
||||||
|
mkdirSync(dirname(destPath), { recursive: true });
|
||||||
|
const existing = readOptional(destPath);
|
||||||
|
if (existing !== prompt) {
|
||||||
|
writeFileSync(destPath, prompt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Pi skill/extension discovery ────────────────────────────────────────────
|
||||||
|
|
||||||
|
function discoverPiSkills(): string[] {
|
||||||
|
const args: string[] = [];
|
||||||
|
for (const skillsRoot of [join(MOSAIC_HOME, 'skills'), join(MOSAIC_HOME, 'skills-local')]) {
|
||||||
|
if (!existsSync(skillsRoot)) continue;
|
||||||
|
try {
|
||||||
|
for (const entry of readdirSync(skillsRoot, { withFileTypes: true })) {
|
||||||
|
if (!entry.isDirectory()) continue;
|
||||||
|
const skillDir = join(skillsRoot, entry.name);
|
||||||
|
if (existsSync(join(skillDir, 'SKILL.md'))) {
|
||||||
|
args.push('--skill', skillDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// skip
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
|
||||||
|
function discoverPiExtension(): string[] {
|
||||||
|
const ext = join(MOSAIC_HOME, 'runtime', 'pi', 'mosaic-extension.ts');
|
||||||
|
return existsSync(ext) ? ['--extension', ext] : [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Launch functions ────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function getMissionPrompt(): string {
|
||||||
|
const mission = detectMission();
|
||||||
|
if (!mission) return '';
|
||||||
|
return `Active mission detected: ${mission.name}. Read the mission state files and report status.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function launchRuntime(runtime: RuntimeName, args: string[], yolo: boolean): never {
|
||||||
|
checkMosaicHome();
|
||||||
|
checkFile(join(MOSAIC_HOME, 'AGENTS.md'), 'AGENTS.md');
|
||||||
|
checkSoul();
|
||||||
|
checkRuntime(runtime);
|
||||||
|
|
||||||
|
// Pi doesn't need sequential-thinking (has native thinking levels)
|
||||||
|
if (runtime !== 'pi') {
|
||||||
|
checkSequentialThinking(runtime);
|
||||||
|
}
|
||||||
|
|
||||||
|
checkResumableSession();
|
||||||
|
|
||||||
|
const missionPrompt = getMissionPrompt();
|
||||||
|
const hasMissionNoArgs = missionPrompt && args.length === 0;
|
||||||
|
const label = RUNTIME_LABELS[runtime];
|
||||||
|
const modeStr = yolo ? ' in YOLO mode' : '';
|
||||||
|
const missionStr = hasMissionNoArgs ? ' (active mission detected)' : '';
|
||||||
|
|
||||||
|
writeSessionLock(runtime);
|
||||||
|
|
||||||
|
switch (runtime) {
|
||||||
|
case 'claude': {
|
||||||
|
const prompt = buildRuntimePrompt('claude');
|
||||||
|
const cliArgs = yolo ? ['--dangerously-skip-permissions'] : [];
|
||||||
|
cliArgs.push('--append-system-prompt', prompt);
|
||||||
|
if (hasMissionNoArgs) {
|
||||||
|
cliArgs.push(missionPrompt);
|
||||||
|
} else {
|
||||||
|
cliArgs.push(...args);
|
||||||
|
}
|
||||||
|
console.log(`[mosaic] Launching ${label}${modeStr}${missionStr}...`);
|
||||||
|
execRuntime('claude', cliArgs);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'codex': {
|
||||||
|
ensureRuntimeConfig('codex', join(homedir(), '.codex', 'instructions.md'));
|
||||||
|
const cliArgs = yolo ? ['--dangerously-bypass-approvals-and-sandbox'] : [];
|
||||||
|
if (hasMissionNoArgs) {
|
||||||
|
cliArgs.push(missionPrompt);
|
||||||
|
} else {
|
||||||
|
cliArgs.push(...args);
|
||||||
|
}
|
||||||
|
console.log(`[mosaic] Launching ${label}${modeStr}${missionStr}...`);
|
||||||
|
execRuntime('codex', cliArgs);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'opencode': {
|
||||||
|
ensureRuntimeConfig('opencode', join(homedir(), '.config', 'opencode', 'AGENTS.md'));
|
||||||
|
console.log(`[mosaic] Launching ${label}${modeStr}...`);
|
||||||
|
execRuntime('opencode', args);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'pi': {
|
||||||
|
const prompt = buildRuntimePrompt('pi');
|
||||||
|
const cliArgs = ['--append-system-prompt', prompt];
|
||||||
|
cliArgs.push(...discoverPiSkills());
|
||||||
|
cliArgs.push(...discoverPiExtension());
|
||||||
|
if (hasMissionNoArgs) {
|
||||||
|
cliArgs.push(missionPrompt);
|
||||||
|
} else {
|
||||||
|
cliArgs.push(...args);
|
||||||
|
}
|
||||||
|
console.log(`[mosaic] Launching ${label}${modeStr}${missionStr}...`);
|
||||||
|
execRuntime('pi', cliArgs);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
process.exit(0); // Unreachable but satisfies never
|
||||||
|
}
|
||||||
|
|
||||||
|
/** exec into the runtime, replacing the current process. */
|
||||||
|
function execRuntime(cmd: string, args: string[]): void {
|
||||||
|
try {
|
||||||
|
// Use execFileSync with inherited stdio to replace the process
|
||||||
|
const result = spawnSync(cmd, args, {
|
||||||
|
stdio: 'inherit',
|
||||||
|
env: process.env,
|
||||||
|
});
|
||||||
|
process.exit(result.status ?? 0);
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`[mosaic] Failed to launch ${cmd}:`, err instanceof Error ? err.message : err);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Framework script/tool delegation ───────────────────────────────────────
|
||||||
|
|
||||||
|
function delegateToScript(scriptPath: string, args: string[], env?: Record<string, string>): never {
|
||||||
|
if (!existsSync(scriptPath)) {
|
||||||
|
console.error(`[mosaic] Script not found: ${scriptPath}`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
execFileSync('bash', [scriptPath, ...args], {
|
||||||
|
stdio: 'inherit',
|
||||||
|
env: { ...process.env, ...env },
|
||||||
|
});
|
||||||
|
process.exit(0);
|
||||||
|
} catch (err) {
|
||||||
|
process.exit((err as { status?: number }).status ?? 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve a path under the framework tools directory. Prefers the version
|
||||||
|
* bundled in the @mosaic/mosaic npm package (always matches the installed
|
||||||
|
* CLI version) over the deployed copy in ~/.config/mosaic/ (may be stale).
|
||||||
|
*/
|
||||||
|
function resolveTool(...segments: string[]): string {
|
||||||
|
try {
|
||||||
|
const req = createRequire(import.meta.url);
|
||||||
|
const mosaicPkg = dirname(req.resolve('@mosaic/mosaic/package.json'));
|
||||||
|
const bundled = join(mosaicPkg, 'framework', 'tools', ...segments);
|
||||||
|
if (existsSync(bundled)) return bundled;
|
||||||
|
} catch {
|
||||||
|
// Fall through to deployed copy
|
||||||
|
}
|
||||||
|
return join(MOSAIC_HOME, 'tools', ...segments);
|
||||||
|
}
|
||||||
|
|
||||||
|
function fwScript(name: string): string {
|
||||||
|
return resolveTool('_scripts', name);
|
||||||
|
}
|
||||||
|
|
||||||
|
function toolScript(toolDir: string, name: string): string {
|
||||||
|
return resolveTool(toolDir, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Coord (mission orchestrator) ───────────────────────────────────────────
|
||||||
|
|
||||||
|
const COORD_SUBCMDS: Record<string, string> = {
|
||||||
|
status: 'session-status.sh',
|
||||||
|
session: 'session-status.sh',
|
||||||
|
init: 'mission-init.sh',
|
||||||
|
mission: 'mission-status.sh',
|
||||||
|
progress: 'mission-status.sh',
|
||||||
|
continue: 'continue-prompt.sh',
|
||||||
|
next: 'continue-prompt.sh',
|
||||||
|
run: 'session-run.sh',
|
||||||
|
start: 'session-run.sh',
|
||||||
|
smoke: 'smoke-test.sh',
|
||||||
|
test: 'smoke-test.sh',
|
||||||
|
resume: 'session-resume.sh',
|
||||||
|
recover: 'session-resume.sh',
|
||||||
|
};
|
||||||
|
|
||||||
|
function runCoord(args: string[]): never {
|
||||||
|
checkMosaicHome();
|
||||||
|
let runtime = 'claude';
|
||||||
|
let yoloFlag = '';
|
||||||
|
const coordArgs: string[] = [];
|
||||||
|
|
||||||
|
for (const arg of args) {
|
||||||
|
if (arg === '--claude' || arg === '--codex' || arg === '--pi') {
|
||||||
|
runtime = arg.slice(2);
|
||||||
|
} else if (arg === '--yolo') {
|
||||||
|
yoloFlag = '--yolo';
|
||||||
|
} else {
|
||||||
|
coordArgs.push(arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const subcmd = coordArgs[0] ?? 'help';
|
||||||
|
const subArgs = coordArgs.slice(1);
|
||||||
|
const script = COORD_SUBCMDS[subcmd];
|
||||||
|
|
||||||
|
if (!script) {
|
||||||
|
console.log(`mosaic coord — mission coordinator tools
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
init --name <name> [opts] Initialize a new mission
|
||||||
|
mission [--project <path>] Show mission progress dashboard
|
||||||
|
status [--project <path>] Check agent session health
|
||||||
|
continue [--project <path>] Generate continuation prompt
|
||||||
|
run [--project <path>] Launch runtime with mission context
|
||||||
|
smoke Run orchestration smoke checks
|
||||||
|
resume [--project <path>] Crash recovery
|
||||||
|
|
||||||
|
Runtime: --claude (default) | --codex | --pi | --yolo`);
|
||||||
|
process.exit(subcmd === 'help' ? 0 : 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (yoloFlag) subArgs.unshift(yoloFlag);
|
||||||
|
delegateToScript(toolScript('orchestrator', script), subArgs, {
|
||||||
|
MOSAIC_COORD_RUNTIME: runtime,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Prdy (PRD tools via framework scripts) ─────────────────────────────────
|
||||||
|
|
||||||
|
const PRDY_SUBCMDS: Record<string, string> = {
|
||||||
|
init: 'prdy-init.sh',
|
||||||
|
update: 'prdy-update.sh',
|
||||||
|
validate: 'prdy-validate.sh',
|
||||||
|
check: 'prdy-validate.sh',
|
||||||
|
status: 'prdy-status.sh',
|
||||||
|
};
|
||||||
|
|
||||||
|
function runPrdyLocal(args: string[]): never {
|
||||||
|
checkMosaicHome();
|
||||||
|
let runtime = 'claude';
|
||||||
|
const prdyArgs: string[] = [];
|
||||||
|
|
||||||
|
for (const arg of args) {
|
||||||
|
if (arg === '--claude' || arg === '--codex' || arg === '--pi') {
|
||||||
|
runtime = arg.slice(2);
|
||||||
|
} else {
|
||||||
|
prdyArgs.push(arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const subcmd = prdyArgs[0] ?? 'help';
|
||||||
|
const subArgs = prdyArgs.slice(1);
|
||||||
|
const script = PRDY_SUBCMDS[subcmd];
|
||||||
|
|
||||||
|
if (!script) {
|
||||||
|
console.log(`mosaic prdy — PRD creation and validation
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
init [--project <path>] [--name <feature>] Create docs/PRD.md
|
||||||
|
update [--project <path>] Update existing PRD
|
||||||
|
validate [--project <path>] Check PRD completeness
|
||||||
|
status [--project <path>] Quick PRD health check
|
||||||
|
|
||||||
|
Runtime: --claude (default) | --codex | --pi`);
|
||||||
|
process.exit(subcmd === 'help' ? 0 : 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
delegateToScript(toolScript('prdy', script), subArgs, {
|
||||||
|
MOSAIC_PRDY_RUNTIME: runtime,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Seq (sequential-thinking MCP) ──────────────────────────────────────────
|
||||||
|
|
||||||
|
function runSeq(args: string[]): never {
|
||||||
|
checkMosaicHome();
|
||||||
|
const action = args[0] ?? 'check';
|
||||||
|
const rest = args.slice(1);
|
||||||
|
const checker = fwScript('mosaic-ensure-sequential-thinking');
|
||||||
|
|
||||||
|
switch (action) {
|
||||||
|
case 'check':
|
||||||
|
delegateToScript(checker, ['--check', ...rest]);
|
||||||
|
break; // unreachable
|
||||||
|
case 'fix':
|
||||||
|
case 'apply':
|
||||||
|
delegateToScript(checker, rest);
|
||||||
|
break;
|
||||||
|
case 'start': {
|
||||||
|
console.log('[mosaic] Starting sequential-thinking MCP server...');
|
||||||
|
try {
|
||||||
|
execFileSync('npx', ['-y', '@modelcontextprotocol/server-sequential-thinking', ...rest], {
|
||||||
|
stdio: 'inherit',
|
||||||
|
});
|
||||||
|
process.exit(0);
|
||||||
|
} catch (err) {
|
||||||
|
process.exit((err as { status?: number }).status ?? 1);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
console.error(`[mosaic] Unknown seq subcommand '${action}'. Use: check|fix|start`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Upgrade ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function runUpgrade(args: string[]): never {
|
||||||
|
checkMosaicHome();
|
||||||
|
const subcmd = args[0];
|
||||||
|
|
||||||
|
if (!subcmd || subcmd === 'release') {
|
||||||
|
delegateToScript(fwScript('mosaic-release-upgrade'), args.slice(subcmd === 'release' ? 1 : 0));
|
||||||
|
} else if (subcmd === 'check') {
|
||||||
|
delegateToScript(fwScript('mosaic-release-upgrade'), ['--dry-run', ...args.slice(1)]);
|
||||||
|
} else if (subcmd === 'project') {
|
||||||
|
delegateToScript(fwScript('mosaic-upgrade'), args.slice(1));
|
||||||
|
} else if (subcmd.startsWith('-')) {
|
||||||
|
delegateToScript(fwScript('mosaic-release-upgrade'), args);
|
||||||
|
} else {
|
||||||
|
delegateToScript(fwScript('mosaic-upgrade'), args);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Commander registration ─────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export function registerLaunchCommands(program: Command): void {
|
||||||
|
// Runtime launchers
|
||||||
|
for (const runtime of ['claude', 'codex', 'opencode', 'pi'] as const) {
|
||||||
|
program
|
||||||
|
.command(runtime)
|
||||||
|
.description(`Launch ${RUNTIME_LABELS[runtime]} with Mosaic injection`)
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
launchRuntime(runtime, cmd.args, false);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Yolo mode
|
||||||
|
program
|
||||||
|
.command('yolo <runtime>')
|
||||||
|
.description('Launch a runtime in dangerous-permissions mode (claude|codex|opencode|pi)')
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((runtime: string, _opts: unknown, cmd: Command) => {
|
||||||
|
const valid: RuntimeName[] = ['claude', 'codex', 'opencode', 'pi'];
|
||||||
|
if (!valid.includes(runtime as RuntimeName)) {
|
||||||
|
console.error(
|
||||||
|
`[mosaic] ERROR: Unsupported yolo runtime '${runtime}'. Use: ${valid.join('|')}`,
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
launchRuntime(runtime as RuntimeName, cmd.args, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Coord (mission orchestrator)
|
||||||
|
program
|
||||||
|
.command('coord')
|
||||||
|
.description('Mission coordinator tools (init, status, run, continue, resume)')
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
runCoord(cmd.args);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Prdy (PRD tools via local framework scripts)
|
||||||
|
program
|
||||||
|
.command('prdy')
|
||||||
|
.description('PRD creation and validation (init, update, validate, status)')
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
runPrdyLocal(cmd.args);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Seq (sequential-thinking MCP management)
|
||||||
|
program
|
||||||
|
.command('seq')
|
||||||
|
.description('sequential-thinking MCP management (check/fix/start)')
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
runSeq(cmd.args);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upgrade (release + project)
|
||||||
|
program
|
||||||
|
.command('upgrade')
|
||||||
|
.description('Upgrade Mosaic release or project files')
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
runUpgrade(cmd.args);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Direct framework script delegates
|
||||||
|
const directCommands: Record<string, { desc: string; script: string }> = {
|
||||||
|
init: { desc: 'Generate SOUL.md (agent identity contract)', script: 'mosaic-init' },
|
||||||
|
doctor: { desc: 'Health audit — detect drift and missing files', script: 'mosaic-doctor' },
|
||||||
|
sync: { desc: 'Sync skills from canonical source', script: 'mosaic-sync-skills' },
|
||||||
|
bootstrap: {
|
||||||
|
desc: 'Bootstrap a repo with Mosaic standards',
|
||||||
|
script: 'mosaic-bootstrap-repo',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const [name, { desc, script }] of Object.entries(directCommands)) {
|
||||||
|
program
|
||||||
|
.command(name)
|
||||||
|
.description(desc)
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
checkMosaicHome();
|
||||||
|
delegateToScript(fwScript(script), cmd.args);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
35
packages/config/package.json
Normal file
35
packages/config/package.json
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"name": "@mosaic/config",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"type": "module",
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"types": "dist/index.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"default": "./dist/index.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"lint": "eslint src",
|
||||||
|
"typecheck": "tsc --noEmit",
|
||||||
|
"test": "vitest run --passWithNoTests"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@mosaic/memory": "workspace:^",
|
||||||
|
"@mosaic/queue": "workspace:^",
|
||||||
|
"@mosaic/storage": "workspace:^"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"typescript": "^5.8.0",
|
||||||
|
"vitest": "^2.0.0"
|
||||||
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
|
}
|
||||||
7
packages/config/src/index.ts
Normal file
7
packages/config/src/index.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
export type { MosaicConfig, StorageTier, MemoryConfigRef } from './mosaic-config.js';
|
||||||
|
export {
|
||||||
|
DEFAULT_LOCAL_CONFIG,
|
||||||
|
DEFAULT_TEAM_CONFIG,
|
||||||
|
loadConfig,
|
||||||
|
validateConfig,
|
||||||
|
} from './mosaic-config.js';
|
||||||
140
packages/config/src/mosaic-config.ts
Normal file
140
packages/config/src/mosaic-config.ts
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
import { readFileSync, existsSync } from 'node:fs';
|
||||||
|
import { resolve } from 'node:path';
|
||||||
|
import type { StorageConfig } from '@mosaic/storage';
|
||||||
|
import type { QueueAdapterConfig as QueueConfig } from '@mosaic/queue';
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Types */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
export type StorageTier = 'local' | 'team';
|
||||||
|
|
||||||
|
export interface MemoryConfigRef {
|
||||||
|
type: 'pgvector' | 'sqlite-vec' | 'keyword';
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MosaicConfig {
|
||||||
|
tier: StorageTier;
|
||||||
|
storage: StorageConfig;
|
||||||
|
queue: QueueConfig;
|
||||||
|
memory: MemoryConfigRef;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Defaults */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
export const DEFAULT_LOCAL_CONFIG: MosaicConfig = {
|
||||||
|
tier: 'local',
|
||||||
|
storage: { type: 'sqlite', path: '.mosaic/data.db' },
|
||||||
|
queue: { type: 'local', dataDir: '.mosaic/queue' },
|
||||||
|
memory: { type: 'keyword' },
|
||||||
|
};
|
||||||
|
|
||||||
|
export const DEFAULT_TEAM_CONFIG: MosaicConfig = {
|
||||||
|
tier: 'team',
|
||||||
|
storage: { type: 'postgres', url: 'postgresql://mosaic:mosaic@localhost:5432/mosaic' },
|
||||||
|
queue: { type: 'bullmq' },
|
||||||
|
memory: { type: 'pgvector' },
|
||||||
|
};
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Validation */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
const VALID_TIERS = new Set<string>(['local', 'team']);
|
||||||
|
const VALID_STORAGE_TYPES = new Set<string>(['postgres', 'sqlite', 'files']);
|
||||||
|
const VALID_QUEUE_TYPES = new Set<string>(['bullmq', 'local']);
|
||||||
|
const VALID_MEMORY_TYPES = new Set<string>(['pgvector', 'sqlite-vec', 'keyword']);
|
||||||
|
|
||||||
|
export function validateConfig(raw: unknown): MosaicConfig {
|
||||||
|
if (typeof raw !== 'object' || raw === null) {
|
||||||
|
throw new Error('MosaicConfig must be a non-null object');
|
||||||
|
}
|
||||||
|
|
||||||
|
const obj = raw as Record<string, unknown>;
|
||||||
|
|
||||||
|
// tier
|
||||||
|
const tier = obj['tier'];
|
||||||
|
if (typeof tier !== 'string' || !VALID_TIERS.has(tier)) {
|
||||||
|
throw new Error(`Invalid tier "${String(tier)}" — expected "local" or "team"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// storage
|
||||||
|
const storage = obj['storage'];
|
||||||
|
if (typeof storage !== 'object' || storage === null) {
|
||||||
|
throw new Error('config.storage must be a non-null object');
|
||||||
|
}
|
||||||
|
const storageType = (storage as Record<string, unknown>)['type'];
|
||||||
|
if (typeof storageType !== 'string' || !VALID_STORAGE_TYPES.has(storageType)) {
|
||||||
|
throw new Error(`Invalid storage.type "${String(storageType)}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// queue
|
||||||
|
const queue = obj['queue'];
|
||||||
|
if (typeof queue !== 'object' || queue === null) {
|
||||||
|
throw new Error('config.queue must be a non-null object');
|
||||||
|
}
|
||||||
|
const queueType = (queue as Record<string, unknown>)['type'];
|
||||||
|
if (typeof queueType !== 'string' || !VALID_QUEUE_TYPES.has(queueType)) {
|
||||||
|
throw new Error(`Invalid queue.type "${String(queueType)}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// memory
|
||||||
|
const memory = obj['memory'];
|
||||||
|
if (typeof memory !== 'object' || memory === null) {
|
||||||
|
throw new Error('config.memory must be a non-null object');
|
||||||
|
}
|
||||||
|
const memoryType = (memory as Record<string, unknown>)['type'];
|
||||||
|
if (typeof memoryType !== 'string' || !VALID_MEMORY_TYPES.has(memoryType)) {
|
||||||
|
throw new Error(`Invalid memory.type "${String(memoryType)}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
tier: tier as StorageTier,
|
||||||
|
storage: storage as StorageConfig,
|
||||||
|
queue: queue as QueueConfig,
|
||||||
|
memory: memory as MemoryConfigRef,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Loader */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
function detectFromEnv(): MosaicConfig {
|
||||||
|
if (process.env['DATABASE_URL']) {
|
||||||
|
return {
|
||||||
|
...DEFAULT_TEAM_CONFIG,
|
||||||
|
storage: {
|
||||||
|
type: 'postgres',
|
||||||
|
url: process.env['DATABASE_URL'],
|
||||||
|
},
|
||||||
|
queue: {
|
||||||
|
type: 'bullmq',
|
||||||
|
url: process.env['VALKEY_URL'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return DEFAULT_LOCAL_CONFIG;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function loadConfig(configPath?: string): MosaicConfig {
|
||||||
|
// 1. Explicit path or default location
|
||||||
|
const paths = configPath
|
||||||
|
? [resolve(configPath)]
|
||||||
|
: [
|
||||||
|
resolve(process.cwd(), 'mosaic.config.json'),
|
||||||
|
resolve(process.cwd(), '../../mosaic.config.json'), // monorepo root when cwd is apps/gateway
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const p of paths) {
|
||||||
|
if (existsSync(p)) {
|
||||||
|
const raw: unknown = JSON.parse(readFileSync(p, 'utf-8'));
|
||||||
|
return validateConfig(raw);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Fall back to env-var detection
|
||||||
|
return detectFromEnv();
|
||||||
|
}
|
||||||
9
packages/config/tsconfig.json
Normal file
9
packages/config/tsconfig.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"extends": "../../tsconfig.base.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"outDir": "dist",
|
||||||
|
"rootDir": "src"
|
||||||
|
},
|
||||||
|
"include": ["src/**/*"],
|
||||||
|
"exclude": ["node_modules", "dist"]
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/coord",
|
"name": "@mosaic/coord",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/db",
|
"name": "@mosaic/db",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/design-tokens",
|
"name": "@mosaic/design-tokens",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/forge",
|
"name": "@mosaic/forge",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/log",
|
"name": "@mosaic/log",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/macp",
|
"name": "@mosaic/macp",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/memory",
|
"name": "@mosaic/memory",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -18,6 +18,7 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@mosaic/db": "workspace:*",
|
"@mosaic/db": "workspace:*",
|
||||||
|
"@mosaic/storage": "workspace:*",
|
||||||
"@mosaic/types": "workspace:*",
|
"@mosaic/types": "workspace:*",
|
||||||
"drizzle-orm": "^0.45.1"
|
"drizzle-orm": "^0.45.1"
|
||||||
},
|
},
|
||||||
|
|||||||
298
packages/memory/src/adapters/keyword.test.ts
Normal file
298
packages/memory/src/adapters/keyword.test.ts
Normal file
@@ -0,0 +1,298 @@
|
|||||||
|
import { describe, it, expect, beforeEach } from 'vitest';
|
||||||
|
import type { StorageAdapter } from '@mosaic/storage';
|
||||||
|
import { KeywordAdapter } from './keyword.js';
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* In-memory mock StorageAdapter */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
function createMockStorage(): StorageAdapter {
|
||||||
|
const collections = new Map<string, Map<string, Record<string, unknown>>>();
|
||||||
|
let idCounter = 0;
|
||||||
|
|
||||||
|
function getCollection(name: string): Map<string, Record<string, unknown>> {
|
||||||
|
if (!collections.has(name)) collections.set(name, new Map());
|
||||||
|
return collections.get(name)!;
|
||||||
|
}
|
||||||
|
|
||||||
|
const adapter: StorageAdapter = {
|
||||||
|
name: 'mock',
|
||||||
|
|
||||||
|
async create<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
data: T,
|
||||||
|
): Promise<T & { id: string }> {
|
||||||
|
const id = String(++idCounter);
|
||||||
|
const record = { ...data, id };
|
||||||
|
getCollection(collection).set(id, record);
|
||||||
|
return record as T & { id: string };
|
||||||
|
},
|
||||||
|
|
||||||
|
async read<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
id: string,
|
||||||
|
): Promise<T | null> {
|
||||||
|
const record = getCollection(collection).get(id);
|
||||||
|
return (record as T) ?? null;
|
||||||
|
},
|
||||||
|
|
||||||
|
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
|
||||||
|
const col = getCollection(collection);
|
||||||
|
const existing = col.get(id);
|
||||||
|
if (!existing) return false;
|
||||||
|
col.set(id, { ...existing, ...data });
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
|
||||||
|
async delete(collection: string, id: string): Promise<boolean> {
|
||||||
|
return getCollection(collection).delete(id);
|
||||||
|
},
|
||||||
|
|
||||||
|
async find<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
filter?: Record<string, unknown>,
|
||||||
|
): Promise<T[]> {
|
||||||
|
const col = getCollection(collection);
|
||||||
|
const results: T[] = [];
|
||||||
|
for (const record of col.values()) {
|
||||||
|
if (filter && !matchesFilter(record, filter)) continue;
|
||||||
|
results.push(record as T);
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
},
|
||||||
|
|
||||||
|
async findOne<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
filter: Record<string, unknown>,
|
||||||
|
): Promise<T | null> {
|
||||||
|
const col = getCollection(collection);
|
||||||
|
for (const record of col.values()) {
|
||||||
|
if (matchesFilter(record, filter)) return record as T;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
|
||||||
|
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
|
||||||
|
const rows = await adapter.find(collection, filter);
|
||||||
|
return rows.length;
|
||||||
|
},
|
||||||
|
|
||||||
|
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
|
||||||
|
return fn(adapter);
|
||||||
|
},
|
||||||
|
|
||||||
|
async migrate(): Promise<void> {},
|
||||||
|
async close(): Promise<void> {},
|
||||||
|
};
|
||||||
|
|
||||||
|
return adapter;
|
||||||
|
}
|
||||||
|
|
||||||
|
function matchesFilter(record: Record<string, unknown>, filter: Record<string, unknown>): boolean {
|
||||||
|
for (const [key, value] of Object.entries(filter)) {
|
||||||
|
if (record[key] !== value) return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Tests */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
describe('KeywordAdapter', () => {
|
||||||
|
let adapter: KeywordAdapter;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
adapter = new KeywordAdapter({ type: 'keyword', storage: createMockStorage() });
|
||||||
|
});
|
||||||
|
|
||||||
|
/* ---- Preferences ---- */
|
||||||
|
|
||||||
|
describe('preferences', () => {
|
||||||
|
it('should set and get a preference', async () => {
|
||||||
|
await adapter.setPreference('u1', 'theme', 'dark');
|
||||||
|
const value = await adapter.getPreference('u1', 'theme');
|
||||||
|
expect(value).toBe('dark');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return null for missing preference', async () => {
|
||||||
|
const value = await adapter.getPreference('u1', 'nonexistent');
|
||||||
|
expect(value).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should upsert an existing preference', async () => {
|
||||||
|
await adapter.setPreference('u1', 'theme', 'dark');
|
||||||
|
await adapter.setPreference('u1', 'theme', 'light');
|
||||||
|
const value = await adapter.getPreference('u1', 'theme');
|
||||||
|
expect(value).toBe('light');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should delete a preference', async () => {
|
||||||
|
await adapter.setPreference('u1', 'theme', 'dark');
|
||||||
|
const deleted = await adapter.deletePreference('u1', 'theme');
|
||||||
|
expect(deleted).toBe(true);
|
||||||
|
const value = await adapter.getPreference('u1', 'theme');
|
||||||
|
expect(value).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return false when deleting nonexistent preference', async () => {
|
||||||
|
const deleted = await adapter.deletePreference('u1', 'nope');
|
||||||
|
expect(deleted).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should list preferences by userId', async () => {
|
||||||
|
await adapter.setPreference('u1', 'theme', 'dark', 'appearance');
|
||||||
|
await adapter.setPreference('u1', 'lang', 'en', 'locale');
|
||||||
|
await adapter.setPreference('u2', 'theme', 'light', 'appearance');
|
||||||
|
|
||||||
|
const prefs = await adapter.listPreferences('u1');
|
||||||
|
expect(prefs).toHaveLength(2);
|
||||||
|
expect(prefs.map((p) => p.key).sort()).toEqual(['lang', 'theme']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filter preferences by category', async () => {
|
||||||
|
await adapter.setPreference('u1', 'theme', 'dark', 'appearance');
|
||||||
|
await adapter.setPreference('u1', 'lang', 'en', 'locale');
|
||||||
|
|
||||||
|
const prefs = await adapter.listPreferences('u1', 'appearance');
|
||||||
|
expect(prefs).toHaveLength(1);
|
||||||
|
expect(prefs[0]!.key).toBe('theme');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
/* ---- Insights ---- */
|
||||||
|
|
||||||
|
describe('insights', () => {
|
||||||
|
it('should store and retrieve an insight', async () => {
|
||||||
|
const insight = await adapter.storeInsight({
|
||||||
|
userId: 'u1',
|
||||||
|
content: 'TypeScript is great for type safety',
|
||||||
|
source: 'chat',
|
||||||
|
category: 'technical',
|
||||||
|
relevanceScore: 0.9,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(insight.id).toBeDefined();
|
||||||
|
expect(insight.content).toBe('TypeScript is great for type safety');
|
||||||
|
|
||||||
|
const fetched = await adapter.getInsight(insight.id);
|
||||||
|
expect(fetched).not.toBeNull();
|
||||||
|
expect(fetched!.content).toBe('TypeScript is great for type safety');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return null for missing insight', async () => {
|
||||||
|
const result = await adapter.getInsight('nonexistent');
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should delete an insight', async () => {
|
||||||
|
const insight = await adapter.storeInsight({
|
||||||
|
userId: 'u1',
|
||||||
|
content: 'test',
|
||||||
|
source: 'chat',
|
||||||
|
category: 'general',
|
||||||
|
relevanceScore: 0.5,
|
||||||
|
});
|
||||||
|
|
||||||
|
const deleted = await adapter.deleteInsight(insight.id);
|
||||||
|
expect(deleted).toBe(true);
|
||||||
|
|
||||||
|
const fetched = await adapter.getInsight(insight.id);
|
||||||
|
expect(fetched).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
/* ---- Keyword Search ---- */
|
||||||
|
|
||||||
|
describe('searchInsights', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
await adapter.storeInsight({
|
||||||
|
userId: 'u1',
|
||||||
|
content: 'TypeScript provides excellent type safety for JavaScript projects',
|
||||||
|
source: 'chat',
|
||||||
|
category: 'technical',
|
||||||
|
relevanceScore: 0.9,
|
||||||
|
});
|
||||||
|
await adapter.storeInsight({
|
||||||
|
userId: 'u1',
|
||||||
|
content: 'React hooks simplify state management in components',
|
||||||
|
source: 'chat',
|
||||||
|
category: 'technical',
|
||||||
|
relevanceScore: 0.8,
|
||||||
|
});
|
||||||
|
await adapter.storeInsight({
|
||||||
|
userId: 'u1',
|
||||||
|
content: 'TypeScript and React work great together for type safe components',
|
||||||
|
source: 'chat',
|
||||||
|
category: 'technical',
|
||||||
|
relevanceScore: 0.85,
|
||||||
|
});
|
||||||
|
await adapter.storeInsight({
|
||||||
|
userId: 'u2',
|
||||||
|
content: 'TypeScript is popular',
|
||||||
|
source: 'chat',
|
||||||
|
category: 'general',
|
||||||
|
relevanceScore: 0.5,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should find insights by exact keyword', async () => {
|
||||||
|
const results = await adapter.searchInsights('u1', 'hooks');
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0]!.content).toContain('hooks');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be case-insensitive', async () => {
|
||||||
|
const results = await adapter.searchInsights('u1', 'TYPESCRIPT');
|
||||||
|
expect(results.length).toBeGreaterThanOrEqual(1);
|
||||||
|
for (const r of results) {
|
||||||
|
expect(r.content.toLowerCase()).toContain('typescript');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should rank multi-word matches higher', async () => {
|
||||||
|
const results = await adapter.searchInsights('u1', 'TypeScript React');
|
||||||
|
// The insight mentioning both "TypeScript" and "React" should rank first (score=2)
|
||||||
|
expect(results[0]!.score).toBe(2);
|
||||||
|
expect(results[0]!.content).toContain('TypeScript');
|
||||||
|
expect(results[0]!.content).toContain('React');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty for no matches', async () => {
|
||||||
|
const results = await adapter.searchInsights('u1', 'python django');
|
||||||
|
expect(results).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filter by userId', async () => {
|
||||||
|
const results = await adapter.searchInsights('u2', 'TypeScript');
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
expect(results[0]!.content).toBe('TypeScript is popular');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should respect limit option', async () => {
|
||||||
|
const results = await adapter.searchInsights('u1', 'TypeScript', { limit: 1 });
|
||||||
|
expect(results).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty for empty query', async () => {
|
||||||
|
const results = await adapter.searchInsights('u1', ' ');
|
||||||
|
expect(results).toHaveLength(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
/* ---- Lifecycle ---- */
|
||||||
|
|
||||||
|
describe('lifecycle', () => {
|
||||||
|
it('should have name "keyword"', () => {
|
||||||
|
expect(adapter.name).toBe('keyword');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should have null embedder', () => {
|
||||||
|
expect(adapter.embedder).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should close without error', async () => {
|
||||||
|
await expect(adapter.close()).resolves.toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
195
packages/memory/src/adapters/keyword.ts
Normal file
195
packages/memory/src/adapters/keyword.ts
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
import type { StorageAdapter } from '@mosaic/storage';
|
||||||
|
import type {
|
||||||
|
MemoryAdapter,
|
||||||
|
MemoryConfig,
|
||||||
|
NewInsight,
|
||||||
|
Insight,
|
||||||
|
InsightSearchResult,
|
||||||
|
} from '../types.js';
|
||||||
|
import type { EmbeddingProvider } from '../vector-store.js';
|
||||||
|
|
||||||
|
type KeywordConfig = Extract<MemoryConfig, { type: 'keyword' }>;
|
||||||
|
|
||||||
|
const PREFERENCES = 'preferences';
|
||||||
|
const INSIGHTS = 'insights';
|
||||||
|
|
||||||
|
type PreferenceRecord = Record<string, unknown> & {
|
||||||
|
id: string;
|
||||||
|
userId: string;
|
||||||
|
key: string;
|
||||||
|
value: unknown;
|
||||||
|
category: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type InsightRecord = Record<string, unknown> & {
|
||||||
|
id: string;
|
||||||
|
userId: string;
|
||||||
|
content: string;
|
||||||
|
source: string;
|
||||||
|
category: string;
|
||||||
|
relevanceScore: number;
|
||||||
|
metadata: Record<string, unknown>;
|
||||||
|
createdAt: string;
|
||||||
|
updatedAt?: string;
|
||||||
|
decayedAt?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export class KeywordAdapter implements MemoryAdapter {
|
||||||
|
readonly name = 'keyword';
|
||||||
|
readonly embedder: EmbeddingProvider | null = null;
|
||||||
|
|
||||||
|
private storage: StorageAdapter;
|
||||||
|
|
||||||
|
constructor(config: KeywordConfig) {
|
||||||
|
this.storage = config.storage;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Preferences */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
async getPreference(userId: string, key: string): Promise<unknown | null> {
|
||||||
|
const row = await this.storage.findOne<PreferenceRecord>(PREFERENCES, { userId, key });
|
||||||
|
return row?.value ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async setPreference(
|
||||||
|
userId: string,
|
||||||
|
key: string,
|
||||||
|
value: unknown,
|
||||||
|
category?: string,
|
||||||
|
): Promise<void> {
|
||||||
|
const existing = await this.storage.findOne<PreferenceRecord>(PREFERENCES, { userId, key });
|
||||||
|
if (existing) {
|
||||||
|
await this.storage.update(PREFERENCES, existing.id, {
|
||||||
|
value,
|
||||||
|
...(category !== undefined ? { category } : {}),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await this.storage.create(PREFERENCES, {
|
||||||
|
userId,
|
||||||
|
key,
|
||||||
|
value,
|
||||||
|
category: category ?? 'general',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async deletePreference(userId: string, key: string): Promise<boolean> {
|
||||||
|
const existing = await this.storage.findOne<PreferenceRecord>(PREFERENCES, { userId, key });
|
||||||
|
if (!existing) return false;
|
||||||
|
return this.storage.delete(PREFERENCES, existing.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
async listPreferences(
|
||||||
|
userId: string,
|
||||||
|
category?: string,
|
||||||
|
): Promise<Array<{ key: string; value: unknown; category: string }>> {
|
||||||
|
const filter: Record<string, unknown> = { userId };
|
||||||
|
if (category !== undefined) filter.category = category;
|
||||||
|
|
||||||
|
const rows = await this.storage.find<PreferenceRecord>(PREFERENCES, filter);
|
||||||
|
return rows.map((r) => ({ key: r.key, value: r.value, category: r.category }));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Insights */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
async storeInsight(insight: NewInsight): Promise<Insight> {
|
||||||
|
const now = new Date();
|
||||||
|
const row = await this.storage.create<Record<string, unknown>>(INSIGHTS, {
|
||||||
|
userId: insight.userId,
|
||||||
|
content: insight.content,
|
||||||
|
source: insight.source,
|
||||||
|
category: insight.category,
|
||||||
|
relevanceScore: insight.relevanceScore,
|
||||||
|
metadata: insight.metadata ?? {},
|
||||||
|
createdAt: now.toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
userId: insight.userId,
|
||||||
|
content: insight.content,
|
||||||
|
source: insight.source,
|
||||||
|
category: insight.category,
|
||||||
|
relevanceScore: insight.relevanceScore,
|
||||||
|
metadata: insight.metadata,
|
||||||
|
createdAt: now,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async getInsight(id: string): Promise<Insight | null> {
|
||||||
|
const row = await this.storage.read<InsightRecord>(INSIGHTS, id);
|
||||||
|
if (!row) return null;
|
||||||
|
return toInsight(row);
|
||||||
|
}
|
||||||
|
|
||||||
|
async searchInsights(
|
||||||
|
userId: string,
|
||||||
|
query: string,
|
||||||
|
opts?: { limit?: number; embedding?: number[] },
|
||||||
|
): Promise<InsightSearchResult[]> {
|
||||||
|
const limit = opts?.limit ?? 10;
|
||||||
|
const words = query
|
||||||
|
.toLowerCase()
|
||||||
|
.split(/\s+/)
|
||||||
|
.filter((w) => w.length > 0);
|
||||||
|
|
||||||
|
if (words.length === 0) return [];
|
||||||
|
|
||||||
|
const rows = await this.storage.find<InsightRecord>(INSIGHTS, { userId });
|
||||||
|
|
||||||
|
const scored: InsightSearchResult[] = [];
|
||||||
|
for (const row of rows) {
|
||||||
|
const content = row.content.toLowerCase();
|
||||||
|
let score = 0;
|
||||||
|
for (const word of words) {
|
||||||
|
if (content.includes(word)) score++;
|
||||||
|
}
|
||||||
|
if (score > 0) {
|
||||||
|
scored.push({
|
||||||
|
id: row.id,
|
||||||
|
content: row.content,
|
||||||
|
score,
|
||||||
|
metadata: row.metadata ?? undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
scored.sort((a, b) => b.score - a.score);
|
||||||
|
return scored.slice(0, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteInsight(id: string): Promise<boolean> {
|
||||||
|
return this.storage.delete(INSIGHTS, id);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Lifecycle */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
async close(): Promise<void> {
|
||||||
|
// no-op — storage adapter manages its own lifecycle
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Helpers */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
function toInsight(row: InsightRecord): Insight {
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
userId: row.userId,
|
||||||
|
content: row.content,
|
||||||
|
source: row.source,
|
||||||
|
category: row.category,
|
||||||
|
relevanceScore: row.relevanceScore,
|
||||||
|
metadata: row.metadata ?? undefined,
|
||||||
|
createdAt: new Date(row.createdAt),
|
||||||
|
updatedAt: row.updatedAt ? new Date(row.updatedAt) : undefined,
|
||||||
|
decayedAt: row.decayedAt ? new Date(row.decayedAt) : undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
177
packages/memory/src/adapters/pgvector.ts
Normal file
177
packages/memory/src/adapters/pgvector.ts
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
import { createDb, type DbHandle } from '@mosaic/db';
|
||||||
|
import type {
|
||||||
|
MemoryAdapter,
|
||||||
|
MemoryConfig,
|
||||||
|
NewInsight as AdapterNewInsight,
|
||||||
|
Insight as AdapterInsight,
|
||||||
|
InsightSearchResult,
|
||||||
|
} from '../types.js';
|
||||||
|
import type { EmbeddingProvider } from '../vector-store.js';
|
||||||
|
import {
|
||||||
|
createPreferencesRepo,
|
||||||
|
type PreferencesRepo,
|
||||||
|
type Preference,
|
||||||
|
type NewPreference,
|
||||||
|
} from '../preferences.js';
|
||||||
|
import {
|
||||||
|
createInsightsRepo,
|
||||||
|
type InsightsRepo,
|
||||||
|
type NewInsight as DbNewInsight,
|
||||||
|
} from '../insights.js';
|
||||||
|
|
||||||
|
type PgVectorConfig = Extract<MemoryConfig, { type: 'pgvector' }>;
|
||||||
|
|
||||||
|
export class PgVectorAdapter implements MemoryAdapter {
|
||||||
|
readonly name = 'pgvector';
|
||||||
|
readonly embedder: EmbeddingProvider | null;
|
||||||
|
|
||||||
|
private handle: DbHandle;
|
||||||
|
private preferences: PreferencesRepo;
|
||||||
|
private insights: InsightsRepo;
|
||||||
|
|
||||||
|
constructor(config: PgVectorConfig) {
|
||||||
|
this.handle = createDb();
|
||||||
|
this.preferences = createPreferencesRepo(this.handle.db);
|
||||||
|
this.insights = createInsightsRepo(this.handle.db);
|
||||||
|
this.embedder = config.embedder ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Preferences */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
async getPreference(userId: string, key: string): Promise<unknown | null> {
|
||||||
|
const row = await this.preferences.findByUserAndKey(userId, key);
|
||||||
|
return row?.value ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async setPreference(
|
||||||
|
userId: string,
|
||||||
|
key: string,
|
||||||
|
value: unknown,
|
||||||
|
category?: string,
|
||||||
|
): Promise<void> {
|
||||||
|
await this.preferences.upsert({
|
||||||
|
userId,
|
||||||
|
key,
|
||||||
|
value,
|
||||||
|
...(category ? { category: category as NewPreference['category'] } : {}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async deletePreference(userId: string, key: string): Promise<boolean> {
|
||||||
|
return this.preferences.remove(userId, key);
|
||||||
|
}
|
||||||
|
|
||||||
|
async listPreferences(
|
||||||
|
userId: string,
|
||||||
|
category?: string,
|
||||||
|
): Promise<Array<{ key: string; value: unknown; category: string }>> {
|
||||||
|
const rows = category
|
||||||
|
? await this.preferences.findByUserAndCategory(userId, category as Preference['category'])
|
||||||
|
: await this.preferences.findByUser(userId);
|
||||||
|
|
||||||
|
return rows.map((r) => ({ key: r.key, value: r.value, category: r.category }));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Insights */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
async storeInsight(insight: AdapterNewInsight): Promise<AdapterInsight> {
|
||||||
|
const row = await this.insights.create({
|
||||||
|
userId: insight.userId,
|
||||||
|
content: insight.content,
|
||||||
|
source: insight.source as DbNewInsight['source'],
|
||||||
|
category: insight.category as DbNewInsight['category'],
|
||||||
|
relevanceScore: insight.relevanceScore,
|
||||||
|
metadata: insight.metadata ?? {},
|
||||||
|
embedding: insight.embedding ?? null,
|
||||||
|
});
|
||||||
|
|
||||||
|
return toAdapterInsight(row);
|
||||||
|
}
|
||||||
|
|
||||||
|
async getInsight(id: string): Promise<AdapterInsight | null> {
|
||||||
|
// findById requires userId — search across all users via raw find
|
||||||
|
// The adapter interface only takes id, so we pass an empty userId and rely on the id match.
|
||||||
|
// Since the repo requires userId, we use a two-step approach.
|
||||||
|
const row = await this.insights.findById(id, '');
|
||||||
|
if (!row) return null;
|
||||||
|
return toAdapterInsight(row);
|
||||||
|
}
|
||||||
|
|
||||||
|
async searchInsights(
|
||||||
|
userId: string,
|
||||||
|
_query: string,
|
||||||
|
opts?: { limit?: number; embedding?: number[] },
|
||||||
|
): Promise<InsightSearchResult[]> {
|
||||||
|
if (opts?.embedding) {
|
||||||
|
const results = await this.insights.searchByEmbedding(
|
||||||
|
userId,
|
||||||
|
opts.embedding,
|
||||||
|
opts.limit ?? 10,
|
||||||
|
);
|
||||||
|
return results.map((r) => ({
|
||||||
|
id: r.insight.id,
|
||||||
|
content: r.insight.content,
|
||||||
|
score: 1 - r.distance,
|
||||||
|
metadata: (r.insight.metadata as Record<string, unknown>) ?? undefined,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: return recent insights for the user
|
||||||
|
const rows = await this.insights.findByUser(userId, opts?.limit ?? 10);
|
||||||
|
return rows.map((r) => ({
|
||||||
|
id: r.id,
|
||||||
|
content: r.content,
|
||||||
|
score: Number(r.relevanceScore),
|
||||||
|
metadata: (r.metadata as Record<string, unknown>) ?? undefined,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteInsight(id: string): Promise<boolean> {
|
||||||
|
// The repo requires userId — pass empty string since adapter interface only has id
|
||||||
|
return this.insights.remove(id, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Lifecycle */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
async close(): Promise<void> {
|
||||||
|
await this.handle.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Helpers */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
function toAdapterInsight(row: {
|
||||||
|
id: string;
|
||||||
|
userId: string;
|
||||||
|
content: string;
|
||||||
|
source: string;
|
||||||
|
category: string;
|
||||||
|
relevanceScore: number;
|
||||||
|
metadata: unknown;
|
||||||
|
embedding: unknown;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date | null;
|
||||||
|
decayedAt: Date | null;
|
||||||
|
}): AdapterInsight {
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
userId: row.userId,
|
||||||
|
content: row.content,
|
||||||
|
source: row.source,
|
||||||
|
category: row.category,
|
||||||
|
relevanceScore: row.relevanceScore,
|
||||||
|
metadata: (row.metadata as Record<string, unknown>) ?? undefined,
|
||||||
|
embedding: (row.embedding as number[]) ?? undefined,
|
||||||
|
createdAt: row.createdAt,
|
||||||
|
updatedAt: row.updatedAt ?? undefined,
|
||||||
|
decayedAt: row.decayedAt ?? undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
18
packages/memory/src/factory.ts
Normal file
18
packages/memory/src/factory.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import type { MemoryAdapter, MemoryConfig } from './types.js';
|
||||||
|
|
||||||
|
type MemoryType = MemoryConfig['type'];
|
||||||
|
|
||||||
|
const registry = new Map<MemoryType, (config: MemoryConfig) => MemoryAdapter>();
|
||||||
|
|
||||||
|
export function registerMemoryAdapter(
|
||||||
|
type: MemoryType,
|
||||||
|
factory: (config: MemoryConfig) => MemoryAdapter,
|
||||||
|
): void {
|
||||||
|
registry.set(type, factory);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createMemoryAdapter(config: MemoryConfig): MemoryAdapter {
|
||||||
|
const factory = registry.get(config.type);
|
||||||
|
if (!factory) throw new Error(`No adapter registered for type: ${config.type}`);
|
||||||
|
return factory(config);
|
||||||
|
}
|
||||||
@@ -13,3 +13,27 @@ export {
|
|||||||
type SearchResult,
|
type SearchResult,
|
||||||
} from './insights.js';
|
} from './insights.js';
|
||||||
export type { VectorStore, VectorSearchResult, EmbeddingProvider } from './vector-store.js';
|
export type { VectorStore, VectorSearchResult, EmbeddingProvider } from './vector-store.js';
|
||||||
|
export type {
|
||||||
|
MemoryAdapter,
|
||||||
|
MemoryConfig,
|
||||||
|
NewInsight as AdapterNewInsight,
|
||||||
|
Insight as AdapterInsight,
|
||||||
|
InsightSearchResult,
|
||||||
|
} from './types.js';
|
||||||
|
export { createMemoryAdapter, registerMemoryAdapter } from './factory.js';
|
||||||
|
export { PgVectorAdapter } from './adapters/pgvector.js';
|
||||||
|
export { KeywordAdapter } from './adapters/keyword.js';
|
||||||
|
|
||||||
|
// Auto-register adapters at module load time
|
||||||
|
import { registerMemoryAdapter } from './factory.js';
|
||||||
|
import { PgVectorAdapter } from './adapters/pgvector.js';
|
||||||
|
import { KeywordAdapter } from './adapters/keyword.js';
|
||||||
|
import type { MemoryConfig } from './types.js';
|
||||||
|
|
||||||
|
registerMemoryAdapter('pgvector', (config: MemoryConfig) => {
|
||||||
|
return new PgVectorAdapter(config as Extract<MemoryConfig, { type: 'pgvector' }>);
|
||||||
|
});
|
||||||
|
|
||||||
|
registerMemoryAdapter('keyword', (config: MemoryConfig) => {
|
||||||
|
return new KeywordAdapter(config as Extract<MemoryConfig, { type: 'keyword' }>);
|
||||||
|
});
|
||||||
|
|||||||
73
packages/memory/src/types.ts
Normal file
73
packages/memory/src/types.ts
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
export type { EmbeddingProvider, VectorSearchResult } from './vector-store.js';
|
||||||
|
import type { EmbeddingProvider } from './vector-store.js';
|
||||||
|
import type { StorageAdapter } from '@mosaic/storage';
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Insight types (adapter-level, decoupled from Drizzle schema) */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
export interface NewInsight {
|
||||||
|
userId: string;
|
||||||
|
content: string;
|
||||||
|
source: string;
|
||||||
|
category: string;
|
||||||
|
relevanceScore: number;
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
embedding?: number[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Insight extends NewInsight {
|
||||||
|
id: string;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt?: Date;
|
||||||
|
decayedAt?: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface InsightSearchResult {
|
||||||
|
id: string;
|
||||||
|
content: string;
|
||||||
|
score: number;
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* MemoryAdapter interface */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
export interface MemoryAdapter {
|
||||||
|
readonly name: string;
|
||||||
|
|
||||||
|
// Preferences
|
||||||
|
getPreference(userId: string, key: string): Promise<unknown | null>;
|
||||||
|
setPreference(userId: string, key: string, value: unknown, category?: string): Promise<void>;
|
||||||
|
deletePreference(userId: string, key: string): Promise<boolean>;
|
||||||
|
listPreferences(
|
||||||
|
userId: string,
|
||||||
|
category?: string,
|
||||||
|
): Promise<Array<{ key: string; value: unknown; category: string }>>;
|
||||||
|
|
||||||
|
// Insights
|
||||||
|
storeInsight(insight: NewInsight): Promise<Insight>;
|
||||||
|
getInsight(id: string): Promise<Insight | null>;
|
||||||
|
searchInsights(
|
||||||
|
userId: string,
|
||||||
|
query: string,
|
||||||
|
opts?: { limit?: number; embedding?: number[] },
|
||||||
|
): Promise<InsightSearchResult[]>;
|
||||||
|
deleteInsight(id: string): Promise<boolean>;
|
||||||
|
|
||||||
|
// Embedding
|
||||||
|
readonly embedder: EmbeddingProvider | null;
|
||||||
|
|
||||||
|
// Lifecycle
|
||||||
|
close(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* MemoryConfig */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
export type MemoryConfig =
|
||||||
|
| { type: 'pgvector'; embedder?: EmbeddingProvider }
|
||||||
|
| { type: 'sqlite-vec'; embedder?: EmbeddingProvider }
|
||||||
|
| { type: 'keyword'; storage: StorageAdapter };
|
||||||
88
packages/mosaic/__tests__/platform/file-ops.test.ts
Normal file
88
packages/mosaic/__tests__/platform/file-ops.test.ts
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import {
|
||||||
|
mkdtempSync,
|
||||||
|
mkdirSync,
|
||||||
|
writeFileSync,
|
||||||
|
readFileSync,
|
||||||
|
existsSync,
|
||||||
|
chmodSync,
|
||||||
|
rmSync,
|
||||||
|
} from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { tmpdir } from 'node:os';
|
||||||
|
import { syncDirectory } from '../../src/platform/file-ops.js';
|
||||||
|
|
||||||
|
describe('syncDirectory', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = mkdtempSync(join(tmpdir(), 'mosaic-file-ops-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('is a no-op when source and target are the same path', () => {
|
||||||
|
const dir = join(tmpDir, 'same');
|
||||||
|
mkdirSync(dir, { recursive: true });
|
||||||
|
writeFileSync(join(dir, 'file.txt'), 'hello');
|
||||||
|
// Should not throw even with read-only files
|
||||||
|
const gitDir = join(dir, '.git', 'objects', 'pack');
|
||||||
|
mkdirSync(gitDir, { recursive: true });
|
||||||
|
const packFile = join(gitDir, 'pack-abc.idx');
|
||||||
|
writeFileSync(packFile, 'data');
|
||||||
|
chmodSync(packFile, 0o444);
|
||||||
|
|
||||||
|
expect(() => syncDirectory(dir, dir)).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('skips nested .git directories when excludeGit is true', () => {
|
||||||
|
const src = join(tmpDir, 'src');
|
||||||
|
const dest = join(tmpDir, 'dest');
|
||||||
|
|
||||||
|
// Create source with a nested .git
|
||||||
|
mkdirSync(join(src, 'sources', 'skills', '.git', 'objects'), { recursive: true });
|
||||||
|
writeFileSync(join(src, 'sources', 'skills', '.git', 'objects', 'pack.idx'), 'git-data');
|
||||||
|
writeFileSync(join(src, 'sources', 'skills', 'SKILL.md'), 'skill content');
|
||||||
|
writeFileSync(join(src, 'README.md'), 'readme');
|
||||||
|
|
||||||
|
syncDirectory(src, dest, { excludeGit: true });
|
||||||
|
|
||||||
|
// .git contents should NOT be copied
|
||||||
|
expect(existsSync(join(dest, 'sources', 'skills', '.git'))).toBe(false);
|
||||||
|
// Normal files should be copied
|
||||||
|
expect(readFileSync(join(dest, 'sources', 'skills', 'SKILL.md'), 'utf-8')).toBe(
|
||||||
|
'skill content',
|
||||||
|
);
|
||||||
|
expect(readFileSync(join(dest, 'README.md'), 'utf-8')).toBe('readme');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('copies nested .git directories when excludeGit is false', () => {
|
||||||
|
const src = join(tmpDir, 'src');
|
||||||
|
const dest = join(tmpDir, 'dest');
|
||||||
|
|
||||||
|
mkdirSync(join(src, 'sub', '.git'), { recursive: true });
|
||||||
|
writeFileSync(join(src, 'sub', '.git', 'HEAD'), 'ref: refs/heads/main');
|
||||||
|
|
||||||
|
syncDirectory(src, dest, { excludeGit: false });
|
||||||
|
|
||||||
|
expect(readFileSync(join(dest, 'sub', '.git', 'HEAD'), 'utf-8')).toBe('ref: refs/heads/main');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('respects preserve option', () => {
|
||||||
|
const src = join(tmpDir, 'src');
|
||||||
|
const dest = join(tmpDir, 'dest');
|
||||||
|
|
||||||
|
mkdirSync(src, { recursive: true });
|
||||||
|
mkdirSync(dest, { recursive: true });
|
||||||
|
writeFileSync(join(src, 'SOUL.md'), 'new soul');
|
||||||
|
writeFileSync(join(dest, 'SOUL.md'), 'old soul');
|
||||||
|
writeFileSync(join(src, 'README.md'), 'new readme');
|
||||||
|
|
||||||
|
syncDirectory(src, dest, { preserve: ['SOUL.md'] });
|
||||||
|
|
||||||
|
expect(readFileSync(join(dest, 'SOUL.md'), 'utf-8')).toBe('old soul');
|
||||||
|
expect(readFileSync(join(dest, 'README.md'), 'utf-8')).toBe('new readme');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -65,4 +65,36 @@ describe('detectInstallStage', () => {
|
|||||||
expect(state.installAction).toBe('keep');
|
expect(state.installAction).toBe('keep');
|
||||||
expect(state.soul.agentName).toBe('TestAgent');
|
expect(state.soul.agentName).toBe('TestAgent');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('pre-populates state when reconfiguring', async () => {
|
||||||
|
mkdirSync(join(tmpDir, 'bin'), { recursive: true });
|
||||||
|
writeFileSync(join(tmpDir, 'SOUL.md'), 'You are **Jarvis** in this session.');
|
||||||
|
writeFileSync(join(tmpDir, 'USER.md'), '**Name:** TestUser');
|
||||||
|
|
||||||
|
const p = new HeadlessPrompter({
|
||||||
|
'What would you like to do?': 'reconfigure',
|
||||||
|
});
|
||||||
|
const state = createState(tmpDir);
|
||||||
|
await detectInstallStage(p, state, mockConfig);
|
||||||
|
|
||||||
|
expect(state.installAction).toBe('reconfigure');
|
||||||
|
// Existing values loaded as defaults for reconfiguration
|
||||||
|
expect(state.soul.agentName).toBe('TestAgent');
|
||||||
|
expect(state.user.userName).toBe('TestUser');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('does not pre-populate state on fresh reset', async () => {
|
||||||
|
mkdirSync(join(tmpDir, 'bin'), { recursive: true });
|
||||||
|
writeFileSync(join(tmpDir, 'SOUL.md'), 'You are **Jarvis** in this session.');
|
||||||
|
|
||||||
|
const p = new HeadlessPrompter({
|
||||||
|
'What would you like to do?': 'reset',
|
||||||
|
});
|
||||||
|
const state = createState(tmpDir);
|
||||||
|
await detectInstallStage(p, state, mockConfig);
|
||||||
|
|
||||||
|
expect(state.installAction).toBe('reset');
|
||||||
|
// Reset should NOT load existing values
|
||||||
|
expect(state.soul.agentName).toBeUndefined();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,849 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
# mosaic — Unified agent launcher and management CLI
|
|
||||||
#
|
|
||||||
# AGENTS.md is the global policy source for all agent sessions.
|
|
||||||
# The launcher injects a composed runtime contract (AGENTS + runtime reference).
|
|
||||||
#
|
|
||||||
# Usage:
|
|
||||||
# mosaic claude [args...] Launch Claude Code with runtime contract injected
|
|
||||||
# mosaic opencode [args...] Launch OpenCode with runtime contract injected
|
|
||||||
# mosaic codex [args...] Launch Codex with runtime contract injected
|
|
||||||
# mosaic yolo <runtime> [args...] Launch runtime in dangerous-permissions mode
|
|
||||||
# mosaic --yolo <runtime> [args...] Alias for yolo
|
|
||||||
# mosaic init [args...] Generate SOUL.md interactively
|
|
||||||
# mosaic doctor [args...] Health audit
|
|
||||||
# mosaic sync [args...] Sync skills
|
|
||||||
# mosaic seq [subcommand] sequential-thinking MCP management (check/fix/start)
|
|
||||||
# mosaic bootstrap <path> Bootstrap a repo
|
|
||||||
# mosaic upgrade release Upgrade installed Mosaic release
|
|
||||||
# mosaic upgrade check Check release upgrade status (no changes)
|
|
||||||
# mosaic upgrade project [args] Upgrade project-local stale files
|
|
||||||
|
|
||||||
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
|
||||||
VERSION="0.1.0"
|
|
||||||
|
|
||||||
usage() {
|
|
||||||
cat <<USAGE
|
|
||||||
mosaic $VERSION — Unified agent launcher
|
|
||||||
|
|
||||||
Usage: mosaic <command> [args...]
|
|
||||||
|
|
||||||
Agent Launchers:
|
|
||||||
pi [args...] Launch Pi with runtime contract injected (recommended)
|
|
||||||
claude [args...] Launch Claude Code with runtime contract injected
|
|
||||||
opencode [args...] Launch OpenCode with runtime contract injected
|
|
||||||
codex [args...] Launch Codex with runtime contract injected
|
|
||||||
yolo <runtime> [args...] Dangerous mode for claude|codex|opencode|pi
|
|
||||||
--yolo <runtime> [args...] Alias for yolo
|
|
||||||
|
|
||||||
Management:
|
|
||||||
init [args...] Generate SOUL.md (agent identity contract)
|
|
||||||
doctor [args...] Audit runtime state and detect drift
|
|
||||||
sync [args...] Sync skills from canonical source
|
|
||||||
seq [subcommand] sequential-thinking MCP management:
|
|
||||||
check [--runtime <r>] [--strict]
|
|
||||||
fix [--runtime <r>]
|
|
||||||
start
|
|
||||||
bootstrap <path> Bootstrap a repo with Mosaic standards
|
|
||||||
upgrade [mode] [args] Upgrade release (default) or project files
|
|
||||||
upgrade check Check release upgrade status (no changes)
|
|
||||||
release-upgrade [...] Upgrade installed Mosaic release
|
|
||||||
project-upgrade [...] Clean up stale SOUL.md/CLAUDE.md in a project
|
|
||||||
|
|
||||||
PRD:
|
|
||||||
prdy <subcommand> PRD creation and validation
|
|
||||||
init Create docs/PRD.md via guided runtime session
|
|
||||||
update Update existing PRD via guided runtime session
|
|
||||||
validate Check PRD completeness (bash-only)
|
|
||||||
status Quick PRD health check (one-liner)
|
|
||||||
|
|
||||||
Coordinator (r0):
|
|
||||||
coord <subcommand> Manual coordinator tools
|
|
||||||
init Initialize a new mission
|
|
||||||
mission Show mission progress dashboard
|
|
||||||
status Check agent session health
|
|
||||||
continue Generate continuation prompt
|
|
||||||
run Generate context and launch selected runtime
|
|
||||||
resume Crash recovery
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-h, --help Show this help
|
|
||||||
-v, --version Show version
|
|
||||||
|
|
||||||
All arguments after the command are forwarded to the target CLI.
|
|
||||||
USAGE
|
|
||||||
}
|
|
||||||
|
|
||||||
# Pre-flight checks
|
|
||||||
check_mosaic_home() {
|
|
||||||
if [[ ! -d "$MOSAIC_HOME" ]]; then
|
|
||||||
echo "[mosaic] ERROR: ~/.config/mosaic not found." >&2
|
|
||||||
echo "[mosaic] Install with: bash <(curl -fsSL https://git.mosaicstack.dev/mosaic/mosaic-stack/raw/branch/main/tools/install.sh)" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
check_agents_md() {
|
|
||||||
if [[ ! -f "$MOSAIC_HOME/AGENTS.md" ]]; then
|
|
||||||
echo "[mosaic] ERROR: ~/.config/mosaic/AGENTS.md not found." >&2
|
|
||||||
echo "[mosaic] Re-run the installer: bash <(curl -fsSL https://git.mosaicstack.dev/mosaic/mosaic-stack/raw/branch/main/tools/install.sh)" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
check_soul() {
|
|
||||||
if [[ ! -f "$MOSAIC_HOME/SOUL.md" ]]; then
|
|
||||||
echo "[mosaic] SOUL.md not found. Running mosaic init..."
|
|
||||||
"$MOSAIC_HOME/bin/mosaic-init"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
check_runtime() {
|
|
||||||
local cmd="$1"
|
|
||||||
if ! command -v "$cmd" >/dev/null 2>&1; then
|
|
||||||
echo "[mosaic] ERROR: '$cmd' not found in PATH." >&2
|
|
||||||
echo "[mosaic] Install $cmd before launching." >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
check_sequential_thinking() {
|
|
||||||
local runtime="${1:-all}"
|
|
||||||
local checker="$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking"
|
|
||||||
if [[ ! -x "$checker" ]]; then
|
|
||||||
echo "[mosaic] ERROR: sequential-thinking checker missing: $checker" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
if ! "$checker" --check --runtime "$runtime" >/dev/null 2>&1; then
|
|
||||||
echo "[mosaic] ERROR: sequential-thinking MCP is required but not configured." >&2
|
|
||||||
echo "[mosaic] Fix config: $checker --runtime $runtime" >&2
|
|
||||||
echo "[mosaic] Or run: mosaic seq fix --runtime $runtime" >&2
|
|
||||||
echo "[mosaic] Manual server start: mosaic seq start" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
runtime_contract_path() {
|
|
||||||
local runtime="$1"
|
|
||||||
case "$runtime" in
|
|
||||||
claude) echo "$MOSAIC_HOME/runtime/claude/RUNTIME.md" ;;
|
|
||||||
codex) echo "$MOSAIC_HOME/runtime/codex/RUNTIME.md" ;;
|
|
||||||
opencode) echo "$MOSAIC_HOME/runtime/opencode/RUNTIME.md" ;;
|
|
||||||
pi) echo "$MOSAIC_HOME/runtime/pi/RUNTIME.md" ;;
|
|
||||||
*)
|
|
||||||
echo "[mosaic] ERROR: unsupported runtime '$runtime' for runtime contract." >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
}
|
|
||||||
|
|
||||||
build_runtime_prompt() {
|
|
||||||
local runtime="$1"
|
|
||||||
local runtime_file
|
|
||||||
runtime_file="$(runtime_contract_path "$runtime")"
|
|
||||||
if [[ ! -f "$runtime_file" ]]; then
|
|
||||||
echo "[mosaic] ERROR: runtime contract not found: $runtime_file" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Inject active mission context FIRST so the agent sees it immediately
|
|
||||||
local mission_file=".mosaic/orchestrator/mission.json"
|
|
||||||
if [[ -f "$mission_file" ]] && command -v jq &>/dev/null; then
|
|
||||||
local m_status
|
|
||||||
m_status="$(jq -r '.status // "inactive"' "$mission_file" 2>/dev/null)"
|
|
||||||
if [[ "$m_status" == "active" || "$m_status" == "paused" ]]; then
|
|
||||||
local m_name m_id m_count m_completed
|
|
||||||
m_name="$(jq -r '.name // "unnamed"' "$mission_file")"
|
|
||||||
m_id="$(jq -r '.mission_id // ""' "$mission_file")"
|
|
||||||
m_count="$(jq '.milestones | length' "$mission_file")"
|
|
||||||
m_completed="$(jq '[.milestones[] | select(.status == "completed")] | length' "$mission_file")"
|
|
||||||
|
|
||||||
cat <<MISSION_EOF
|
|
||||||
# ACTIVE MISSION — HARD GATE (Read Before Anything Else)
|
|
||||||
|
|
||||||
An active orchestration mission exists in this project. This is a BLOCKING requirement.
|
|
||||||
|
|
||||||
**Mission:** $m_name
|
|
||||||
**ID:** $m_id
|
|
||||||
**Status:** $m_status
|
|
||||||
**Milestones:** $m_completed / $m_count completed
|
|
||||||
|
|
||||||
## MANDATORY — Before ANY Response to the User
|
|
||||||
|
|
||||||
You MUST complete these steps before responding to any user message, including simple greetings:
|
|
||||||
|
|
||||||
1. Read \`~/.config/mosaic/guides/ORCHESTRATOR-PROTOCOL.md\` (mission lifecycle protocol)
|
|
||||||
2. Read \`docs/MISSION-MANIFEST.md\` for full mission scope, milestones, and success criteria
|
|
||||||
3. Read the latest scratchpad in \`docs/scratchpads/\` for session history, decisions, and corrections
|
|
||||||
4. Read \`docs/TASKS.md\` for current task state (what is done, what is next)
|
|
||||||
5. After reading all four, acknowledge the mission state to the user before proceeding
|
|
||||||
|
|
||||||
If the user gives a task, execute it within the mission context. If no task is given, present mission status and ask how to proceed.
|
|
||||||
|
|
||||||
MISSION_EOF
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Inject PRD status so the agent knows requirements state
|
|
||||||
local prd_file="docs/PRD.md"
|
|
||||||
if [[ -f "$prd_file" ]]; then
|
|
||||||
local prd_sections=0
|
|
||||||
local prd_assumptions=0
|
|
||||||
for entry in "Problem Statement|^#{2,3} .*(problem statement|objective)" \
|
|
||||||
"Scope / Non-Goals|^#{2,3} .*(scope|non.goal|out of scope|in.scope)" \
|
|
||||||
"User Stories / Requirements|^#{2,3} .*(user stor|stakeholder|user.*requirement)" \
|
|
||||||
"Functional Requirements|^#{2,3} .*functional requirement" \
|
|
||||||
"Non-Functional Requirements|^#{2,3} .*non.functional" \
|
|
||||||
"Acceptance Criteria|^#{2,3} .*acceptance criteria" \
|
|
||||||
"Technical Considerations|^#{2,3} .*(technical consideration|constraint|dependenc)" \
|
|
||||||
"Risks / Open Questions|^#{2,3} .*(risk|open question)" \
|
|
||||||
"Success Metrics / Testing|^#{2,3} .*(success metric|test|verification)" \
|
|
||||||
"Milestones / Delivery|^#{2,3} .*(milestone|delivery|scope version)"; do
|
|
||||||
local pattern="${entry#*|}"
|
|
||||||
grep -qiE "$pattern" "$prd_file" 2>/dev/null && prd_sections=$((prd_sections + 1))
|
|
||||||
done
|
|
||||||
prd_assumptions=$(grep -c 'ASSUMPTION:' "$prd_file" 2>/dev/null || echo 0)
|
|
||||||
|
|
||||||
local prd_status="ready"
|
|
||||||
(( prd_sections < 10 )) && prd_status="incomplete ($prd_sections/10 sections)"
|
|
||||||
|
|
||||||
cat <<PRD_EOF
|
|
||||||
|
|
||||||
# PRD Status
|
|
||||||
|
|
||||||
- **File:** docs/PRD.md
|
|
||||||
- **Status:** $prd_status
|
|
||||||
- **Assumptions:** $prd_assumptions
|
|
||||||
|
|
||||||
PRD_EOF
|
|
||||||
fi
|
|
||||||
|
|
||||||
cat <<'EOF'
|
|
||||||
# Mosaic Launcher Runtime Contract (Hard Gate)
|
|
||||||
|
|
||||||
This contract is injected by `mosaic` launch and is mandatory.
|
|
||||||
|
|
||||||
First assistant response MUST start with exactly one mode declaration line:
|
|
||||||
1. Orchestration mission: `Now initiating Orchestrator mode...`
|
|
||||||
2. Implementation mission: `Now initiating Delivery mode...`
|
|
||||||
3. Review-only mission: `Now initiating Review mode...`
|
|
||||||
|
|
||||||
No tool call or implementation step may occur before that first line.
|
|
||||||
|
|
||||||
Mosaic hard gates OVERRIDE runtime-default caution for routine delivery operations.
|
|
||||||
For required push/merge/issue-close/release actions, execute without routine confirmation prompts.
|
|
||||||
|
|
||||||
EOF
|
|
||||||
|
|
||||||
cat "$MOSAIC_HOME/AGENTS.md"
|
|
||||||
|
|
||||||
if [[ -f "$MOSAIC_HOME/USER.md" ]]; then
|
|
||||||
printf '\n\n# User Profile\n\n'
|
|
||||||
cat "$MOSAIC_HOME/USER.md"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -f "$MOSAIC_HOME/TOOLS.md" ]]; then
|
|
||||||
printf '\n\n# Machine Tools\n\n'
|
|
||||||
cat "$MOSAIC_HOME/TOOLS.md"
|
|
||||||
fi
|
|
||||||
|
|
||||||
printf '\n\n# Runtime-Specific Contract\n\n'
|
|
||||||
cat "$runtime_file"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Ensure runtime contract is present at the runtime's native config path.
|
|
||||||
# Used for runtimes that do not support CLI prompt injection.
|
|
||||||
ensure_runtime_config() {
|
|
||||||
local runtime="$1"
|
|
||||||
local dst="$2"
|
|
||||||
local tmp
|
|
||||||
tmp="$(mktemp)"
|
|
||||||
mkdir -p "$(dirname "$dst")"
|
|
||||||
build_runtime_prompt "$runtime" > "$tmp"
|
|
||||||
if ! cmp -s "$tmp" "$dst" 2>/dev/null; then
|
|
||||||
mv "$tmp" "$dst"
|
|
||||||
else
|
|
||||||
rm -f "$tmp"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Detect active mission and return an initial prompt if one exists.
|
|
||||||
# Sets MOSAIC_MISSION_PROMPT as a side effect.
|
|
||||||
_detect_mission_prompt() {
|
|
||||||
MOSAIC_MISSION_PROMPT=""
|
|
||||||
local mission_file=".mosaic/orchestrator/mission.json"
|
|
||||||
if [[ -f "$mission_file" ]] && command -v jq &>/dev/null; then
|
|
||||||
local m_status
|
|
||||||
m_status="$(jq -r '.status // "inactive"' "$mission_file" 2>/dev/null)"
|
|
||||||
if [[ "$m_status" == "active" || "$m_status" == "paused" ]]; then
|
|
||||||
local m_name
|
|
||||||
m_name="$(jq -r '.name // "unnamed"' "$mission_file")"
|
|
||||||
MOSAIC_MISSION_PROMPT="Active mission detected: ${m_name}. Read the mission state files and report status."
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# Write a session lock if an active mission exists in the current directory.
|
|
||||||
# Called before exec so $$ captures the PID that will become the agent process.
|
|
||||||
_write_launcher_session_lock() {
|
|
||||||
local runtime="$1"
|
|
||||||
local mission_file=".mosaic/orchestrator/mission.json"
|
|
||||||
local lock_file=".mosaic/orchestrator/session.lock"
|
|
||||||
|
|
||||||
# Only write lock if mission exists and is active
|
|
||||||
[[ -f "$mission_file" ]] || return 0
|
|
||||||
command -v jq &>/dev/null || return 0
|
|
||||||
|
|
||||||
local m_status
|
|
||||||
m_status="$(jq -r '.status // "inactive"' "$mission_file" 2>/dev/null)"
|
|
||||||
[[ "$m_status" == "active" || "$m_status" == "paused" ]] || return 0
|
|
||||||
|
|
||||||
local session_id
|
|
||||||
session_id="${runtime}-$(date +%Y%m%d-%H%M%S)-$$"
|
|
||||||
|
|
||||||
jq -n \
|
|
||||||
--arg sid "$session_id" \
|
|
||||||
--arg rt "$runtime" \
|
|
||||||
--arg pid "$$" \
|
|
||||||
--arg ts "$(date -u +%Y-%m-%dT%H:%M:%SZ)" \
|
|
||||||
--arg pp "$(pwd)" \
|
|
||||||
--arg mid "" \
|
|
||||||
'{
|
|
||||||
session_id: $sid,
|
|
||||||
runtime: $rt,
|
|
||||||
pid: ($pid | tonumber),
|
|
||||||
started_at: $ts,
|
|
||||||
project_path: $pp,
|
|
||||||
milestone_id: $mid
|
|
||||||
}' > "$lock_file"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Clean up session lock on exit (covers normal exit + signals).
|
|
||||||
# Registered via trap after _write_launcher_session_lock succeeds.
|
|
||||||
_cleanup_session_lock() {
|
|
||||||
rm -f ".mosaic/orchestrator/session.lock" 2>/dev/null
|
|
||||||
}
|
|
||||||
|
|
||||||
# Launcher functions
|
|
||||||
launch_claude() {
|
|
||||||
check_mosaic_home
|
|
||||||
check_agents_md
|
|
||||||
check_soul
|
|
||||||
check_runtime "claude"
|
|
||||||
check_sequential_thinking "claude"
|
|
||||||
|
|
||||||
_check_resumable_session
|
|
||||||
|
|
||||||
# Claude supports --append-system-prompt for direct injection
|
|
||||||
local runtime_prompt
|
|
||||||
runtime_prompt="$(build_runtime_prompt "claude")"
|
|
||||||
|
|
||||||
# If active mission exists and no user prompt was given, inject initial prompt
|
|
||||||
_detect_mission_prompt
|
|
||||||
_write_launcher_session_lock "claude"
|
|
||||||
trap _cleanup_session_lock EXIT INT TERM
|
|
||||||
if [[ -n "$MOSAIC_MISSION_PROMPT" && $# -eq 0 ]]; then
|
|
||||||
echo "[mosaic] Launching Claude Code (active mission detected)..."
|
|
||||||
exec claude --append-system-prompt "$runtime_prompt" "$MOSAIC_MISSION_PROMPT"
|
|
||||||
else
|
|
||||||
echo "[mosaic] Launching Claude Code..."
|
|
||||||
exec claude --append-system-prompt "$runtime_prompt" "$@"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
launch_opencode() {
|
|
||||||
check_mosaic_home
|
|
||||||
check_agents_md
|
|
||||||
check_soul
|
|
||||||
check_runtime "opencode"
|
|
||||||
check_sequential_thinking "opencode"
|
|
||||||
|
|
||||||
_check_resumable_session
|
|
||||||
|
|
||||||
# OpenCode reads from ~/.config/opencode/AGENTS.md
|
|
||||||
ensure_runtime_config "opencode" "$HOME/.config/opencode/AGENTS.md"
|
|
||||||
_write_launcher_session_lock "opencode"
|
|
||||||
trap _cleanup_session_lock EXIT INT TERM
|
|
||||||
echo "[mosaic] Launching OpenCode..."
|
|
||||||
exec opencode "$@"
|
|
||||||
}
|
|
||||||
|
|
||||||
launch_codex() {
|
|
||||||
check_mosaic_home
|
|
||||||
check_agents_md
|
|
||||||
check_soul
|
|
||||||
check_runtime "codex"
|
|
||||||
check_sequential_thinking "codex"
|
|
||||||
|
|
||||||
_check_resumable_session
|
|
||||||
|
|
||||||
# Codex reads from ~/.codex/instructions.md
|
|
||||||
ensure_runtime_config "codex" "$HOME/.codex/instructions.md"
|
|
||||||
_detect_mission_prompt
|
|
||||||
_write_launcher_session_lock "codex"
|
|
||||||
trap _cleanup_session_lock EXIT INT TERM
|
|
||||||
if [[ -n "$MOSAIC_MISSION_PROMPT" && $# -eq 0 ]]; then
|
|
||||||
echo "[mosaic] Launching Codex (active mission detected)..."
|
|
||||||
exec codex "$MOSAIC_MISSION_PROMPT"
|
|
||||||
else
|
|
||||||
echo "[mosaic] Launching Codex..."
|
|
||||||
exec codex "$@"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
launch_pi() {
|
|
||||||
check_mosaic_home
|
|
||||||
check_agents_md
|
|
||||||
check_soul
|
|
||||||
check_runtime "pi"
|
|
||||||
# Pi has native thinking levels — no sequential-thinking gate required
|
|
||||||
|
|
||||||
_check_resumable_session
|
|
||||||
|
|
||||||
local runtime_prompt
|
|
||||||
runtime_prompt="$(build_runtime_prompt "pi")"
|
|
||||||
|
|
||||||
# Build skill args from Mosaic skills directories (canonical + local)
|
|
||||||
local -a skill_args=()
|
|
||||||
for skills_root in "$MOSAIC_HOME/skills" "$MOSAIC_HOME/skills-local"; do
|
|
||||||
[[ -d "$skills_root" ]] || continue
|
|
||||||
for skill_dir in "$skills_root"/*/; do
|
|
||||||
[[ -f "${skill_dir}SKILL.md" ]] && skill_args+=(--skill "$skill_dir")
|
|
||||||
done
|
|
||||||
done
|
|
||||||
|
|
||||||
# Load Mosaic extension if present
|
|
||||||
local -a ext_args=()
|
|
||||||
local mosaic_ext="$MOSAIC_HOME/runtime/pi/mosaic-extension.ts"
|
|
||||||
[[ -f "$mosaic_ext" ]] && ext_args=(--extension "$mosaic_ext")
|
|
||||||
|
|
||||||
_detect_mission_prompt
|
|
||||||
_write_launcher_session_lock "pi"
|
|
||||||
trap _cleanup_session_lock EXIT INT TERM
|
|
||||||
if [[ -n "$MOSAIC_MISSION_PROMPT" && $# -eq 0 ]]; then
|
|
||||||
echo "[mosaic] Launching Pi (active mission detected)..."
|
|
||||||
exec pi --append-system-prompt "$runtime_prompt" \
|
|
||||||
"${skill_args[@]}" "${ext_args[@]}" "$MOSAIC_MISSION_PROMPT"
|
|
||||||
else
|
|
||||||
echo "[mosaic] Launching Pi..."
|
|
||||||
exec pi --append-system-prompt "$runtime_prompt" \
|
|
||||||
"${skill_args[@]}" "${ext_args[@]}" "$@"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
launch_yolo() {
|
|
||||||
if [[ $# -eq 0 ]]; then
|
|
||||||
echo "[mosaic] ERROR: yolo requires a runtime (claude|codex|opencode|pi)." >&2
|
|
||||||
echo "[mosaic] Example: mosaic yolo claude" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
local runtime="$1"
|
|
||||||
shift
|
|
||||||
|
|
||||||
case "$runtime" in
|
|
||||||
claude)
|
|
||||||
check_mosaic_home
|
|
||||||
check_agents_md
|
|
||||||
check_soul
|
|
||||||
check_runtime "claude"
|
|
||||||
check_sequential_thinking "claude"
|
|
||||||
|
|
||||||
# Claude uses an explicit dangerous permissions flag.
|
|
||||||
local runtime_prompt
|
|
||||||
runtime_prompt="$(build_runtime_prompt "claude")"
|
|
||||||
|
|
||||||
_detect_mission_prompt
|
|
||||||
_write_launcher_session_lock "claude"
|
|
||||||
trap _cleanup_session_lock EXIT INT TERM
|
|
||||||
if [[ -n "$MOSAIC_MISSION_PROMPT" && $# -eq 0 ]]; then
|
|
||||||
echo "[mosaic] Launching Claude Code in YOLO mode (active mission detected)..."
|
|
||||||
exec claude --dangerously-skip-permissions --append-system-prompt "$runtime_prompt" "$MOSAIC_MISSION_PROMPT"
|
|
||||||
else
|
|
||||||
echo "[mosaic] Launching Claude Code in YOLO mode (dangerous permissions enabled)..."
|
|
||||||
exec claude --dangerously-skip-permissions --append-system-prompt "$runtime_prompt" "$@"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
codex)
|
|
||||||
check_mosaic_home
|
|
||||||
check_agents_md
|
|
||||||
check_soul
|
|
||||||
check_runtime "codex"
|
|
||||||
check_sequential_thinking "codex"
|
|
||||||
|
|
||||||
# Codex reads instructions.md from ~/.codex and supports a direct dangerous flag.
|
|
||||||
ensure_runtime_config "codex" "$HOME/.codex/instructions.md"
|
|
||||||
_detect_mission_prompt
|
|
||||||
_write_launcher_session_lock "codex"
|
|
||||||
trap _cleanup_session_lock EXIT INT TERM
|
|
||||||
if [[ -n "$MOSAIC_MISSION_PROMPT" && $# -eq 0 ]]; then
|
|
||||||
echo "[mosaic] Launching Codex in YOLO mode (active mission detected)..."
|
|
||||||
exec codex --dangerously-bypass-approvals-and-sandbox "$MOSAIC_MISSION_PROMPT"
|
|
||||||
else
|
|
||||||
echo "[mosaic] Launching Codex in YOLO mode (dangerous permissions enabled)..."
|
|
||||||
exec codex --dangerously-bypass-approvals-and-sandbox "$@"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
opencode)
|
|
||||||
check_mosaic_home
|
|
||||||
check_agents_md
|
|
||||||
check_soul
|
|
||||||
check_runtime "opencode"
|
|
||||||
check_sequential_thinking "opencode"
|
|
||||||
|
|
||||||
# OpenCode defaults to allow-all permissions unless user config restricts them.
|
|
||||||
ensure_runtime_config "opencode" "$HOME/.config/opencode/AGENTS.md"
|
|
||||||
_write_launcher_session_lock "opencode"
|
|
||||||
trap _cleanup_session_lock EXIT INT TERM
|
|
||||||
echo "[mosaic] Launching OpenCode in YOLO mode..."
|
|
||||||
exec opencode "$@"
|
|
||||||
;;
|
|
||||||
pi)
|
|
||||||
# Pi has no permission restrictions — yolo is identical to normal launch
|
|
||||||
launch_pi "$@"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "[mosaic] ERROR: Unsupported yolo runtime '$runtime'. Use claude|codex|opencode|pi." >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
}
|
|
||||||
|
|
||||||
# Delegate to existing scripts
|
|
||||||
run_init() {
|
|
||||||
# Prefer wizard if Node.js and bundle are available
|
|
||||||
local wizard_bin="$MOSAIC_HOME/dist/mosaic-wizard.mjs"
|
|
||||||
if command -v node >/dev/null 2>&1 && [[ -f "$wizard_bin" ]]; then
|
|
||||||
exec node "$wizard_bin" "$@"
|
|
||||||
fi
|
|
||||||
# Fallback to legacy bash wizard
|
|
||||||
check_mosaic_home
|
|
||||||
exec "$MOSAIC_HOME/bin/mosaic-init" "$@"
|
|
||||||
}
|
|
||||||
|
|
||||||
run_doctor() {
|
|
||||||
check_mosaic_home
|
|
||||||
exec "$MOSAIC_HOME/bin/mosaic-doctor" "$@"
|
|
||||||
}
|
|
||||||
|
|
||||||
run_sync() {
|
|
||||||
check_mosaic_home
|
|
||||||
exec "$MOSAIC_HOME/bin/mosaic-sync-skills" "$@"
|
|
||||||
}
|
|
||||||
|
|
||||||
run_seq() {
|
|
||||||
check_mosaic_home
|
|
||||||
local checker="$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking"
|
|
||||||
local action="${1:-check}"
|
|
||||||
|
|
||||||
case "$action" in
|
|
||||||
check)
|
|
||||||
shift || true
|
|
||||||
exec "$checker" --check "$@"
|
|
||||||
;;
|
|
||||||
fix|apply)
|
|
||||||
shift || true
|
|
||||||
exec "$checker" "$@"
|
|
||||||
;;
|
|
||||||
start)
|
|
||||||
shift || true
|
|
||||||
check_runtime "npx"
|
|
||||||
echo "[mosaic] Starting sequential-thinking MCP server..."
|
|
||||||
exec npx -y @modelcontextprotocol/server-sequential-thinking "$@"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "[mosaic] ERROR: Unknown seq subcommand '$action'." >&2
|
|
||||||
echo "[mosaic] Use: mosaic seq check|fix|start" >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
}
|
|
||||||
|
|
||||||
run_coord() {
|
|
||||||
check_mosaic_home
|
|
||||||
local runtime="claude"
|
|
||||||
local runtime_flag=""
|
|
||||||
local yolo_flag=""
|
|
||||||
local -a coord_args=()
|
|
||||||
|
|
||||||
while [[ $# -gt 0 ]]; do
|
|
||||||
case "$1" in
|
|
||||||
--claude|--codex|--pi)
|
|
||||||
local selected_runtime="${1#--}"
|
|
||||||
if [[ -n "$runtime_flag" ]] && [[ "$runtime" != "$selected_runtime" ]]; then
|
|
||||||
echo "[mosaic] ERROR: --claude, --codex, and --pi are mutually exclusive for 'mosaic coord'." >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
runtime="$selected_runtime"
|
|
||||||
runtime_flag="$1"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--yolo)
|
|
||||||
yolo_flag="--yolo"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
coord_args+=("$1")
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
local subcmd="${coord_args[0]:-help}"
|
|
||||||
if (( ${#coord_args[@]} > 1 )); then
|
|
||||||
set -- "${coord_args[@]:1}"
|
|
||||||
else
|
|
||||||
set --
|
|
||||||
fi
|
|
||||||
|
|
||||||
local tool_dir="$MOSAIC_HOME/tools/orchestrator"
|
|
||||||
|
|
||||||
case "$subcmd" in
|
|
||||||
status|session)
|
|
||||||
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/session-status.sh" "$@"
|
|
||||||
;;
|
|
||||||
init)
|
|
||||||
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/mission-init.sh" "$@"
|
|
||||||
;;
|
|
||||||
mission|progress)
|
|
||||||
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/mission-status.sh" "$@"
|
|
||||||
;;
|
|
||||||
continue|next)
|
|
||||||
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/continue-prompt.sh" "$@"
|
|
||||||
;;
|
|
||||||
run|start)
|
|
||||||
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/session-run.sh" ${yolo_flag:+"$yolo_flag"} "$@"
|
|
||||||
;;
|
|
||||||
smoke|test)
|
|
||||||
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/smoke-test.sh" "$@"
|
|
||||||
;;
|
|
||||||
resume|recover)
|
|
||||||
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/session-resume.sh" "$@"
|
|
||||||
;;
|
|
||||||
help|*)
|
|
||||||
cat <<COORD_USAGE
|
|
||||||
mosaic coord — r0 manual coordinator tools
|
|
||||||
|
|
||||||
Commands:
|
|
||||||
init --name <name> [opts] Initialize a new mission
|
|
||||||
mission [--project <path>] Show mission progress dashboard
|
|
||||||
status [--project <path>] Check agent session health
|
|
||||||
continue [--project <path>] Generate continuation prompt for next session
|
|
||||||
run [--project <path>] Generate context and launch selected runtime
|
|
||||||
smoke Run orchestration behavior smoke checks
|
|
||||||
resume [--project <path>] Crash recovery (detect dirty state, generate fix)
|
|
||||||
|
|
||||||
Runtime:
|
|
||||||
--claude Use Claude runtime hints/prompts (default)
|
|
||||||
--codex Use Codex runtime hints/prompts
|
|
||||||
--pi Use Pi runtime hints/prompts
|
|
||||||
--yolo Launch runtime in dangerous/skip-permissions mode (run only)
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
mosaic coord init --name "Security Fix" --milestones "Critical,High,Medium"
|
|
||||||
mosaic coord mission
|
|
||||||
mosaic coord --codex mission
|
|
||||||
mosaic coord --pi run
|
|
||||||
mosaic coord continue --copy
|
|
||||||
mosaic coord run
|
|
||||||
mosaic coord run --codex
|
|
||||||
mosaic coord --yolo run
|
|
||||||
mosaic coord smoke
|
|
||||||
mosaic coord continue --codex --copy
|
|
||||||
|
|
||||||
COORD_USAGE
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
}
|
|
||||||
|
|
||||||
# Resume advisory — prints warning if active mission or stale session detected
|
|
||||||
_check_resumable_session() {
|
|
||||||
local mission_file=".mosaic/orchestrator/mission.json"
|
|
||||||
local lock_file=".mosaic/orchestrator/session.lock"
|
|
||||||
|
|
||||||
command -v jq &>/dev/null || return 0
|
|
||||||
|
|
||||||
if [[ -f "$lock_file" ]]; then
|
|
||||||
local pid
|
|
||||||
pid="$(jq -r '.pid // 0' "$lock_file" 2>/dev/null)"
|
|
||||||
if [[ -n "$pid" ]] && [[ "$pid" != "0" ]] && ! kill -0 "$pid" 2>/dev/null; then
|
|
||||||
# Stale lock from a dead session — clean it up
|
|
||||||
rm -f "$lock_file"
|
|
||||||
echo "[mosaic] Cleaned up stale session lock (PID $pid no longer running)."
|
|
||||||
echo ""
|
|
||||||
fi
|
|
||||||
elif [[ -f "$mission_file" ]]; then
|
|
||||||
local status
|
|
||||||
status="$(jq -r '.status // "inactive"' "$mission_file" 2>/dev/null)"
|
|
||||||
if [[ "$status" == "active" ]]; then
|
|
||||||
echo "[mosaic] Active mission detected. Generate continuation prompt with:"
|
|
||||||
echo "[mosaic] mosaic coord continue"
|
|
||||||
echo ""
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
run_prdy() {
|
|
||||||
check_mosaic_home
|
|
||||||
local runtime="claude"
|
|
||||||
local runtime_flag=""
|
|
||||||
local -a prdy_args=()
|
|
||||||
|
|
||||||
while [[ $# -gt 0 ]]; do
|
|
||||||
case "$1" in
|
|
||||||
--claude|--codex|--pi)
|
|
||||||
local selected_runtime="${1#--}"
|
|
||||||
if [[ -n "$runtime_flag" ]] && [[ "$runtime" != "$selected_runtime" ]]; then
|
|
||||||
echo "[mosaic] ERROR: --claude, --codex, and --pi are mutually exclusive for 'mosaic prdy'." >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
runtime="$selected_runtime"
|
|
||||||
runtime_flag="$1"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
prdy_args+=("$1")
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
local subcmd="${prdy_args[0]:-help}"
|
|
||||||
if (( ${#prdy_args[@]} > 1 )); then
|
|
||||||
set -- "${prdy_args[@]:1}"
|
|
||||||
else
|
|
||||||
set --
|
|
||||||
fi
|
|
||||||
|
|
||||||
local tool_dir="$MOSAIC_HOME/tools/prdy"
|
|
||||||
|
|
||||||
case "$subcmd" in
|
|
||||||
init)
|
|
||||||
MOSAIC_PRDY_RUNTIME="$runtime" exec bash "$tool_dir/prdy-init.sh" "$@"
|
|
||||||
;;
|
|
||||||
update)
|
|
||||||
MOSAIC_PRDY_RUNTIME="$runtime" exec bash "$tool_dir/prdy-update.sh" "$@"
|
|
||||||
;;
|
|
||||||
validate|check)
|
|
||||||
MOSAIC_PRDY_RUNTIME="$runtime" exec bash "$tool_dir/prdy-validate.sh" "$@"
|
|
||||||
;;
|
|
||||||
status)
|
|
||||||
exec bash "$tool_dir/prdy-status.sh" "$@"
|
|
||||||
;;
|
|
||||||
help|*)
|
|
||||||
cat <<PRDY_USAGE
|
|
||||||
mosaic prdy — PRD creation and validation tools
|
|
||||||
|
|
||||||
Commands:
|
|
||||||
init [--project <path>] [--name <feature>] Create docs/PRD.md via guided runtime session
|
|
||||||
update [--project <path>] Update existing docs/PRD.md via guided runtime session
|
|
||||||
validate [--project <path>] Check PRD completeness against Mosaic guide (bash-only)
|
|
||||||
status [--project <path>] [--format short|json] Quick PRD health check (one-liner)
|
|
||||||
|
|
||||||
Runtime:
|
|
||||||
--claude Use Claude runtime (default)
|
|
||||||
--codex Use Codex runtime
|
|
||||||
--pi Use Pi runtime
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
mosaic prdy init --name "User Authentication"
|
|
||||||
mosaic prdy update
|
|
||||||
mosaic prdy --pi init --name "User Authentication"
|
|
||||||
mosaic prdy --codex init --name "User Authentication"
|
|
||||||
mosaic prdy validate
|
|
||||||
|
|
||||||
Output location: docs/PRD.md (per Mosaic PRD guide)
|
|
||||||
|
|
||||||
PRDY_USAGE
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
}
|
|
||||||
|
|
||||||
run_bootstrap() {
|
|
||||||
check_mosaic_home
|
|
||||||
exec "$MOSAIC_HOME/bin/mosaic-bootstrap-repo" "$@"
|
|
||||||
}
|
|
||||||
|
|
||||||
run_release_upgrade() {
|
|
||||||
check_mosaic_home
|
|
||||||
exec "$MOSAIC_HOME/bin/mosaic-release-upgrade" "$@"
|
|
||||||
}
|
|
||||||
|
|
||||||
run_project_upgrade() {
|
|
||||||
check_mosaic_home
|
|
||||||
exec "$MOSAIC_HOME/bin/mosaic-upgrade" "$@"
|
|
||||||
}
|
|
||||||
|
|
||||||
run_upgrade() {
|
|
||||||
check_mosaic_home
|
|
||||||
|
|
||||||
# Default: upgrade installed release
|
|
||||||
if [[ $# -eq 0 ]]; then
|
|
||||||
run_release_upgrade
|
|
||||||
fi
|
|
||||||
|
|
||||||
case "$1" in
|
|
||||||
release)
|
|
||||||
shift
|
|
||||||
run_release_upgrade "$@"
|
|
||||||
;;
|
|
||||||
check)
|
|
||||||
shift
|
|
||||||
run_release_upgrade --dry-run "$@"
|
|
||||||
;;
|
|
||||||
project)
|
|
||||||
shift
|
|
||||||
run_project_upgrade "$@"
|
|
||||||
;;
|
|
||||||
|
|
||||||
# Backward compatibility for historical project-upgrade usage.
|
|
||||||
--all|--root)
|
|
||||||
run_project_upgrade "$@"
|
|
||||||
;;
|
|
||||||
--dry-run|--ref|--keep|--overwrite|-y|--yes)
|
|
||||||
run_release_upgrade "$@"
|
|
||||||
;;
|
|
||||||
-*)
|
|
||||||
run_release_upgrade "$@"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
run_project_upgrade "$@"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
}
|
|
||||||
|
|
||||||
# Main router
|
|
||||||
if [[ $# -eq 0 ]]; then
|
|
||||||
usage
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
command="$1"
|
|
||||||
shift
|
|
||||||
|
|
||||||
case "$command" in
|
|
||||||
pi) launch_pi "$@" ;;
|
|
||||||
claude) launch_claude "$@" ;;
|
|
||||||
opencode) launch_opencode "$@" ;;
|
|
||||||
codex) launch_codex "$@" ;;
|
|
||||||
yolo|--yolo) launch_yolo "$@" ;;
|
|
||||||
init) run_init "$@" ;;
|
|
||||||
doctor) run_doctor "$@" ;;
|
|
||||||
sync) run_sync "$@" ;;
|
|
||||||
seq) run_seq "$@" ;;
|
|
||||||
bootstrap) run_bootstrap "$@" ;;
|
|
||||||
prdy) run_prdy "$@" ;;
|
|
||||||
coord) run_coord "$@" ;;
|
|
||||||
upgrade) run_upgrade "$@" ;;
|
|
||||||
release-upgrade) run_release_upgrade "$@" ;;
|
|
||||||
project-upgrade) run_project_upgrade "$@" ;;
|
|
||||||
help|-h|--help) usage ;;
|
|
||||||
version|-v|--version) echo "mosaic $VERSION" ;;
|
|
||||||
*)
|
|
||||||
echo "[mosaic] Unknown command: $command" >&2
|
|
||||||
echo "[mosaic] Run 'mosaic --help' for usage." >&2
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
@@ -1,437 +0,0 @@
|
|||||||
# mosaic.ps1 — Unified agent launcher and management CLI (Windows)
|
|
||||||
#
|
|
||||||
# AGENTS.md is the global policy source for all agent sessions.
|
|
||||||
# The launcher injects a composed runtime contract (AGENTS + runtime reference).
|
|
||||||
#
|
|
||||||
# Usage:
|
|
||||||
# mosaic claude [args...] Launch Claude Code with runtime contract injected
|
|
||||||
# mosaic opencode [args...] Launch OpenCode with runtime contract injected
|
|
||||||
# mosaic codex [args...] Launch Codex with runtime contract injected
|
|
||||||
# mosaic yolo <runtime> [args...] Launch runtime in dangerous-permissions mode
|
|
||||||
# mosaic --yolo <runtime> [args...] Alias for yolo
|
|
||||||
# mosaic init [args...] Generate SOUL.md interactively
|
|
||||||
# mosaic doctor [args...] Health audit
|
|
||||||
# mosaic sync [args...] Sync skills
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
|
|
||||||
$MosaicHome = if ($env:MOSAIC_HOME) { $env:MOSAIC_HOME } else { Join-Path $env:USERPROFILE ".config\mosaic" }
|
|
||||||
$Version = "0.1.0"
|
|
||||||
|
|
||||||
function Show-Usage {
|
|
||||||
Write-Host @"
|
|
||||||
mosaic $Version - Unified agent launcher
|
|
||||||
|
|
||||||
Usage: mosaic <command> [args...]
|
|
||||||
|
|
||||||
Agent Launchers:
|
|
||||||
claude [args...] Launch Claude Code with runtime contract injected
|
|
||||||
opencode [args...] Launch OpenCode with runtime contract injected
|
|
||||||
codex [args...] Launch Codex with runtime contract injected
|
|
||||||
yolo <runtime> [args...] Dangerous mode for claude|codex|opencode
|
|
||||||
--yolo <runtime> [args...] Alias for yolo
|
|
||||||
|
|
||||||
Management:
|
|
||||||
init [args...] Generate SOUL.md (agent identity contract)
|
|
||||||
doctor [args...] Audit runtime state and detect drift
|
|
||||||
sync [args...] Sync skills from canonical source
|
|
||||||
bootstrap <path> Bootstrap a repo with Mosaic standards
|
|
||||||
upgrade [mode] [args] Upgrade release (default) or project files
|
|
||||||
upgrade check Check release upgrade status (no changes)
|
|
||||||
release-upgrade [...] Upgrade installed Mosaic release
|
|
||||||
project-upgrade [...] Clean up stale SOUL.md/CLAUDE.md in a project
|
|
||||||
|
|
||||||
Options:
|
|
||||||
-h, --help Show this help
|
|
||||||
-v, --version Show version
|
|
||||||
"@
|
|
||||||
}
|
|
||||||
|
|
||||||
function Assert-MosaicHome {
|
|
||||||
if (-not (Test-Path $MosaicHome)) {
|
|
||||||
Write-Host "[mosaic] ERROR: ~/.config/mosaic not found." -ForegroundColor Red
|
|
||||||
Write-Host "[mosaic] Install with: bash <(curl -fsSL https://git.mosaicstack.dev/mosaic/mosaic-stack/raw/branch/main/tools/install.sh)"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function Assert-AgentsMd {
|
|
||||||
$agentsPath = Join-Path $MosaicHome "AGENTS.md"
|
|
||||||
if (-not (Test-Path $agentsPath)) {
|
|
||||||
Write-Host "[mosaic] ERROR: ~/.config/mosaic/AGENTS.md not found." -ForegroundColor Red
|
|
||||||
Write-Host "[mosaic] Re-run the installer."
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function Assert-Soul {
|
|
||||||
$soulPath = Join-Path $MosaicHome "SOUL.md"
|
|
||||||
if (-not (Test-Path $soulPath)) {
|
|
||||||
Write-Host "[mosaic] SOUL.md not found. Running mosaic init..."
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-init.ps1")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function Assert-Runtime {
|
|
||||||
param([string]$Cmd)
|
|
||||||
if (-not (Get-Command $Cmd -ErrorAction SilentlyContinue)) {
|
|
||||||
Write-Host "[mosaic] ERROR: '$Cmd' not found in PATH." -ForegroundColor Red
|
|
||||||
Write-Host "[mosaic] Install $Cmd before launching."
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function Assert-SequentialThinking {
|
|
||||||
$checker = Join-Path $MosaicHome "bin\mosaic-ensure-sequential-thinking.ps1"
|
|
||||||
if (-not (Test-Path $checker)) {
|
|
||||||
Write-Host "[mosaic] ERROR: sequential-thinking checker missing: $checker" -ForegroundColor Red
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
& $checker -Check *>$null
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
Write-Host "[mosaic] ERROR: sequential-thinking MCP is required but not configured." -ForegroundColor Red
|
|
||||||
Write-Host "[mosaic] Run: $checker"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function Get-ActiveMission {
|
|
||||||
$missionFile = Join-Path (Get-Location) ".mosaic\orchestrator\mission.json"
|
|
||||||
if (-not (Test-Path $missionFile)) {
|
|
||||||
return $null
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
$mission = Get-Content $missionFile -Raw | ConvertFrom-Json
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
return $null
|
|
||||||
}
|
|
||||||
|
|
||||||
$status = [string]$mission.status
|
|
||||||
if ([string]::IsNullOrWhiteSpace($status)) {
|
|
||||||
$status = "inactive"
|
|
||||||
}
|
|
||||||
if ($status -ne "active" -and $status -ne "paused") {
|
|
||||||
return $null
|
|
||||||
}
|
|
||||||
|
|
||||||
$name = [string]$mission.name
|
|
||||||
if ([string]::IsNullOrWhiteSpace($name)) {
|
|
||||||
$name = "unnamed"
|
|
||||||
}
|
|
||||||
|
|
||||||
$id = [string]$mission.mission_id
|
|
||||||
if ([string]::IsNullOrWhiteSpace($id)) {
|
|
||||||
$id = ""
|
|
||||||
}
|
|
||||||
|
|
||||||
$milestones = @($mission.milestones)
|
|
||||||
$milestoneCount = $milestones.Count
|
|
||||||
$milestoneCompleted = @($milestones | Where-Object { $_.status -eq "completed" }).Count
|
|
||||||
|
|
||||||
return [PSCustomObject]@{
|
|
||||||
Name = $name
|
|
||||||
Id = $id
|
|
||||||
Status = $status
|
|
||||||
MilestoneCount = $milestoneCount
|
|
||||||
MilestoneCompleted = $milestoneCompleted
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function Get-MissionContractBlock {
|
|
||||||
$mission = Get-ActiveMission
|
|
||||||
if ($null -eq $mission) {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
return @"
|
|
||||||
# ACTIVE MISSION — HARD GATE (Read Before Anything Else)
|
|
||||||
|
|
||||||
An active orchestration mission exists in this project. This is a BLOCKING requirement.
|
|
||||||
|
|
||||||
**Mission:** $($mission.Name)
|
|
||||||
**ID:** $($mission.Id)
|
|
||||||
**Status:** $($mission.Status)
|
|
||||||
**Milestones:** $($mission.MilestoneCompleted) / $($mission.MilestoneCount) completed
|
|
||||||
|
|
||||||
## MANDATORY — Before ANY Response to the User
|
|
||||||
|
|
||||||
You MUST complete these steps before responding to any user message, including simple greetings:
|
|
||||||
|
|
||||||
1. Read `~/.config/mosaic/guides/ORCHESTRATOR-PROTOCOL.md` (mission lifecycle protocol)
|
|
||||||
2. Read `docs/MISSION-MANIFEST.md` for full mission scope, milestones, and success criteria
|
|
||||||
3. Read the latest scratchpad in `docs/scratchpads/` for session history, decisions, and corrections
|
|
||||||
4. Read `docs/TASKS.md` for current task state (what is done, what is next)
|
|
||||||
5. After reading all four, acknowledge the mission state to the user before proceeding
|
|
||||||
|
|
||||||
If the user gives a task, execute it within the mission context. If no task is given, present mission status and ask how to proceed.
|
|
||||||
"@
|
|
||||||
}
|
|
||||||
|
|
||||||
function Get-MissionPrompt {
|
|
||||||
$mission = Get-ActiveMission
|
|
||||||
if ($null -eq $mission) {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return "Active mission detected: $($mission.Name). Read the mission state files and report status."
|
|
||||||
}
|
|
||||||
|
|
||||||
function Get-RuntimePrompt {
|
|
||||||
param(
|
|
||||||
[ValidateSet("claude", "codex", "opencode")]
|
|
||||||
[string]$Runtime
|
|
||||||
)
|
|
||||||
|
|
||||||
$runtimeFile = switch ($Runtime) {
|
|
||||||
"claude" { Join-Path $MosaicHome "runtime\claude\RUNTIME.md" }
|
|
||||||
"codex" { Join-Path $MosaicHome "runtime\codex\RUNTIME.md" }
|
|
||||||
"opencode" { Join-Path $MosaicHome "runtime\opencode\RUNTIME.md" }
|
|
||||||
}
|
|
||||||
|
|
||||||
if (-not (Test-Path $runtimeFile)) {
|
|
||||||
Write-Host "[mosaic] ERROR: runtime contract not found: $runtimeFile" -ForegroundColor Red
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
$launcherContract = @'
|
|
||||||
# Mosaic Launcher Runtime Contract (Hard Gate)
|
|
||||||
|
|
||||||
This contract is injected by `mosaic` launch and is mandatory.
|
|
||||||
|
|
||||||
First assistant response MUST start with exactly one mode declaration line:
|
|
||||||
1. Orchestration mission: `Now initiating Orchestrator mode...`
|
|
||||||
2. Implementation mission: `Now initiating Delivery mode...`
|
|
||||||
3. Review-only mission: `Now initiating Review mode...`
|
|
||||||
|
|
||||||
No tool call or implementation step may occur before that first line.
|
|
||||||
|
|
||||||
Mosaic hard gates OVERRIDE runtime-default caution for routine delivery operations.
|
|
||||||
For required push/merge/issue-close/release actions, execute without routine confirmation prompts.
|
|
||||||
|
|
||||||
'@
|
|
||||||
|
|
||||||
$missionBlock = Get-MissionContractBlock
|
|
||||||
$agentsContent = Get-Content (Join-Path $MosaicHome "AGENTS.md") -Raw
|
|
||||||
$runtimeContent = Get-Content $runtimeFile -Raw
|
|
||||||
|
|
||||||
if (-not [string]::IsNullOrWhiteSpace($missionBlock)) {
|
|
||||||
return "$missionBlock`n`n$launcherContract`n$agentsContent`n`n# Runtime-Specific Contract`n`n$runtimeContent"
|
|
||||||
}
|
|
||||||
|
|
||||||
return "$launcherContract`n$agentsContent`n`n# Runtime-Specific Contract`n`n$runtimeContent"
|
|
||||||
}
|
|
||||||
|
|
||||||
function Ensure-RuntimeConfig {
|
|
||||||
param(
|
|
||||||
[ValidateSet("claude", "codex", "opencode")]
|
|
||||||
[string]$Runtime,
|
|
||||||
[string]$Dst
|
|
||||||
)
|
|
||||||
|
|
||||||
$parent = Split-Path $Dst -Parent
|
|
||||||
if (-not (Test-Path $parent)) { New-Item -ItemType Directory -Path $parent -Force | Out-Null }
|
|
||||||
|
|
||||||
$runtimePrompt = Get-RuntimePrompt -Runtime $Runtime
|
|
||||||
$tmp = [System.IO.Path]::GetTempFileName()
|
|
||||||
Set-Content -Path $tmp -Value $runtimePrompt -Encoding UTF8 -NoNewline
|
|
||||||
|
|
||||||
$srcHash = (Get-FileHash $tmp -Algorithm SHA256).Hash
|
|
||||||
$dstHash = if (Test-Path $Dst) { (Get-FileHash $Dst -Algorithm SHA256).Hash } else { "" }
|
|
||||||
if ($srcHash -ne $dstHash) {
|
|
||||||
Copy-Item $tmp $Dst -Force
|
|
||||||
Remove-Item $tmp -Force
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Remove-Item $tmp -Force
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function Invoke-Yolo {
|
|
||||||
param([string[]]$YoloArgs)
|
|
||||||
|
|
||||||
if ($YoloArgs.Count -lt 1) {
|
|
||||||
Write-Host "[mosaic] ERROR: yolo requires a runtime (claude|codex|opencode)." -ForegroundColor Red
|
|
||||||
Write-Host "[mosaic] Example: mosaic yolo claude"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
$runtime = $YoloArgs[0]
|
|
||||||
$tail = if ($YoloArgs.Count -gt 1) { @($YoloArgs[1..($YoloArgs.Count - 1)]) } else { @() }
|
|
||||||
|
|
||||||
switch ($runtime) {
|
|
||||||
"claude" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
Assert-AgentsMd
|
|
||||||
Assert-Soul
|
|
||||||
Assert-Runtime "claude"
|
|
||||||
Assert-SequentialThinking
|
|
||||||
$agentsContent = Get-RuntimePrompt -Runtime "claude"
|
|
||||||
Write-Host "[mosaic] Launching Claude Code in YOLO mode (dangerous permissions enabled)..."
|
|
||||||
& claude --dangerously-skip-permissions --append-system-prompt $agentsContent @tail
|
|
||||||
return
|
|
||||||
}
|
|
||||||
"codex" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
Assert-AgentsMd
|
|
||||||
Assert-Soul
|
|
||||||
Assert-Runtime "codex"
|
|
||||||
Assert-SequentialThinking
|
|
||||||
Ensure-RuntimeConfig -Runtime "codex" -Dst (Join-Path $env:USERPROFILE ".codex\instructions.md")
|
|
||||||
$missionPrompt = Get-MissionPrompt
|
|
||||||
if (-not [string]::IsNullOrWhiteSpace($missionPrompt) -and $tail.Count -eq 0) {
|
|
||||||
Write-Host "[mosaic] Launching Codex in YOLO mode (active mission detected)..."
|
|
||||||
& codex --dangerously-bypass-approvals-and-sandbox $missionPrompt
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Write-Host "[mosaic] Launching Codex in YOLO mode (dangerous permissions enabled)..."
|
|
||||||
& codex --dangerously-bypass-approvals-and-sandbox @tail
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
"opencode" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
Assert-AgentsMd
|
|
||||||
Assert-Soul
|
|
||||||
Assert-Runtime "opencode"
|
|
||||||
Assert-SequentialThinking
|
|
||||||
Ensure-RuntimeConfig -Runtime "opencode" -Dst (Join-Path $env:USERPROFILE ".config\opencode\AGENTS.md")
|
|
||||||
Write-Host "[mosaic] Launching OpenCode in YOLO mode..."
|
|
||||||
& opencode @tail
|
|
||||||
return
|
|
||||||
}
|
|
||||||
default {
|
|
||||||
Write-Host "[mosaic] ERROR: Unsupported yolo runtime '$runtime'. Use claude|codex|opencode." -ForegroundColor Red
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($args.Count -eq 0) {
|
|
||||||
Show-Usage
|
|
||||||
exit 0
|
|
||||||
}
|
|
||||||
|
|
||||||
$command = $args[0]
|
|
||||||
$remaining = if ($args.Count -gt 1) { @($args[1..($args.Count - 1)]) } else { @() }
|
|
||||||
|
|
||||||
switch ($command) {
|
|
||||||
"claude" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
Assert-AgentsMd
|
|
||||||
Assert-Soul
|
|
||||||
Assert-Runtime "claude"
|
|
||||||
Assert-SequentialThinking
|
|
||||||
# Claude supports --append-system-prompt for direct injection
|
|
||||||
$agentsContent = Get-RuntimePrompt -Runtime "claude"
|
|
||||||
Write-Host "[mosaic] Launching Claude Code..."
|
|
||||||
& claude --append-system-prompt $agentsContent @remaining
|
|
||||||
}
|
|
||||||
"opencode" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
Assert-AgentsMd
|
|
||||||
Assert-Soul
|
|
||||||
Assert-Runtime "opencode"
|
|
||||||
Assert-SequentialThinking
|
|
||||||
# OpenCode reads from ~/.config/opencode/AGENTS.md
|
|
||||||
Ensure-RuntimeConfig -Runtime "opencode" -Dst (Join-Path $env:USERPROFILE ".config\opencode\AGENTS.md")
|
|
||||||
Write-Host "[mosaic] Launching OpenCode..."
|
|
||||||
& opencode @remaining
|
|
||||||
}
|
|
||||||
"codex" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
Assert-AgentsMd
|
|
||||||
Assert-Soul
|
|
||||||
Assert-Runtime "codex"
|
|
||||||
Assert-SequentialThinking
|
|
||||||
# Codex reads from ~/.codex/instructions.md
|
|
||||||
Ensure-RuntimeConfig -Runtime "codex" -Dst (Join-Path $env:USERPROFILE ".codex\instructions.md")
|
|
||||||
$missionPrompt = Get-MissionPrompt
|
|
||||||
if (-not [string]::IsNullOrWhiteSpace($missionPrompt) -and $remaining.Count -eq 0) {
|
|
||||||
Write-Host "[mosaic] Launching Codex (active mission detected)..."
|
|
||||||
& codex $missionPrompt
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Write-Host "[mosaic] Launching Codex..."
|
|
||||||
& codex @remaining
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"yolo" {
|
|
||||||
Invoke-Yolo -YoloArgs $remaining
|
|
||||||
}
|
|
||||||
"--yolo" {
|
|
||||||
Invoke-Yolo -YoloArgs $remaining
|
|
||||||
}
|
|
||||||
"init" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-init.ps1") @remaining
|
|
||||||
}
|
|
||||||
"doctor" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-doctor.ps1") @remaining
|
|
||||||
}
|
|
||||||
"sync" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-sync-skills.ps1") @remaining
|
|
||||||
}
|
|
||||||
"bootstrap" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
Write-Host "[mosaic] NOTE: mosaic-bootstrap-repo requires bash. Use Git Bash or WSL." -ForegroundColor Yellow
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-bootstrap-repo") @remaining
|
|
||||||
}
|
|
||||||
"upgrade" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
if ($remaining.Count -eq 0) {
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-release-upgrade.ps1")
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
$mode = $remaining[0]
|
|
||||||
$tail = if ($remaining.Count -gt 1) { $remaining[1..($remaining.Count - 1)] } else { @() }
|
|
||||||
|
|
||||||
switch -Regex ($mode) {
|
|
||||||
"^release$" {
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-release-upgrade.ps1") @tail
|
|
||||||
}
|
|
||||||
"^check$" {
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-release-upgrade.ps1") -DryRun @tail
|
|
||||||
}
|
|
||||||
"^project$" {
|
|
||||||
Write-Host "[mosaic] NOTE: mosaic-upgrade requires bash. Use Git Bash or WSL." -ForegroundColor Yellow
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-upgrade") @tail
|
|
||||||
}
|
|
||||||
"^(--all|--root)$" {
|
|
||||||
Write-Host "[mosaic] NOTE: mosaic-upgrade requires bash. Use Git Bash or WSL." -ForegroundColor Yellow
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-upgrade") @remaining
|
|
||||||
}
|
|
||||||
"^(--dry-run|--ref|--keep|--overwrite|-y|--yes)$" {
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-release-upgrade.ps1") @remaining
|
|
||||||
}
|
|
||||||
"^-.*" {
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-release-upgrade.ps1") @remaining
|
|
||||||
}
|
|
||||||
default {
|
|
||||||
Write-Host "[mosaic] NOTE: treating positional argument as project path." -ForegroundColor Yellow
|
|
||||||
Write-Host "[mosaic] NOTE: mosaic-upgrade requires bash. Use Git Bash or WSL." -ForegroundColor Yellow
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-upgrade") @remaining
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"release-upgrade" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-release-upgrade.ps1") @remaining
|
|
||||||
}
|
|
||||||
"project-upgrade" {
|
|
||||||
Assert-MosaicHome
|
|
||||||
Write-Host "[mosaic] NOTE: mosaic-upgrade requires bash. Use Git Bash or WSL." -ForegroundColor Yellow
|
|
||||||
& (Join-Path $MosaicHome "bin\mosaic-upgrade") @remaining
|
|
||||||
}
|
|
||||||
{ $_ -in "help", "-h", "--help" } { Show-Usage }
|
|
||||||
{ $_ -in "version", "-v", "--version" } { Write-Host "mosaic $Version" }
|
|
||||||
default {
|
|
||||||
Write-Host "[mosaic] Unknown command: $command" -ForegroundColor Red
|
|
||||||
Write-Host "[mosaic] Run 'mosaic --help' for usage."
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,12 +1,32 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
# ─── Mosaic Framework Installer ──────────────────────────────────────────────
|
||||||
|
#
|
||||||
|
# Installs/upgrades the framework DATA to ~/.config/mosaic/.
|
||||||
|
# No executables are placed on PATH — the mosaic npm CLI is the only binary.
|
||||||
|
#
|
||||||
|
# Called by tools/install.sh (the unified installer). Can also be run directly.
|
||||||
|
#
|
||||||
|
# Environment:
|
||||||
|
# MOSAIC_HOME — target directory (default: ~/.config/mosaic)
|
||||||
|
# MOSAIC_INSTALL_MODE — prompt|keep|overwrite (default: prompt)
|
||||||
|
# MOSAIC_ALLOW_MISSING_SEQUENTIAL_THINKING — 1 to bypass MCP check
|
||||||
|
# MOSAIC_SKIP_SKILLS_SYNC — 1 to skip skill sync
|
||||||
|
# ──────────────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
SOURCE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
TARGET_DIR="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
TARGET_DIR="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
||||||
INSTALL_MODE="${MOSAIC_INSTALL_MODE:-prompt}" # prompt|keep|overwrite
|
INSTALL_MODE="${MOSAIC_INSTALL_MODE:-prompt}"
|
||||||
PRESERVE_PATHS=("SOUL.md" "USER.md" "TOOLS.md" "memory")
|
|
||||||
|
|
||||||
# Colors (disabled if not a terminal)
|
# Files preserved across upgrades (never overwritten)
|
||||||
|
PRESERVE_PATHS=("SOUL.md" "USER.md" "TOOLS.md" "memory" "sources")
|
||||||
|
|
||||||
|
# Current framework schema version — bump this when the layout changes.
|
||||||
|
# The migration system uses this to run upgrade steps.
|
||||||
|
FRAMEWORK_VERSION=2
|
||||||
|
|
||||||
|
# ─── colours ──────────────────────────────────────────────────────────────────
|
||||||
if [[ -t 1 ]]; then
|
if [[ -t 1 ]]; then
|
||||||
GREEN='\033[0;32m' YELLOW='\033[0;33m' RED='\033[0;31m'
|
GREEN='\033[0;32m' YELLOW='\033[0;33m' RED='\033[0;31m'
|
||||||
CYAN='\033[0;36m' BOLD='\033[1m' RESET='\033[0m'
|
CYAN='\033[0;36m' BOLD='\033[1m' RESET='\033[0m'
|
||||||
@@ -19,9 +39,29 @@ warn() { echo -e " ${YELLOW}⚠${RESET} $1" >&2; }
|
|||||||
fail() { echo -e " ${RED}✗${RESET} $1" >&2; }
|
fail() { echo -e " ${RED}✗${RESET} $1" >&2; }
|
||||||
step() { echo -e "\n${BOLD}$1${RESET}"; }
|
step() { echo -e "\n${BOLD}$1${RESET}"; }
|
||||||
|
|
||||||
|
# ─── helpers ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
is_existing_install() {
|
is_existing_install() {
|
||||||
[[ -d "$TARGET_DIR" ]] || return 1
|
[[ -d "$TARGET_DIR" ]] || return 1
|
||||||
[[ -f "$TARGET_DIR/bin/mosaic" || -f "$TARGET_DIR/AGENTS.md" || -f "$TARGET_DIR/SOUL.md" ]]
|
[[ -f "$TARGET_DIR/AGENTS.md" || -f "$TARGET_DIR/SOUL.md" ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
installed_framework_version() {
|
||||||
|
local vf="$TARGET_DIR/.framework-version"
|
||||||
|
if [[ -f "$vf" ]]; then
|
||||||
|
cat "$vf" 2>/dev/null || echo "0"
|
||||||
|
else
|
||||||
|
# No version file = legacy install (version 0 or 1)
|
||||||
|
if [[ -d "$TARGET_DIR/bin" ]]; then
|
||||||
|
echo "1" # Has bin/ → pre-migration legacy
|
||||||
|
else
|
||||||
|
echo "0" # Fresh or unknown
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
write_framework_version() {
|
||||||
|
echo "$FRAMEWORK_VERSION" > "$TARGET_DIR/.framework-version"
|
||||||
}
|
}
|
||||||
|
|
||||||
select_install_mode() {
|
select_install_mode() {
|
||||||
@@ -39,33 +79,22 @@ select_install_mode() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
case "$INSTALL_MODE" in
|
case "$INSTALL_MODE" in
|
||||||
keep|overwrite)
|
keep|overwrite) ;;
|
||||||
;;
|
|
||||||
prompt)
|
prompt)
|
||||||
if [[ -t 0 ]]; then
|
if [[ -t 0 ]]; then
|
||||||
echo ""
|
echo ""
|
||||||
echo "Existing Mosaic install detected at: $TARGET_DIR"
|
echo "Existing Mosaic install detected at: $TARGET_DIR"
|
||||||
echo "Choose reinstall mode:"
|
echo " 1) keep Update framework, preserve local files (SOUL.md, USER.md, etc.)"
|
||||||
echo " 1) keep Keep local files (SOUL.md, USER.md, TOOLS.md, memory/) while updating framework"
|
echo " 2) overwrite Replace everything"
|
||||||
echo " 2) overwrite Replace everything in $TARGET_DIR"
|
echo " 3) cancel Abort"
|
||||||
echo " 3) cancel Abort install"
|
|
||||||
printf "Selection [1/2/3] (default: 1): "
|
printf "Selection [1/2/3] (default: 1): "
|
||||||
read -r selection
|
read -r selection
|
||||||
|
|
||||||
case "${selection:-1}" in
|
case "${selection:-1}" in
|
||||||
1|k|K|keep|KEEP) INSTALL_MODE="keep" ;;
|
1|k|K|keep) INSTALL_MODE="keep" ;;
|
||||||
2|o|O|overwrite|OVERWRITE) INSTALL_MODE="overwrite" ;;
|
2|o|O|overwrite) INSTALL_MODE="overwrite" ;;
|
||||||
3|c|C|cancel|CANCEL|n|N|no|NO)
|
*) fail "Install cancelled."; exit 1 ;;
|
||||||
fail "Install cancelled."
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
warn "Unrecognized selection '$selection'; defaulting to keep."
|
|
||||||
INSTALL_MODE="keep"
|
|
||||||
;;
|
|
||||||
esac
|
esac
|
||||||
else
|
else
|
||||||
warn "Existing install detected without interactive input; defaulting to keep local files."
|
|
||||||
INSTALL_MODE="keep"
|
INSTALL_MODE="keep"
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
@@ -83,10 +112,9 @@ sync_framework() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if command -v rsync >/dev/null 2>&1; then
|
if command -v rsync >/dev/null 2>&1; then
|
||||||
local rsync_args=(-a --delete --exclude ".git")
|
local rsync_args=(-a --delete --exclude ".git" --exclude ".framework-version")
|
||||||
|
|
||||||
if [[ "$INSTALL_MODE" == "keep" ]]; then
|
if [[ "$INSTALL_MODE" == "keep" ]]; then
|
||||||
local path
|
|
||||||
for path in "${PRESERVE_PATHS[@]}"; do
|
for path in "${PRESERVE_PATHS[@]}"; do
|
||||||
rsync_args+=(--exclude "$path")
|
rsync_args+=(--exclude "$path")
|
||||||
done
|
done
|
||||||
@@ -96,10 +124,10 @@ sync_framework() {
|
|||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Fallback: cp-based sync
|
||||||
local preserve_tmp=""
|
local preserve_tmp=""
|
||||||
if [[ "$INSTALL_MODE" == "keep" ]]; then
|
if [[ "$INSTALL_MODE" == "keep" ]]; then
|
||||||
preserve_tmp="$(mktemp -d "${TMPDIR:-/tmp}/mosaic-preserve-XXXXXX")"
|
preserve_tmp="$(mktemp -d "${TMPDIR:-/tmp}/mosaic-preserve-XXXXXX")"
|
||||||
local path
|
|
||||||
for path in "${PRESERVE_PATHS[@]}"; do
|
for path in "${PRESERVE_PATHS[@]}"; do
|
||||||
if [[ -e "$TARGET_DIR/$path" ]]; then
|
if [[ -e "$TARGET_DIR/$path" ]]; then
|
||||||
mkdir -p "$preserve_tmp/$(dirname "$path")"
|
mkdir -p "$preserve_tmp/$(dirname "$path")"
|
||||||
@@ -108,12 +136,11 @@ sync_framework() {
|
|||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
|
|
||||||
find "$TARGET_DIR" -mindepth 1 -maxdepth 1 ! -name ".git" -exec rm -rf {} +
|
find "$TARGET_DIR" -mindepth 1 -maxdepth 1 ! -name ".git" ! -name ".framework-version" -exec rm -rf {} +
|
||||||
cp -R "$SOURCE_DIR"/. "$TARGET_DIR"/
|
cp -R "$SOURCE_DIR"/. "$TARGET_DIR"/
|
||||||
rm -rf "$TARGET_DIR/.git"
|
rm -rf "$TARGET_DIR/.git"
|
||||||
|
|
||||||
if [[ -n "$preserve_tmp" ]]; then
|
if [[ -n "$preserve_tmp" ]]; then
|
||||||
local path
|
|
||||||
for path in "${PRESERVE_PATHS[@]}"; do
|
for path in "${PRESERVE_PATHS[@]}"; do
|
||||||
if [[ -e "$preserve_tmp/$path" ]]; then
|
if [[ -e "$preserve_tmp/$path" ]]; then
|
||||||
rm -rf "$TARGET_DIR/$path"
|
rm -rf "$TARGET_DIR/$path"
|
||||||
@@ -125,136 +152,133 @@ sync_framework() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
# Migrations — run sequentially from the installed version to FRAMEWORK_VERSION
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
run_migrations() {
|
||||||
|
local from_version
|
||||||
|
from_version="$(installed_framework_version)"
|
||||||
|
|
||||||
|
if [[ "$from_version" -ge "$FRAMEWORK_VERSION" ]]; then
|
||||||
|
return # Already current
|
||||||
|
fi
|
||||||
|
|
||||||
|
step "Running migrations (v${from_version} → v${FRAMEWORK_VERSION})"
|
||||||
|
|
||||||
|
# ── Migration: v0/v1 → v2 ─────────────────────────────────────────────────
|
||||||
|
# Remove bin/ directory — all executables now live in the npm CLI.
|
||||||
|
# Scripts that were in bin/ are now in tools/_scripts/.
|
||||||
|
if [[ "$from_version" -lt 2 ]]; then
|
||||||
|
if [[ -d "$TARGET_DIR/bin" ]]; then
|
||||||
|
ok "Removing legacy bin/ directory (executables now in npm CLI)"
|
||||||
|
rm -rf "$TARGET_DIR/bin"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove old mosaic PATH entry from shell profiles
|
||||||
|
for profile in "$HOME/.bashrc" "$HOME/.zshrc" "$HOME/.profile"; do
|
||||||
|
if [[ -f "$profile" ]] && grep -qF "$TARGET_DIR/bin" "$profile"; then
|
||||||
|
# Remove the PATH line and the comment above it
|
||||||
|
sed -i.mosaic-migration-bak \
|
||||||
|
-e "\|# Mosaic agent framework|d" \
|
||||||
|
-e "\|$TARGET_DIR/bin|d" \
|
||||||
|
"$profile"
|
||||||
|
ok "Cleaned up old PATH entry from $(basename "$profile")"
|
||||||
|
rm -f "${profile}.mosaic-migration-bak"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Remove stale rails/ symlink
|
||||||
|
if [[ -L "$TARGET_DIR/rails" ]]; then
|
||||||
|
rm -f "$TARGET_DIR/rails"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── Future migrations go here ──────────────────────────────────────────────
|
||||||
|
# if [[ "$from_version" -lt 3 ]]; then
|
||||||
|
# ...
|
||||||
|
# fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
# Main
|
||||||
|
# ═══════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
step "Installing Mosaic framework"
|
step "Installing Mosaic framework"
|
||||||
|
|
||||||
mkdir -p "$TARGET_DIR"
|
mkdir -p "$TARGET_DIR"
|
||||||
select_install_mode
|
select_install_mode
|
||||||
|
|
||||||
if [[ "$INSTALL_MODE" == "keep" ]]; then
|
if [[ "$INSTALL_MODE" == "keep" ]]; then
|
||||||
ok "Install mode: keep local SOUL.md/USER.md/TOOLS.md/memory while updating framework"
|
ok "Install mode: keep local files (SOUL.md, USER.md, TOOLS.md, memory/)"
|
||||||
else
|
else
|
||||||
ok "Install mode: overwrite existing files"
|
ok "Install mode: overwrite"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
sync_framework
|
sync_framework
|
||||||
|
|
||||||
# Ensure memory directory exists (preserved across upgrades, may not exist on fresh install)
|
# Ensure memory directory exists
|
||||||
mkdir -p "$TARGET_DIR/memory"
|
mkdir -p "$TARGET_DIR/memory"
|
||||||
|
|
||||||
chmod +x "$TARGET_DIR"/bin/*
|
|
||||||
chmod +x "$TARGET_DIR"/install.sh
|
|
||||||
|
|
||||||
# Ensure tool scripts are executable
|
# Ensure tool scripts are executable
|
||||||
find "$TARGET_DIR/tools" -name "*.sh" -exec chmod +x {} + 2>/dev/null || true
|
find "$TARGET_DIR/tools" -name "*.sh" -exec chmod +x {} + 2>/dev/null || true
|
||||||
|
find "$TARGET_DIR/tools/_scripts" -type f -exec chmod +x {} + 2>/dev/null || true
|
||||||
|
|
||||||
# Create backward-compat symlink: rails/ → tools/
|
ok "Framework synced to $TARGET_DIR"
|
||||||
if [[ -d "$TARGET_DIR/tools" ]]; then
|
|
||||||
if [[ -d "$TARGET_DIR/rails" ]] && [[ ! -L "$TARGET_DIR/rails" ]]; then
|
|
||||||
rm -rf "$TARGET_DIR/rails"
|
|
||||||
fi
|
|
||||||
ln -sfn "tools" "$TARGET_DIR/rails"
|
|
||||||
fi
|
|
||||||
|
|
||||||
ok "Framework installed to $TARGET_DIR"
|
# Run migrations before post-install (migrations may remove old bin/ etc.)
|
||||||
|
run_migrations
|
||||||
|
|
||||||
step "Post-install tasks"
|
step "Post-install tasks"
|
||||||
|
|
||||||
if "$TARGET_DIR/bin/mosaic-link-runtime-assets" >/dev/null 2>&1; then
|
SCRIPTS="$TARGET_DIR/tools/_scripts"
|
||||||
|
|
||||||
|
if [[ -x "$SCRIPTS/mosaic-link-runtime-assets" ]]; then
|
||||||
|
if "$SCRIPTS/mosaic-link-runtime-assets" >/dev/null 2>&1; then
|
||||||
ok "Runtime assets linked"
|
ok "Runtime assets linked"
|
||||||
else
|
else
|
||||||
warn "Runtime asset linking failed (non-fatal)"
|
warn "Runtime asset linking failed (non-fatal)"
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if "$TARGET_DIR/bin/mosaic-ensure-sequential-thinking" >/dev/null 2>&1; then
|
if [[ -x "$SCRIPTS/mosaic-ensure-sequential-thinking" ]]; then
|
||||||
|
if "$SCRIPTS/mosaic-ensure-sequential-thinking" >/dev/null 2>&1; then
|
||||||
ok "sequential-thinking MCP configured"
|
ok "sequential-thinking MCP configured"
|
||||||
else
|
else
|
||||||
if [[ "${MOSAIC_ALLOW_MISSING_SEQUENTIAL_THINKING:-0}" == "1" ]]; then
|
if [[ "${MOSAIC_ALLOW_MISSING_SEQUENTIAL_THINKING:-0}" == "1" ]]; then
|
||||||
warn "sequential-thinking MCP setup failed but bypassed (MOSAIC_ALLOW_MISSING_SEQUENTIAL_THINKING=1)"
|
warn "sequential-thinking MCP setup bypassed (MOSAIC_ALLOW_MISSING_SEQUENTIAL_THINKING=1)"
|
||||||
else
|
else
|
||||||
fail "sequential-thinking MCP setup failed (hard requirement)."
|
fail "sequential-thinking MCP setup failed (hard requirement)."
|
||||||
fail "Set MOSAIC_ALLOW_MISSING_SEQUENTIAL_THINKING=1 only for temporary bypass scenarios."
|
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
fi
|
|
||||||
|
|
||||||
if "$TARGET_DIR/bin/mosaic-ensure-excalidraw" >/dev/null 2>&1; then
|
|
||||||
ok "excalidraw MCP configured"
|
|
||||||
else
|
|
||||||
warn "excalidraw MCP setup failed (non-fatal) — run 'mosaic-ensure-excalidraw' to retry"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${MOSAIC_SKIP_SKILLS_SYNC:-0}" == "1" ]]; then
|
|
||||||
ok "Skills sync skipped (MOSAIC_SKIP_SKILLS_SYNC=1)"
|
|
||||||
else
|
|
||||||
if "$TARGET_DIR/bin/mosaic-sync-skills" >/dev/null 2>&1; then
|
|
||||||
ok "Skills synced"
|
|
||||||
else
|
|
||||||
warn "Skills sync failed (non-fatal)"
|
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if "$TARGET_DIR/bin/mosaic-migrate-local-skills" --apply >/dev/null 2>&1; then
|
if [[ -x "$SCRIPTS/mosaic-ensure-excalidraw" ]]; then
|
||||||
ok "Local skills migrated"
|
"$SCRIPTS/mosaic-ensure-excalidraw" >/dev/null 2>&1 && ok "excalidraw MCP configured" || warn "excalidraw MCP setup failed (non-fatal)"
|
||||||
else
|
|
||||||
warn "Local skill migration failed (non-fatal)"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if "$TARGET_DIR/bin/mosaic-doctor" >/dev/null 2>&1; then
|
if [[ "${MOSAIC_SKIP_SKILLS_SYNC:-0}" != "1" ]] && [[ -x "$SCRIPTS/mosaic-sync-skills" ]]; then
|
||||||
ok "Health audit passed"
|
"$SCRIPTS/mosaic-sync-skills" >/dev/null 2>&1 && ok "Skills synced" || warn "Skills sync failed (non-fatal)"
|
||||||
else
|
|
||||||
warn "Health audit reported issues — run 'mosaic doctor' for details"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
step "PATH configuration"
|
if [[ -x "$SCRIPTS/mosaic-migrate-local-skills" ]]; then
|
||||||
|
"$SCRIPTS/mosaic-migrate-local-skills" --apply >/dev/null 2>&1 && ok "Local skills migrated" || warn "Local skill migration failed (non-fatal)"
|
||||||
PATH_LINE="export PATH=\"$TARGET_DIR/bin:\$PATH\""
|
|
||||||
|
|
||||||
# Find the right shell profile
|
|
||||||
if [[ -n "${ZSH_VERSION:-}" ]] || [[ "$(basename "${SHELL:-}")" == "zsh" ]]; then
|
|
||||||
SHELL_PROFILE="$HOME/.zshrc"
|
|
||||||
elif [[ -f "$HOME/.bashrc" ]]; then
|
|
||||||
SHELL_PROFILE="$HOME/.bashrc"
|
|
||||||
elif [[ -f "$HOME/.profile" ]]; then
|
|
||||||
SHELL_PROFILE="$HOME/.profile"
|
|
||||||
else
|
|
||||||
SHELL_PROFILE="$HOME/.profile"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
PATH_CHANGED=false
|
if [[ -x "$SCRIPTS/mosaic-doctor" ]]; then
|
||||||
if grep -qF "$TARGET_DIR/bin" "$SHELL_PROFILE" 2>/dev/null; then
|
"$SCRIPTS/mosaic-doctor" >/dev/null 2>&1 && ok "Health audit passed" || warn "Health audit reported issues — run 'mosaic doctor' for details"
|
||||||
ok "Already in PATH via $SHELL_PROFILE"
|
|
||||||
else
|
|
||||||
{
|
|
||||||
echo ""
|
|
||||||
echo "# Mosaic agent framework"
|
|
||||||
echo "$PATH_LINE"
|
|
||||||
} >> "$SHELL_PROFILE"
|
|
||||||
ok "Added to PATH in $SHELL_PROFILE"
|
|
||||||
PATH_CHANGED=true
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Write version stamp AFTER everything succeeds
|
||||||
|
write_framework_version
|
||||||
|
|
||||||
# ── Summary ──────────────────────────────────────────────────
|
# ── Summary ──────────────────────────────────────────────────
|
||||||
echo ""
|
echo ""
|
||||||
echo -e "${GREEN}${BOLD} Mosaic installed successfully.${RESET}"
|
echo -e "${GREEN}${BOLD} Mosaic framework installed.${RESET}"
|
||||||
echo ""
|
echo ""
|
||||||
|
|
||||||
# Collect next steps
|
|
||||||
NEXT_STEPS=()
|
|
||||||
|
|
||||||
if [[ "$PATH_CHANGED" == "true" ]]; then
|
|
||||||
NEXT_STEPS+=("Run ${CYAN}source $SHELL_PROFILE${RESET} or log out and back in to activate PATH.")
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ ! -f "$TARGET_DIR/SOUL.md" ]]; then
|
if [[ ! -f "$TARGET_DIR/SOUL.md" ]]; then
|
||||||
NEXT_STEPS+=("Run ${CYAN}mosaic init${RESET} to set up your agent identity (SOUL.md), user profile (USER.md), and tool config (TOOLS.md).")
|
echo -e " Run ${CYAN}mosaic init${RESET} to set up your agent identity."
|
||||||
elif grep -q "not configured" "$TARGET_DIR/USER.md" 2>/dev/null; then
|
|
||||||
NEXT_STEPS+=("Run ${CYAN}mosaic init${RESET} to personalize your user profile (USER.md) and tool config (TOOLS.md).")
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ ${#NEXT_STEPS[@]} -gt 0 ]]; then
|
|
||||||
echo -e " ${BOLD}Next steps:${RESET}"
|
|
||||||
for i in "${!NEXT_STEPS[@]}"; do
|
|
||||||
echo -e " $((i+1)). ${NEXT_STEPS[$i]}"
|
|
||||||
done
|
|
||||||
echo ""
|
echo ""
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -69,12 +69,12 @@ case "$cmd" in
|
|||||||
echo "[agent-framework] orchestrator already running (pid=$(cat "$PID_FILE"))"
|
echo "[agent-framework] orchestrator already running (pid=$(cat "$PID_FILE"))"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
nohup "$MOSAIC_HOME/bin/mosaic-orchestrator-drain" --poll-sec "$poll_sec" $sync_arg >"$LOG_FILE" 2>&1 &
|
nohup "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-drain" --poll-sec "$poll_sec" $sync_arg >"$LOG_FILE" 2>&1 &
|
||||||
echo "$!" > "$PID_FILE"
|
echo "$!" > "$PID_FILE"
|
||||||
echo "[agent-framework] orchestrator started (pid=$!, log=$LOG_FILE)"
|
echo "[agent-framework] orchestrator started (pid=$!, log=$LOG_FILE)"
|
||||||
;;
|
;;
|
||||||
drain)
|
drain)
|
||||||
exec "$MOSAIC_HOME/bin/mosaic-orchestrator-drain" --poll-sec "$poll_sec" $sync_arg
|
exec "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-drain" --poll-sec "$poll_sec" $sync_arg
|
||||||
;;
|
;;
|
||||||
stop)
|
stop)
|
||||||
if ! is_running; then
|
if ! is_running; then
|
||||||
|
|||||||
@@ -113,8 +113,8 @@ echo "[mosaic] Optional: run orchestrator rail via ~/.config/mosaic/bin/mosaic-o
|
|||||||
echo "[mosaic] Optional: run detached orchestrator via bash $TARGET_DIR/scripts/agent/orchestrator-daemon.sh start"
|
echo "[mosaic] Optional: run detached orchestrator via bash $TARGET_DIR/scripts/agent/orchestrator-daemon.sh start"
|
||||||
|
|
||||||
if [[ -n "$QUALITY_TEMPLATE" ]]; then
|
if [[ -n "$QUALITY_TEMPLATE" ]]; then
|
||||||
if [[ -x "$MOSAIC_HOME/bin/mosaic-quality-apply" ]]; then
|
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-quality-apply" ]]; then
|
||||||
"$MOSAIC_HOME/bin/mosaic-quality-apply" --template "$QUALITY_TEMPLATE" --target "$TARGET_DIR"
|
"$MOSAIC_HOME/tools/_scripts/mosaic-quality-apply" --template "$QUALITY_TEMPLATE" --target "$TARGET_DIR"
|
||||||
if [[ -f "$TARGET_DIR/.mosaic/quality-rails.yml" ]]; then
|
if [[ -f "$TARGET_DIR/.mosaic/quality-rails.yml" ]]; then
|
||||||
sed -i "s/^enabled:.*/enabled: true/" "$TARGET_DIR/.mosaic/quality-rails.yml"
|
sed -i "s/^enabled:.*/enabled: true/" "$TARGET_DIR/.mosaic/quality-rails.yml"
|
||||||
sed -i "s/^template:.*/template: \"$QUALITY_TEMPLATE\"/" "$TARGET_DIR/.mosaic/quality-rails.yml"
|
sed -i "s/^template:.*/template: \"$QUALITY_TEMPLATE\"/" "$TARGET_DIR/.mosaic/quality-rails.yml"
|
||||||
@@ -165,18 +165,18 @@ expect_dir "$MOSAIC_HOME/profiles"
|
|||||||
expect_dir "$MOSAIC_HOME/templates/agent"
|
expect_dir "$MOSAIC_HOME/templates/agent"
|
||||||
expect_dir "$MOSAIC_HOME/skills"
|
expect_dir "$MOSAIC_HOME/skills"
|
||||||
expect_dir "$MOSAIC_HOME/skills-local"
|
expect_dir "$MOSAIC_HOME/skills-local"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-link-runtime-assets"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-ensure-sequential-thinking"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-sync-skills"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-sync-skills"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-projects"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-projects"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-quality-apply"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-quality-apply"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-quality-verify"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-quality-verify"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-run"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-run"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-sync-tasks"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-sync-tasks"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-drain"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-drain"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-matrix-publish"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-matrix-publish"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-matrix-consume"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-matrix-consume"
|
||||||
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-matrix-cycle"
|
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-matrix-cycle"
|
||||||
expect_file "$MOSAIC_HOME/tools/git/ci-queue-wait.sh"
|
expect_file "$MOSAIC_HOME/tools/git/ci-queue-wait.sh"
|
||||||
expect_file "$MOSAIC_HOME/tools/git/pr-ci-wait.sh"
|
expect_file "$MOSAIC_HOME/tools/git/pr-ci-wait.sh"
|
||||||
expect_file "$MOSAIC_HOME/tools/orchestrator-matrix/transport/matrix_transport.py"
|
expect_file "$MOSAIC_HOME/tools/orchestrator-matrix/transport/matrix_transport.py"
|
||||||
@@ -215,8 +215,8 @@ check_runtime_contract_file "$HOME/.config/opencode/AGENTS.md" "$MOSAIC_HOME/run
|
|||||||
check_runtime_contract_file "$HOME/.codex/instructions.md" "$MOSAIC_HOME/runtime/codex/instructions.md" "codex"
|
check_runtime_contract_file "$HOME/.codex/instructions.md" "$MOSAIC_HOME/runtime/codex/instructions.md" "codex"
|
||||||
|
|
||||||
# Sequential-thinking MCP hard requirement.
|
# Sequential-thinking MCP hard requirement.
|
||||||
if [[ -x "$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking" ]]; then
|
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-ensure-sequential-thinking" ]]; then
|
||||||
if "$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking" --check >/dev/null 2>&1; then
|
if "$MOSAIC_HOME/tools/_scripts/mosaic-ensure-sequential-thinking" --check >/dev/null 2>&1; then
|
||||||
pass "sequential-thinking MCP configured and available"
|
pass "sequential-thinking MCP configured and available"
|
||||||
else
|
else
|
||||||
warn "sequential-thinking MCP missing or misconfigured"
|
warn "sequential-thinking MCP missing or misconfigured"
|
||||||
@@ -422,8 +422,8 @@ with open('$pi_settings_file', 'w') as f:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# 4. Run link-runtime-assets if available
|
# 4. Run link-runtime-assets if available
|
||||||
if [[ -x "$MOSAIC_HOME/bin/mosaic-link-runtime-assets" ]]; then
|
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets" ]]; then
|
||||||
"$MOSAIC_HOME/bin/mosaic-link-runtime-assets" >/dev/null 2>&1 && fix "Re-ran mosaic-link-runtime-assets"
|
"$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets" >/dev/null 2>&1 && fix "Re-ran mosaic-link-runtime-assets"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "[mosaic-doctor] fixes=$fix_count"
|
echo "[mosaic-doctor] fixes=$fix_count"
|
||||||
@@ -60,12 +60,14 @@ Options:
|
|||||||
--timezone <tz> Your timezone (e.g., "America/Chicago")
|
--timezone <tz> Your timezone (e.g., "America/Chicago")
|
||||||
--non-interactive Fail if any required value is missing (no prompts)
|
--non-interactive Fail if any required value is missing (no prompts)
|
||||||
--soul-only Only generate SOUL.md
|
--soul-only Only generate SOUL.md
|
||||||
|
--force Overwrite existing files without prompting
|
||||||
-h, --help Show help
|
-h, --help Show help
|
||||||
USAGE
|
USAGE
|
||||||
}
|
}
|
||||||
|
|
||||||
NON_INTERACTIVE=0
|
NON_INTERACTIVE=0
|
||||||
SOUL_ONLY=0
|
SOUL_ONLY=0
|
||||||
|
FORCE=0
|
||||||
|
|
||||||
while [[ $# -gt 0 ]]; do
|
while [[ $# -gt 0 ]]; do
|
||||||
case "$1" in
|
case "$1" in
|
||||||
@@ -79,6 +81,7 @@ while [[ $# -gt 0 ]]; do
|
|||||||
--timezone) TIMEZONE="$2"; shift 2 ;;
|
--timezone) TIMEZONE="$2"; shift 2 ;;
|
||||||
--non-interactive) NON_INTERACTIVE=1; shift ;;
|
--non-interactive) NON_INTERACTIVE=1; shift ;;
|
||||||
--soul-only) SOUL_ONLY=1; shift ;;
|
--soul-only) SOUL_ONLY=1; shift ;;
|
||||||
|
--force) FORCE=1; shift ;;
|
||||||
-h|--help) usage; exit 0 ;;
|
-h|--help) usage; exit 0 ;;
|
||||||
*) echo "Unknown argument: $1" >&2; usage >&2; exit 1 ;;
|
*) echo "Unknown argument: $1" >&2; usage >&2; exit 1 ;;
|
||||||
esac
|
esac
|
||||||
@@ -139,6 +142,134 @@ prompt_multiline() {
|
|||||||
eval "$var_name=\"$value\""
|
eval "$var_name=\"$value\""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# ── Existing file detection ────────────────────────────────────
|
||||||
|
|
||||||
|
detect_existing_config() {
|
||||||
|
local found=0
|
||||||
|
local existing_files=()
|
||||||
|
|
||||||
|
[[ -f "$SOUL_OUTPUT" ]] && { found=1; existing_files+=("SOUL.md"); }
|
||||||
|
[[ -f "$USER_OUTPUT" ]] && { found=1; existing_files+=("USER.md"); }
|
||||||
|
[[ -f "$TOOLS_OUTPUT" ]] && { found=1; existing_files+=("TOOLS.md"); }
|
||||||
|
|
||||||
|
if [[ $found -eq 0 || $FORCE -eq 1 ]]; then
|
||||||
|
return 0 # No existing files or --force: proceed with fresh install
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "[mosaic-init] Existing configuration detected:"
|
||||||
|
for f in "${existing_files[@]}"; do
|
||||||
|
echo " ✓ $f"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Show current agent name if SOUL.md exists
|
||||||
|
if [[ -f "$SOUL_OUTPUT" ]]; then
|
||||||
|
local current_name
|
||||||
|
current_name=$(grep -oP 'You are \*\*\K[^*]+' "$SOUL_OUTPUT" 2>/dev/null || true)
|
||||||
|
if [[ -n "$current_name" ]]; then
|
||||||
|
echo " Agent: $current_name"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [[ $NON_INTERACTIVE -eq 1 ]]; then
|
||||||
|
echo "[mosaic-init] Existing config found. Use --force to overwrite in non-interactive mode."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "What would you like to do?"
|
||||||
|
echo " 1) keep — Keep existing files, skip init (default)"
|
||||||
|
echo " 2) import — Import values from existing files as defaults, then regenerate"
|
||||||
|
echo " 3) overwrite — Start fresh, overwrite all files"
|
||||||
|
printf "Choose [1/2/3]: "
|
||||||
|
read -r choice
|
||||||
|
|
||||||
|
case "${choice:-1}" in
|
||||||
|
1|keep)
|
||||||
|
echo "[mosaic-init] Keeping existing configuration."
|
||||||
|
# Still push to runtime adapters in case framework was updated
|
||||||
|
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets" ]]; then
|
||||||
|
echo "[mosaic-init] Updating runtime adapters..."
|
||||||
|
"$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets"
|
||||||
|
fi
|
||||||
|
echo "[mosaic-init] Done. Launch with: mosaic claude"
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
2|import)
|
||||||
|
echo "[mosaic-init] Importing values from existing files as defaults..."
|
||||||
|
import_existing_values
|
||||||
|
;;
|
||||||
|
3|overwrite)
|
||||||
|
echo "[mosaic-init] Starting fresh install..."
|
||||||
|
# Back up existing files
|
||||||
|
local ts
|
||||||
|
ts=$(date +%Y%m%d%H%M%S)
|
||||||
|
for f in "${existing_files[@]}"; do
|
||||||
|
local src="$MOSAIC_HOME/$f"
|
||||||
|
if [[ -f "$src" ]]; then
|
||||||
|
cp "$src" "${src}.bak.${ts}"
|
||||||
|
echo " Backed up $f → ${f}.bak.${ts}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "[mosaic-init] Invalid choice. Keeping existing configuration."
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
import_existing_values() {
|
||||||
|
# Import SOUL.md values
|
||||||
|
if [[ -f "$SOUL_OUTPUT" ]]; then
|
||||||
|
local content
|
||||||
|
content=$(cat "$SOUL_OUTPUT")
|
||||||
|
|
||||||
|
if [[ -z "$AGENT_NAME" ]]; then
|
||||||
|
AGENT_NAME=$(echo "$content" | grep -oP 'You are \*\*\K[^*]+' 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
if [[ -z "$ROLE_DESCRIPTION" ]]; then
|
||||||
|
ROLE_DESCRIPTION=$(echo "$content" | grep -oP 'Role identity: \K.+' 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
if [[ -z "$STYLE" ]]; then
|
||||||
|
if echo "$content" | grep -q 'Be direct, concise'; then
|
||||||
|
STYLE="direct"
|
||||||
|
elif echo "$content" | grep -q 'Be warm and conversational'; then
|
||||||
|
STYLE="friendly"
|
||||||
|
elif echo "$content" | grep -q 'Use professional, structured'; then
|
||||||
|
STYLE="formal"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Import USER.md values
|
||||||
|
if [[ -f "$USER_OUTPUT" ]]; then
|
||||||
|
local content
|
||||||
|
content=$(cat "$USER_OUTPUT")
|
||||||
|
|
||||||
|
if [[ -z "$USER_NAME" ]]; then
|
||||||
|
USER_NAME=$(echo "$content" | grep -oP '\*\*Name:\*\* \K.+' 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
if [[ -z "$PRONOUNS" ]]; then
|
||||||
|
PRONOUNS=$(echo "$content" | grep -oP '\*\*Pronouns:\*\* \K.+' 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
if [[ -z "$TIMEZONE" ]]; then
|
||||||
|
TIMEZONE=$(echo "$content" | grep -oP '\*\*Timezone:\*\* \K.+' 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Import TOOLS.md values
|
||||||
|
if [[ -f "$TOOLS_OUTPUT" ]]; then
|
||||||
|
local content
|
||||||
|
content=$(cat "$TOOLS_OUTPUT")
|
||||||
|
|
||||||
|
if [[ -z "$CREDENTIALS_LOCATION" ]]; then
|
||||||
|
CREDENTIALS_LOCATION=$(echo "$content" | grep -oP '\*\*Location:\*\* \K.+' 2>/dev/null || true)
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
detect_existing_config
|
||||||
|
|
||||||
# ── SOUL.md Generation ────────────────────────────────────────
|
# ── SOUL.md Generation ────────────────────────────────────────
|
||||||
echo "[mosaic-init] Generating SOUL.md — agent identity contract"
|
echo "[mosaic-init] Generating SOUL.md — agent identity contract"
|
||||||
echo ""
|
echo ""
|
||||||
@@ -261,9 +392,9 @@ echo "[mosaic-init] Style: $STYLE"
|
|||||||
|
|
||||||
if [[ $SOUL_ONLY -eq 1 ]]; then
|
if [[ $SOUL_ONLY -eq 1 ]]; then
|
||||||
# Push to runtime adapters and exit
|
# Push to runtime adapters and exit
|
||||||
if [[ -x "$MOSAIC_HOME/bin/mosaic-link-runtime-assets" ]]; then
|
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets" ]]; then
|
||||||
echo "[mosaic-init] Updating runtime adapters..."
|
echo "[mosaic-init] Updating runtime adapters..."
|
||||||
"$MOSAIC_HOME/bin/mosaic-link-runtime-assets"
|
"$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets"
|
||||||
fi
|
fi
|
||||||
echo "[mosaic-init] Done. Launch with: mosaic claude"
|
echo "[mosaic-init] Done. Launch with: mosaic claude"
|
||||||
exit 0
|
exit 0
|
||||||
@@ -413,10 +544,10 @@ fi
|
|||||||
# ── Finalize ──────────────────────────────────────────────────
|
# ── Finalize ──────────────────────────────────────────────────
|
||||||
|
|
||||||
# Push to runtime adapters
|
# Push to runtime adapters
|
||||||
if [[ -x "$MOSAIC_HOME/bin/mosaic-link-runtime-assets" ]]; then
|
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets" ]]; then
|
||||||
echo ""
|
echo ""
|
||||||
echo "[mosaic-init] Updating runtime adapters..."
|
echo "[mosaic-init] Updating runtime adapters..."
|
||||||
"$MOSAIC_HOME/bin/mosaic-link-runtime-assets"
|
"$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
@@ -128,8 +128,8 @@ fi
|
|||||||
# Pi extension is loaded via --extension flag in the mosaic launcher.
|
# Pi extension is loaded via --extension flag in the mosaic launcher.
|
||||||
# Do NOT copy into ~/.pi/agent/extensions/ — that causes duplicate loading.
|
# Do NOT copy into ~/.pi/agent/extensions/ — that causes duplicate loading.
|
||||||
|
|
||||||
if [[ -x "$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking" ]]; then
|
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-ensure-sequential-thinking" ]]; then
|
||||||
"$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking"
|
"$MOSAIC_HOME/tools/_scripts/mosaic-ensure-sequential-thinking"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "[mosaic-link] Runtime assets synced (non-symlink mode)"
|
echo "[mosaic-link] Runtime assets synced (non-symlink mode)"
|
||||||
@@ -2,8 +2,8 @@
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
||||||
sync_cmd="$MOSAIC_HOME/bin/mosaic-orchestrator-sync-tasks"
|
sync_cmd="$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-sync-tasks"
|
||||||
run_cmd="$MOSAIC_HOME/bin/mosaic-orchestrator-run"
|
run_cmd="$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-run"
|
||||||
|
|
||||||
do_sync=1
|
do_sync=1
|
||||||
poll_sec=15
|
poll_sec=15
|
||||||
@@ -3,9 +3,9 @@ set -euo pipefail
|
|||||||
|
|
||||||
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
||||||
|
|
||||||
consume="$MOSAIC_HOME/bin/mosaic-orchestrator-matrix-consume"
|
consume="$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-matrix-consume"
|
||||||
run="$MOSAIC_HOME/bin/mosaic-orchestrator-run"
|
run="$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-run"
|
||||||
publish="$MOSAIC_HOME/bin/mosaic-orchestrator-matrix-publish"
|
publish="$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-matrix-publish"
|
||||||
|
|
||||||
for cmd in "$consume" "$run" "$publish"; do
|
for cmd in "$consume" "$run" "$publish"; do
|
||||||
if [[ ! -x "$cmd" ]]; then
|
if [[ ! -x "$cmd" ]]; then
|
||||||
@@ -151,7 +151,7 @@ case "$cmd" in
|
|||||||
[[ -n "$quality_template" ]] && args+=(--quality-template "$quality_template")
|
[[ -n "$quality_template" ]] && args+=(--quality-template "$quality_template")
|
||||||
args+=("$repo")
|
args+=("$repo")
|
||||||
echo "[mosaic-projects] bootstrap: $repo"
|
echo "[mosaic-projects] bootstrap: $repo"
|
||||||
"$MOSAIC_HOME/bin/mosaic-bootstrap-repo" "${args[@]}"
|
"$MOSAIC_HOME/tools/_scripts/mosaic-bootstrap-repo" "${args[@]}"
|
||||||
add_repo "$repo" || true
|
add_repo "$repo" || true
|
||||||
done
|
done
|
||||||
;;
|
;;
|
||||||
@@ -193,7 +193,7 @@ case "$cmd" in
|
|||||||
drain)
|
drain)
|
||||||
args=(--poll-sec "$poll_sec")
|
args=(--poll-sec "$poll_sec")
|
||||||
[[ $no_sync -eq 1 ]] && args+=(--no-sync)
|
[[ $no_sync -eq 1 ]] && args+=(--no-sync)
|
||||||
"$MOSAIC_HOME/bin/mosaic-orchestrator-drain" "${args[@]}"
|
"$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-drain" "${args[@]}"
|
||||||
;;
|
;;
|
||||||
status)
|
status)
|
||||||
echo "[mosaic-projects] no daemon script in repo; run from bootstrapped repo or re-bootstrap"
|
echo "[mosaic-projects] no daemon script in repo; run from bootstrapped repo or re-bootstrap"
|
||||||
@@ -55,7 +55,68 @@ mkdir -p "$MOSAIC_HOME" "$MOSAIC_SKILLS_DIR" "$MOSAIC_LOCAL_SKILLS_DIR"
|
|||||||
if [[ $fetch -eq 1 ]]; then
|
if [[ $fetch -eq 1 ]]; then
|
||||||
if [[ -d "$SKILLS_REPO_DIR/.git" ]]; then
|
if [[ -d "$SKILLS_REPO_DIR/.git" ]]; then
|
||||||
echo "[mosaic-skills] Updating skills source: $SKILLS_REPO_DIR"
|
echo "[mosaic-skills] Updating skills source: $SKILLS_REPO_DIR"
|
||||||
git -C "$SKILLS_REPO_DIR" pull --rebase
|
|
||||||
|
# ── Detect dirty state ──────────────────────────────────────────────
|
||||||
|
dirty=""
|
||||||
|
dirty="$(git -C "$SKILLS_REPO_DIR" status --porcelain 2>/dev/null || true)"
|
||||||
|
|
||||||
|
if [[ -n "$dirty" ]]; then
|
||||||
|
# ── Auto-migrate customized skills to skills-local/ ─────────────
|
||||||
|
# Instead of stash/pop (fragile, merge conflicts), we:
|
||||||
|
# 1. Identify which skill dirs contain user edits
|
||||||
|
# 2. Copy those full skill dirs into skills-local/ (preserving edits)
|
||||||
|
# 3. Reset the repo clean so pull always succeeds
|
||||||
|
# 4. skills-local/ takes precedence during linking, so edits win
|
||||||
|
|
||||||
|
SOURCE_SKILLS_SUBDIR="$SKILLS_REPO_DIR/skills"
|
||||||
|
migrated=()
|
||||||
|
|
||||||
|
while IFS= read -r line; do
|
||||||
|
# porcelain format: XY <path> — extract the file path
|
||||||
|
file="${line:3}"
|
||||||
|
# Only migrate files under skills/ subdir in the repo
|
||||||
|
if [[ "$file" == skills/* ]]; then
|
||||||
|
# Extract the skill directory name (first path component after skills/)
|
||||||
|
skill_name="${file#skills/}"
|
||||||
|
skill_name="${skill_name%%/*}"
|
||||||
|
|
||||||
|
# Skip if already migrated this skill in this run
|
||||||
|
local_skill_dir="$MOSAIC_LOCAL_SKILLS_DIR/$skill_name"
|
||||||
|
if [[ -d "$local_skill_dir" ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Skip if skill_name is empty or hidden
|
||||||
|
if [[ -z "$skill_name" || "$skill_name" == .* ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Copy the skill (with user's edits) from repo working tree to skills-local/
|
||||||
|
if [[ -d "$SOURCE_SKILLS_SUBDIR/$skill_name" ]]; then
|
||||||
|
cp -R "$SOURCE_SKILLS_SUBDIR/$skill_name" "$local_skill_dir"
|
||||||
|
migrated+=("$skill_name")
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done <<< "$dirty"
|
||||||
|
|
||||||
|
if [[ ${#migrated[@]} -gt 0 ]]; then
|
||||||
|
echo "[mosaic-skills] Migrated ${#migrated[@]} customized skill(s) to skills-local/:"
|
||||||
|
for s in "${migrated[@]}"; do
|
||||||
|
echo " → $MOSAIC_LOCAL_SKILLS_DIR/$s"
|
||||||
|
done
|
||||||
|
echo "[mosaic-skills] Your edits are preserved there and take precedence over canonical."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Reset repo to clean state so pull always works
|
||||||
|
echo "[mosaic-skills] Resetting source repo to clean state..."
|
||||||
|
git -C "$SKILLS_REPO_DIR" checkout . 2>/dev/null || true
|
||||||
|
git -C "$SKILLS_REPO_DIR" clean -fd 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! git -C "$SKILLS_REPO_DIR" pull --rebase 2>/dev/null; then
|
||||||
|
echo "[mosaic-skills] WARN: pull failed — continuing with existing checkout" >&2
|
||||||
|
git -C "$SKILLS_REPO_DIR" rebase --abort 2>/dev/null || true
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
echo "[mosaic-skills] Cloning skills source to: $SKILLS_REPO_DIR"
|
echo "[mosaic-skills] Cloning skills source to: $SKILLS_REPO_DIR"
|
||||||
mkdir -p "$(dirname "$SKILLS_REPO_DIR")"
|
mkdir -p "$(dirname "$SKILLS_REPO_DIR")"
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
||||||
BOOTSTRAP_CMD="$MOSAIC_HOME/bin/mosaic-bootstrap-repo"
|
BOOTSTRAP_CMD="$MOSAIC_HOME/tools/_scripts/mosaic-bootstrap-repo"
|
||||||
|
|
||||||
roots=("$HOME/src")
|
roots=("$HOME/src")
|
||||||
apply=0
|
apply=0
|
||||||
@@ -80,11 +80,11 @@ echo -e "${C_CYAN}Capsule:${C_RESET} $(next_task_capsule_path "$PROJECT")"
|
|||||||
|
|
||||||
cd "$PROJECT"
|
cd "$PROJECT"
|
||||||
if [[ "$YOLO" == true ]]; then
|
if [[ "$YOLO" == true ]]; then
|
||||||
exec "$MOSAIC_HOME/bin/mosaic" yolo "$runtime" "$launch_prompt"
|
exec mosaic yolo "$runtime" "$launch_prompt"
|
||||||
elif [[ "$runtime" == "claude" ]]; then
|
elif [[ "$runtime" == "claude" ]]; then
|
||||||
exec "$MOSAIC_HOME/bin/mosaic" claude "$launch_prompt"
|
exec mosaic claude "$launch_prompt"
|
||||||
elif [[ "$runtime" == "codex" ]]; then
|
elif [[ "$runtime" == "codex" ]]; then
|
||||||
exec "$MOSAIC_HOME/bin/mosaic" codex "$launch_prompt"
|
exec mosaic codex "$launch_prompt"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -e "${C_RED}Unsupported coord runtime: $runtime${C_RESET}" >&2
|
echo -e "${C_RED}Unsupported coord runtime: $runtime${C_RESET}" >&2
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/mosaic",
|
"name": "@mosaic/mosaic",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.10",
|
||||||
"description": "Mosaic agent framework — installation wizard and meta package",
|
"description": "Mosaic agent framework — installation wizard and meta package",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { readFileSync, existsSync } from 'node:fs';
|
import { readFileSync, existsSync, readdirSync, statSync, copyFileSync } from 'node:fs';
|
||||||
import { join } from 'node:path';
|
import { join } from 'node:path';
|
||||||
import type { ConfigService } from './config-service.js';
|
import type { ConfigService } from './config-service.js';
|
||||||
import type { SoulConfig, UserConfig, ToolsConfig, InstallAction } from '../types.js';
|
import type { SoulConfig, UserConfig, ToolsConfig, InstallAction } from '../types.js';
|
||||||
@@ -140,6 +140,23 @@ export class FileConfigAdapter implements ConfigService {
|
|||||||
preserve: preservePaths,
|
preserve: preservePaths,
|
||||||
excludeGit: true,
|
excludeGit: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Copy default root-level .md files (AGENTS.md, STANDARDS.md, etc.)
|
||||||
|
// from framework/defaults/ into mosaicHome root if they don't exist yet.
|
||||||
|
// These are framework contracts — only written on first install, never
|
||||||
|
// overwritten (user may have customized them).
|
||||||
|
const defaultsDir = join(this.sourceDir, 'defaults');
|
||||||
|
if (existsSync(defaultsDir)) {
|
||||||
|
for (const entry of readdirSync(defaultsDir)) {
|
||||||
|
const dest = join(this.mosaicHome, entry);
|
||||||
|
if (!existsSync(dest)) {
|
||||||
|
const src = join(defaultsDir, entry);
|
||||||
|
if (statSync(src).isFile()) {
|
||||||
|
copyFileSync(src, dest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { homedir } from 'node:os';
|
import { homedir } from 'node:os';
|
||||||
import { join } from 'node:path';
|
import { join } from 'node:path';
|
||||||
|
|
||||||
export const VERSION = '0.1.0';
|
export const VERSION = '0.0.2';
|
||||||
|
|
||||||
export const DEFAULT_MOSAIC_HOME = join(homedir(), '.config', 'mosaic');
|
export const DEFAULT_MOSAIC_HOME = join(homedir(), '.config', 'mosaic');
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
import { resolve } from 'node:path';
|
import { existsSync } from 'node:fs';
|
||||||
|
import { dirname, resolve } from 'node:path';
|
||||||
import { fileURLToPath } from 'node:url';
|
import { fileURLToPath } from 'node:url';
|
||||||
|
|
||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
@@ -49,7 +50,14 @@ program
|
|||||||
.action(async (opts: Record<string, string | boolean | undefined>) => {
|
.action(async (opts: Record<string, string | boolean | undefined>) => {
|
||||||
try {
|
try {
|
||||||
const mosaicHome = (opts['mosaicHome'] as string) ?? DEFAULT_MOSAIC_HOME;
|
const mosaicHome = (opts['mosaicHome'] as string) ?? DEFAULT_MOSAIC_HOME;
|
||||||
const sourceDir = (opts['sourceDir'] as string | undefined) ?? mosaicHome;
|
// Default source to the framework/ dir bundled in this npm package.
|
||||||
|
// This ensures syncFramework copies AGENTS.md, STANDARDS.md, guides/, etc.
|
||||||
|
// Falls back to mosaicHome if the bundled dir doesn't exist (shouldn't happen).
|
||||||
|
const pkgRoot = dirname(fileURLToPath(import.meta.url));
|
||||||
|
const bundledFramework = resolve(pkgRoot, '..', 'framework');
|
||||||
|
const sourceDir =
|
||||||
|
(opts['sourceDir'] as string | undefined) ??
|
||||||
|
(existsSync(bundledFramework) ? bundledFramework : mosaicHome);
|
||||||
|
|
||||||
const prompter = opts['nonInteractive'] ? new HeadlessPrompter() : new ClackPrompter();
|
const prompter = opts['nonInteractive'] ? new HeadlessPrompter() : new ClackPrompter();
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import {
|
|||||||
unlinkSync,
|
unlinkSync,
|
||||||
statSync,
|
statSync,
|
||||||
} from 'node:fs';
|
} from 'node:fs';
|
||||||
import { dirname, join, relative } from 'node:path';
|
import { dirname, join, relative, resolve } from 'node:path';
|
||||||
|
|
||||||
const MAX_BACKUPS = 3;
|
const MAX_BACKUPS = 3;
|
||||||
|
|
||||||
@@ -68,6 +68,9 @@ export function syncDirectory(
|
|||||||
target: string,
|
target: string,
|
||||||
options: { preserve?: string[]; excludeGit?: boolean } = {},
|
options: { preserve?: string[]; excludeGit?: boolean } = {},
|
||||||
): void {
|
): void {
|
||||||
|
// Guard: source and target are the same directory — nothing to sync
|
||||||
|
if (resolve(source) === resolve(target)) return;
|
||||||
|
|
||||||
const preserveSet = new Set(options.preserve ?? []);
|
const preserveSet = new Set(options.preserve ?? []);
|
||||||
|
|
||||||
// Collect files from source
|
// Collect files from source
|
||||||
@@ -77,9 +80,10 @@ export function syncDirectory(
|
|||||||
const stat = statSync(src);
|
const stat = statSync(src);
|
||||||
if (stat.isDirectory()) {
|
if (stat.isDirectory()) {
|
||||||
const relPath = relative(relBase, src);
|
const relPath = relative(relBase, src);
|
||||||
|
const dirName = relPath.split('/').pop() ?? '';
|
||||||
|
|
||||||
// Skip .git
|
// Skip any .git directory (top-level or nested, e.g. sources/agent-skills/.git)
|
||||||
if (options.excludeGit && relPath === '.git') return;
|
if (options.excludeGit && (dirName === '.git' || relPath.includes('/.git'))) return;
|
||||||
|
|
||||||
// Skip preserved paths at top level
|
// Skip preserved paths at top level
|
||||||
if (preserveSet.has(relPath) && existsSync(dest)) return;
|
if (preserveSet.has(relPath) && existsSync(dest)) return;
|
||||||
@@ -91,6 +95,9 @@ export function syncDirectory(
|
|||||||
} else {
|
} else {
|
||||||
const relPath = relative(relBase, src);
|
const relPath = relative(relBase, src);
|
||||||
|
|
||||||
|
// Skip files inside .git directories
|
||||||
|
if (options.excludeGit && relPath.includes('/.git/')) return;
|
||||||
|
|
||||||
// Skip preserved files at top level
|
// Skip preserved files at top level
|
||||||
if (preserveSet.has(relPath) && existsSync(dest)) return;
|
if (preserveSet.has(relPath) && existsSync(dest)) return;
|
||||||
|
|
||||||
|
|||||||
@@ -122,10 +122,18 @@ export function semverLt(a: string, b: string): boolean {
|
|||||||
|
|
||||||
// ─── Cache ──────────────────────────────────────────────────────────────────
|
// ─── Cache ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
function readCache(): UpdateCheckResult | null {
|
/** Cache stores only the latest registry version (the expensive network call).
|
||||||
|
* The installed version is always checked fresh — it's a local `npm ls`. */
|
||||||
|
interface RegistryCache {
|
||||||
|
latest: string;
|
||||||
|
checkedAt: string;
|
||||||
|
registry: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function readCache(): RegistryCache | null {
|
||||||
try {
|
try {
|
||||||
if (!existsSync(CACHE_FILE)) return null;
|
if (!existsSync(CACHE_FILE)) return null;
|
||||||
const raw = JSON.parse(readFileSync(CACHE_FILE, 'utf-8')) as UpdateCheckResult;
|
const raw = JSON.parse(readFileSync(CACHE_FILE, 'utf-8')) as RegistryCache;
|
||||||
const age = Date.now() - new Date(raw.checkedAt).getTime();
|
const age = Date.now() - new Date(raw.checkedAt).getTime();
|
||||||
if (age > CACHE_TTL_MS) return null;
|
if (age > CACHE_TTL_MS) return null;
|
||||||
return raw;
|
return raw;
|
||||||
@@ -134,10 +142,10 @@ function readCache(): UpdateCheckResult | null {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeCache(result: UpdateCheckResult): void {
|
function writeCache(entry: RegistryCache): void {
|
||||||
try {
|
try {
|
||||||
mkdirSync(CACHE_DIR, { recursive: true });
|
mkdirSync(CACHE_DIR, { recursive: true });
|
||||||
writeFileSync(CACHE_FILE, JSON.stringify(result, null, 2) + '\n', 'utf-8');
|
writeFileSync(CACHE_FILE, JSON.stringify(entry, null, 2) + '\n', 'utf-8');
|
||||||
} catch {
|
} catch {
|
||||||
// Best-effort — cache is not critical
|
// Best-effort — cache is not critical
|
||||||
}
|
}
|
||||||
@@ -174,29 +182,40 @@ export function getLatestVersion(): string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Perform an update check — uses cache when fresh, otherwise hits registry.
|
* Perform an update check — uses registry cache when fresh, always checks
|
||||||
|
* installed version fresh (local npm ls is cheap, caching it causes stale
|
||||||
|
* "update available" banners after an upgrade).
|
||||||
* Never throws.
|
* Never throws.
|
||||||
*/
|
*/
|
||||||
export function checkForUpdate(options?: { skipCache?: boolean }): UpdateCheckResult {
|
export function checkForUpdate(options?: { skipCache?: boolean }): UpdateCheckResult {
|
||||||
|
const current = getInstalledVersion();
|
||||||
|
|
||||||
|
let latest: string;
|
||||||
|
let checkedAt: string;
|
||||||
|
|
||||||
if (!options?.skipCache) {
|
if (!options?.skipCache) {
|
||||||
const cached = readCache();
|
const cached = readCache();
|
||||||
if (cached) return cached;
|
if (cached) {
|
||||||
|
latest = cached.latest;
|
||||||
|
checkedAt = cached.checkedAt;
|
||||||
|
} else {
|
||||||
|
latest = getLatestVersion();
|
||||||
|
checkedAt = new Date().toISOString();
|
||||||
|
writeCache({ latest, checkedAt, registry: REGISTRY });
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
latest = getLatestVersion();
|
||||||
|
checkedAt = new Date().toISOString();
|
||||||
|
writeCache({ latest, checkedAt, registry: REGISTRY });
|
||||||
}
|
}
|
||||||
|
|
||||||
const current = getInstalledVersion();
|
return {
|
||||||
const latest = getLatestVersion();
|
|
||||||
const updateAvailable = !!(current && latest && semverLt(current, latest));
|
|
||||||
|
|
||||||
const result: UpdateCheckResult = {
|
|
||||||
current,
|
current,
|
||||||
latest,
|
latest,
|
||||||
updateAvailable,
|
updateAvailable: !!(current && latest && semverLt(current, latest)),
|
||||||
checkedAt: new Date().toISOString(),
|
checkedAt,
|
||||||
registry: REGISTRY,
|
registry: REGISTRY,
|
||||||
};
|
};
|
||||||
|
|
||||||
writeCache(result);
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -87,7 +87,9 @@ export async function detectInstallStage(
|
|||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
if (state.installAction === 'keep') {
|
if (state.installAction === 'keep' || state.installAction === 'reconfigure') {
|
||||||
|
// Load existing values — for 'keep' they're final, for 'reconfigure'
|
||||||
|
// they become pre-populated defaults so the user can tweak them.
|
||||||
state.soul = await config.readSoul();
|
state.soul = await config.readSoul();
|
||||||
state.user = await config.readUser();
|
state.user = await config.readUser();
|
||||||
state.tools = await config.readTools();
|
state.tools = await config.readTools();
|
||||||
|
|||||||
@@ -24,6 +24,18 @@ export async function soulSetupStage(p: WizardPrompter, state: WizardState): Pro
|
|||||||
return undefined;
|
return undefined;
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
} else if (state.installAction === 'reconfigure') {
|
||||||
|
// Show existing value as default so the user can accept or change it
|
||||||
|
state.soul.agentName = await p.text({
|
||||||
|
message: 'What name should agents use?',
|
||||||
|
placeholder: state.soul.agentName,
|
||||||
|
defaultValue: state.soul.agentName,
|
||||||
|
validate: (v) => {
|
||||||
|
if (v.length === 0) return 'Name cannot be empty';
|
||||||
|
if (v.length > 50) return 'Name must be under 50 characters';
|
||||||
|
return undefined;
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (state.mode === 'advanced') {
|
if (state.mode === 'advanced') {
|
||||||
@@ -38,7 +50,7 @@ export async function soulSetupStage(p: WizardPrompter, state: WizardState): Pro
|
|||||||
state.soul.roleDescription ??= DEFAULTS.roleDescription;
|
state.soul.roleDescription ??= DEFAULTS.roleDescription;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!state.soul.communicationStyle) {
|
if (!state.soul.communicationStyle || state.installAction === 'reconfigure') {
|
||||||
state.soul.communicationStyle = await p.select<CommunicationStyle>({
|
state.soul.communicationStyle = await p.select<CommunicationStyle>({
|
||||||
message: 'Communication style',
|
message: 'Communication style',
|
||||||
options: [
|
options: [
|
||||||
@@ -46,7 +58,7 @@ export async function soulSetupStage(p: WizardPrompter, state: WizardState): Pro
|
|||||||
{ value: 'friendly', label: 'Friendly', hint: 'Warm but efficient, conversational' },
|
{ value: 'friendly', label: 'Friendly', hint: 'Warm but efficient, conversational' },
|
||||||
{ value: 'formal', label: 'Formal', hint: 'Professional, structured, thorough' },
|
{ value: 'formal', label: 'Formal', hint: 'Professional, structured, thorough' },
|
||||||
],
|
],
|
||||||
initialValue: 'direct',
|
initialValue: state.soul.communicationStyle ?? 'direct',
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/prdy",
|
"name": "@mosaic/prdy",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/quality-rails",
|
"name": "@mosaic/quality-rails",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.3",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -17,7 +17,7 @@
|
|||||||
"test": "vitest run --passWithNoTests"
|
"test": "vitest run --passWithNoTests"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"commander": "^12.0.0"
|
"commander": "^13.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^22.0.0",
|
"@types/node": "^22.0.0",
|
||||||
|
|||||||
@@ -106,12 +106,26 @@ function printScaffoldResult(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register quality-rails subcommands on an existing Commander program.
|
||||||
|
* This avoids cross-package Commander version mismatches by using the
|
||||||
|
* caller's Command instance directly.
|
||||||
|
*/
|
||||||
|
export function registerQualityRails(parent: Command): void {
|
||||||
|
buildQualityRailsCommand(
|
||||||
|
parent.command('quality-rails').description('Manage quality rails scaffolding'),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
export function createQualityRailsCli(): Command {
|
export function createQualityRailsCli(): Command {
|
||||||
const program = new Command('mosaic');
|
const program = new Command('mosaic');
|
||||||
const qualityRails = program
|
buildQualityRailsCommand(
|
||||||
.command('quality-rails')
|
program.command('quality-rails').description('Manage quality rails scaffolding'),
|
||||||
.description('Manage quality rails scaffolding');
|
);
|
||||||
|
return program;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildQualityRailsCommand(qualityRails: Command): void {
|
||||||
qualityRails
|
qualityRails
|
||||||
.command('init')
|
.command('init')
|
||||||
.requiredOption('--project <path>', 'Project path')
|
.requiredOption('--project <path>', 'Project path')
|
||||||
@@ -184,8 +198,6 @@ export function createQualityRailsCli(): Command {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return program;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function runQualityRailsCli(argv: string[] = process.argv): Promise<void> {
|
export async function runQualityRailsCli(argv: string[] = process.argv): Promise<void> {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/queue",
|
"name": "@mosaic/queue",
|
||||||
"version": "0.0.1-alpha.2",
|
"version": "0.0.2",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
|
|||||||
50
packages/queue/src/adapters/bullmq.ts
Normal file
50
packages/queue/src/adapters/bullmq.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import Redis from 'ioredis';
|
||||||
|
|
||||||
|
import type { QueueAdapter, QueueConfig, TaskPayload } from '../types.js';
|
||||||
|
|
||||||
|
const DEFAULT_VALKEY_URL = 'redis://localhost:6380';
|
||||||
|
|
||||||
|
export function createBullMQAdapter(config: QueueConfig): QueueAdapter {
|
||||||
|
if (config.type !== 'bullmq') {
|
||||||
|
throw new Error(`Expected config type "bullmq", got "${config.type}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = config.url ?? process.env['VALKEY_URL'] ?? DEFAULT_VALKEY_URL;
|
||||||
|
const redis = new Redis(url, { maxRetriesPerRequest: 3 });
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: 'bullmq',
|
||||||
|
|
||||||
|
async enqueue(queueName: string, payload: TaskPayload): Promise<void> {
|
||||||
|
await redis.lpush(queueName, JSON.stringify(payload));
|
||||||
|
},
|
||||||
|
|
||||||
|
async dequeue(queueName: string): Promise<TaskPayload | null> {
|
||||||
|
const item = await redis.rpop(queueName);
|
||||||
|
if (!item) return null;
|
||||||
|
return JSON.parse(item) as TaskPayload;
|
||||||
|
},
|
||||||
|
|
||||||
|
async length(queueName: string): Promise<number> {
|
||||||
|
return redis.llen(queueName);
|
||||||
|
},
|
||||||
|
|
||||||
|
async publish(channel: string, message: string): Promise<void> {
|
||||||
|
await redis.publish(channel, message);
|
||||||
|
},
|
||||||
|
|
||||||
|
subscribe(channel: string, handler: (message: string) => void): () => void {
|
||||||
|
const sub = redis.duplicate();
|
||||||
|
sub.subscribe(channel).catch(() => {});
|
||||||
|
sub.on('message', (_ch: string, msg: string) => handler(msg));
|
||||||
|
return () => {
|
||||||
|
sub.unsubscribe(channel).catch(() => {});
|
||||||
|
sub.disconnect();
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
async close(): Promise<void> {
|
||||||
|
await redis.quit();
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
81
packages/queue/src/adapters/local.test.ts
Normal file
81
packages/queue/src/adapters/local.test.ts
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import { mkdtempSync, rmSync } from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { tmpdir } from 'node:os';
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
|
||||||
|
import type { TaskPayload } from '../types.js';
|
||||||
|
import { createLocalAdapter } from './local.js';
|
||||||
|
|
||||||
|
function makePayload(id: string): TaskPayload {
|
||||||
|
return { id, type: 'test', data: { value: id }, createdAt: new Date().toISOString() };
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('LocalAdapter', () => {
|
||||||
|
let dataDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
dataDir = mkdtempSync(join(tmpdir(), 'mosaic-queue-test-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
rmSync(dataDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('enqueue + dequeue in FIFO order', async () => {
|
||||||
|
const adapter = createLocalAdapter({ type: 'local', dataDir });
|
||||||
|
const a = makePayload('a');
|
||||||
|
const b = makePayload('b');
|
||||||
|
const c = makePayload('c');
|
||||||
|
|
||||||
|
await adapter.enqueue('tasks', a);
|
||||||
|
await adapter.enqueue('tasks', b);
|
||||||
|
await adapter.enqueue('tasks', c);
|
||||||
|
|
||||||
|
expect(await adapter.dequeue('tasks')).toEqual(a);
|
||||||
|
expect(await adapter.dequeue('tasks')).toEqual(b);
|
||||||
|
expect(await adapter.dequeue('tasks')).toEqual(c);
|
||||||
|
expect(await adapter.dequeue('tasks')).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('length accuracy', async () => {
|
||||||
|
const adapter = createLocalAdapter({ type: 'local', dataDir });
|
||||||
|
|
||||||
|
expect(await adapter.length('q')).toBe(0);
|
||||||
|
await adapter.enqueue('q', makePayload('1'));
|
||||||
|
await adapter.enqueue('q', makePayload('2'));
|
||||||
|
expect(await adapter.length('q')).toBe(2);
|
||||||
|
await adapter.dequeue('q');
|
||||||
|
expect(await adapter.length('q')).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('publish + subscribe delivery', async () => {
|
||||||
|
const adapter = createLocalAdapter({ type: 'local', dataDir });
|
||||||
|
const received: string[] = [];
|
||||||
|
|
||||||
|
const unsub = adapter.subscribe('chan', (msg) => received.push(msg));
|
||||||
|
await adapter.publish('chan', 'hello');
|
||||||
|
await adapter.publish('chan', 'world');
|
||||||
|
|
||||||
|
expect(received).toEqual(['hello', 'world']);
|
||||||
|
|
||||||
|
unsub();
|
||||||
|
await adapter.publish('chan', 'after-unsub');
|
||||||
|
expect(received).toEqual(['hello', 'world']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('persistence survives close and re-create', async () => {
|
||||||
|
const p1 = makePayload('x');
|
||||||
|
const p2 = makePayload('y');
|
||||||
|
|
||||||
|
const adapter1 = createLocalAdapter({ type: 'local', dataDir });
|
||||||
|
await adapter1.enqueue('persist-q', p1);
|
||||||
|
await adapter1.enqueue('persist-q', p2);
|
||||||
|
await adapter1.close();
|
||||||
|
|
||||||
|
const adapter2 = createLocalAdapter({ type: 'local', dataDir });
|
||||||
|
expect(await adapter2.length('persist-q')).toBe(2);
|
||||||
|
expect(await adapter2.dequeue('persist-q')).toEqual(p1);
|
||||||
|
expect(await adapter2.dequeue('persist-q')).toEqual(p2);
|
||||||
|
await adapter2.close();
|
||||||
|
});
|
||||||
|
});
|
||||||
87
packages/queue/src/adapters/local.ts
Normal file
87
packages/queue/src/adapters/local.ts
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
import { mkdirSync, readFileSync, readdirSync, writeFileSync } from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { EventEmitter } from 'node:events';
|
||||||
|
|
||||||
|
import type { QueueAdapter, QueueConfig, TaskPayload } from '../types.js';
|
||||||
|
|
||||||
|
const DEFAULT_DATA_DIR = '.mosaic/queue';
|
||||||
|
|
||||||
|
export function createLocalAdapter(config: QueueConfig): QueueAdapter {
|
||||||
|
if (config.type !== 'local') {
|
||||||
|
throw new Error(`Expected config type "local", got "${config.type}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const dataDir = config.dataDir ?? DEFAULT_DATA_DIR;
|
||||||
|
const queues = new Map<string, TaskPayload[]>();
|
||||||
|
const emitter = new EventEmitter();
|
||||||
|
|
||||||
|
mkdirSync(dataDir, { recursive: true });
|
||||||
|
|
||||||
|
// Load existing JSON files on startup
|
||||||
|
for (const file of readdirSync(dataDir)) {
|
||||||
|
if (!file.endsWith('.json')) continue;
|
||||||
|
const queueName = file.slice(0, -5);
|
||||||
|
try {
|
||||||
|
const raw = readFileSync(join(dataDir, file), 'utf-8');
|
||||||
|
const items = JSON.parse(raw) as TaskPayload[];
|
||||||
|
if (Array.isArray(items)) {
|
||||||
|
queues.set(queueName, items);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore corrupt files
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function persist(queueName: string): void {
|
||||||
|
const items = queues.get(queueName) ?? [];
|
||||||
|
writeFileSync(join(dataDir, `${queueName}.json`), JSON.stringify(items), 'utf-8');
|
||||||
|
}
|
||||||
|
|
||||||
|
function getQueue(queueName: string): TaskPayload[] {
|
||||||
|
let q = queues.get(queueName);
|
||||||
|
if (!q) {
|
||||||
|
q = [];
|
||||||
|
queues.set(queueName, q);
|
||||||
|
}
|
||||||
|
return q;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: 'local',
|
||||||
|
|
||||||
|
async enqueue(queueName: string, payload: TaskPayload): Promise<void> {
|
||||||
|
getQueue(queueName).push(payload);
|
||||||
|
persist(queueName);
|
||||||
|
},
|
||||||
|
|
||||||
|
async dequeue(queueName: string): Promise<TaskPayload | null> {
|
||||||
|
const q = getQueue(queueName);
|
||||||
|
const item = q.shift() ?? null;
|
||||||
|
persist(queueName);
|
||||||
|
return item;
|
||||||
|
},
|
||||||
|
|
||||||
|
async length(queueName: string): Promise<number> {
|
||||||
|
return getQueue(queueName).length;
|
||||||
|
},
|
||||||
|
|
||||||
|
async publish(channel: string, message: string): Promise<void> {
|
||||||
|
emitter.emit(channel, message);
|
||||||
|
},
|
||||||
|
|
||||||
|
subscribe(channel: string, handler: (message: string) => void): () => void {
|
||||||
|
emitter.on(channel, handler);
|
||||||
|
return () => {
|
||||||
|
emitter.off(channel, handler);
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
async close(): Promise<void> {
|
||||||
|
for (const queueName of queues.keys()) {
|
||||||
|
persist(queueName);
|
||||||
|
}
|
||||||
|
queues.clear();
|
||||||
|
emitter.removeAllListeners();
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
18
packages/queue/src/factory.ts
Normal file
18
packages/queue/src/factory.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import type { QueueAdapter, QueueConfig } from './types.js';
|
||||||
|
|
||||||
|
type QueueType = QueueConfig['type'];
|
||||||
|
|
||||||
|
const registry = new Map<QueueType, (config: QueueConfig) => QueueAdapter>();
|
||||||
|
|
||||||
|
export function registerQueueAdapter(
|
||||||
|
type: QueueType,
|
||||||
|
factory: (config: QueueConfig) => QueueAdapter,
|
||||||
|
): void {
|
||||||
|
registry.set(type, factory);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createQueueAdapter(config: QueueConfig): QueueAdapter {
|
||||||
|
const factory = registry.get(config.type);
|
||||||
|
if (!factory) throw new Error(`No adapter registered for type: ${config.type}`);
|
||||||
|
return factory(config);
|
||||||
|
}
|
||||||
@@ -6,3 +6,15 @@ export {
|
|||||||
type QueueClient,
|
type QueueClient,
|
||||||
type TaskPayload,
|
type TaskPayload,
|
||||||
} from './queue.js';
|
} from './queue.js';
|
||||||
|
|
||||||
|
export { type QueueAdapter, type QueueConfig as QueueAdapterConfig } from './types.js';
|
||||||
|
export { createQueueAdapter, registerQueueAdapter } from './factory.js';
|
||||||
|
export { createBullMQAdapter } from './adapters/bullmq.js';
|
||||||
|
export { createLocalAdapter } from './adapters/local.js';
|
||||||
|
|
||||||
|
import { registerQueueAdapter } from './factory.js';
|
||||||
|
import { createBullMQAdapter } from './adapters/bullmq.js';
|
||||||
|
import { createLocalAdapter } from './adapters/local.js';
|
||||||
|
|
||||||
|
registerQueueAdapter('bullmq', createBullMQAdapter);
|
||||||
|
registerQueueAdapter('local', createLocalAdapter);
|
||||||
|
|||||||
18
packages/queue/src/types.ts
Normal file
18
packages/queue/src/types.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
export interface TaskPayload {
|
||||||
|
id: string;
|
||||||
|
type: string;
|
||||||
|
data: Record<string, unknown>;
|
||||||
|
createdAt: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface QueueAdapter {
|
||||||
|
readonly name: string;
|
||||||
|
enqueue(queueName: string, payload: TaskPayload): Promise<void>;
|
||||||
|
dequeue(queueName: string): Promise<TaskPayload | null>;
|
||||||
|
length(queueName: string): Promise<number>;
|
||||||
|
publish(channel: string, message: string): Promise<void>;
|
||||||
|
subscribe(channel: string, handler: (message: string) => void): () => void;
|
||||||
|
close(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type QueueConfig = { type: 'bullmq'; url?: string } | { type: 'local'; dataDir?: string };
|
||||||
35
packages/storage/package.json
Normal file
35
packages/storage/package.json
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
{
|
||||||
|
"name": "@mosaic/storage",
|
||||||
|
"version": "0.0.2",
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"types": "dist/index.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"default": "./dist/index.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"lint": "eslint src",
|
||||||
|
"typecheck": "tsc --noEmit",
|
||||||
|
"test": "vitest run --passWithNoTests"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@mosaic/db": "workspace:^",
|
||||||
|
"@mosaic/types": "workspace:*",
|
||||||
|
"better-sqlite3": "^12.8.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/better-sqlite3": "^7.6.13",
|
||||||
|
"typescript": "^5.8.0",
|
||||||
|
"vitest": "^2.0.0"
|
||||||
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
|
}
|
||||||
252
packages/storage/src/adapters/postgres.ts
Normal file
252
packages/storage/src/adapters/postgres.ts
Normal file
@@ -0,0 +1,252 @@
|
|||||||
|
import {
|
||||||
|
createDb,
|
||||||
|
runMigrations,
|
||||||
|
eq,
|
||||||
|
and,
|
||||||
|
asc,
|
||||||
|
desc,
|
||||||
|
sql,
|
||||||
|
type Db,
|
||||||
|
type DbHandle,
|
||||||
|
} from '@mosaic/db';
|
||||||
|
import * as schema from '@mosaic/db';
|
||||||
|
import type { StorageAdapter, StorageConfig } from '../types.js';
|
||||||
|
|
||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maps collection name → Drizzle table object.
|
||||||
|
* Typed as `any` because the generic StorageAdapter interface erases table
|
||||||
|
* types — all runtime values are still strongly-typed Drizzle table objects.
|
||||||
|
*/
|
||||||
|
const TABLE_MAP: Record<string, any> = {
|
||||||
|
users: schema.users,
|
||||||
|
sessions: schema.sessions,
|
||||||
|
accounts: schema.accounts,
|
||||||
|
verifications: schema.verifications,
|
||||||
|
teams: schema.teams,
|
||||||
|
team_members: schema.teamMembers,
|
||||||
|
projects: schema.projects,
|
||||||
|
missions: schema.missions,
|
||||||
|
tasks: schema.tasks,
|
||||||
|
mission_tasks: schema.missionTasks,
|
||||||
|
events: schema.events,
|
||||||
|
agents: schema.agents,
|
||||||
|
tickets: schema.tickets,
|
||||||
|
appreciations: schema.appreciations,
|
||||||
|
conversations: schema.conversations,
|
||||||
|
messages: schema.messages,
|
||||||
|
preferences: schema.preferences,
|
||||||
|
insights: schema.insights,
|
||||||
|
agent_logs: schema.agentLogs,
|
||||||
|
skills: schema.skills,
|
||||||
|
routing_rules: schema.routingRules,
|
||||||
|
provider_credentials: schema.providerCredentials,
|
||||||
|
summarization_jobs: schema.summarizationJobs,
|
||||||
|
};
|
||||||
|
|
||||||
|
function getTable(collection: string): any {
|
||||||
|
const table = TABLE_MAP[collection];
|
||||||
|
if (!table) throw new Error(`Unknown collection: ${collection}`);
|
||||||
|
return table;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildWhereClause(table: any, filter?: Record<string, unknown>) {
|
||||||
|
if (!filter || Object.keys(filter).length === 0) return undefined;
|
||||||
|
const conditions = Object.entries(filter).map(([key, value]) => {
|
||||||
|
const column = table[key];
|
||||||
|
if (!column) throw new Error(`Unknown column "${key}" on table`);
|
||||||
|
return eq(column, value);
|
||||||
|
});
|
||||||
|
return conditions.length === 1 ? conditions[0]! : and(...conditions);
|
||||||
|
}
|
||||||
|
|
||||||
|
export class PostgresAdapter implements StorageAdapter {
|
||||||
|
readonly name = 'postgres';
|
||||||
|
private handle: DbHandle;
|
||||||
|
private db: Db;
|
||||||
|
private url: string;
|
||||||
|
|
||||||
|
constructor(config: Extract<StorageConfig, { type: 'postgres' }>) {
|
||||||
|
this.url = config.url;
|
||||||
|
this.handle = createDb(config.url);
|
||||||
|
this.db = this.handle.db;
|
||||||
|
}
|
||||||
|
|
||||||
|
async create<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
data: T,
|
||||||
|
): Promise<T & { id: string }> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
const [row] = await (this.db as any).insert(table).values(data).returning();
|
||||||
|
return row as T & { id: string };
|
||||||
|
}
|
||||||
|
|
||||||
|
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
const [row] = await (this.db as any).select().from(table).where(eq(table.id, id));
|
||||||
|
return (row as T) ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
const result = await (this.db as any)
|
||||||
|
.update(table)
|
||||||
|
.set(data)
|
||||||
|
.where(eq(table.id, id))
|
||||||
|
.returning();
|
||||||
|
return result.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(collection: string, id: string): Promise<boolean> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
const result = await (this.db as any).delete(table).where(eq(table.id, id)).returning();
|
||||||
|
return result.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async find<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
filter?: Record<string, unknown>,
|
||||||
|
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
|
||||||
|
): Promise<T[]> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
let query = (this.db as any).select().from(table);
|
||||||
|
const where = buildWhereClause(table, filter);
|
||||||
|
if (where) query = query.where(where);
|
||||||
|
if (opts?.orderBy) {
|
||||||
|
const col = table[opts.orderBy];
|
||||||
|
if (col) {
|
||||||
|
query = query.orderBy(opts.order === 'desc' ? desc(col) : asc(col));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (opts?.limit) query = query.limit(opts.limit);
|
||||||
|
if (opts?.offset) query = query.offset(opts.offset);
|
||||||
|
return (await query) as T[];
|
||||||
|
}
|
||||||
|
|
||||||
|
async findOne<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
filter: Record<string, unknown>,
|
||||||
|
): Promise<T | null> {
|
||||||
|
const results = await this.find<T>(collection, filter, { limit: 1 });
|
||||||
|
return results[0] ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
let query = (this.db as any).select({ count: sql<number>`count(*)::int` }).from(table);
|
||||||
|
const where = buildWhereClause(table, filter);
|
||||||
|
if (where) query = query.where(where);
|
||||||
|
const [row] = await query;
|
||||||
|
return (row as any)?.count ?? 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
|
||||||
|
return (this.db as any).transaction(async (drizzleTx: any) => {
|
||||||
|
const txAdapter = new PostgresTxAdapter(drizzleTx, this.url);
|
||||||
|
return fn(txAdapter);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async migrate(): Promise<void> {
|
||||||
|
await runMigrations(this.url);
|
||||||
|
}
|
||||||
|
|
||||||
|
async close(): Promise<void> {
|
||||||
|
await this.handle.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thin transaction wrapper — delegates to the Drizzle transaction object
|
||||||
|
* instead of the top-level db handle.
|
||||||
|
*/
|
||||||
|
class PostgresTxAdapter implements StorageAdapter {
|
||||||
|
readonly name = 'postgres';
|
||||||
|
private tx: any;
|
||||||
|
private url: string;
|
||||||
|
|
||||||
|
constructor(tx: any, url: string) {
|
||||||
|
this.tx = tx;
|
||||||
|
this.url = url;
|
||||||
|
}
|
||||||
|
|
||||||
|
async create<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
data: T,
|
||||||
|
): Promise<T & { id: string }> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
const [row] = await this.tx.insert(table).values(data).returning();
|
||||||
|
return row as T & { id: string };
|
||||||
|
}
|
||||||
|
|
||||||
|
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
const [row] = await this.tx.select().from(table).where(eq(table.id, id));
|
||||||
|
return (row as T) ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
const result = await this.tx.update(table).set(data).where(eq(table.id, id)).returning();
|
||||||
|
return result.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(collection: string, id: string): Promise<boolean> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
const result = await this.tx.delete(table).where(eq(table.id, id)).returning();
|
||||||
|
return result.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async find<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
filter?: Record<string, unknown>,
|
||||||
|
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
|
||||||
|
): Promise<T[]> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
let query = this.tx.select().from(table);
|
||||||
|
const where = buildWhereClause(table, filter);
|
||||||
|
if (where) query = query.where(where);
|
||||||
|
if (opts?.orderBy) {
|
||||||
|
const col = table[opts.orderBy];
|
||||||
|
if (col) {
|
||||||
|
query = query.orderBy(opts.order === 'desc' ? desc(col) : asc(col));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (opts?.limit) query = query.limit(opts.limit);
|
||||||
|
if (opts?.offset) query = query.offset(opts.offset);
|
||||||
|
return (await query) as T[];
|
||||||
|
}
|
||||||
|
|
||||||
|
async findOne<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
filter: Record<string, unknown>,
|
||||||
|
): Promise<T | null> {
|
||||||
|
const results = await this.find<T>(collection, filter, { limit: 1 });
|
||||||
|
return results[0] ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
|
||||||
|
const table = getTable(collection);
|
||||||
|
let query = this.tx.select({ count: sql<number>`count(*)::int` }).from(table);
|
||||||
|
const where = buildWhereClause(table, filter);
|
||||||
|
if (where) query = query.where(where);
|
||||||
|
const [row] = await query;
|
||||||
|
return (row as any)?.count ?? 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
|
||||||
|
return this.tx.transaction(async (nestedTx: any) => {
|
||||||
|
const nested = new PostgresTxAdapter(nestedTx, this.url);
|
||||||
|
return fn(nested);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async migrate(): Promise<void> {
|
||||||
|
await runMigrations(this.url);
|
||||||
|
}
|
||||||
|
|
||||||
|
async close(): Promise<void> {
|
||||||
|
// No-op inside a transaction
|
||||||
|
}
|
||||||
|
}
|
||||||
201
packages/storage/src/adapters/sqlite.test.ts
Normal file
201
packages/storage/src/adapters/sqlite.test.ts
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { SqliteAdapter } from './sqlite.js';
|
||||||
|
|
||||||
|
describe('SqliteAdapter', () => {
|
||||||
|
let adapter: SqliteAdapter;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
adapter = new SqliteAdapter({ type: 'sqlite', path: ':memory:' });
|
||||||
|
await adapter.migrate();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await adapter.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('CRUD', () => {
|
||||||
|
it('creates and reads a record', async () => {
|
||||||
|
const created = await adapter.create('users', { name: 'Alice', email: 'alice@test.com' });
|
||||||
|
expect(created.id).toBeDefined();
|
||||||
|
expect(created.name).toBe('Alice');
|
||||||
|
|
||||||
|
const read = await adapter.read('users', created.id);
|
||||||
|
expect(read).not.toBeNull();
|
||||||
|
expect(read!.name).toBe('Alice');
|
||||||
|
expect(read!.email).toBe('alice@test.com');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns null for non-existent record', async () => {
|
||||||
|
const result = await adapter.read('users', 'does-not-exist');
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('updates a record', async () => {
|
||||||
|
const created = await adapter.create('users', { name: 'Alice' });
|
||||||
|
const updated = await adapter.update('users', created.id, { name: 'Bob' });
|
||||||
|
expect(updated).toBe(true);
|
||||||
|
|
||||||
|
const read = await adapter.read('users', created.id);
|
||||||
|
expect(read!.name).toBe('Bob');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('update returns false for non-existent record', async () => {
|
||||||
|
const result = await adapter.update('users', 'does-not-exist', { name: 'X' });
|
||||||
|
expect(result).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('deletes a record', async () => {
|
||||||
|
const created = await adapter.create('users', { name: 'Alice' });
|
||||||
|
const deleted = await adapter.delete('users', created.id);
|
||||||
|
expect(deleted).toBe(true);
|
||||||
|
|
||||||
|
const read = await adapter.read('users', created.id);
|
||||||
|
expect(read).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('delete returns false for non-existent record', async () => {
|
||||||
|
const result = await adapter.delete('users', 'does-not-exist');
|
||||||
|
expect(result).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('find', () => {
|
||||||
|
it('finds records with filter', async () => {
|
||||||
|
await adapter.create('users', { name: 'Alice', role: 'admin' });
|
||||||
|
await adapter.create('users', { name: 'Bob', role: 'user' });
|
||||||
|
await adapter.create('users', { name: 'Charlie', role: 'admin' });
|
||||||
|
|
||||||
|
const admins = await adapter.find('users', { role: 'admin' });
|
||||||
|
expect(admins).toHaveLength(2);
|
||||||
|
expect(admins.map((u) => u.name).sort()).toEqual(['Alice', 'Charlie']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('finds all records without filter', async () => {
|
||||||
|
await adapter.create('users', { name: 'Alice' });
|
||||||
|
await adapter.create('users', { name: 'Bob' });
|
||||||
|
|
||||||
|
const all = await adapter.find('users');
|
||||||
|
expect(all).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('supports limit and offset', async () => {
|
||||||
|
for (let i = 0; i < 5; i++) {
|
||||||
|
await adapter.create('users', { name: `User${i}`, idx: i });
|
||||||
|
}
|
||||||
|
|
||||||
|
const page = await adapter.find('users', undefined, {
|
||||||
|
limit: 2,
|
||||||
|
offset: 1,
|
||||||
|
orderBy: 'created_at',
|
||||||
|
});
|
||||||
|
expect(page).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('findOne returns first match', async () => {
|
||||||
|
await adapter.create('users', { name: 'Alice', role: 'admin' });
|
||||||
|
await adapter.create('users', { name: 'Bob', role: 'user' });
|
||||||
|
|
||||||
|
const found = await adapter.findOne('users', { role: 'user' });
|
||||||
|
expect(found).not.toBeNull();
|
||||||
|
expect(found!.name).toBe('Bob');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('findOne returns null when no match', async () => {
|
||||||
|
const result = await adapter.findOne('users', { role: 'nonexistent' });
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('count', () => {
|
||||||
|
it('counts all records', async () => {
|
||||||
|
await adapter.create('users', { name: 'Alice' });
|
||||||
|
await adapter.create('users', { name: 'Bob' });
|
||||||
|
|
||||||
|
const total = await adapter.count('users');
|
||||||
|
expect(total).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('counts with filter', async () => {
|
||||||
|
await adapter.create('users', { name: 'Alice', role: 'admin' });
|
||||||
|
await adapter.create('users', { name: 'Bob', role: 'user' });
|
||||||
|
await adapter.create('users', { name: 'Charlie', role: 'admin' });
|
||||||
|
|
||||||
|
const adminCount = await adapter.count('users', { role: 'admin' });
|
||||||
|
expect(adminCount).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns 0 for empty collection', async () => {
|
||||||
|
const count = await adapter.count('users');
|
||||||
|
expect(count).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('transaction', () => {
|
||||||
|
it('commits on success', async () => {
|
||||||
|
await adapter.transaction(async (tx) => {
|
||||||
|
await tx.create('users', { name: 'Alice' });
|
||||||
|
await tx.create('users', { name: 'Bob' });
|
||||||
|
});
|
||||||
|
|
||||||
|
const count = await adapter.count('users');
|
||||||
|
expect(count).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rolls back on error', async () => {
|
||||||
|
await expect(
|
||||||
|
adapter.transaction(async (tx) => {
|
||||||
|
await tx.create('users', { name: 'Alice' });
|
||||||
|
throw new Error('rollback test');
|
||||||
|
}),
|
||||||
|
).rejects.toThrow('rollback test');
|
||||||
|
|
||||||
|
const count = await adapter.count('users');
|
||||||
|
expect(count).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('migrate', () => {
|
||||||
|
it('creates all tables', async () => {
|
||||||
|
// migrate() was already called in beforeEach — verify tables exist
|
||||||
|
const collections = [
|
||||||
|
'users',
|
||||||
|
'sessions',
|
||||||
|
'accounts',
|
||||||
|
'projects',
|
||||||
|
'missions',
|
||||||
|
'tasks',
|
||||||
|
'agents',
|
||||||
|
'conversations',
|
||||||
|
'messages',
|
||||||
|
'preferences',
|
||||||
|
'insights',
|
||||||
|
'skills',
|
||||||
|
'events',
|
||||||
|
'routing_rules',
|
||||||
|
'provider_credentials',
|
||||||
|
'agent_logs',
|
||||||
|
'teams',
|
||||||
|
'team_members',
|
||||||
|
'mission_tasks',
|
||||||
|
'tickets',
|
||||||
|
'summarization_jobs',
|
||||||
|
'appreciations',
|
||||||
|
'verifications',
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const collection of collections) {
|
||||||
|
// Should not throw
|
||||||
|
const count = await adapter.count(collection);
|
||||||
|
expect(count).toBe(0);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('is idempotent', async () => {
|
||||||
|
await adapter.migrate();
|
||||||
|
await adapter.migrate();
|
||||||
|
// Should not throw
|
||||||
|
const count = await adapter.count('users');
|
||||||
|
expect(count).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
283
packages/storage/src/adapters/sqlite.ts
Normal file
283
packages/storage/src/adapters/sqlite.ts
Normal file
@@ -0,0 +1,283 @@
|
|||||||
|
import Database from 'better-sqlite3';
|
||||||
|
import { randomUUID } from 'node:crypto';
|
||||||
|
import type { StorageAdapter, StorageConfig } from '../types.js';
|
||||||
|
|
||||||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
|
|
||||||
|
const COLLECTIONS = [
|
||||||
|
'users',
|
||||||
|
'sessions',
|
||||||
|
'accounts',
|
||||||
|
'projects',
|
||||||
|
'missions',
|
||||||
|
'tasks',
|
||||||
|
'agents',
|
||||||
|
'conversations',
|
||||||
|
'messages',
|
||||||
|
'preferences',
|
||||||
|
'insights',
|
||||||
|
'skills',
|
||||||
|
'events',
|
||||||
|
'routing_rules',
|
||||||
|
'provider_credentials',
|
||||||
|
'agent_logs',
|
||||||
|
'teams',
|
||||||
|
'team_members',
|
||||||
|
'mission_tasks',
|
||||||
|
'tickets',
|
||||||
|
'summarization_jobs',
|
||||||
|
'appreciations',
|
||||||
|
'verifications',
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
function buildFilterClause(filter?: Record<string, unknown>): {
|
||||||
|
clause: string;
|
||||||
|
params: unknown[];
|
||||||
|
} {
|
||||||
|
if (!filter || Object.keys(filter).length === 0) return { clause: '', params: [] };
|
||||||
|
const conditions: string[] = [];
|
||||||
|
const params: unknown[] = [];
|
||||||
|
for (const [key, value] of Object.entries(filter)) {
|
||||||
|
if (key === 'id') {
|
||||||
|
conditions.push('id = ?');
|
||||||
|
params.push(value);
|
||||||
|
} else {
|
||||||
|
conditions.push(`json_extract(data_json, '$.${key}') = ?`);
|
||||||
|
params.push(typeof value === 'object' ? JSON.stringify(value) : value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { clause: ` WHERE ${conditions.join(' AND ')}`, params };
|
||||||
|
}
|
||||||
|
|
||||||
|
export class SqliteAdapter implements StorageAdapter {
|
||||||
|
readonly name = 'sqlite';
|
||||||
|
private db: Database.Database;
|
||||||
|
|
||||||
|
constructor(config: Extract<StorageConfig, { type: 'sqlite' }>) {
|
||||||
|
this.db = new Database(config.path);
|
||||||
|
this.db.pragma('journal_mode = WAL');
|
||||||
|
this.db.pragma('foreign_keys = ON');
|
||||||
|
}
|
||||||
|
|
||||||
|
async create<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
data: T,
|
||||||
|
): Promise<T & { id: string }> {
|
||||||
|
const id = (data as any).id ?? randomUUID();
|
||||||
|
const now = new Date().toISOString();
|
||||||
|
const rest = Object.fromEntries(Object.entries(data).filter(([k]) => k !== 'id'));
|
||||||
|
this.db
|
||||||
|
.prepare(
|
||||||
|
`INSERT INTO ${collection} (id, data_json, created_at, updated_at) VALUES (?, ?, ?, ?)`,
|
||||||
|
)
|
||||||
|
.run(id, JSON.stringify(rest), now, now);
|
||||||
|
return { ...data, id } as T & { id: string };
|
||||||
|
}
|
||||||
|
|
||||||
|
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
|
||||||
|
const row = this.db.prepare(`SELECT * FROM ${collection} WHERE id = ?`).get(id) as any;
|
||||||
|
if (!row) return null;
|
||||||
|
return { id: row.id, ...JSON.parse(row.data_json as string) } as T;
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
|
||||||
|
const existing = this.db
|
||||||
|
.prepare(`SELECT data_json FROM ${collection} WHERE id = ?`)
|
||||||
|
.get(id) as any;
|
||||||
|
if (!existing) return false;
|
||||||
|
const merged = { ...JSON.parse(existing.data_json as string), ...data };
|
||||||
|
const now = new Date().toISOString();
|
||||||
|
const result = this.db
|
||||||
|
.prepare(`UPDATE ${collection} SET data_json = ?, updated_at = ? WHERE id = ?`)
|
||||||
|
.run(JSON.stringify(merged), now, id);
|
||||||
|
return result.changes > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(collection: string, id: string): Promise<boolean> {
|
||||||
|
const result = this.db.prepare(`DELETE FROM ${collection} WHERE id = ?`).run(id);
|
||||||
|
return result.changes > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async find<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
filter?: Record<string, unknown>,
|
||||||
|
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
|
||||||
|
): Promise<T[]> {
|
||||||
|
const { clause, params } = buildFilterClause(filter);
|
||||||
|
let query = `SELECT * FROM ${collection}${clause}`;
|
||||||
|
if (opts?.orderBy) {
|
||||||
|
const dir = opts.order === 'desc' ? 'DESC' : 'ASC';
|
||||||
|
const col =
|
||||||
|
opts.orderBy === 'id' || opts.orderBy === 'created_at' || opts.orderBy === 'updated_at'
|
||||||
|
? opts.orderBy
|
||||||
|
: `json_extract(data_json, '$.${opts.orderBy}')`;
|
||||||
|
query += ` ORDER BY ${col} ${dir}`;
|
||||||
|
}
|
||||||
|
if (opts?.limit) {
|
||||||
|
query += ` LIMIT ?`;
|
||||||
|
params.push(opts.limit);
|
||||||
|
}
|
||||||
|
if (opts?.offset) {
|
||||||
|
query += ` OFFSET ?`;
|
||||||
|
params.push(opts.offset);
|
||||||
|
}
|
||||||
|
const rows = this.db.prepare(query).all(...params) as any[];
|
||||||
|
return rows.map((row) => ({ id: row.id, ...JSON.parse(row.data_json as string) }) as T);
|
||||||
|
}
|
||||||
|
|
||||||
|
async findOne<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
filter: Record<string, unknown>,
|
||||||
|
): Promise<T | null> {
|
||||||
|
const results = await this.find<T>(collection, filter, { limit: 1 });
|
||||||
|
return results[0] ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
|
||||||
|
const { clause, params } = buildFilterClause(filter);
|
||||||
|
const row = this.db
|
||||||
|
.prepare(`SELECT COUNT(*) as count FROM ${collection}${clause}`)
|
||||||
|
.get(...params) as any;
|
||||||
|
return row?.count ?? 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
|
||||||
|
const txAdapter = new SqliteTxAdapter(this.db);
|
||||||
|
this.db.exec('BEGIN');
|
||||||
|
try {
|
||||||
|
const result = await fn(txAdapter);
|
||||||
|
this.db.exec('COMMIT');
|
||||||
|
return result;
|
||||||
|
} catch (err) {
|
||||||
|
this.db.exec('ROLLBACK');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async migrate(): Promise<void> {
|
||||||
|
const createTable = (name: string) =>
|
||||||
|
this.db.exec(`
|
||||||
|
CREATE TABLE IF NOT EXISTS ${name} (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
data_json TEXT NOT NULL DEFAULT '{}',
|
||||||
|
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||||
|
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
for (const collection of COLLECTIONS) {
|
||||||
|
createTable(collection);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async close(): Promise<void> {
|
||||||
|
this.db.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transaction wrapper that uses the same db handle — better-sqlite3 transactions
|
||||||
|
* are connection-level, so all statements on the same Database instance within
|
||||||
|
* a db.transaction() callback participate in the transaction.
|
||||||
|
*/
|
||||||
|
class SqliteTxAdapter implements StorageAdapter {
|
||||||
|
readonly name = 'sqlite';
|
||||||
|
private db: Database.Database;
|
||||||
|
|
||||||
|
constructor(db: Database.Database) {
|
||||||
|
this.db = db;
|
||||||
|
}
|
||||||
|
|
||||||
|
async create<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
data: T,
|
||||||
|
): Promise<T & { id: string }> {
|
||||||
|
const id = (data as any).id ?? randomUUID();
|
||||||
|
const now = new Date().toISOString();
|
||||||
|
const rest = Object.fromEntries(Object.entries(data).filter(([k]) => k !== 'id'));
|
||||||
|
this.db
|
||||||
|
.prepare(
|
||||||
|
`INSERT INTO ${collection} (id, data_json, created_at, updated_at) VALUES (?, ?, ?, ?)`,
|
||||||
|
)
|
||||||
|
.run(id, JSON.stringify(rest), now, now);
|
||||||
|
return { ...data, id } as T & { id: string };
|
||||||
|
}
|
||||||
|
|
||||||
|
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
|
||||||
|
const row = this.db.prepare(`SELECT * FROM ${collection} WHERE id = ?`).get(id) as any;
|
||||||
|
if (!row) return null;
|
||||||
|
return { id: row.id, ...JSON.parse(row.data_json as string) } as T;
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
|
||||||
|
const existing = this.db
|
||||||
|
.prepare(`SELECT data_json FROM ${collection} WHERE id = ?`)
|
||||||
|
.get(id) as any;
|
||||||
|
if (!existing) return false;
|
||||||
|
const merged = { ...JSON.parse(existing.data_json as string), ...data };
|
||||||
|
const now = new Date().toISOString();
|
||||||
|
const result = this.db
|
||||||
|
.prepare(`UPDATE ${collection} SET data_json = ?, updated_at = ? WHERE id = ?`)
|
||||||
|
.run(JSON.stringify(merged), now, id);
|
||||||
|
return result.changes > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(collection: string, id: string): Promise<boolean> {
|
||||||
|
const result = this.db.prepare(`DELETE FROM ${collection} WHERE id = ?`).run(id);
|
||||||
|
return result.changes > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async find<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
filter?: Record<string, unknown>,
|
||||||
|
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
|
||||||
|
): Promise<T[]> {
|
||||||
|
const { clause, params } = buildFilterClause(filter);
|
||||||
|
let query = `SELECT * FROM ${collection}${clause}`;
|
||||||
|
if (opts?.orderBy) {
|
||||||
|
const dir = opts.order === 'desc' ? 'DESC' : 'ASC';
|
||||||
|
const col =
|
||||||
|
opts.orderBy === 'id' || opts.orderBy === 'created_at' || opts.orderBy === 'updated_at'
|
||||||
|
? opts.orderBy
|
||||||
|
: `json_extract(data_json, '$.${opts.orderBy}')`;
|
||||||
|
query += ` ORDER BY ${col} ${dir}`;
|
||||||
|
}
|
||||||
|
if (opts?.limit) {
|
||||||
|
query += ` LIMIT ?`;
|
||||||
|
params.push(opts.limit);
|
||||||
|
}
|
||||||
|
if (opts?.offset) {
|
||||||
|
query += ` OFFSET ?`;
|
||||||
|
params.push(opts.offset);
|
||||||
|
}
|
||||||
|
const rows = this.db.prepare(query).all(...params) as any[];
|
||||||
|
return rows.map((row) => ({ id: row.id, ...JSON.parse(row.data_json as string) }) as T);
|
||||||
|
}
|
||||||
|
|
||||||
|
async findOne<T extends Record<string, unknown>>(
|
||||||
|
collection: string,
|
||||||
|
filter: Record<string, unknown>,
|
||||||
|
): Promise<T | null> {
|
||||||
|
const results = await this.find<T>(collection, filter, { limit: 1 });
|
||||||
|
return results[0] ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
|
||||||
|
const { clause, params } = buildFilterClause(filter);
|
||||||
|
const row = this.db
|
||||||
|
.prepare(`SELECT COUNT(*) as count FROM ${collection}${clause}`)
|
||||||
|
.get(...params) as any;
|
||||||
|
return row?.count ?? 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
|
||||||
|
return fn(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
async migrate(): Promise<void> {
|
||||||
|
// No-op inside transaction
|
||||||
|
}
|
||||||
|
|
||||||
|
async close(): Promise<void> {
|
||||||
|
// No-op inside transaction
|
||||||
|
}
|
||||||
|
}
|
||||||
18
packages/storage/src/factory.ts
Normal file
18
packages/storage/src/factory.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import type { StorageAdapter, StorageConfig } from './types.js';
|
||||||
|
|
||||||
|
type StorageType = StorageConfig['type'];
|
||||||
|
|
||||||
|
const registry = new Map<StorageType, (config: StorageConfig) => StorageAdapter>();
|
||||||
|
|
||||||
|
export function registerStorageAdapter(
|
||||||
|
type: StorageType,
|
||||||
|
factory: (config: StorageConfig) => StorageAdapter,
|
||||||
|
): void {
|
||||||
|
registry.set(type, factory);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createStorageAdapter(config: StorageConfig): StorageAdapter {
|
||||||
|
const factory = registry.get(config.type);
|
||||||
|
if (!factory) throw new Error(`No adapter registered for type: ${config.type}`);
|
||||||
|
return factory(config);
|
||||||
|
}
|
||||||
17
packages/storage/src/index.ts
Normal file
17
packages/storage/src/index.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
export type { StorageAdapter, StorageConfig } from './types.js';
|
||||||
|
export { createStorageAdapter, registerStorageAdapter } from './factory.js';
|
||||||
|
export { PostgresAdapter } from './adapters/postgres.js';
|
||||||
|
export { SqliteAdapter } from './adapters/sqlite.js';
|
||||||
|
|
||||||
|
import { registerStorageAdapter } from './factory.js';
|
||||||
|
import { PostgresAdapter } from './adapters/postgres.js';
|
||||||
|
import { SqliteAdapter } from './adapters/sqlite.js';
|
||||||
|
import type { StorageConfig } from './types.js';
|
||||||
|
|
||||||
|
registerStorageAdapter('postgres', (config: StorageConfig) => {
|
||||||
|
return new PostgresAdapter(config as Extract<StorageConfig, { type: 'postgres' }>);
|
||||||
|
});
|
||||||
|
|
||||||
|
registerStorageAdapter('sqlite', (config: StorageConfig) => {
|
||||||
|
return new SqliteAdapter(config as Extract<StorageConfig, { type: 'sqlite' }>);
|
||||||
|
});
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user