Compare commits
1 Commits
fix/metapa
...
2b99908de4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b99908de4 |
12
.env.example
12
.env.example
@@ -23,8 +23,8 @@ VALKEY_URL=redis://localhost:6380
|
|||||||
|
|
||||||
|
|
||||||
# ─── Gateway ─────────────────────────────────────────────────────────────────
|
# ─── Gateway ─────────────────────────────────────────────────────────────────
|
||||||
# TCP port the NestJS/Fastify gateway listens on (default: 14242)
|
# TCP port the NestJS/Fastify gateway listens on (default: 4000)
|
||||||
GATEWAY_PORT=14242
|
GATEWAY_PORT=4000
|
||||||
|
|
||||||
# Comma-separated list of allowed CORS origins.
|
# Comma-separated list of allowed CORS origins.
|
||||||
# Must include the web app origin in production.
|
# Must include the web app origin in production.
|
||||||
@@ -37,12 +37,12 @@ GATEWAY_CORS_ORIGIN=http://localhost:3000
|
|||||||
BETTER_AUTH_SECRET=change-me-to-a-random-32-char-string
|
BETTER_AUTH_SECRET=change-me-to-a-random-32-char-string
|
||||||
|
|
||||||
# Public base URL of the gateway (used by BetterAuth for callback URLs)
|
# Public base URL of the gateway (used by BetterAuth for callback URLs)
|
||||||
BETTER_AUTH_URL=http://localhost:14242
|
BETTER_AUTH_URL=http://localhost:4000
|
||||||
|
|
||||||
|
|
||||||
# ─── Web App (Next.js) ───────────────────────────────────────────────────────
|
# ─── Web App (Next.js) ───────────────────────────────────────────────────────
|
||||||
# Public gateway URL — accessible from the browser, not just the server.
|
# Public gateway URL — accessible from the browser, not just the server.
|
||||||
NEXT_PUBLIC_GATEWAY_URL=http://localhost:14242
|
NEXT_PUBLIC_GATEWAY_URL=http://localhost:4000
|
||||||
|
|
||||||
|
|
||||||
# ─── OpenTelemetry ───────────────────────────────────────────────────────────
|
# ─── OpenTelemetry ───────────────────────────────────────────────────────────
|
||||||
@@ -121,12 +121,12 @@ OTEL_SERVICE_NAME=mosaic-gateway
|
|||||||
# ─── Discord Plugin (optional — set DISCORD_BOT_TOKEN to enable) ─────────────
|
# ─── Discord Plugin (optional — set DISCORD_BOT_TOKEN to enable) ─────────────
|
||||||
# DISCORD_BOT_TOKEN=
|
# DISCORD_BOT_TOKEN=
|
||||||
# DISCORD_GUILD_ID=
|
# DISCORD_GUILD_ID=
|
||||||
# DISCORD_GATEWAY_URL=http://localhost:14242
|
# DISCORD_GATEWAY_URL=http://localhost:4000
|
||||||
|
|
||||||
|
|
||||||
# ─── Telegram Plugin (optional — set TELEGRAM_BOT_TOKEN to enable) ───────────
|
# ─── Telegram Plugin (optional — set TELEGRAM_BOT_TOKEN to enable) ───────────
|
||||||
# TELEGRAM_BOT_TOKEN=
|
# TELEGRAM_BOT_TOKEN=
|
||||||
# TELEGRAM_GATEWAY_URL=http://localhost:14242
|
# TELEGRAM_GATEWAY_URL=http://localhost:4000
|
||||||
|
|
||||||
|
|
||||||
# ─── SSO Providers (add credentials to enable) ───────────────────────────────
|
# ─── SSO Providers (add credentials to enable) ───────────────────────────────
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ steps:
|
|||||||
image: *node_image
|
image: *node_image
|
||||||
commands:
|
commands:
|
||||||
- corepack enable
|
- corepack enable
|
||||||
- apk add --no-cache python3 make g++
|
|
||||||
- pnpm install --frozen-lockfile
|
- pnpm install --frozen-lockfile
|
||||||
|
|
||||||
typecheck:
|
typecheck:
|
||||||
|
|||||||
@@ -35,31 +35,17 @@ steps:
|
|||||||
- |
|
- |
|
||||||
echo "//git.mosaicstack.dev/api/packages/mosaic/npm/:_authToken=$NPM_TOKEN" > ~/.npmrc
|
echo "//git.mosaicstack.dev/api/packages/mosaic/npm/:_authToken=$NPM_TOKEN" > ~/.npmrc
|
||||||
echo "@mosaic:registry=https://git.mosaicstack.dev/api/packages/mosaic/npm/" >> ~/.npmrc
|
echo "@mosaic:registry=https://git.mosaicstack.dev/api/packages/mosaic/npm/" >> ~/.npmrc
|
||||||
# Publish non-private packages to Gitea (--no-git-checks skips dirty/branch checks in CI)
|
# Publish all non-private packages (--no-git-checks skips dirty/branch checks in CI)
|
||||||
# --filter excludes web (private)
|
# --filter excludes private apps (gateway, web) and the root
|
||||||
- >
|
- >
|
||||||
pnpm --filter "@mosaic/*"
|
pnpm --filter "@mosaic/*"
|
||||||
|
--filter "!@mosaic/gateway"
|
||||||
--filter "!@mosaic/web"
|
--filter "!@mosaic/web"
|
||||||
publish --no-git-checks --access public
|
publish --no-git-checks --access public
|
||||||
|| echo "[publish] Some packages may already exist at this version — continuing"
|
|| echo "[publish] Some packages may already exist at this version — continuing"
|
||||||
depends_on:
|
depends_on:
|
||||||
- build
|
- build
|
||||||
|
|
||||||
# TODO: Uncomment when ready to publish to npmjs.org
|
|
||||||
# publish-npmjs:
|
|
||||||
# image: *node_image
|
|
||||||
# environment:
|
|
||||||
# NPM_TOKEN:
|
|
||||||
# from_secret: npmjs_token
|
|
||||||
# commands:
|
|
||||||
# - *enable_pnpm
|
|
||||||
# - apk add --no-cache jq bash
|
|
||||||
# - bash scripts/publish-npmjs.sh
|
|
||||||
# depends_on:
|
|
||||||
# - build
|
|
||||||
# when:
|
|
||||||
# - event: [tag]
|
|
||||||
|
|
||||||
build-gateway:
|
build-gateway:
|
||||||
image: gcr.io/kaniko-project/executor:debug
|
image: gcr.io/kaniko-project/executor:debug
|
||||||
environment:
|
environment:
|
||||||
|
|||||||
@@ -1,23 +1,9 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/gateway",
|
"name": "@mosaic/gateway",
|
||||||
"version": "0.1.0",
|
"version": "0.0.0",
|
||||||
"repository": {
|
"private": true,
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "apps/gateway"
|
|
||||||
},
|
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/main.js",
|
"main": "dist/main.js",
|
||||||
"bin": {
|
|
||||||
"mosaic-gateway": "dist/main.js"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
],
|
|
||||||
"publishConfig": {
|
|
||||||
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
|
||||||
"access": "public"
|
|
||||||
},
|
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc",
|
"build": "tsc",
|
||||||
"dev": "tsx watch src/main.ts",
|
"dev": "tsx watch src/main.ts",
|
||||||
@@ -28,19 +14,17 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@anthropic-ai/sdk": "^0.80.0",
|
"@anthropic-ai/sdk": "^0.80.0",
|
||||||
"@fastify/helmet": "^13.0.2",
|
"@fastify/helmet": "^13.0.2",
|
||||||
"@mariozechner/pi-ai": "^0.65.0",
|
"@mariozechner/pi-ai": "~0.57.1",
|
||||||
"@mariozechner/pi-coding-agent": "^0.65.0",
|
"@mariozechner/pi-coding-agent": "~0.57.1",
|
||||||
"@modelcontextprotocol/sdk": "^1.27.1",
|
"@modelcontextprotocol/sdk": "^1.27.1",
|
||||||
"@mosaic/auth": "workspace:^",
|
"@mosaic/auth": "workspace:^",
|
||||||
"@mosaic/brain": "workspace:^",
|
"@mosaic/brain": "workspace:^",
|
||||||
"@mosaic/config": "workspace:^",
|
|
||||||
"@mosaic/coord": "workspace:^",
|
"@mosaic/coord": "workspace:^",
|
||||||
"@mosaic/db": "workspace:^",
|
"@mosaic/db": "workspace:^",
|
||||||
"@mosaic/discord-plugin": "workspace:^",
|
"@mosaic/discord-plugin": "workspace:^",
|
||||||
"@mosaic/log": "workspace:^",
|
"@mosaic/log": "workspace:^",
|
||||||
"@mosaic/memory": "workspace:^",
|
"@mosaic/memory": "workspace:^",
|
||||||
"@mosaic/queue": "workspace:^",
|
"@mosaic/queue": "workspace:^",
|
||||||
"@mosaic/storage": "workspace:^",
|
|
||||||
"@mosaic/telegram-plugin": "workspace:^",
|
"@mosaic/telegram-plugin": "workspace:^",
|
||||||
"@mosaic/types": "workspace:^",
|
"@mosaic/types": "workspace:^",
|
||||||
"@nestjs/common": "^11.0.0",
|
"@nestjs/common": "^11.0.0",
|
||||||
@@ -49,7 +33,7 @@
|
|||||||
"@nestjs/platform-socket.io": "^11.0.0",
|
"@nestjs/platform-socket.io": "^11.0.0",
|
||||||
"@nestjs/throttler": "^6.5.0",
|
"@nestjs/throttler": "^6.5.0",
|
||||||
"@nestjs/websockets": "^11.0.0",
|
"@nestjs/websockets": "^11.0.0",
|
||||||
"@opentelemetry/auto-instrumentations-node": "^0.72.0",
|
"@opentelemetry/auto-instrumentations-node": "^0.71.0",
|
||||||
"@opentelemetry/exporter-metrics-otlp-http": "^0.213.0",
|
"@opentelemetry/exporter-metrics-otlp-http": "^0.213.0",
|
||||||
"@opentelemetry/exporter-trace-otlp-http": "^0.213.0",
|
"@opentelemetry/exporter-trace-otlp-http": "^0.213.0",
|
||||||
"@opentelemetry/resources": "^2.6.0",
|
"@opentelemetry/resources": "^2.6.0",
|
||||||
|
|||||||
@@ -1,90 +0,0 @@
|
|||||||
import {
|
|
||||||
Body,
|
|
||||||
Controller,
|
|
||||||
Delete,
|
|
||||||
Get,
|
|
||||||
HttpCode,
|
|
||||||
HttpStatus,
|
|
||||||
Inject,
|
|
||||||
Param,
|
|
||||||
Post,
|
|
||||||
UseGuards,
|
|
||||||
} from '@nestjs/common';
|
|
||||||
import { randomBytes, createHash } from 'node:crypto';
|
|
||||||
import { eq, type Db, adminTokens } from '@mosaic/db';
|
|
||||||
import { v4 as uuid } from 'uuid';
|
|
||||||
import { DB } from '../database/database.module.js';
|
|
||||||
import { AdminGuard } from './admin.guard.js';
|
|
||||||
import { CurrentUser } from '../auth/current-user.decorator.js';
|
|
||||||
import type {
|
|
||||||
CreateTokenDto,
|
|
||||||
TokenCreatedDto,
|
|
||||||
TokenDto,
|
|
||||||
TokenListDto,
|
|
||||||
} from './admin-tokens.dto.js';
|
|
||||||
|
|
||||||
function hashToken(plaintext: string): string {
|
|
||||||
return createHash('sha256').update(plaintext).digest('hex');
|
|
||||||
}
|
|
||||||
|
|
||||||
function toTokenDto(row: typeof adminTokens.$inferSelect): TokenDto {
|
|
||||||
return {
|
|
||||||
id: row.id,
|
|
||||||
label: row.label,
|
|
||||||
scope: row.scope,
|
|
||||||
expiresAt: row.expiresAt?.toISOString() ?? null,
|
|
||||||
lastUsedAt: row.lastUsedAt?.toISOString() ?? null,
|
|
||||||
createdAt: row.createdAt.toISOString(),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Controller('api/admin/tokens')
|
|
||||||
@UseGuards(AdminGuard)
|
|
||||||
export class AdminTokensController {
|
|
||||||
constructor(@Inject(DB) private readonly db: Db) {}
|
|
||||||
|
|
||||||
@Post()
|
|
||||||
async create(
|
|
||||||
@Body() dto: CreateTokenDto,
|
|
||||||
@CurrentUser() user: { id: string },
|
|
||||||
): Promise<TokenCreatedDto> {
|
|
||||||
const plaintext = randomBytes(32).toString('hex');
|
|
||||||
const tokenHash = hashToken(plaintext);
|
|
||||||
const id = uuid();
|
|
||||||
|
|
||||||
const expiresAt = dto.expiresInDays
|
|
||||||
? new Date(Date.now() + dto.expiresInDays * 24 * 60 * 60 * 1000)
|
|
||||||
: null;
|
|
||||||
|
|
||||||
const [row] = await this.db
|
|
||||||
.insert(adminTokens)
|
|
||||||
.values({
|
|
||||||
id,
|
|
||||||
userId: user.id,
|
|
||||||
tokenHash,
|
|
||||||
label: dto.label ?? 'CLI token',
|
|
||||||
scope: dto.scope ?? 'admin',
|
|
||||||
expiresAt,
|
|
||||||
})
|
|
||||||
.returning();
|
|
||||||
|
|
||||||
return { ...toTokenDto(row!), plaintext };
|
|
||||||
}
|
|
||||||
|
|
||||||
@Get()
|
|
||||||
async list(@CurrentUser() user: { id: string }): Promise<TokenListDto> {
|
|
||||||
const rows = await this.db
|
|
||||||
.select()
|
|
||||||
.from(adminTokens)
|
|
||||||
.where(eq(adminTokens.userId, user.id))
|
|
||||||
.orderBy(adminTokens.createdAt);
|
|
||||||
|
|
||||||
return { tokens: rows.map(toTokenDto), total: rows.length };
|
|
||||||
}
|
|
||||||
|
|
||||||
@Delete(':id')
|
|
||||||
@HttpCode(HttpStatus.NO_CONTENT)
|
|
||||||
async revoke(@Param('id') id: string, @CurrentUser() _user: { id: string }): Promise<void> {
|
|
||||||
await this.db.delete(adminTokens).where(eq(adminTokens.id, id));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
import { IsString, IsOptional, IsInt, Min } from 'class-validator';
|
|
||||||
|
|
||||||
export class CreateTokenDto {
|
|
||||||
@IsString()
|
|
||||||
label!: string;
|
|
||||||
|
|
||||||
@IsOptional()
|
|
||||||
@IsString()
|
|
||||||
scope?: string;
|
|
||||||
|
|
||||||
@IsOptional()
|
|
||||||
@IsInt()
|
|
||||||
@Min(1)
|
|
||||||
expiresInDays?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TokenDto {
|
|
||||||
id: string;
|
|
||||||
label: string;
|
|
||||||
scope: string;
|
|
||||||
expiresAt: string | null;
|
|
||||||
lastUsedAt: string | null;
|
|
||||||
createdAt: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TokenCreatedDto extends TokenDto {
|
|
||||||
plaintext: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TokenListDto {
|
|
||||||
tokens: TokenDto[];
|
|
||||||
total: number;
|
|
||||||
}
|
|
||||||
@@ -6,11 +6,10 @@ import {
|
|||||||
Injectable,
|
Injectable,
|
||||||
UnauthorizedException,
|
UnauthorizedException,
|
||||||
} from '@nestjs/common';
|
} from '@nestjs/common';
|
||||||
import { createHash } from 'node:crypto';
|
|
||||||
import { fromNodeHeaders } from 'better-auth/node';
|
import { fromNodeHeaders } from 'better-auth/node';
|
||||||
import type { Auth } from '@mosaic/auth';
|
import type { Auth } from '@mosaic/auth';
|
||||||
import type { Db } from '@mosaic/db';
|
import type { Db } from '@mosaic/db';
|
||||||
import { eq, adminTokens, users as usersTable } from '@mosaic/db';
|
import { eq, users as usersTable } from '@mosaic/db';
|
||||||
import type { FastifyRequest } from 'fastify';
|
import type { FastifyRequest } from 'fastify';
|
||||||
import { AUTH } from '../auth/auth.tokens.js';
|
import { AUTH } from '../auth/auth.tokens.js';
|
||||||
import { DB } from '../database/database.module.js';
|
import { DB } from '../database/database.module.js';
|
||||||
@@ -20,8 +19,6 @@ interface UserWithRole {
|
|||||||
role?: string;
|
role?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
type AuthenticatedRequest = FastifyRequest & { user: unknown; session: unknown };
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AdminGuard implements CanActivate {
|
export class AdminGuard implements CanActivate {
|
||||||
constructor(
|
constructor(
|
||||||
@@ -31,64 +28,8 @@ export class AdminGuard implements CanActivate {
|
|||||||
|
|
||||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||||
const request = context.switchToHttp().getRequest<FastifyRequest>();
|
const request = context.switchToHttp().getRequest<FastifyRequest>();
|
||||||
|
|
||||||
// Try bearer token auth first
|
|
||||||
const authHeader = request.raw.headers['authorization'];
|
|
||||||
if (authHeader?.startsWith('Bearer ')) {
|
|
||||||
return this.validateBearerToken(request, authHeader.slice(7));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fall back to BetterAuth session
|
|
||||||
return this.validateSession(request);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async validateBearerToken(request: FastifyRequest, plaintext: string): Promise<boolean> {
|
|
||||||
const tokenHash = createHash('sha256').update(plaintext).digest('hex');
|
|
||||||
|
|
||||||
const [row] = await this.db
|
|
||||||
.select({
|
|
||||||
tokenId: adminTokens.id,
|
|
||||||
userId: adminTokens.userId,
|
|
||||||
scope: adminTokens.scope,
|
|
||||||
expiresAt: adminTokens.expiresAt,
|
|
||||||
userName: usersTable.name,
|
|
||||||
userEmail: usersTable.email,
|
|
||||||
userRole: usersTable.role,
|
|
||||||
})
|
|
||||||
.from(adminTokens)
|
|
||||||
.innerJoin(usersTable, eq(adminTokens.userId, usersTable.id))
|
|
||||||
.where(eq(adminTokens.tokenHash, tokenHash))
|
|
||||||
.limit(1);
|
|
||||||
|
|
||||||
if (!row) {
|
|
||||||
throw new UnauthorizedException('Invalid API token');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (row.expiresAt && row.expiresAt < new Date()) {
|
|
||||||
throw new UnauthorizedException('API token expired');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (row.userRole !== 'admin') {
|
|
||||||
throw new ForbiddenException('Admin access required');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update last-used timestamp (fire-and-forget)
|
|
||||||
this.db
|
|
||||||
.update(adminTokens)
|
|
||||||
.set({ lastUsedAt: new Date() })
|
|
||||||
.where(eq(adminTokens.id, row.tokenId))
|
|
||||||
.then(() => {})
|
|
||||||
.catch(() => {});
|
|
||||||
|
|
||||||
const req = request as AuthenticatedRequest;
|
|
||||||
req.user = { id: row.userId, name: row.userName, email: row.userEmail, role: row.userRole };
|
|
||||||
req.session = { id: `token:${row.tokenId}`, userId: row.userId };
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async validateSession(request: FastifyRequest): Promise<boolean> {
|
|
||||||
const headers = fromNodeHeaders(request.raw.headers);
|
const headers = fromNodeHeaders(request.raw.headers);
|
||||||
|
|
||||||
const result = await this.auth.api.getSession({ headers });
|
const result = await this.auth.api.getSession({ headers });
|
||||||
|
|
||||||
if (!result) {
|
if (!result) {
|
||||||
@@ -97,6 +38,8 @@ export class AdminGuard implements CanActivate {
|
|||||||
|
|
||||||
const user = result.user as UserWithRole;
|
const user = result.user as UserWithRole;
|
||||||
|
|
||||||
|
// Ensure the role field is populated. better-auth should include additionalFields
|
||||||
|
// in the session, but as a fallback, fetch the role from the database if needed.
|
||||||
let userRole = user.role;
|
let userRole = user.role;
|
||||||
if (!userRole) {
|
if (!userRole) {
|
||||||
const [dbUser] = await this.db
|
const [dbUser] = await this.db
|
||||||
@@ -105,6 +48,7 @@ export class AdminGuard implements CanActivate {
|
|||||||
.where(eq(usersTable.id, user.id))
|
.where(eq(usersTable.id, user.id))
|
||||||
.limit(1);
|
.limit(1);
|
||||||
userRole = dbUser?.role ?? 'member';
|
userRole = dbUser?.role ?? 'member';
|
||||||
|
// Update the session user object with the fetched role
|
||||||
(user as UserWithRole).role = userRole;
|
(user as UserWithRole).role = userRole;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -112,9 +56,8 @@ export class AdminGuard implements CanActivate {
|
|||||||
throw new ForbiddenException('Admin access required');
|
throw new ForbiddenException('Admin access required');
|
||||||
}
|
}
|
||||||
|
|
||||||
const req = request as AuthenticatedRequest;
|
(request as FastifyRequest & { user: unknown; session: unknown }).user = result.user;
|
||||||
req.user = result.user;
|
(request as FastifyRequest & { user: unknown; session: unknown }).session = result.session;
|
||||||
req.session = result.session;
|
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,18 +2,10 @@ import { Module } from '@nestjs/common';
|
|||||||
import { AdminController } from './admin.controller.js';
|
import { AdminController } from './admin.controller.js';
|
||||||
import { AdminHealthController } from './admin-health.controller.js';
|
import { AdminHealthController } from './admin-health.controller.js';
|
||||||
import { AdminJobsController } from './admin-jobs.controller.js';
|
import { AdminJobsController } from './admin-jobs.controller.js';
|
||||||
import { AdminTokensController } from './admin-tokens.controller.js';
|
|
||||||
import { BootstrapController } from './bootstrap.controller.js';
|
|
||||||
import { AdminGuard } from './admin.guard.js';
|
import { AdminGuard } from './admin.guard.js';
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
controllers: [
|
controllers: [AdminController, AdminHealthController, AdminJobsController],
|
||||||
AdminController,
|
|
||||||
AdminHealthController,
|
|
||||||
AdminJobsController,
|
|
||||||
AdminTokensController,
|
|
||||||
BootstrapController,
|
|
||||||
],
|
|
||||||
providers: [AdminGuard],
|
providers: [AdminGuard],
|
||||||
})
|
})
|
||||||
export class AdminModule {}
|
export class AdminModule {}
|
||||||
|
|||||||
@@ -1,101 +0,0 @@
|
|||||||
import {
|
|
||||||
Body,
|
|
||||||
Controller,
|
|
||||||
ForbiddenException,
|
|
||||||
Get,
|
|
||||||
Inject,
|
|
||||||
InternalServerErrorException,
|
|
||||||
Post,
|
|
||||||
} from '@nestjs/common';
|
|
||||||
import { randomBytes, createHash } from 'node:crypto';
|
|
||||||
import { count, eq, type Db, users as usersTable, adminTokens } from '@mosaic/db';
|
|
||||||
import type { Auth } from '@mosaic/auth';
|
|
||||||
import { v4 as uuid } from 'uuid';
|
|
||||||
import { AUTH } from '../auth/auth.tokens.js';
|
|
||||||
import { DB } from '../database/database.module.js';
|
|
||||||
import type { BootstrapSetupDto, BootstrapStatusDto, BootstrapResultDto } from './bootstrap.dto.js';
|
|
||||||
|
|
||||||
@Controller('api/bootstrap')
|
|
||||||
export class BootstrapController {
|
|
||||||
constructor(
|
|
||||||
@Inject(AUTH) private readonly auth: Auth,
|
|
||||||
@Inject(DB) private readonly db: Db,
|
|
||||||
) {}
|
|
||||||
|
|
||||||
@Get('status')
|
|
||||||
async status(): Promise<BootstrapStatusDto> {
|
|
||||||
const [result] = await this.db.select({ total: count() }).from(usersTable);
|
|
||||||
return { needsSetup: (result?.total ?? 0) === 0 };
|
|
||||||
}
|
|
||||||
|
|
||||||
@Post('setup')
|
|
||||||
async setup(@Body() dto: BootstrapSetupDto): Promise<BootstrapResultDto> {
|
|
||||||
// Only allow setup when zero users exist
|
|
||||||
const [result] = await this.db.select({ total: count() }).from(usersTable);
|
|
||||||
if ((result?.total ?? 0) > 0) {
|
|
||||||
throw new ForbiddenException('Setup already completed — users exist');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create admin user via BetterAuth API
|
|
||||||
const authApi = this.auth.api as unknown as {
|
|
||||||
createUser: (opts: {
|
|
||||||
body: { name: string; email: string; password: string; role?: string };
|
|
||||||
}) => Promise<{
|
|
||||||
user: { id: string; name: string; email: string };
|
|
||||||
}>;
|
|
||||||
};
|
|
||||||
|
|
||||||
const created = await authApi.createUser({
|
|
||||||
body: {
|
|
||||||
name: dto.name,
|
|
||||||
email: dto.email,
|
|
||||||
password: dto.password,
|
|
||||||
role: 'admin',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// Verify user was created
|
|
||||||
const [user] = await this.db
|
|
||||||
.select()
|
|
||||||
.from(usersTable)
|
|
||||||
.where(eq(usersTable.id, created.user.id))
|
|
||||||
.limit(1);
|
|
||||||
|
|
||||||
if (!user) throw new InternalServerErrorException('User created but not found');
|
|
||||||
|
|
||||||
// Ensure role is admin (createUser may not set it via BetterAuth)
|
|
||||||
if (user.role !== 'admin') {
|
|
||||||
await this.db.update(usersTable).set({ role: 'admin' }).where(eq(usersTable.id, user.id));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate admin API token
|
|
||||||
const plaintext = randomBytes(32).toString('hex');
|
|
||||||
const tokenHash = createHash('sha256').update(plaintext).digest('hex');
|
|
||||||
const tokenId = uuid();
|
|
||||||
|
|
||||||
const [token] = await this.db
|
|
||||||
.insert(adminTokens)
|
|
||||||
.values({
|
|
||||||
id: tokenId,
|
|
||||||
userId: user.id,
|
|
||||||
tokenHash,
|
|
||||||
label: 'Initial setup token',
|
|
||||||
scope: 'admin',
|
|
||||||
})
|
|
||||||
.returning();
|
|
||||||
|
|
||||||
return {
|
|
||||||
user: {
|
|
||||||
id: user.id,
|
|
||||||
name: user.name,
|
|
||||||
email: user.email,
|
|
||||||
role: 'admin',
|
|
||||||
},
|
|
||||||
token: {
|
|
||||||
id: token!.id,
|
|
||||||
plaintext,
|
|
||||||
label: token!.label,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
import { IsString, IsEmail, MinLength } from 'class-validator';
|
|
||||||
|
|
||||||
export class BootstrapSetupDto {
|
|
||||||
@IsString()
|
|
||||||
name!: string;
|
|
||||||
|
|
||||||
@IsEmail()
|
|
||||||
email!: string;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@MinLength(8)
|
|
||||||
password!: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface BootstrapStatusDto {
|
|
||||||
needsSetup: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface BootstrapResultDto {
|
|
||||||
user: {
|
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
email: string;
|
|
||||||
role: string;
|
|
||||||
};
|
|
||||||
token: {
|
|
||||||
id: string;
|
|
||||||
plaintext: string;
|
|
||||||
label: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -62,7 +62,7 @@ function restoreEnv(saved: Map<EnvKey, string | undefined>): void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function makeRegistry(): ModelRegistry {
|
function makeRegistry(): ModelRegistry {
|
||||||
return ModelRegistry.inMemory(AuthStorage.inMemory());
|
return new ModelRegistry(AuthStorage.inMemory());
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ export class ProviderService implements OnModuleInit, OnModuleDestroy {
|
|||||||
|
|
||||||
async onModuleInit(): Promise<void> {
|
async onModuleInit(): Promise<void> {
|
||||||
const authStorage = AuthStorage.inMemory();
|
const authStorage = AuthStorage.inMemory();
|
||||||
this.registry = ModelRegistry.inMemory(authStorage);
|
this.registry = new ModelRegistry(authStorage);
|
||||||
|
|
||||||
// Build the default set of adapters that rely on the registry
|
// Build the default set of adapters that rely on the registry
|
||||||
this.adapters = [
|
this.adapters = [
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import { Module } from '@nestjs/common';
|
import { Module } from '@nestjs/common';
|
||||||
import { APP_GUARD } from '@nestjs/core';
|
import { APP_GUARD } from '@nestjs/core';
|
||||||
import { HealthController } from './health/health.controller.js';
|
import { HealthController } from './health/health.controller.js';
|
||||||
import { ConfigModule } from './config/config.module.js';
|
|
||||||
import { DatabaseModule } from './database/database.module.js';
|
import { DatabaseModule } from './database/database.module.js';
|
||||||
import { AuthModule } from './auth/auth.module.js';
|
import { AuthModule } from './auth/auth.module.js';
|
||||||
import { BrainModule } from './brain/brain.module.js';
|
import { BrainModule } from './brain/brain.module.js';
|
||||||
@@ -29,7 +28,6 @@ import { ThrottlerGuard, ThrottlerModule } from '@nestjs/throttler';
|
|||||||
@Module({
|
@Module({
|
||||||
imports: [
|
imports: [
|
||||||
ThrottlerModule.forRoot([{ name: 'default', ttl: 60_000, limit: 60 }]),
|
ThrottlerModule.forRoot([{ name: 'default', ttl: 60_000, limit: 60 }]),
|
||||||
ConfigModule,
|
|
||||||
DatabaseModule,
|
DatabaseModule,
|
||||||
AuthModule,
|
AuthModule,
|
||||||
BrainModule,
|
BrainModule,
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import { SsoController } from './sso.controller.js';
|
|||||||
useFactory: (db: Db): Auth =>
|
useFactory: (db: Db): Auth =>
|
||||||
createAuth({
|
createAuth({
|
||||||
db,
|
db,
|
||||||
baseURL: process.env['BETTER_AUTH_URL'] ?? 'http://localhost:14242',
|
baseURL: process.env['BETTER_AUTH_URL'] ?? 'http://localhost:4000',
|
||||||
secret: process.env['BETTER_AUTH_SECRET'],
|
secret: process.env['BETTER_AUTH_SECRET'],
|
||||||
}),
|
}),
|
||||||
inject: [DB],
|
inject: [DB],
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
import { Global, Module } from '@nestjs/common';
|
|
||||||
import { loadConfig, type MosaicConfig } from '@mosaic/config';
|
|
||||||
|
|
||||||
export const MOSAIC_CONFIG = 'MOSAIC_CONFIG';
|
|
||||||
|
|
||||||
@Global()
|
|
||||||
@Module({
|
|
||||||
providers: [
|
|
||||||
{
|
|
||||||
provide: MOSAIC_CONFIG,
|
|
||||||
useFactory: (): MosaicConfig => loadConfig(),
|
|
||||||
},
|
|
||||||
],
|
|
||||||
exports: [MOSAIC_CONFIG],
|
|
||||||
})
|
|
||||||
export class ConfigModule {}
|
|
||||||
@@ -1,51 +1,28 @@
|
|||||||
import { mkdirSync } from 'node:fs';
|
|
||||||
import { homedir } from 'node:os';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { Global, Inject, Module, type OnApplicationShutdown } from '@nestjs/common';
|
import { Global, Inject, Module, type OnApplicationShutdown } from '@nestjs/common';
|
||||||
import { createDb, createPgliteDb, type Db, type DbHandle } from '@mosaic/db';
|
import { createDb, type Db, type DbHandle } from '@mosaic/db';
|
||||||
import { createStorageAdapter, type StorageAdapter } from '@mosaic/storage';
|
|
||||||
import type { MosaicConfig } from '@mosaic/config';
|
|
||||||
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
|
||||||
|
|
||||||
export const DB_HANDLE = 'DB_HANDLE';
|
export const DB_HANDLE = 'DB_HANDLE';
|
||||||
export const DB = 'DB';
|
export const DB = 'DB';
|
||||||
export const STORAGE_ADAPTER = 'STORAGE_ADAPTER';
|
|
||||||
|
|
||||||
@Global()
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
providers: [
|
providers: [
|
||||||
{
|
{
|
||||||
provide: DB_HANDLE,
|
provide: DB_HANDLE,
|
||||||
useFactory: (config: MosaicConfig): DbHandle => {
|
useFactory: (): DbHandle => createDb(),
|
||||||
if (config.tier === 'local') {
|
|
||||||
const dataDir = join(homedir(), '.config', 'mosaic', 'gateway', 'pglite');
|
|
||||||
mkdirSync(dataDir, { recursive: true });
|
|
||||||
return createPgliteDb(dataDir);
|
|
||||||
}
|
|
||||||
return createDb(config.storage.type === 'postgres' ? config.storage.url : undefined);
|
|
||||||
},
|
|
||||||
inject: [MOSAIC_CONFIG],
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
provide: DB,
|
provide: DB,
|
||||||
useFactory: (handle: DbHandle): Db => handle.db,
|
useFactory: (handle: DbHandle): Db => handle.db,
|
||||||
inject: [DB_HANDLE],
|
inject: [DB_HANDLE],
|
||||||
},
|
},
|
||||||
{
|
|
||||||
provide: STORAGE_ADAPTER,
|
|
||||||
useFactory: (config: MosaicConfig): StorageAdapter => createStorageAdapter(config.storage),
|
|
||||||
inject: [MOSAIC_CONFIG],
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
exports: [DB, STORAGE_ADAPTER],
|
exports: [DB],
|
||||||
})
|
})
|
||||||
export class DatabaseModule implements OnApplicationShutdown {
|
export class DatabaseModule implements OnApplicationShutdown {
|
||||||
constructor(
|
constructor(@Inject(DB_HANDLE) private readonly handle: DbHandle) {}
|
||||||
@Inject(DB_HANDLE) private readonly handle: DbHandle,
|
|
||||||
@Inject(STORAGE_ADAPTER) private readonly storageAdapter: StorageAdapter,
|
|
||||||
) {}
|
|
||||||
|
|
||||||
async onApplicationShutdown(): Promise<void> {
|
async onApplicationShutdown(): Promise<void> {
|
||||||
await Promise.all([this.handle.close(), this.storageAdapter.close()]);
|
await this.handle.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +1,5 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
import { config } from 'dotenv';
|
import { config } from 'dotenv';
|
||||||
import { existsSync } from 'node:fs';
|
import { resolve } from 'node:path';
|
||||||
import { resolve, join } from 'node:path';
|
|
||||||
import { homedir } from 'node:os';
|
|
||||||
|
|
||||||
// Load .env from daemon config dir (global install / daemon mode).
|
|
||||||
// Loaded first so monorepo .env can override for local dev.
|
|
||||||
const daemonEnv = join(homedir(), '.config', 'mosaic', 'gateway', '.env');
|
|
||||||
if (existsSync(daemonEnv)) config({ path: daemonEnv });
|
|
||||||
|
|
||||||
// Load .env from monorepo root (cwd is apps/gateway when run via pnpm filter)
|
// Load .env from monorepo root (cwd is apps/gateway when run via pnpm filter)
|
||||||
config({ path: resolve(process.cwd(), '../../.env') });
|
config({ path: resolve(process.cwd(), '../../.env') });
|
||||||
@@ -59,7 +51,7 @@ async function bootstrap(): Promise<void> {
|
|||||||
mountAuthHandler(app);
|
mountAuthHandler(app);
|
||||||
mountMcpHandler(app, app.get(McpService));
|
mountMcpHandler(app, app.get(McpService));
|
||||||
|
|
||||||
const port = Number(process.env['GATEWAY_PORT'] ?? 14242);
|
const port = Number(process.env['GATEWAY_PORT'] ?? 4000);
|
||||||
await app.listen(port, '0.0.0.0');
|
await app.listen(port, '0.0.0.0');
|
||||||
logger.log(`Gateway listening on port ${port}`);
|
logger.log(`Gateway listening on port ${port}`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,29 +1,11 @@
|
|||||||
import { Global, Module } from '@nestjs/common';
|
import { Global, Module } from '@nestjs/common';
|
||||||
import {
|
import { createMemory, type Memory } from '@mosaic/memory';
|
||||||
createMemory,
|
|
||||||
type Memory,
|
|
||||||
createMemoryAdapter,
|
|
||||||
type MemoryAdapter,
|
|
||||||
type MemoryConfig,
|
|
||||||
} from '@mosaic/memory';
|
|
||||||
import type { Db } from '@mosaic/db';
|
import type { Db } from '@mosaic/db';
|
||||||
import type { StorageAdapter } from '@mosaic/storage';
|
import { DB } from '../database/database.module.js';
|
||||||
import type { MosaicConfig } from '@mosaic/config';
|
|
||||||
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
|
||||||
import { DB, STORAGE_ADAPTER } from '../database/database.module.js';
|
|
||||||
import { MEMORY } from './memory.tokens.js';
|
import { MEMORY } from './memory.tokens.js';
|
||||||
import { MemoryController } from './memory.controller.js';
|
import { MemoryController } from './memory.controller.js';
|
||||||
import { EmbeddingService } from './embedding.service.js';
|
import { EmbeddingService } from './embedding.service.js';
|
||||||
|
|
||||||
export const MEMORY_ADAPTER = 'MEMORY_ADAPTER';
|
|
||||||
|
|
||||||
function buildMemoryConfig(config: MosaicConfig, storageAdapter: StorageAdapter): MemoryConfig {
|
|
||||||
if (config.memory.type === 'keyword') {
|
|
||||||
return { type: 'keyword', storage: storageAdapter };
|
|
||||||
}
|
|
||||||
return { type: config.memory.type };
|
|
||||||
}
|
|
||||||
|
|
||||||
@Global()
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
providers: [
|
providers: [
|
||||||
@@ -32,15 +14,9 @@ function buildMemoryConfig(config: MosaicConfig, storageAdapter: StorageAdapter)
|
|||||||
useFactory: (db: Db): Memory => createMemory(db),
|
useFactory: (db: Db): Memory => createMemory(db),
|
||||||
inject: [DB],
|
inject: [DB],
|
||||||
},
|
},
|
||||||
{
|
|
||||||
provide: MEMORY_ADAPTER,
|
|
||||||
useFactory: (config: MosaicConfig, storageAdapter: StorageAdapter): MemoryAdapter =>
|
|
||||||
createMemoryAdapter(buildMemoryConfig(config, storageAdapter)),
|
|
||||||
inject: [MOSAIC_CONFIG, STORAGE_ADAPTER],
|
|
||||||
},
|
|
||||||
EmbeddingService,
|
EmbeddingService,
|
||||||
],
|
],
|
||||||
controllers: [MemoryController],
|
controllers: [MemoryController],
|
||||||
exports: [MEMORY, MEMORY_ADAPTER, EmbeddingService],
|
exports: [MEMORY, EmbeddingService],
|
||||||
})
|
})
|
||||||
export class MemoryModule {}
|
export class MemoryModule {}
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ class TelegramChannelPluginAdapter implements IChannelPlugin {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const DEFAULT_GATEWAY_URL = 'http://localhost:14242';
|
const DEFAULT_GATEWAY_URL = 'http://localhost:4000';
|
||||||
|
|
||||||
function createPluginRegistry(): IChannelPlugin[] {
|
function createPluginRegistry(): IChannelPlugin[] {
|
||||||
const plugins: IChannelPlugin[] = [];
|
const plugins: IChannelPlugin[] = [];
|
||||||
|
|||||||
@@ -1,21 +1,9 @@
|
|||||||
import { Global, Module } from '@nestjs/common';
|
import { Global, Module } from '@nestjs/common';
|
||||||
import { createQueueAdapter, type QueueAdapter } from '@mosaic/queue';
|
|
||||||
import type { MosaicConfig } from '@mosaic/config';
|
|
||||||
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
|
||||||
import { QueueService } from './queue.service.js';
|
import { QueueService } from './queue.service.js';
|
||||||
|
|
||||||
export const QUEUE_ADAPTER = 'QUEUE_ADAPTER';
|
|
||||||
|
|
||||||
@Global()
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
providers: [
|
providers: [QueueService],
|
||||||
QueueService,
|
exports: [QueueService],
|
||||||
{
|
|
||||||
provide: QUEUE_ADAPTER,
|
|
||||||
useFactory: (config: MosaicConfig): QueueAdapter => createQueueAdapter(config.queue),
|
|
||||||
inject: [MOSAIC_CONFIG],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
exports: [QueueService, QUEUE_ADAPTER],
|
|
||||||
})
|
})
|
||||||
export class QueueModule {}
|
export class QueueModule {}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/web",
|
"name": "@mosaic/web",
|
||||||
"version": "0.0.2",
|
"version": "0.0.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "next build",
|
"build": "next build",
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import { defineConfig, devices } from '@playwright/test';
|
|||||||
*
|
*
|
||||||
* Assumes:
|
* Assumes:
|
||||||
* - Next.js web app running on http://localhost:3000
|
* - Next.js web app running on http://localhost:3000
|
||||||
* - NestJS gateway running on http://localhost:14242
|
* - NestJS gateway running on http://localhost:4000
|
||||||
*
|
*
|
||||||
* Run with: pnpm --filter @mosaic/web test:e2e
|
* Run with: pnpm --filter @mosaic/web test:e2e
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
const GATEWAY_URL = process.env['NEXT_PUBLIC_GATEWAY_URL'] ?? 'http://localhost:14242';
|
const GATEWAY_URL = process.env['NEXT_PUBLIC_GATEWAY_URL'] ?? 'http://localhost:4000';
|
||||||
|
|
||||||
export interface ApiRequestInit extends Omit<RequestInit, 'body'> {
|
export interface ApiRequestInit extends Omit<RequestInit, 'body'> {
|
||||||
body?: unknown;
|
body?: unknown;
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { createAuthClient } from 'better-auth/react';
|
|||||||
import { adminClient, genericOAuthClient } from 'better-auth/client/plugins';
|
import { adminClient, genericOAuthClient } from 'better-auth/client/plugins';
|
||||||
|
|
||||||
export const authClient = createAuthClient({
|
export const authClient = createAuthClient({
|
||||||
baseURL: process.env['NEXT_PUBLIC_GATEWAY_URL'] ?? 'http://localhost:14242',
|
baseURL: process.env['NEXT_PUBLIC_GATEWAY_URL'] ?? 'http://localhost:4000',
|
||||||
plugins: [adminClient(), genericOAuthClient()],
|
plugins: [adminClient(), genericOAuthClient()],
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { io, type Socket } from 'socket.io-client';
|
import { io, type Socket } from 'socket.io-client';
|
||||||
|
|
||||||
const GATEWAY_URL = process.env['NEXT_PUBLIC_GATEWAY_URL'] ?? 'http://localhost:14242';
|
const GATEWAY_URL = process.env['NEXT_PUBLIC_GATEWAY_URL'] ?? 'http://localhost:4000';
|
||||||
|
|
||||||
let socket: Socket | null = null;
|
let socket: Socket | null = null;
|
||||||
|
|
||||||
|
|||||||
@@ -93,7 +93,7 @@ packages/cli/src/tui/
|
|||||||
cd /home/jwoltje/src/mosaic-mono-v1-worktrees/tui-improvements
|
cd /home/jwoltje/src/mosaic-mono-v1-worktrees/tui-improvements
|
||||||
pnpm --filter @mosaic/cli exec tsx src/cli.ts tui
|
pnpm --filter @mosaic/cli exec tsx src/cli.ts tui
|
||||||
# or after build:
|
# or after build:
|
||||||
node packages/cli/dist/cli.js tui --gateway http://localhost:14242
|
node packages/cli/dist/cli.js tui --gateway http://localhost:4000
|
||||||
```
|
```
|
||||||
|
|
||||||
### Quality Gates
|
### Quality Gates
|
||||||
|
|||||||
@@ -1,30 +1,73 @@
|
|||||||
# Tasks — Storage Abstraction Retrofit
|
# Tasks — Harness Foundation
|
||||||
|
|
||||||
> Single-writer: orchestrator only. Workers read but never modify.
|
> Single-writer: orchestrator only. Workers read but never modify.
|
||||||
>
|
>
|
||||||
> **Mission:** Decouple gateway from hardcoded Postgres/Valkey backends. Introduce interface-driven middleware so the gateway is backend-agnostic. Default to local tier (SQLite + JSON) for zero-dependency installs.
|
|
||||||
>
|
|
||||||
> **`agent` column values:** `codex` | `sonnet` | `haiku` | `glm-5` | `opus` | `—` (auto/default)
|
> **`agent` column values:** `codex` | `sonnet` | `haiku` | `glm-5` | `opus` | `—` (auto/default)
|
||||||
|
|
||||||
| id | status | agent | description | tokens |
|
| id | status | agent | milestone | description | pr | notes |
|
||||||
| --------- | ----------- | ------ | ---------------------------------------------------------------- | ------ |
|
| ------ | ------ | ------ | ------------------ | ------------------------------------------------------------------ | ---- | ----------- |
|
||||||
| SA-P1-001 | done | sonnet | Define QueueAdapter interface in packages/queue/src/types.ts | 3K |
|
| M1-001 | done | sonnet | M1: Persistence | Wire ChatGateway → ConversationsRepo for user messages | #292 | #224 closed |
|
||||||
| SA-P1-002 | done | sonnet | Define StorageAdapter interface in packages/storage/src/types.ts | 3K |
|
| M1-002 | done | sonnet | M1: Persistence | Wire agent event relay → ConversationsRepo for assistant responses | #292 | #225 closed |
|
||||||
| SA-P1-003 | done | sonnet | Define MemoryAdapter interface in packages/memory/src/types.ts | 3K |
|
| M1-003 | done | sonnet | M1: Persistence | Store message metadata: model, provider, tokens, tool calls | #292 | #226 closed |
|
||||||
| SA-P1-004 | done | sonnet | Create adapter factory pattern + config types | 3K |
|
| M1-004 | done | sonnet | M1: Persistence | Load message history into Pi session on resume | #301 | #227 closed |
|
||||||
| SA-P2-001 | done | sonnet | Refactor @mosaic/queue: wrap ioredis as BullMQ adapter | 3K |
|
| M1-005 | done | sonnet | M1: Persistence | Context window management: summarize when >80% | #301 | #228 closed |
|
||||||
| SA-P2-002 | done | sonnet | Create @mosaic/storage: wrap Drizzle as Postgres adapter | 6K |
|
| M1-006 | done | sonnet | M1: Persistence | Conversation search endpoint | #299 | #229 closed |
|
||||||
| SA-P2-003 | done | sonnet | Refactor @mosaic/memory: extract pgvector adapter | 4K |
|
| M1-007 | done | sonnet | M1: Persistence | TUI /history command | #297 | #230 closed |
|
||||||
| SA-P2-004 | done | sonnet | Update gateway modules to use factories + DI tokens | 5K |
|
| M1-008 | done | sonnet | M1: Persistence | Verify persistence — 20 tests | #304 | #231 closed |
|
||||||
| SA-P2-005 | done | opus | Verify Phase 2: all tests pass, typecheck clean | — |
|
| M2-001 | done | sonnet | M2: Security | InsightsRepo userId on searchByEmbedding | #290 | #232 closed |
|
||||||
| SA-P3-001 | done | sonnet | Implement local queue adapter: JSON file persistence | 5K |
|
| M2-002 | done | sonnet | M2: Security | InsightsRepo userId on findByUser/decay | #290 | #233 closed |
|
||||||
| SA-P3-002 | done | sonnet | Implement SQLite storage adapter with better-sqlite3 | 8K |
|
| M2-003 | done | sonnet | M2: Security | PreferencesRepo userId verified | #294 | #234 closed |
|
||||||
| SA-P3-003 | done | sonnet | Implement keyword memory adapter — no vector dependency | 4K |
|
| M2-004 | done | sonnet | M2: Security | Memory tools userId injection fixed | #294 | #235 closed |
|
||||||
| SA-P3-004 | done | opus | Verify Phase 3: 42 new tests, 347 total passing | — |
|
| M2-005 | done | sonnet | M2: Security | ConversationsRepo ownership checks | #293 | #236 closed |
|
||||||
| SA-P4-001 | done | sonnet | MosaicConfig schema + loader with tier auto-detection | 6K |
|
| M2-006 | done | sonnet | M2: Security | AgentsRepo findAccessible scoped | #293 | #237 closed |
|
||||||
| SA-P4-002 | done | sonnet | CLI: mosaic gateway init — interactive wizard | 4K |
|
| M2-007 | done | sonnet | M2: Security | Cross-user isolation — 28 tests | #305 | #238 closed |
|
||||||
| SA-P4-003 | done | sonnet | CLI: mosaic gateway start/stop/status lifecycle | 5K |
|
| M2-008 | done | sonnet | M2: Security | Valkey SCAN + /gc admin-only | #298 | #239 closed |
|
||||||
| SA-P4-004 | done | opus | Verify Phase 4: 381 tests passing, 40/40 tasks clean | — |
|
| M3-001 | done | sonnet | M3: Providers | IProviderAdapter + OllamaAdapter | #306 | #240 closed |
|
||||||
| SA-P5-001 | not-started | codex | Migration tooling: mosaic storage export/import | — |
|
| M3-002 | done | sonnet | M3: Providers | AnthropicAdapter | #309 | #241 closed |
|
||||||
| SA-P5-002 | not-started | codex | Docker Compose profiles: local vs team | — |
|
| M3-003 | done | sonnet | M3: Providers | OpenAIAdapter | #310 | #242 closed |
|
||||||
| SA-P5-003 | not-started | codex | Final verification + docs: README, architecture diagram | — |
|
| M3-004 | done | sonnet | M3: Providers | OpenRouterAdapter | #311 | #243 closed |
|
||||||
|
| M3-005 | done | sonnet | M3: Providers | ZaiAdapter (GLM-5) | #314 | #244 closed |
|
||||||
|
| M3-006 | done | sonnet | M3: Providers | Ollama embedding support | #311 | #245 closed |
|
||||||
|
| M3-007 | done | sonnet | M3: Providers | Provider health checks | #308 | #246 closed |
|
||||||
|
| M3-008 | done | sonnet | M3: Providers | Model capability matrix | #303 | #247 closed |
|
||||||
|
| M3-009 | done | sonnet | M3: Providers | EmbeddingService → Ollama default | #308 | #248 closed |
|
||||||
|
| M3-010 | done | sonnet | M3: Providers | OAuth token storage (AES-256-GCM) | #317 | #249 closed |
|
||||||
|
| M3-011 | done | sonnet | M3: Providers | Provider credentials CRUD | #317 | #250 closed |
|
||||||
|
| M3-012 | done | sonnet | M3: Providers | Verify providers — 40 tests | #319 | #251 closed |
|
||||||
|
| M4-001 | done | sonnet | M4: Routing | routing_rules DB schema | #315 | #252 closed |
|
||||||
|
| M4-002 | done | sonnet | M4: Routing | Condition types | #315 | #253 closed |
|
||||||
|
| M4-003 | done | sonnet | M4: Routing | Action types | #315 | #254 closed |
|
||||||
|
| M4-004 | done | sonnet | M4: Routing | Default routing rules (11 seeds) | #316 | #255 closed |
|
||||||
|
| M4-005 | done | sonnet | M4: Routing | Task classifier (60+ tests) | #316 | #256 closed |
|
||||||
|
| M4-006 | done | sonnet | M4: Routing | Routing decision pipeline | #318 | #257 closed |
|
||||||
|
| M4-007 | done | sonnet | M4: Routing | /model override | #323 | #258 closed |
|
||||||
|
| M4-008 | done | sonnet | M4: Routing | Routing transparency in session:info | #323 | #259 closed |
|
||||||
|
| M4-009 | done | sonnet | M4: Routing | Routing rules CRUD API | #320 | #260 closed |
|
||||||
|
| M4-010 | done | sonnet | M4: Routing | Per-user routing overrides | #320 | #261 closed |
|
||||||
|
| M4-011 | done | sonnet | M4: Routing | Agent specialization capabilities | #320 | #262 closed |
|
||||||
|
| M4-012 | done | sonnet | M4: Routing | Routing wired into ChatGateway | #323 | #263 closed |
|
||||||
|
| M4-013 | done | sonnet | M4: Routing | Verify routing — 9 E2E tests | #323 | #264 closed |
|
||||||
|
| M5-001 | done | sonnet | M5: Sessions | Agent config loaded on session create | #323 | #265 closed |
|
||||||
|
| M5-002 | done | sonnet | M5: Sessions | /model command end-to-end | #323 | #266 closed |
|
||||||
|
| M5-003 | done | sonnet | M5: Sessions | /agent command mid-session | #323 | #267 closed |
|
||||||
|
| M5-004 | done | sonnet | M5: Sessions | Session ↔ conversation binding | #321 | #268 closed |
|
||||||
|
| M5-005 | done | sonnet | M5: Sessions | Session info broadcast | #321 | #269 closed |
|
||||||
|
| M5-006 | done | sonnet | M5: Sessions | /agent new from TUI | #321 | #270 closed |
|
||||||
|
| M5-007 | done | sonnet | M5: Sessions | Session metrics | #321 | #271 closed |
|
||||||
|
| M5-008 | done | sonnet | M5: Sessions | Verify sessions — 28 tests | #324 | #272 closed |
|
||||||
|
| M6-001 | done | sonnet | M6: Jobs | BullMQ + Valkey config | #324 | #273 closed |
|
||||||
|
| M6-002 | done | sonnet | M6: Jobs | Queue service with typed jobs | #324 | #274 closed |
|
||||||
|
| M6-003 | done | sonnet | M6: Jobs | Summarization → BullMQ | #324 | #275 closed |
|
||||||
|
| M6-004 | done | sonnet | M6: Jobs | GC → BullMQ | #324 | #276 closed |
|
||||||
|
| M6-005 | done | sonnet | M6: Jobs | Tier management → BullMQ | #324 | #277 closed |
|
||||||
|
| M6-006 | done | sonnet | M6: Jobs | Admin jobs API | #325 | #278 closed |
|
||||||
|
| M6-007 | done | sonnet | M6: Jobs | Job event logging | #325 | #279 closed |
|
||||||
|
| M6-008 | done | sonnet | M6: Jobs | Verify jobs | #324 | #280 closed |
|
||||||
|
| M7-001 | done | sonnet | M7: Channel Design | IChannelAdapter interface | #325 | #281 closed |
|
||||||
|
| M7-002 | done | sonnet | M7: Channel Design | Channel message protocol | #325 | #282 closed |
|
||||||
|
| M7-003 | done | sonnet | M7: Channel Design | Matrix integration design | #326 | #283 closed |
|
||||||
|
| M7-004 | done | sonnet | M7: Channel Design | Conversation multiplexing | #326 | #284 closed |
|
||||||
|
| M7-005 | done | sonnet | M7: Channel Design | Remote auth bridging | #326 | #285 closed |
|
||||||
|
| M7-006 | done | sonnet | M7: Channel Design | Agent-to-agent via Matrix | #326 | #286 closed |
|
||||||
|
| M7-007 | done | sonnet | M7: Channel Design | Multi-user isolation in Matrix | #326 | #287 closed |
|
||||||
|
| M7-008 | done | sonnet | M7: Channel Design | channel-protocol.md published | #326 | #288 closed |
|
||||||
|
|||||||
@@ -1,555 +0,0 @@
|
|||||||
# Storage & Queue Abstraction — Middleware Architecture
|
|
||||||
|
|
||||||
Design
|
|
||||||
Status: Design (retrofit required)
|
|
||||||
date: 2026-04-02
|
|
||||||
context: Agents coupled directly to infrastructure backends, bypassing intended middleware layer
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## The Problem
|
|
||||||
|
|
||||||
Current packages are **direct adapters**, not **middleware**:
|
|
||||||
| Package | Current State | Intended Design |
|
|
||||||
|---------|---------------|-----------------|
|
|
||||||
| `@mosaic/queue` | `ioredis` hardcoded | Interface → BullMQ OR local-files |
|
|
||||||
| `@mosaic/db` | Drizzle + Postgres hardcoded | Interface → Postgres OR SQLite OR JSON/MD |
|
|
||||||
| `@mosaic/memory` | pgvector required | Interface → pgvector OR sqlite-vec OR keyword-search |
|
|
||||||
|
|
||||||
## The gateway and TUI import these packages directly, which means they they're coupled to specific infrastructure. Users cannot run Mosaic Stack without Postgres + Valkey.
|
|
||||||
|
|
||||||
## The Intended Architecture
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────────┐
|
|
||||||
│ Gateway / TUI / CLI │
|
|
||||||
│ (agnostic of storage backend, talks to middleware) │
|
|
||||||
└───────────────────────────┬─────────────────────────────────────┘
|
|
||||||
│
|
|
||||||
┌───────────────────┼───────────────────┐
|
|
||||||
│ │ │
|
|
||||||
▼─────────────────┴─────────────────┴─────────────────┘
|
|
||||||
| | | |
|
|
||||||
▼─────────────────┴───────────────────┴─────────────────┘
|
|
||||||
| | | |
|
|
||||||
Queue Storage Memory
|
|
||||||
| | | |
|
|
||||||
┌─────────┬─────────┬─────────┬─────────────────────────────────┐
|
|
||||||
| BullMQ | | Local | | Postgres | SQLite | JSON/MD | pgvector | sqlite-vec | keyword |
|
|
||||||
|(Valkey)| |(files) | | | | | |
|
|
||||||
└─────────┴─────────┴─────────┴─────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
The gateway imports the interface, not the backend. At startup it reads config and instantiates the correct adapter.
|
|
||||||
|
|
||||||
## The Drift
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// What should have happened:
|
|
||||||
gateway/queue.service.ts → @mosaic/queue (interface) → queue.adapter.ts
|
|
||||||
|
|
||||||
// What actually happened:
|
|
||||||
gateway/queue.service.ts → @mosaic/queue → ioredis (hardcoded)
|
|
||||||
```
|
|
||||||
|
|
||||||
## The Current State Analysis
|
|
||||||
|
|
||||||
### `@mosaic/queue` (packages/queue/src/queue.ts)
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import Redis from 'ioredis'; // ← Direct import of backend
|
|
||||||
|
|
||||||
export function createQueue(config?: QueueConfig): QueueHandle {
|
|
||||||
const url = config?.url ?? process.env['VALKEY_URL'] ?? DEFAULT_VALKEY_URL;
|
|
||||||
const redis = new Redis(url, { maxRetriesPerRequest: 3 });
|
|
||||||
// ...queue ops directly on redis...
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Problem:** `ioredis` is imported in the package, not the adapter interface. Consumers cannot swap backends.
|
|
||||||
|
|
||||||
### `@mosaic/db` (packages/db/src/client.ts)
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { drizzle, type PostgresJsDatabase } from 'drizzle-orm/postgres-js';
|
|
||||||
import postgres from 'postgres';
|
|
||||||
|
|
||||||
export function createDb(url?: string): DbHandle {
|
|
||||||
const connectionString = url ?? process.env['DATABASE_URL'] ?? DEFAULT_DATABASE_URL;
|
|
||||||
const sql = postgres(connectionString, { max: 20, idle_timeout: 30, connect_timeout: 5 });
|
|
||||||
const db = drizzle(sql, { schema });
|
|
||||||
// ...
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Problem:** Drizzle + Postgres is hardcoded. No SQLite, JSON, or file-based options.
|
|
||||||
|
|
||||||
### `@mosaic/memory` (packages/memory/src/memory.ts)
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import type { Db } from '@mosaic/db'; // ← Depends on Drizzle/PG
|
|
||||||
|
|
||||||
export function createMemory(db: Db): Memory {
|
|
||||||
return {
|
|
||||||
preferences: createPreferencesRepo(db),
|
|
||||||
insights: createInsightsRepo(db),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Problem:** Memory package is tightly coupled to `@mosaic/db` (which is Postgres-only). No alternative storage backends.
|
|
||||||
|
|
||||||
## The Target Interfaces
|
|
||||||
|
|
||||||
### Queue Interface
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// packages/queue/src/types.ts
|
|
||||||
export interface QueueAdapter {
|
|
||||||
readonly name: string;
|
|
||||||
|
|
||||||
enqueue(queueName: string, payload: TaskPayload): Promise<void>;
|
|
||||||
dequeue(queueName: string): Promise<TaskPayload | null>;
|
|
||||||
length(queueName: string): Promise<number>;
|
|
||||||
publish(channel: string, message: string): Promise<void>;
|
|
||||||
subscribe(channel: string, handler: (message: string) => void): () => void;
|
|
||||||
close(): Promise<void>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TaskPayload {
|
|
||||||
id: string;
|
|
||||||
type: string;
|
|
||||||
data: Record<string, unknown>;
|
|
||||||
createdAt: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface QueueConfig {
|
|
||||||
type: 'bullmq' | 'local';
|
|
||||||
url?: string; // For bullmq: Valkey/Redis URL
|
|
||||||
dataDir?: string; // For local: directory for JSON persistence
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Storage Interface
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// packages/storage/src/types.ts
|
|
||||||
export interface StorageAdapter {
|
|
||||||
readonly name: string;
|
|
||||||
|
|
||||||
// Entity CRUD
|
|
||||||
create<T>(collection: string, data: O): Promise<T>;
|
|
||||||
read<T>(collection: string, id: string): Promise<T | null>;
|
|
||||||
update<T>(collection: string, id: string, data: Partial<O>): Promise<T | null>;
|
|
||||||
delete(collection: string, id: string): Promise<boolean>;
|
|
||||||
|
|
||||||
// Queries
|
|
||||||
find<T>(collection: string, filter: Record<string, unknown>): Promise<T[]>;
|
|
||||||
findOne<T>(collection: string, filter: Record<string, unknown): Promise<T | null>;
|
|
||||||
|
|
||||||
// Bulk operations
|
|
||||||
createMany<T>(collection: string, items: O[]): Promise<T[]>;
|
|
||||||
updateMany<T>(collection: string, ids: string[], data: Partial<O>): Promise<number>;
|
|
||||||
deleteMany(collection: string, ids: string[]): Promise<number>;
|
|
||||||
|
|
||||||
// Raw queries (for complex queries)
|
|
||||||
query<T>(collection: string, query: string, params?: unknown[]): Promise<T[]>;
|
|
||||||
|
|
||||||
// Transaction support
|
|
||||||
transaction<T>(fn: (tx: StorageTransaction) => Promise<T>): Promise<T>;
|
|
||||||
|
|
||||||
close(): Promise<void>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface StorageTransaction {
|
|
||||||
commit(): Promise<void>;
|
|
||||||
rollback(): Promise<void>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface StorageConfig {
|
|
||||||
type: 'postgres' | 'sqlite' | 'files';
|
|
||||||
url?: string; // For postgres
|
|
||||||
path?: string; // For sqlite/files
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Memory Interface (Vector + Preferences)
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// packages/memory/src/types.ts
|
|
||||||
export interface MemoryAdapter {
|
|
||||||
readonly name: string;
|
|
||||||
|
|
||||||
// Preferences (key-value storage)
|
|
||||||
getPreference(userId: string, key: string): Promise<unknown | null>;
|
|
||||||
setPreference(userId: string, key: string, value: unknown): Promise<void>;
|
|
||||||
deletePreference(userId: string, key: string): Promise<boolean>;
|
|
||||||
listPreferences(
|
|
||||||
userId: string,
|
|
||||||
category?: string,
|
|
||||||
): Promise<Array<{ key: string; value: unknown }>>;
|
|
||||||
|
|
||||||
// Insights (with optional vector search)
|
|
||||||
storeInsight(insight: NewInsight): Promise<Insight>;
|
|
||||||
getInsight(id: string): Promise<Insight | null>;
|
|
||||||
searchInsights(query: string, limit?: number, filter?: InsightFilter): Promise<SearchResult[]>;
|
|
||||||
deleteInsight(id: string): Promise<boolean>;
|
|
||||||
|
|
||||||
// Embedding provider (optional, null = no vector search)
|
|
||||||
readonly embedder?: EmbeddingProvider | null;
|
|
||||||
|
|
||||||
close(): Promise<void>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface NewInsight {
|
|
||||||
id: string;
|
|
||||||
userId: string;
|
|
||||||
content: string;
|
|
||||||
embedding?: number[]; // If embedder is available
|
|
||||||
source: 'agent' | 'user' | 'summarization' | 'system';
|
|
||||||
category: 'decision' | 'learning' | 'preference' | 'fact' | 'pattern' | 'general';
|
|
||||||
relevanceScore: number;
|
|
||||||
metadata?: Record<string, unknown>;
|
|
||||||
createdAt: Date;
|
|
||||||
decayedAt?: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface InsightFilter {
|
|
||||||
userId?: string;
|
|
||||||
category?: string;
|
|
||||||
source?: string;
|
|
||||||
minRelevance?: number;
|
|
||||||
fromDate?: Date;
|
|
||||||
toDate?: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SearchResult {
|
|
||||||
documentId: string;
|
|
||||||
content: string;
|
|
||||||
distance: number;
|
|
||||||
metadata?: Record<string, unknown>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface MemoryConfig {
|
|
||||||
type: 'pgvector' | 'sqlite-vec' | 'keyword';
|
|
||||||
storage: StorageAdapter;
|
|
||||||
embedder?: EmbeddingProvider;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface EmbeddingProvider {
|
|
||||||
embed(text: string): Promise<number[]>;
|
|
||||||
embedBatch(texts: string[]): Promise<number[][]>;
|
|
||||||
readonly dimensions: number;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Three Tiers
|
|
||||||
|
|
||||||
### Tier 1: Local (Zero Dependencies)
|
|
||||||
|
|
||||||
**Target:** Single user, single machine, no external services
|
|
||||||
|
|
||||||
| Component | Backend | Storage |
|
|
||||||
| --------- | --------------------------------------------- | ------------ |
|
|
||||||
| Queue | In-process + JSON files in `~/.mosaic/queue/` |
|
|
||||||
| Storage | SQLite (better-sqlite3) `~/.mosaic/data.db` |
|
|
||||||
| Memory | Keyword search | SQLite table |
|
|
||||||
| Vector | None | N/A |
|
|
||||||
|
|
||||||
**Dependencies:**
|
|
||||||
|
|
||||||
- `better-sqlite3` (bundled)
|
|
||||||
- No Postgres, No Valkey, No pgvector
|
|
||||||
|
|
||||||
**Upgrade path:**
|
|
||||||
|
|
||||||
1. Run `mosaic gateway configure` → select "local" tier
|
|
||||||
2. Gateway starts with SQLite database
|
|
||||||
3. Optional: run `mosaic gateway upgrade --tier team` to migrate to Postgres
|
|
||||||
|
|
||||||
### Tier 2: Team (Postgres + Valkey)
|
|
||||||
|
|
||||||
**Target:** Multiple users, shared server, CI/CD environments
|
|
||||||
|
|
||||||
| Component | Backend | Storage |
|
|
||||||
| --------- | -------------- | ------------------------------ |
|
|
||||||
| Queue | BullMQ | Valkey |
|
|
||||||
| Storage | Postgres | Shared PG instance |
|
|
||||||
| Memory | pgvector | Postgres with vector extension |
|
|
||||||
| Vector | LLM embeddings | Configured provider |
|
|
||||||
|
|
||||||
**Dependencies:**
|
|
||||||
|
|
||||||
- PostgreSQL 17+ with pgvector extension
|
|
||||||
- Valkey (Redis-compatible)
|
|
||||||
- LLM provider for embeddings
|
|
||||||
|
|
||||||
**Migration from Local → Team:**
|
|
||||||
|
|
||||||
1. `mosaic gateway backup` → creates dump of SQLite database
|
|
||||||
2. `mosaic gateway upgrade --tier team` → restores to Postgres
|
|
||||||
3. Queue replays from BullMQ (may need manual reconciliation for in-flight jobs)
|
|
||||||
4. Memory embeddings regenerated if vector search was new
|
|
||||||
|
|
||||||
### Tier 3: Enterprise (Clustered)
|
|
||||||
|
|
||||||
**Target:** Large teams, multi-region, high availability
|
|
||||||
|
|
||||||
| Component | Backend | Storage |
|
|
||||||
| --------- | --------------------------- | ----------------------------- |
|
|
||||||
| Queue | BullMQ cluster | Multiple Valkey nodes |
|
|
||||||
| Storage | Postgres cluster | Primary + replicas |
|
|
||||||
| Memory | Dedicated vector DB | Qdrant, Pinecone, or pgvector |
|
|
||||||
| Vector | Dedicated embedding service | Separate microservice |
|
|
||||||
|
|
||||||
## MarkdownDB Integration
|
|
||||||
|
|
||||||
For file-based storage, we use [MarkdownDB](https://markdowndb.com) to parse MD files into queryable data.
|
|
||||||
|
|
||||||
**What it provides:**
|
|
||||||
|
|
||||||
- Parses frontmatter (YAML/JSON/TOML)
|
|
||||||
- Extracts links, tags, metadata
|
|
||||||
- Builds index in JSON or SQLite
|
|
||||||
- Queryable via SQL-like interface
|
|
||||||
|
|
||||||
**Usage in Mosaic:**
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Local tier with MD files for documents
|
|
||||||
const storage = createStorageAdapter({
|
|
||||||
type: 'files',
|
|
||||||
path: path.join(mosaicHome, 'docs'),
|
|
||||||
markdowndb: {
|
|
||||||
parseFrontmatter: true,
|
|
||||||
extractLinks: true,
|
|
||||||
indexFile: 'index.json',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
## Dream Mode — Memory Consolidation
|
|
||||||
|
|
||||||
Automated equivalent to Claude Code's "Dream: Memory Consolidation" cycle
|
|
||||||
|
|
||||||
**Trigger:** Every 24 hours (if 5+ sessions active)
|
|
||||||
|
|
||||||
**Phases:**
|
|
||||||
|
|
||||||
1. **Orient** — What happened, what's the current state
|
|
||||||
- Scan recent session logs
|
|
||||||
- Identify active tasks, missions, conversations
|
|
||||||
- Calculate time window (last 24h)
|
|
||||||
|
|
||||||
2. **Gather** — Pull in relevant context
|
|
||||||
- Load conversations, decisions, agent logs
|
|
||||||
- Extract key interactions and outcomes
|
|
||||||
- Identify patterns and learnings
|
|
||||||
|
|
||||||
3. **Consolidate** — Summarize and compress
|
|
||||||
- Generate summary of the last 24h
|
|
||||||
- Extract key decisions and their rationale
|
|
||||||
- Identify recurring patterns
|
|
||||||
- Compress verbose logs into concise insights
|
|
||||||
|
|
||||||
4. **Prune** — Archive and cleanup
|
|
||||||
- Archive raw session files to dated folders
|
|
||||||
- Delete redundant/temporary data
|
|
||||||
- Update MEMORY.md with consolidated content
|
|
||||||
- Update insight relevance scores
|
|
||||||
|
|
||||||
**Implementation:**
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// In @mosaic/dream (new package)
|
|
||||||
export async function runDreamCycle(config: DreamConfig): Promise<DreamResult> {
|
|
||||||
const memory = await loadMemoryAdapter(config.storage);
|
|
||||||
|
|
||||||
// Orient
|
|
||||||
const sessions = await memory.getRecentSessions(24 * 60 * 60 * 1000);
|
|
||||||
if (sessions.length < 5) return { skipped: true, reason: 'insufficient_sessions' };
|
|
||||||
|
|
||||||
// Gather
|
|
||||||
const context = await gatherContext(memory, sessions);
|
|
||||||
|
|
||||||
// Consolidate
|
|
||||||
const consolidated = await consolidateWithLLM(context, config.llm);
|
|
||||||
|
|
||||||
// Prune
|
|
||||||
await pruneArchivedData(memory, config.retention);
|
|
||||||
|
|
||||||
// Store consolidated insights
|
|
||||||
await memory.storeInsights(consolidated.insights);
|
|
||||||
|
|
||||||
return {
|
|
||||||
sessionsProcessed: sessions.length,
|
|
||||||
insightsCreated: consolidated.insights.length,
|
|
||||||
bytesPruned: consolidated.bytesRemoved,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Retrofit Plan
|
|
||||||
|
|
||||||
### Phase 1: Interface Extraction (2-3 days)
|
|
||||||
|
|
||||||
**Goal:** Define interfaces without changing existing behavior
|
|
||||||
|
|
||||||
1. Create `packages/queue/src/types.ts` with `QueueAdapter` interface
|
|
||||||
2. Create `packages/storage/src/types.ts` with `StorageAdapter` interface
|
|
||||||
3. Create `packages/memory/src/types.ts` with `MemoryAdapter` interface (refactor existing)
|
|
||||||
4. Add adapter registry pattern to each package
|
|
||||||
5. No breaking changes — existing code continues to work
|
|
||||||
|
|
||||||
### Phase 2: Refactor Existing to Adapters (3-5 days)
|
|
||||||
|
|
||||||
**Goal:** Move existing implementations behind adapters
|
|
||||||
|
|
||||||
#### 2.1 Queue Refactor
|
|
||||||
|
|
||||||
1. Rename `packages/queue/src/queue.ts` → `packages/queue/src/adapters/bullmq.ts`
|
|
||||||
2. Create `packages/queue/src/index.ts` to export factory function
|
|
||||||
3. Factory function reads config, instantiates correct adapter
|
|
||||||
4. Update gateway imports to use factory
|
|
||||||
|
|
||||||
#### 2.2 Storage Refactor
|
|
||||||
|
|
||||||
1. Create `packages/storage/` (new package)
|
|
||||||
2. Move Drizzle logic to `packages/storage/src/adapters/postgres.ts`
|
|
||||||
3. Create SQLite adapter in `packages/storage/src/adapters/sqlite.ts`
|
|
||||||
4. Update gateway to use storage factory
|
|
||||||
5. Deprecate direct `@mosaic/db` imports
|
|
||||||
|
|
||||||
#### 2.3 Memory Refactor
|
|
||||||
|
|
||||||
1. Extract existing logic to `packages/memory/src/adapters/pgvector.ts`
|
|
||||||
2. Create keyword adapter in `packages/memory/src/adapters/keyword.ts`
|
|
||||||
3. Update vector-store.ts to be adapter-agnostic
|
|
||||||
|
|
||||||
### Phase 3: Local Tier Implementation (2-3 days)
|
|
||||||
|
|
||||||
**Goal:** Zero-dependency baseline
|
|
||||||
|
|
||||||
1. Implement `packages/queue/src/adapters/local.ts` (in-process + JSON persistence)
|
|
||||||
2. Implement `packages/storage/src/adapters/files.ts` (JSON + MD via MarkdownDB)
|
|
||||||
3. Implement `packages/memory/src/adapters/keyword.ts` (TF-IDF search)
|
|
||||||
4. Add `packages/dream/` for consolidation cycle
|
|
||||||
5. Wire up local tier in gateway startup
|
|
||||||
|
|
||||||
### Phase 4: Configuration System (1-2 days)
|
|
||||||
|
|
||||||
**Goal:** Runtime backend selection
|
|
||||||
|
|
||||||
1. Create `packages/config/src/storage.ts` for storage configuration
|
|
||||||
2. Add `mosaic.config.ts` schema with storage tier settings
|
|
||||||
3. Update gateway to read config on startup
|
|
||||||
4. Add `mosaic gateway configure` CLI command
|
|
||||||
5. Add tier migration commands (`mosaic gateway upgrade`)
|
|
||||||
|
|
||||||
### Phase 5: Testing & Documentation (2-3 days)
|
|
||||||
|
|
||||||
1. Unit tests for each adapter
|
|
||||||
2. Integration tests for factory pattern
|
|
||||||
3. Migration tests (local → team)
|
|
||||||
4. Update README and architecture docs
|
|
||||||
5. Add configuration guide
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## File Changes Summary
|
|
||||||
|
|
||||||
### New Files
|
|
||||||
|
|
||||||
```
|
|
||||||
packages/
|
|
||||||
├── config/
|
|
||||||
│ └── src/
|
|
||||||
│ ├── storage.ts # Storage config schema
|
|
||||||
│ └── index.ts
|
|
||||||
├── dream/ # NEW: Dream mode consolidation
|
|
||||||
│ ├── src/
|
|
||||||
│ │ ├── index.ts
|
|
||||||
│ │ ├── orient.ts
|
|
||||||
│ │ ├── gather.ts
|
|
||||||
│ │ ├── consolidate.ts
|
|
||||||
│ │ └── prune.ts
|
|
||||||
│ └── package.json
|
|
||||||
├── queue/
|
|
||||||
│ └── src/
|
|
||||||
│ ├── types.ts # NEW: QueueAdapter interface
|
|
||||||
│ ├── index.ts # NEW: Factory function
|
|
||||||
│ └── adapters/
|
|
||||||
│ ├── bullmq.ts # MOVED from queue.ts
|
|
||||||
│ └── local.ts # NEW: In-process adapter
|
|
||||||
├── storage/ # NEW: Storage abstraction
|
|
||||||
│ ├── src/
|
|
||||||
│ │ ├── types.ts # StorageAdapter interface
|
|
||||||
│ │ ├── index.ts # Factory function
|
|
||||||
│ │ └── adapters/
|
|
||||||
│ │ ├── postgres.ts # MOVED from @mosaic/db
|
|
||||||
│ │ ├── sqlite.ts # NEW: SQLite adapter
|
|
||||||
│ │ └── files.ts # NEW: JSON/MD adapter
|
|
||||||
│ └── package.json
|
|
||||||
└── memory/
|
|
||||||
└── src/
|
|
||||||
├── types.ts # UPDATED: MemoryAdapter interface
|
|
||||||
├── index.ts # UPDATED: Factory function
|
|
||||||
└── adapters/
|
|
||||||
├── pgvector.ts # EXTRACTED from existing code
|
|
||||||
├── sqlite-vec.ts # NEW: SQLite with vectors
|
|
||||||
└── keyword.ts # NEW: TF-IDF search
|
|
||||||
```
|
|
||||||
|
|
||||||
### Modified Files
|
|
||||||
|
|
||||||
```
|
|
||||||
packages/
|
|
||||||
├── db/ # DEPRECATED: Logic moved to storage adapters
|
|
||||||
├── queue/
|
|
||||||
│ └── src/
|
|
||||||
│ └── queue.ts # → adapters/bullmq.ts
|
|
||||||
├── memory/
|
|
||||||
│ ├── src/
|
|
||||||
│ │ ├── memory.ts # → use factory
|
|
||||||
│ │ ├── insights.ts # → use factory
|
|
||||||
│ │ └── preferences.ts # → use factory
|
|
||||||
│ └── package.json # Remove pgvector from dependencies
|
|
||||||
└── gateway/
|
|
||||||
└── src/
|
|
||||||
├── database/
|
|
||||||
│ └── database.module.ts # Update to use storage factory
|
|
||||||
├── memory/
|
|
||||||
│ └── memory.module.ts # Update to use memory factory
|
|
||||||
└── queue/
|
|
||||||
└── queue.module.ts # Update to use queue factory
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Breaking Changes
|
|
||||||
|
|
||||||
1. **`@mosaic/db`** → **`@mosaic/storage`** (with migration guide)
|
|
||||||
2. Direct `ioredis` imports → Use `@mosaic/queue` factory
|
|
||||||
3. Direct `pgvector` queries → Use `@mosaic/memory` factory
|
|
||||||
4. Gateway startup now requires storage config (defaults to local)
|
|
||||||
|
|
||||||
## Non-Breaking Migration Path
|
|
||||||
|
|
||||||
1. Existing deployments with Postgres/Valkey continue to work (default config)
|
|
||||||
2. New deployments can choose local tier
|
|
||||||
3. Migration commands available when ready to upgrade
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Success Criteria
|
|
||||||
|
|
||||||
- [ ] Local tier runs with zero external dependencies
|
|
||||||
- [ ] All three tiers (local, team, enterprise) work correctly
|
|
||||||
- [ ] Factory pattern correctly selects backend at runtime
|
|
||||||
- [ ] Migration from local → team preserves all data
|
|
||||||
- [ ] Dream mode consolidates 24h of sessions
|
|
||||||
- [ ] Documentation covers all three tiers and migration paths
|
|
||||||
- [ ] All existing tests pass
|
|
||||||
- [ ] New adapters have >80% coverage
|
|
||||||
@@ -229,11 +229,11 @@ external clients. Authentication requires a valid BetterAuth session (cookie or
|
|||||||
|
|
||||||
### Gateway
|
### Gateway
|
||||||
|
|
||||||
| Variable | Default | Description |
|
| Variable | Default | Description |
|
||||||
| --------------------- | ------------------------ | ---------------------------------------------- |
|
| --------------------- | ----------------------- | ---------------------------------------------- |
|
||||||
| `GATEWAY_PORT` | `14242` | Port the gateway listens on |
|
| `GATEWAY_PORT` | `4000` | Port the gateway listens on |
|
||||||
| `GATEWAY_CORS_ORIGIN` | `http://localhost:3000` | Allowed CORS origin for browser clients |
|
| `GATEWAY_CORS_ORIGIN` | `http://localhost:3000` | Allowed CORS origin for browser clients |
|
||||||
| `BETTER_AUTH_URL` | `http://localhost:14242` | Public URL of the gateway (used by BetterAuth) |
|
| `BETTER_AUTH_URL` | `http://localhost:4000` | Public URL of the gateway (used by BetterAuth) |
|
||||||
|
|
||||||
### SSO (Optional)
|
### SSO (Optional)
|
||||||
|
|
||||||
@@ -292,13 +292,13 @@ Each OIDC provider requires its client ID, client secret, and issuer URL togethe
|
|||||||
|
|
||||||
### Plugins
|
### Plugins
|
||||||
|
|
||||||
| Variable | Description |
|
| Variable | Description |
|
||||||
| ---------------------- | -------------------------------------------------------------------------- |
|
| ---------------------- | ------------------------------------------------------------------------- |
|
||||||
| `DISCORD_BOT_TOKEN` | Discord bot token (enables Discord plugin) |
|
| `DISCORD_BOT_TOKEN` | Discord bot token (enables Discord plugin) |
|
||||||
| `DISCORD_GUILD_ID` | Discord guild/server ID |
|
| `DISCORD_GUILD_ID` | Discord guild/server ID |
|
||||||
| `DISCORD_GATEWAY_URL` | Gateway URL for Discord plugin to call (default: `http://localhost:14242`) |
|
| `DISCORD_GATEWAY_URL` | Gateway URL for Discord plugin to call (default: `http://localhost:4000`) |
|
||||||
| `TELEGRAM_BOT_TOKEN` | Telegram bot token (enables Telegram plugin) |
|
| `TELEGRAM_BOT_TOKEN` | Telegram bot token (enables Telegram plugin) |
|
||||||
| `TELEGRAM_GATEWAY_URL` | Gateway URL for Telegram plugin to call |
|
| `TELEGRAM_GATEWAY_URL` | Gateway URL for Telegram plugin to call |
|
||||||
|
|
||||||
### Observability
|
### Observability
|
||||||
|
|
||||||
@@ -309,9 +309,9 @@ Each OIDC provider requires its client ID, client secret, and issuer URL togethe
|
|||||||
|
|
||||||
### Web App
|
### Web App
|
||||||
|
|
||||||
| Variable | Default | Description |
|
| Variable | Default | Description |
|
||||||
| ------------------------- | ------------------------ | -------------------------------------- |
|
| ------------------------- | ----------------------- | -------------------------------------- |
|
||||||
| `NEXT_PUBLIC_GATEWAY_URL` | `http://localhost:14242` | Gateway URL used by the Next.js client |
|
| `NEXT_PUBLIC_GATEWAY_URL` | `http://localhost:4000` | Gateway URL used by the Next.js client |
|
||||||
|
|
||||||
### Coordination
|
### Coordination
|
||||||
|
|
||||||
|
|||||||
@@ -194,7 +194,7 @@ server {
|
|||||||
|
|
||||||
# WebSocket support (for chat.gateway.ts / Socket.IO)
|
# WebSocket support (for chat.gateway.ts / Socket.IO)
|
||||||
location /socket.io/ {
|
location /socket.io/ {
|
||||||
proxy_pass http://127.0.0.1:14242;
|
proxy_pass http://127.0.0.1:4000;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
proxy_set_header Connection "upgrade";
|
proxy_set_header Connection "upgrade";
|
||||||
@@ -204,7 +204,7 @@ server {
|
|||||||
|
|
||||||
# REST + auth
|
# REST + auth
|
||||||
location / {
|
location / {
|
||||||
proxy_pass http://127.0.0.1:14242;
|
proxy_pass http://127.0.0.1:4000;
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
@@ -234,11 +234,11 @@ server {
|
|||||||
# /etc/caddy/Caddyfile
|
# /etc/caddy/Caddyfile
|
||||||
|
|
||||||
your-domain.example.com {
|
your-domain.example.com {
|
||||||
reverse_proxy /socket.io/* localhost:14242 {
|
reverse_proxy /socket.io/* localhost:4000 {
|
||||||
header_up Upgrade {http.upgrade}
|
header_up Upgrade {http.upgrade}
|
||||||
header_up Connection {http.connection}
|
header_up Connection {http.connection}
|
||||||
}
|
}
|
||||||
reverse_proxy localhost:14242
|
reverse_proxy localhost:4000
|
||||||
}
|
}
|
||||||
|
|
||||||
app.your-domain.example.com {
|
app.your-domain.example.com {
|
||||||
@@ -328,7 +328,7 @@ MaxRetentionSec=30day
|
|||||||
- Set `BETTER_AUTH_SECRET` to a cryptographically random value (`openssl rand -base64 32`).
|
- Set `BETTER_AUTH_SECRET` to a cryptographically random value (`openssl rand -base64 32`).
|
||||||
- Restrict `GATEWAY_CORS_ORIGIN` to your exact frontend origin — do not use `*`.
|
- Restrict `GATEWAY_CORS_ORIGIN` to your exact frontend origin — do not use `*`.
|
||||||
- Run services as a dedicated non-root system user (e.g., `mosaic`).
|
- Run services as a dedicated non-root system user (e.g., `mosaic`).
|
||||||
- Firewall: only expose ports 80/443 externally; keep 14242 and 3000 bound to `127.0.0.1`.
|
- Firewall: only expose ports 80/443 externally; keep 4000 and 3000 bound to `127.0.0.1`.
|
||||||
- Set `AGENT_FILE_SANDBOX_DIR` to a directory outside the application root to prevent agent tools from accessing source code.
|
- Set `AGENT_FILE_SANDBOX_DIR` to a directory outside the application root to prevent agent tools from accessing source code.
|
||||||
- If using `AGENT_USER_TOOLS`, enumerate only the tools non-admin users need.
|
- If using `AGENT_USER_TOOLS`, enumerate only the tools non-admin users need.
|
||||||
|
|
||||||
|
|||||||
@@ -112,11 +112,11 @@ DATABASE_URL=postgresql://mosaic:mosaic@localhost:5433/mosaic
|
|||||||
BETTER_AUTH_SECRET=change-me-to-a-random-secret
|
BETTER_AUTH_SECRET=change-me-to-a-random-secret
|
||||||
|
|
||||||
# Gateway
|
# Gateway
|
||||||
GATEWAY_PORT=14242
|
GATEWAY_PORT=4000
|
||||||
GATEWAY_CORS_ORIGIN=http://localhost:3000
|
GATEWAY_CORS_ORIGIN=http://localhost:3000
|
||||||
|
|
||||||
# Web
|
# Web
|
||||||
NEXT_PUBLIC_GATEWAY_URL=http://localhost:14242
|
NEXT_PUBLIC_GATEWAY_URL=http://localhost:4000
|
||||||
|
|
||||||
# Optional: Ollama
|
# Optional: Ollama
|
||||||
OLLAMA_BASE_URL=http://localhost:11434
|
OLLAMA_BASE_URL=http://localhost:11434
|
||||||
@@ -141,7 +141,7 @@ migrations in production).
|
|||||||
pnpm --filter @mosaic/gateway exec tsx src/main.ts
|
pnpm --filter @mosaic/gateway exec tsx src/main.ts
|
||||||
```
|
```
|
||||||
|
|
||||||
The gateway starts on port `14242` by default.
|
The gateway starts on port `4000` by default.
|
||||||
|
|
||||||
### 6. Start the Web App
|
### 6. Start the Web App
|
||||||
|
|
||||||
@@ -395,7 +395,7 @@ directory are defined there.
|
|||||||
|
|
||||||
## API Endpoint Reference
|
## API Endpoint Reference
|
||||||
|
|
||||||
All endpoints are served by the gateway at `http://localhost:14242` by default.
|
All endpoints are served by the gateway at `http://localhost:4000` by default.
|
||||||
|
|
||||||
### Authentication
|
### Authentication
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@
|
|||||||
### Prerequisites
|
### Prerequisites
|
||||||
|
|
||||||
Mosaic Stack requires a running gateway. Your administrator provides the URL
|
Mosaic Stack requires a running gateway. Your administrator provides the URL
|
||||||
(default: `http://localhost:14242`) and creates your account.
|
(default: `http://localhost:4000`) and creates your account.
|
||||||
|
|
||||||
### Logging In (Web)
|
### Logging In (Web)
|
||||||
|
|
||||||
@@ -177,7 +177,7 @@ mosaic --help
|
|||||||
### Signing In
|
### Signing In
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
mosaic login --gateway http://localhost:14242 --email you@example.com
|
mosaic login --gateway http://localhost:4000 --email you@example.com
|
||||||
```
|
```
|
||||||
|
|
||||||
You are prompted for a password if `--password` is not supplied. The session
|
You are prompted for a password if `--password` is not supplied. The session
|
||||||
@@ -191,12 +191,12 @@ mosaic tui
|
|||||||
|
|
||||||
Options:
|
Options:
|
||||||
|
|
||||||
| Flag | Default | Description |
|
| Flag | Default | Description |
|
||||||
| ----------------------- | ------------------------ | ---------------------------------- |
|
| ----------------------- | ----------------------- | ---------------------------------- |
|
||||||
| `--gateway <url>` | `http://localhost:14242` | Gateway URL |
|
| `--gateway <url>` | `http://localhost:4000` | Gateway URL |
|
||||||
| `--conversation <id>` | — | Resume a specific conversation |
|
| `--conversation <id>` | — | Resume a specific conversation |
|
||||||
| `--model <modelId>` | server default | Model to use (e.g. `llama3.2`) |
|
| `--model <modelId>` | server default | Model to use (e.g. `llama3.2`) |
|
||||||
| `--provider <provider>` | server default | Provider (e.g. `ollama`, `openai`) |
|
| `--provider <provider>` | server default | Provider (e.g. `ollama`, `openai`) |
|
||||||
|
|
||||||
If no valid session exists you are prompted to sign in before the TUI launches.
|
If no valid session exists you are prompted to sign in before the TUI launches.
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
{
|
|
||||||
"tier": "local",
|
|
||||||
"storage": { "type": "pglite", "dataDir": ".mosaic/storage-pglite" },
|
|
||||||
"queue": { "type": "local", "dataDir": ".mosaic/queue" },
|
|
||||||
"memory": { "type": "keyword" }
|
|
||||||
}
|
|
||||||
@@ -1,11 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/agent",
|
"name": "@mosaic/agent",
|
||||||
"version": "0.0.2",
|
"version": "0.0.1-alpha.2",
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/agent"
|
|
||||||
},
|
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/auth",
|
"name": "@mosaic/auth",
|
||||||
"version": "0.0.2",
|
"version": "0.0.1-alpha.2",
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/auth"
|
|
||||||
},
|
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ export function createAuth(config: AuthConfig) {
|
|||||||
provider: 'pg',
|
provider: 'pg',
|
||||||
usePlural: true,
|
usePlural: true,
|
||||||
}),
|
}),
|
||||||
baseURL: baseURL ?? process.env['BETTER_AUTH_URL'] ?? 'http://localhost:14242',
|
baseURL: baseURL ?? process.env['BETTER_AUTH_URL'] ?? 'http://localhost:4000',
|
||||||
secret: secret ?? process.env['BETTER_AUTH_SECRET'],
|
secret: secret ?? process.env['BETTER_AUTH_SECRET'],
|
||||||
basePath: '/api/auth',
|
basePath: '/api/auth',
|
||||||
trustedOrigins,
|
trustedOrigins,
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/brain",
|
"name": "@mosaic/brain",
|
||||||
"version": "0.0.2",
|
"version": "0.0.1-alpha.2",
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/brain"
|
|
||||||
},
|
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/cli",
|
"name": "@mosaic/cli",
|
||||||
"version": "0.0.15",
|
"version": "0.0.1-alpha.2",
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/cli"
|
|
||||||
},
|
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -27,7 +22,6 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@clack/prompts": "^0.9.0",
|
"@clack/prompts": "^0.9.0",
|
||||||
"@mosaic/config": "workspace:^",
|
|
||||||
"@mosaic/mosaic": "workspace:^",
|
"@mosaic/mosaic": "workspace:^",
|
||||||
"@mosaic/prdy": "workspace:^",
|
"@mosaic/prdy": "workspace:^",
|
||||||
"@mosaic/quality-rails": "workspace:^",
|
"@mosaic/quality-rails": "workspace:^",
|
||||||
|
|||||||
@@ -2,12 +2,11 @@
|
|||||||
|
|
||||||
import { createRequire } from 'module';
|
import { createRequire } from 'module';
|
||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { registerQualityRails } from '@mosaic/quality-rails';
|
import { createQualityRailsCli } from '@mosaic/quality-rails';
|
||||||
import { registerAgentCommand } from './commands/agent.js';
|
import { registerAgentCommand } from './commands/agent.js';
|
||||||
import { registerMissionCommand } from './commands/mission.js';
|
import { registerMissionCommand } from './commands/mission.js';
|
||||||
// prdy is registered via launch.ts
|
import { registerPrdyCommand } from './commands/prdy.js';
|
||||||
import { registerLaunchCommands } from './commands/launch.js';
|
import { registerLaunchCommands } from './commands/launch.js';
|
||||||
import { registerGatewayCommand } from './commands/gateway.js';
|
|
||||||
|
|
||||||
const _require = createRequire(import.meta.url);
|
const _require = createRequire(import.meta.url);
|
||||||
const CLI_VERSION: string = (_require('../package.json') as { version: string }).version;
|
const CLI_VERSION: string = (_require('../package.json') as { version: string }).version;
|
||||||
@@ -33,7 +32,7 @@ registerLaunchCommands(program);
|
|||||||
program
|
program
|
||||||
.command('login')
|
.command('login')
|
||||||
.description('Sign in to a Mosaic gateway')
|
.description('Sign in to a Mosaic gateway')
|
||||||
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:14242')
|
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:4000')
|
||||||
.option('-e, --email <email>', 'Email address')
|
.option('-e, --email <email>', 'Email address')
|
||||||
.option('-p, --password <password>', 'Password')
|
.option('-p, --password <password>', 'Password')
|
||||||
.action(async (opts: { gateway: string; email?: string; password?: string }) => {
|
.action(async (opts: { gateway: string; email?: string; password?: string }) => {
|
||||||
@@ -67,7 +66,7 @@ program
|
|||||||
program
|
program
|
||||||
.command('tui')
|
.command('tui')
|
||||||
.description('Launch interactive TUI connected to the gateway')
|
.description('Launch interactive TUI connected to the gateway')
|
||||||
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:14242')
|
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:4000')
|
||||||
.option('-c, --conversation <id>', 'Resume a conversation by ID')
|
.option('-c, --conversation <id>', 'Resume a conversation by ID')
|
||||||
.option('-m, --model <modelId>', 'Model ID to use (e.g. gpt-4o, llama3.2)')
|
.option('-m, --model <modelId>', 'Model ID to use (e.g. gpt-4o, llama3.2)')
|
||||||
.option('-p, --provider <provider>', 'Provider to use (e.g. openai, ollama)')
|
.option('-p, --provider <provider>', 'Provider to use (e.g. openai, ollama)')
|
||||||
@@ -208,7 +207,7 @@ const sessionsCmd = program.command('sessions').description('Manage active agent
|
|||||||
sessionsCmd
|
sessionsCmd
|
||||||
.command('list')
|
.command('list')
|
||||||
.description('List active agent sessions')
|
.description('List active agent sessions')
|
||||||
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:14242')
|
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:4000')
|
||||||
.action(async (opts: { gateway: string }) => {
|
.action(async (opts: { gateway: string }) => {
|
||||||
const { withAuth } = await import('./commands/with-auth.js');
|
const { withAuth } = await import('./commands/with-auth.js');
|
||||||
const auth = await withAuth(opts.gateway);
|
const auth = await withAuth(opts.gateway);
|
||||||
@@ -243,7 +242,7 @@ sessionsCmd
|
|||||||
sessionsCmd
|
sessionsCmd
|
||||||
.command('resume <id>')
|
.command('resume <id>')
|
||||||
.description('Resume an existing agent session in the TUI')
|
.description('Resume an existing agent session in the TUI')
|
||||||
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:14242')
|
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:4000')
|
||||||
.action(async (id: string, opts: { gateway: string }) => {
|
.action(async (id: string, opts: { gateway: string }) => {
|
||||||
const { loadSession, validateSession } = await import('./auth.js');
|
const { loadSession, validateSession } = await import('./auth.js');
|
||||||
|
|
||||||
@@ -276,7 +275,7 @@ sessionsCmd
|
|||||||
sessionsCmd
|
sessionsCmd
|
||||||
.command('destroy <id>')
|
.command('destroy <id>')
|
||||||
.description('Terminate an active agent session')
|
.description('Terminate an active agent session')
|
||||||
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:14242')
|
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:4000')
|
||||||
.action(async (id: string, opts: { gateway: string }) => {
|
.action(async (id: string, opts: { gateway: string }) => {
|
||||||
const { withAuth } = await import('./commands/with-auth.js');
|
const { withAuth } = await import('./commands/with-auth.js');
|
||||||
const auth = await withAuth(opts.gateway);
|
const auth = await withAuth(opts.gateway);
|
||||||
@@ -291,10 +290,6 @@ sessionsCmd
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// ─── gateway ──────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
registerGatewayCommand(program);
|
|
||||||
|
|
||||||
// ─── agent ─────────────────────────────────────────────────────────────
|
// ─── agent ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
registerAgentCommand(program);
|
registerAgentCommand(program);
|
||||||
@@ -303,9 +298,17 @@ registerAgentCommand(program);
|
|||||||
|
|
||||||
registerMissionCommand(program);
|
registerMissionCommand(program);
|
||||||
|
|
||||||
|
// ─── prdy ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
registerPrdyCommand(program);
|
||||||
|
|
||||||
// ─── quality-rails ──────────────────────────────────────────────────────
|
// ─── quality-rails ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
registerQualityRails(program);
|
const qrWrapper = createQualityRailsCli();
|
||||||
|
const qrCmd = qrWrapper.commands.find((c) => c.name() === 'quality-rails');
|
||||||
|
if (qrCmd !== undefined) {
|
||||||
|
program.addCommand(qrCmd as unknown as Command);
|
||||||
|
}
|
||||||
|
|
||||||
// ─── update ─────────────────────────────────────────────────────────────
|
// ─── update ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ export function registerAgentCommand(program: Command) {
|
|||||||
const cmd = program
|
const cmd = program
|
||||||
.command('agent')
|
.command('agent')
|
||||||
.description('Manage agent configurations')
|
.description('Manage agent configurations')
|
||||||
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:14242')
|
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:4000')
|
||||||
.option('--list', 'List all agents')
|
.option('--list', 'List all agents')
|
||||||
.option('--new', 'Create a new agent')
|
.option('--new', 'Create a new agent')
|
||||||
.option('--show <idOrName>', 'Show agent details')
|
.option('--show <idOrName>', 'Show agent details')
|
||||||
|
|||||||
@@ -1,152 +0,0 @@
|
|||||||
import type { Command } from 'commander';
|
|
||||||
import {
|
|
||||||
getDaemonPid,
|
|
||||||
readMeta,
|
|
||||||
startDaemon,
|
|
||||||
stopDaemon,
|
|
||||||
waitForHealth,
|
|
||||||
} from './gateway/daemon.js';
|
|
||||||
|
|
||||||
interface GatewayParentOpts {
|
|
||||||
host: string;
|
|
||||||
port: string;
|
|
||||||
token?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolveOpts(raw: GatewayParentOpts): { host: string; port: number; token?: string } {
|
|
||||||
const meta = readMeta();
|
|
||||||
return {
|
|
||||||
host: raw.host ?? meta?.host ?? 'localhost',
|
|
||||||
port: parseInt(raw.port, 10) || meta?.port || 14242,
|
|
||||||
token: raw.token ?? meta?.adminToken,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function registerGatewayCommand(program: Command): void {
|
|
||||||
const gw = program
|
|
||||||
.command('gateway')
|
|
||||||
.description('Manage the Mosaic gateway daemon')
|
|
||||||
.helpOption('--help', 'Display help')
|
|
||||||
.option('-h, --host <host>', 'Gateway host', 'localhost')
|
|
||||||
.option('-p, --port <port>', 'Gateway port', '14242')
|
|
||||||
.option('-t, --token <token>', 'Admin API token')
|
|
||||||
.action(() => {
|
|
||||||
gw.outputHelp();
|
|
||||||
});
|
|
||||||
|
|
||||||
// ─── install ────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
gw.command('install')
|
|
||||||
.description('Install and configure the gateway daemon')
|
|
||||||
.option('--skip-install', 'Skip npm package installation (use local build)')
|
|
||||||
.action(async (cmdOpts: { skipInstall?: boolean }) => {
|
|
||||||
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
|
||||||
const { runInstall } = await import('./gateway/install.js');
|
|
||||||
await runInstall({ ...opts, skipInstall: cmdOpts.skipInstall });
|
|
||||||
});
|
|
||||||
|
|
||||||
// ─── start ──────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
gw.command('start')
|
|
||||||
.description('Start the gateway daemon')
|
|
||||||
.action(async () => {
|
|
||||||
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
|
||||||
try {
|
|
||||||
const pid = startDaemon();
|
|
||||||
console.log(`Gateway started (PID ${pid.toString()})`);
|
|
||||||
console.log('Waiting for health...');
|
|
||||||
const healthy = await waitForHealth(opts.host, opts.port);
|
|
||||||
if (healthy) {
|
|
||||||
console.log(`Gateway ready at http://${opts.host}:${opts.port.toString()}`);
|
|
||||||
} else {
|
|
||||||
console.warn('Gateway started but health check timed out. Check logs.');
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error(err instanceof Error ? err.message : String(err));
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// ─── stop ───────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
gw.command('stop')
|
|
||||||
.description('Stop the gateway daemon')
|
|
||||||
.action(async () => {
|
|
||||||
try {
|
|
||||||
await stopDaemon();
|
|
||||||
console.log('Gateway stopped.');
|
|
||||||
} catch (err) {
|
|
||||||
console.error(err instanceof Error ? err.message : String(err));
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// ─── restart ────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
gw.command('restart')
|
|
||||||
.description('Restart the gateway daemon')
|
|
||||||
.action(async () => {
|
|
||||||
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
|
||||||
const pid = getDaemonPid();
|
|
||||||
if (pid !== null) {
|
|
||||||
console.log('Stopping gateway...');
|
|
||||||
await stopDaemon();
|
|
||||||
}
|
|
||||||
console.log('Starting gateway...');
|
|
||||||
try {
|
|
||||||
const newPid = startDaemon();
|
|
||||||
console.log(`Gateway started (PID ${newPid.toString()})`);
|
|
||||||
const healthy = await waitForHealth(opts.host, opts.port);
|
|
||||||
if (healthy) {
|
|
||||||
console.log(`Gateway ready at http://${opts.host}:${opts.port.toString()}`);
|
|
||||||
} else {
|
|
||||||
console.warn('Gateway started but health check timed out. Check logs.');
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error(err instanceof Error ? err.message : String(err));
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// ─── status ─────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
gw.command('status')
|
|
||||||
.description('Show gateway daemon status and health')
|
|
||||||
.action(async () => {
|
|
||||||
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
|
||||||
const { runStatus } = await import('./gateway/status.js');
|
|
||||||
await runStatus(opts);
|
|
||||||
});
|
|
||||||
|
|
||||||
// ─── config ─────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
gw.command('config')
|
|
||||||
.description('View or modify gateway configuration')
|
|
||||||
.option('--set <KEY=VALUE>', 'Set a configuration value')
|
|
||||||
.option('--unset <KEY>', 'Remove a configuration key')
|
|
||||||
.option('--edit', 'Open config in $EDITOR')
|
|
||||||
.action(async (cmdOpts: { set?: string; unset?: string; edit?: boolean }) => {
|
|
||||||
const { runConfig } = await import('./gateway/config.js');
|
|
||||||
await runConfig(cmdOpts);
|
|
||||||
});
|
|
||||||
|
|
||||||
// ─── logs ───────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
gw.command('logs')
|
|
||||||
.description('View gateway daemon logs')
|
|
||||||
.option('-f, --follow', 'Follow log output')
|
|
||||||
.option('-n, --lines <count>', 'Number of lines to show', '50')
|
|
||||||
.action(async (cmdOpts: { follow?: boolean; lines?: string }) => {
|
|
||||||
const { runLogs } = await import('./gateway/logs.js');
|
|
||||||
runLogs({ follow: cmdOpts.follow, lines: parseInt(cmdOpts.lines ?? '50', 10) });
|
|
||||||
});
|
|
||||||
|
|
||||||
// ─── uninstall ──────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
gw.command('uninstall')
|
|
||||||
.description('Uninstall the gateway daemon and optionally remove data')
|
|
||||||
.action(async () => {
|
|
||||||
const { runUninstall } = await import('./gateway/uninstall.js');
|
|
||||||
await runUninstall();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,143 +0,0 @@
|
|||||||
import { existsSync, readFileSync, writeFileSync } from 'node:fs';
|
|
||||||
import { execSync } from 'node:child_process';
|
|
||||||
import { ENV_FILE, getDaemonPid, readMeta, META_FILE, ensureDirs } from './daemon.js';
|
|
||||||
|
|
||||||
// Keys that should be masked in output
|
|
||||||
const SECRET_KEYS = new Set([
|
|
||||||
'BETTER_AUTH_SECRET',
|
|
||||||
'ANTHROPIC_API_KEY',
|
|
||||||
'OPENAI_API_KEY',
|
|
||||||
'ZAI_API_KEY',
|
|
||||||
'OPENROUTER_API_KEY',
|
|
||||||
'DISCORD_BOT_TOKEN',
|
|
||||||
'TELEGRAM_BOT_TOKEN',
|
|
||||||
]);
|
|
||||||
|
|
||||||
function maskValue(key: string, value: string): string {
|
|
||||||
if (SECRET_KEYS.has(key) && value.length > 8) {
|
|
||||||
return value.slice(0, 4) + '…' + value.slice(-4);
|
|
||||||
}
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseEnvFile(): Map<string, string> {
|
|
||||||
const map = new Map<string, string>();
|
|
||||||
if (!existsSync(ENV_FILE)) return map;
|
|
||||||
|
|
||||||
const lines = readFileSync(ENV_FILE, 'utf-8').split('\n');
|
|
||||||
for (const line of lines) {
|
|
||||||
const trimmed = line.trim();
|
|
||||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
|
||||||
const eqIdx = trimmed.indexOf('=');
|
|
||||||
if (eqIdx === -1) continue;
|
|
||||||
map.set(trimmed.slice(0, eqIdx), trimmed.slice(eqIdx + 1));
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
function writeEnvFile(entries: Map<string, string>): void {
|
|
||||||
ensureDirs();
|
|
||||||
const lines: string[] = [];
|
|
||||||
for (const [key, value] of entries) {
|
|
||||||
lines.push(`${key}=${value}`);
|
|
||||||
}
|
|
||||||
writeFileSync(ENV_FILE, lines.join('\n') + '\n', { mode: 0o600 });
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ConfigOpts {
|
|
||||||
set?: string;
|
|
||||||
unset?: string;
|
|
||||||
edit?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function runConfig(opts: ConfigOpts): Promise<void> {
|
|
||||||
// Set a value
|
|
||||||
if (opts.set) {
|
|
||||||
const eqIdx = opts.set.indexOf('=');
|
|
||||||
if (eqIdx === -1) {
|
|
||||||
console.error('Usage: mosaic gateway config --set KEY=VALUE');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
const key = opts.set.slice(0, eqIdx);
|
|
||||||
const value = opts.set.slice(eqIdx + 1);
|
|
||||||
const entries = parseEnvFile();
|
|
||||||
entries.set(key, value);
|
|
||||||
writeEnvFile(entries);
|
|
||||||
console.log(`Set ${key}=${maskValue(key, value)}`);
|
|
||||||
promptRestart();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unset a value
|
|
||||||
if (opts.unset) {
|
|
||||||
const entries = parseEnvFile();
|
|
||||||
if (!entries.has(opts.unset)) {
|
|
||||||
console.error(`Key not found: ${opts.unset}`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
entries.delete(opts.unset);
|
|
||||||
writeEnvFile(entries);
|
|
||||||
console.log(`Removed ${opts.unset}`);
|
|
||||||
promptRestart();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Open in editor
|
|
||||||
if (opts.edit) {
|
|
||||||
if (!existsSync(ENV_FILE)) {
|
|
||||||
console.error(`No config file found at ${ENV_FILE}`);
|
|
||||||
console.error('Run `mosaic gateway install` first.');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
const editor = process.env['EDITOR'] ?? process.env['VISUAL'] ?? 'vi';
|
|
||||||
try {
|
|
||||||
execSync(`${editor} "${ENV_FILE}"`, { stdio: 'inherit' });
|
|
||||||
promptRestart();
|
|
||||||
} catch {
|
|
||||||
console.error('Editor exited with error.');
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Default: show current config
|
|
||||||
showConfig();
|
|
||||||
}
|
|
||||||
|
|
||||||
function showConfig(): void {
|
|
||||||
if (!existsSync(ENV_FILE)) {
|
|
||||||
console.log('No gateway configuration found.');
|
|
||||||
console.log('Run `mosaic gateway install` to set up.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const entries = parseEnvFile();
|
|
||||||
const meta = readMeta();
|
|
||||||
|
|
||||||
console.log('Mosaic Gateway Configuration');
|
|
||||||
console.log('────────────────────────────');
|
|
||||||
console.log(` Config file: ${ENV_FILE}`);
|
|
||||||
console.log(` Meta file: ${META_FILE}`);
|
|
||||||
console.log();
|
|
||||||
|
|
||||||
if (entries.size === 0) {
|
|
||||||
console.log(' (empty)');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const maxKeyLen = Math.max(...[...entries.keys()].map((k) => k.length));
|
|
||||||
for (const [key, value] of entries) {
|
|
||||||
const padding = ' '.repeat(maxKeyLen - key.length);
|
|
||||||
console.log(` ${key}${padding} ${maskValue(key, value)}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (meta?.adminToken) {
|
|
||||||
console.log();
|
|
||||||
console.log(` Admin token: ${maskValue('token', meta.adminToken)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function promptRestart(): void {
|
|
||||||
if (getDaemonPid() !== null) {
|
|
||||||
console.log('\nGateway is running — restart to apply changes: mosaic gateway restart');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,245 +0,0 @@
|
|||||||
import { spawn, execSync } from 'node:child_process';
|
|
||||||
import {
|
|
||||||
existsSync,
|
|
||||||
mkdirSync,
|
|
||||||
readFileSync,
|
|
||||||
writeFileSync,
|
|
||||||
unlinkSync,
|
|
||||||
openSync,
|
|
||||||
constants,
|
|
||||||
} from 'node:fs';
|
|
||||||
import { join, resolve } from 'node:path';
|
|
||||||
import { homedir } from 'node:os';
|
|
||||||
import { createRequire } from 'node:module';
|
|
||||||
|
|
||||||
// ─── Paths ──────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
export const GATEWAY_HOME = resolve(
|
|
||||||
process.env['MOSAIC_GATEWAY_HOME'] ?? join(homedir(), '.config', 'mosaic', 'gateway'),
|
|
||||||
);
|
|
||||||
export const PID_FILE = join(GATEWAY_HOME, 'daemon.pid');
|
|
||||||
export const LOG_DIR = join(GATEWAY_HOME, 'logs');
|
|
||||||
export const LOG_FILE = join(LOG_DIR, 'gateway.log');
|
|
||||||
export const ENV_FILE = join(GATEWAY_HOME, '.env');
|
|
||||||
export const META_FILE = join(GATEWAY_HOME, 'meta.json');
|
|
||||||
|
|
||||||
// ─── Meta ───────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
export interface GatewayMeta {
|
|
||||||
version: string;
|
|
||||||
installedAt: string;
|
|
||||||
entryPoint: string;
|
|
||||||
adminToken?: string;
|
|
||||||
host: string;
|
|
||||||
port: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function readMeta(): GatewayMeta | null {
|
|
||||||
if (!existsSync(META_FILE)) return null;
|
|
||||||
try {
|
|
||||||
return JSON.parse(readFileSync(META_FILE, 'utf-8')) as GatewayMeta;
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function writeMeta(meta: GatewayMeta): void {
|
|
||||||
ensureDirs();
|
|
||||||
writeFileSync(META_FILE, JSON.stringify(meta, null, 2), { mode: 0o600 });
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── Directories ────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
export function ensureDirs(): void {
|
|
||||||
mkdirSync(GATEWAY_HOME, { recursive: true, mode: 0o700 });
|
|
||||||
mkdirSync(LOG_DIR, { recursive: true, mode: 0o700 });
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── PID management ─────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
export function readPid(): number | null {
|
|
||||||
if (!existsSync(PID_FILE)) return null;
|
|
||||||
try {
|
|
||||||
const pid = parseInt(readFileSync(PID_FILE, 'utf-8').trim(), 10);
|
|
||||||
return Number.isNaN(pid) ? null : pid;
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isRunning(pid: number): boolean {
|
|
||||||
try {
|
|
||||||
process.kill(pid, 0);
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getDaemonPid(): number | null {
|
|
||||||
const pid = readPid();
|
|
||||||
if (pid === null) return null;
|
|
||||||
return isRunning(pid) ? pid : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── Entry point resolution ─────────────────────────────────────────────────
|
|
||||||
|
|
||||||
export function resolveGatewayEntry(): string {
|
|
||||||
// Check meta.json for custom entry point
|
|
||||||
const meta = readMeta();
|
|
||||||
if (meta?.entryPoint && existsSync(meta.entryPoint)) {
|
|
||||||
return meta.entryPoint;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to resolve from globally installed @mosaic/gateway
|
|
||||||
try {
|
|
||||||
const req = createRequire(import.meta.url);
|
|
||||||
const pkgPath = req.resolve('@mosaic/gateway/package.json');
|
|
||||||
const mainEntry = join(resolve(pkgPath, '..'), 'dist', 'main.js');
|
|
||||||
if (existsSync(mainEntry)) return mainEntry;
|
|
||||||
} catch {
|
|
||||||
// Not installed globally
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error('Cannot find gateway entry point. Run `mosaic gateway install` first.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── Start / Stop / Health ──────────────────────────────────────────────────
|
|
||||||
|
|
||||||
export function startDaemon(): number {
|
|
||||||
const running = getDaemonPid();
|
|
||||||
if (running !== null) {
|
|
||||||
throw new Error(`Gateway is already running (PID ${running.toString()})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
ensureDirs();
|
|
||||||
const entryPoint = resolveGatewayEntry();
|
|
||||||
|
|
||||||
// Load env vars from gateway .env
|
|
||||||
const env: Record<string, string> = { ...process.env } as Record<string, string>;
|
|
||||||
if (existsSync(ENV_FILE)) {
|
|
||||||
for (const line of readFileSync(ENV_FILE, 'utf-8').split('\n')) {
|
|
||||||
const trimmed = line.trim();
|
|
||||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
|
||||||
const eqIdx = trimmed.indexOf('=');
|
|
||||||
if (eqIdx > 0) env[trimmed.slice(0, eqIdx)] = trimmed.slice(eqIdx + 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const logFd = openSync(LOG_FILE, constants.O_WRONLY | constants.O_CREAT | constants.O_APPEND);
|
|
||||||
|
|
||||||
const child = spawn('node', [entryPoint], {
|
|
||||||
detached: true,
|
|
||||||
stdio: ['ignore', logFd, logFd],
|
|
||||||
env,
|
|
||||||
cwd: GATEWAY_HOME,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!child.pid) {
|
|
||||||
throw new Error('Failed to spawn gateway process');
|
|
||||||
}
|
|
||||||
|
|
||||||
writeFileSync(PID_FILE, child.pid.toString(), { mode: 0o600 });
|
|
||||||
child.unref();
|
|
||||||
|
|
||||||
return child.pid;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function stopDaemon(timeoutMs = 10_000): Promise<void> {
|
|
||||||
const pid = getDaemonPid();
|
|
||||||
if (pid === null) {
|
|
||||||
throw new Error('Gateway is not running');
|
|
||||||
}
|
|
||||||
|
|
||||||
process.kill(pid, 'SIGTERM');
|
|
||||||
|
|
||||||
// Poll for exit
|
|
||||||
const start = Date.now();
|
|
||||||
while (Date.now() - start < timeoutMs) {
|
|
||||||
if (!isRunning(pid)) {
|
|
||||||
cleanPidFile();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
await sleep(250);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Force kill
|
|
||||||
try {
|
|
||||||
process.kill(pid, 'SIGKILL');
|
|
||||||
} catch {
|
|
||||||
// Already dead
|
|
||||||
}
|
|
||||||
cleanPidFile();
|
|
||||||
}
|
|
||||||
|
|
||||||
function cleanPidFile(): void {
|
|
||||||
try {
|
|
||||||
unlinkSync(PID_FILE);
|
|
||||||
} catch {
|
|
||||||
// Ignore
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function waitForHealth(
|
|
||||||
host: string,
|
|
||||||
port: number,
|
|
||||||
timeoutMs = 30_000,
|
|
||||||
): Promise<boolean> {
|
|
||||||
const start = Date.now();
|
|
||||||
let delay = 500;
|
|
||||||
|
|
||||||
while (Date.now() - start < timeoutMs) {
|
|
||||||
try {
|
|
||||||
const res = await fetch(`http://${host}:${port.toString()}/health`);
|
|
||||||
if (res.ok) return true;
|
|
||||||
} catch {
|
|
||||||
// Not ready yet
|
|
||||||
}
|
|
||||||
await sleep(delay);
|
|
||||||
delay = Math.min(delay * 1.5, 3000);
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
function sleep(ms: number): Promise<void> {
|
|
||||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── npm install helper ─────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
const GITEA_REGISTRY = 'https://git.mosaicstack.dev/api/packages/mosaic/npm/';
|
|
||||||
|
|
||||||
export function installGatewayPackage(): void {
|
|
||||||
console.log('Installing @mosaic/gateway from Gitea registry...');
|
|
||||||
execSync(`npm install -g @mosaic/gateway@latest --@mosaic:registry=${GITEA_REGISTRY}`, {
|
|
||||||
stdio: 'inherit',
|
|
||||||
timeout: 120_000,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export function uninstallGatewayPackage(): void {
|
|
||||||
try {
|
|
||||||
execSync('npm uninstall -g @mosaic/gateway', {
|
|
||||||
stdio: 'inherit',
|
|
||||||
timeout: 60_000,
|
|
||||||
});
|
|
||||||
} catch {
|
|
||||||
console.warn('Warning: npm uninstall may not have completed cleanly.');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getInstalledGatewayVersion(): string | null {
|
|
||||||
try {
|
|
||||||
const output = execSync('npm ls -g @mosaic/gateway --json --depth=0', {
|
|
||||||
encoding: 'utf-8',
|
|
||||||
timeout: 15_000,
|
|
||||||
stdio: ['pipe', 'pipe', 'pipe'],
|
|
||||||
});
|
|
||||||
const data = JSON.parse(output) as {
|
|
||||||
dependencies?: { '@mosaic/gateway'?: { version?: string } };
|
|
||||||
};
|
|
||||||
return data.dependencies?.['@mosaic/gateway']?.version ?? null;
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,259 +0,0 @@
|
|||||||
import { randomBytes } from 'node:crypto';
|
|
||||||
import { writeFileSync } from 'node:fs';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { createInterface } from 'node:readline';
|
|
||||||
import type { GatewayMeta } from './daemon.js';
|
|
||||||
import {
|
|
||||||
ENV_FILE,
|
|
||||||
GATEWAY_HOME,
|
|
||||||
ensureDirs,
|
|
||||||
installGatewayPackage,
|
|
||||||
readMeta,
|
|
||||||
resolveGatewayEntry,
|
|
||||||
startDaemon,
|
|
||||||
waitForHealth,
|
|
||||||
writeMeta,
|
|
||||||
getInstalledGatewayVersion,
|
|
||||||
} from './daemon.js';
|
|
||||||
|
|
||||||
interface InstallOpts {
|
|
||||||
host: string;
|
|
||||||
port: number;
|
|
||||||
skipInstall?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
function prompt(rl: ReturnType<typeof createInterface>, question: string): Promise<string> {
|
|
||||||
return new Promise((resolve) => rl.question(question, resolve));
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function runInstall(opts: InstallOpts): Promise<void> {
|
|
||||||
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
||||||
try {
|
|
||||||
await doInstall(rl, opts);
|
|
||||||
} finally {
|
|
||||||
rl.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function doInstall(rl: ReturnType<typeof createInterface>, opts: InstallOpts): Promise<void> {
|
|
||||||
// Check existing installation
|
|
||||||
const existing = readMeta();
|
|
||||||
if (existing) {
|
|
||||||
const answer = await prompt(
|
|
||||||
rl,
|
|
||||||
`Gateway already installed (v${existing.version}). Reinstall? [y/N] `,
|
|
||||||
);
|
|
||||||
if (answer.toLowerCase() !== 'y') {
|
|
||||||
console.log('Aborted.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 1: Install npm package
|
|
||||||
if (!opts.skipInstall) {
|
|
||||||
installGatewayPackage();
|
|
||||||
}
|
|
||||||
|
|
||||||
ensureDirs();
|
|
||||||
|
|
||||||
// Step 2: Collect configuration
|
|
||||||
console.log('\n─── Gateway Configuration ───\n');
|
|
||||||
|
|
||||||
// Tier selection
|
|
||||||
console.log('Storage tier:');
|
|
||||||
console.log(' 1. Local (embedded database, no dependencies)');
|
|
||||||
console.log(' 2. Team (PostgreSQL + Valkey required)');
|
|
||||||
const tierAnswer = (await prompt(rl, 'Select [1]: ')).trim() || '1';
|
|
||||||
const tier = tierAnswer === '2' ? 'team' : 'local';
|
|
||||||
|
|
||||||
const port =
|
|
||||||
opts.port !== 14242
|
|
||||||
? opts.port
|
|
||||||
: parseInt(
|
|
||||||
(await prompt(rl, `Gateway port [${opts.port.toString()}]: `)) || opts.port.toString(),
|
|
||||||
10,
|
|
||||||
);
|
|
||||||
|
|
||||||
let databaseUrl: string | undefined;
|
|
||||||
let valkeyUrl: string | undefined;
|
|
||||||
|
|
||||||
if (tier === 'team') {
|
|
||||||
databaseUrl =
|
|
||||||
(await prompt(rl, 'DATABASE_URL [postgresql://mosaic:mosaic@localhost:5433/mosaic]: ')) ||
|
|
||||||
'postgresql://mosaic:mosaic@localhost:5433/mosaic';
|
|
||||||
|
|
||||||
valkeyUrl =
|
|
||||||
(await prompt(rl, 'VALKEY_URL [redis://localhost:6380]: ')) || 'redis://localhost:6380';
|
|
||||||
}
|
|
||||||
|
|
||||||
const anthropicKey = await prompt(rl, 'ANTHROPIC_API_KEY (optional, press Enter to skip): ');
|
|
||||||
|
|
||||||
const corsOrigin =
|
|
||||||
(await prompt(rl, 'CORS origin [http://localhost:3000]: ')) || 'http://localhost:3000';
|
|
||||||
|
|
||||||
// Generate auth secret
|
|
||||||
const authSecret = randomBytes(32).toString('hex');
|
|
||||||
|
|
||||||
// Step 3: Write .env
|
|
||||||
const envLines = [
|
|
||||||
`GATEWAY_PORT=${port.toString()}`,
|
|
||||||
`BETTER_AUTH_SECRET=${authSecret}`,
|
|
||||||
`BETTER_AUTH_URL=http://${opts.host}:${port.toString()}`,
|
|
||||||
`GATEWAY_CORS_ORIGIN=${corsOrigin}`,
|
|
||||||
`OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318`,
|
|
||||||
`OTEL_SERVICE_NAME=mosaic-gateway`,
|
|
||||||
];
|
|
||||||
|
|
||||||
if (tier === 'team' && databaseUrl && valkeyUrl) {
|
|
||||||
envLines.push(`DATABASE_URL=${databaseUrl}`);
|
|
||||||
envLines.push(`VALKEY_URL=${valkeyUrl}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (anthropicKey) {
|
|
||||||
envLines.push(`ANTHROPIC_API_KEY=${anthropicKey}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
writeFileSync(ENV_FILE, envLines.join('\n') + '\n', { mode: 0o600 });
|
|
||||||
console.log(`\nConfig written to ${ENV_FILE}`);
|
|
||||||
|
|
||||||
// Step 3b: Write mosaic.config.json
|
|
||||||
const mosaicConfig =
|
|
||||||
tier === 'local'
|
|
||||||
? {
|
|
||||||
tier: 'local',
|
|
||||||
storage: { type: 'pglite', dataDir: join(GATEWAY_HOME, 'storage-pglite') },
|
|
||||||
queue: { type: 'local', dataDir: join(GATEWAY_HOME, 'queue') },
|
|
||||||
memory: { type: 'keyword' },
|
|
||||||
}
|
|
||||||
: {
|
|
||||||
tier: 'team',
|
|
||||||
storage: { type: 'postgres', url: databaseUrl },
|
|
||||||
queue: { type: 'bullmq', url: valkeyUrl },
|
|
||||||
memory: { type: 'pgvector' },
|
|
||||||
};
|
|
||||||
|
|
||||||
const configFile = join(GATEWAY_HOME, 'mosaic.config.json');
|
|
||||||
writeFileSync(configFile, JSON.stringify(mosaicConfig, null, 2) + '\n', { mode: 0o600 });
|
|
||||||
console.log(`Config written to ${configFile}`);
|
|
||||||
|
|
||||||
// Step 4: Write meta.json
|
|
||||||
let entryPoint: string;
|
|
||||||
try {
|
|
||||||
entryPoint = resolveGatewayEntry();
|
|
||||||
} catch {
|
|
||||||
console.error('Error: Gateway package not found after install.');
|
|
||||||
console.error('Check that @mosaic/gateway installed correctly.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const version = getInstalledGatewayVersion() ?? 'unknown';
|
|
||||||
|
|
||||||
const meta = {
|
|
||||||
version,
|
|
||||||
installedAt: new Date().toISOString(),
|
|
||||||
entryPoint,
|
|
||||||
host: opts.host,
|
|
||||||
port,
|
|
||||||
};
|
|
||||||
writeMeta(meta);
|
|
||||||
|
|
||||||
// Step 5: Start the daemon
|
|
||||||
console.log('\nStarting gateway daemon...');
|
|
||||||
try {
|
|
||||||
const pid = startDaemon();
|
|
||||||
console.log(`Gateway started (PID ${pid.toString()})`);
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Failed to start: ${err instanceof Error ? err.message : String(err)}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 6: Wait for health
|
|
||||||
console.log('Waiting for gateway to become healthy...');
|
|
||||||
const healthy = await waitForHealth(opts.host, port, 30_000);
|
|
||||||
if (!healthy) {
|
|
||||||
console.error('Gateway did not become healthy within 30 seconds.');
|
|
||||||
console.error(`Check logs: mosaic gateway logs`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
console.log('Gateway is healthy.\n');
|
|
||||||
|
|
||||||
// Step 7: Bootstrap — first user setup
|
|
||||||
await bootstrapFirstUser(rl, opts.host, port, meta);
|
|
||||||
|
|
||||||
console.log('\n─── Installation Complete ───');
|
|
||||||
console.log(` Endpoint: http://${opts.host}:${port.toString()}`);
|
|
||||||
console.log(` Config: ${GATEWAY_HOME}`);
|
|
||||||
console.log(` Logs: mosaic gateway logs`);
|
|
||||||
console.log(` Status: mosaic gateway status`);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function bootstrapFirstUser(
|
|
||||||
rl: ReturnType<typeof createInterface>,
|
|
||||||
host: string,
|
|
||||||
port: number,
|
|
||||||
meta: Omit<GatewayMeta, 'adminToken'> & { adminToken?: string },
|
|
||||||
): Promise<void> {
|
|
||||||
const baseUrl = `http://${host}:${port.toString()}`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const statusRes = await fetch(`${baseUrl}/api/bootstrap/status`);
|
|
||||||
if (!statusRes.ok) return;
|
|
||||||
|
|
||||||
const status = (await statusRes.json()) as { needsSetup: boolean };
|
|
||||||
if (!status.needsSetup) {
|
|
||||||
console.log('Admin user already exists — skipping setup.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
console.warn('Could not check bootstrap status — skipping first user setup.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('─── Admin User Setup ───\n');
|
|
||||||
|
|
||||||
const name = (await prompt(rl, 'Admin name: ')).trim();
|
|
||||||
if (!name) {
|
|
||||||
console.error('Name is required.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const email = (await prompt(rl, 'Admin email: ')).trim();
|
|
||||||
if (!email) {
|
|
||||||
console.error('Email is required.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const password = (await prompt(rl, 'Admin password (min 8 chars): ')).trim();
|
|
||||||
if (password.length < 8) {
|
|
||||||
console.error('Password must be at least 8 characters.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await fetch(`${baseUrl}/api/bootstrap/setup`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ name, email, password }),
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
const body = await res.text().catch(() => '');
|
|
||||||
console.error(`Bootstrap failed (${res.status.toString()}): ${body}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = (await res.json()) as {
|
|
||||||
user: { id: string; email: string };
|
|
||||||
token: { plaintext: string };
|
|
||||||
};
|
|
||||||
|
|
||||||
// Save admin token to meta
|
|
||||||
meta.adminToken = result.token.plaintext;
|
|
||||||
writeMeta(meta as GatewayMeta);
|
|
||||||
|
|
||||||
console.log(`\nAdmin user created: ${result.user.email}`);
|
|
||||||
console.log('Admin API token saved to gateway config.');
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Bootstrap error: ${err instanceof Error ? err.message : String(err)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
import { existsSync, readFileSync } from 'node:fs';
|
|
||||||
import { spawn } from 'node:child_process';
|
|
||||||
import { LOG_FILE } from './daemon.js';
|
|
||||||
|
|
||||||
interface LogsOpts {
|
|
||||||
follow?: boolean;
|
|
||||||
lines?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function runLogs(opts: LogsOpts): void {
|
|
||||||
if (!existsSync(LOG_FILE)) {
|
|
||||||
console.log('No log file found. Is the gateway installed?');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (opts.follow) {
|
|
||||||
const lines = opts.lines ?? 50;
|
|
||||||
const tail = spawn('tail', ['-n', lines.toString(), '-f', LOG_FILE], {
|
|
||||||
stdio: 'inherit',
|
|
||||||
});
|
|
||||||
tail.on('error', () => {
|
|
||||||
// Fallback for systems without tail
|
|
||||||
console.log(readLastLines(opts.lines ?? 50));
|
|
||||||
console.log('\n(--follow requires `tail` command)');
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Just print last N lines
|
|
||||||
console.log(readLastLines(opts.lines ?? 50));
|
|
||||||
}
|
|
||||||
|
|
||||||
function readLastLines(n: number): string {
|
|
||||||
const content = readFileSync(LOG_FILE, 'utf-8');
|
|
||||||
const lines = content.split('\n');
|
|
||||||
return lines.slice(-n).join('\n');
|
|
||||||
}
|
|
||||||
@@ -1,115 +0,0 @@
|
|||||||
import { getDaemonPid, readMeta, LOG_FILE, GATEWAY_HOME } from './daemon.js';
|
|
||||||
|
|
||||||
interface GatewayOpts {
|
|
||||||
host: string;
|
|
||||||
port: number;
|
|
||||||
token?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ServiceStatus {
|
|
||||||
name: string;
|
|
||||||
status: string;
|
|
||||||
latency?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface AdminHealth {
|
|
||||||
status: string;
|
|
||||||
services: {
|
|
||||||
database: { status: string; latencyMs: number };
|
|
||||||
cache: { status: string; latencyMs: number };
|
|
||||||
};
|
|
||||||
agentPool?: { active: number };
|
|
||||||
providers?: Array<{ name: string; available: boolean; models: number }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function runStatus(opts: GatewayOpts): Promise<void> {
|
|
||||||
const meta = readMeta();
|
|
||||||
const pid = getDaemonPid();
|
|
||||||
|
|
||||||
console.log('Mosaic Gateway Status');
|
|
||||||
console.log('─────────────────────');
|
|
||||||
|
|
||||||
// Daemon status
|
|
||||||
if (pid !== null) {
|
|
||||||
console.log(` Status: running (PID ${pid.toString()})`);
|
|
||||||
} else {
|
|
||||||
console.log(' Status: stopped');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Version
|
|
||||||
console.log(` Version: ${meta?.version ?? 'unknown'}`);
|
|
||||||
|
|
||||||
// Endpoint
|
|
||||||
const host = opts.host;
|
|
||||||
const port = opts.port;
|
|
||||||
console.log(` Endpoint: http://${host}:${port.toString()}`);
|
|
||||||
console.log(` Config: ${GATEWAY_HOME}`);
|
|
||||||
console.log(` Logs: ${LOG_FILE}`);
|
|
||||||
|
|
||||||
if (pid === null) return;
|
|
||||||
|
|
||||||
// Health check
|
|
||||||
try {
|
|
||||||
const healthRes = await fetch(`http://${host}:${port.toString()}/health`);
|
|
||||||
if (!healthRes.ok) {
|
|
||||||
console.log('\n Health: unreachable');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
console.log('\n Health: unreachable');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Admin health (requires token)
|
|
||||||
const token = opts.token ?? meta?.adminToken;
|
|
||||||
if (!token) {
|
|
||||||
console.log(
|
|
||||||
'\n (No admin token — run `mosaic gateway config` to set one for detailed status)',
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await fetch(`http://${host}:${port.toString()}/api/admin/health`, {
|
|
||||||
headers: { Authorization: `Bearer ${token}` },
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
console.log('\n Admin health: unauthorized or unavailable');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const health = (await res.json()) as AdminHealth;
|
|
||||||
|
|
||||||
console.log('\n Services:');
|
|
||||||
const services: ServiceStatus[] = [
|
|
||||||
{
|
|
||||||
name: 'Database',
|
|
||||||
status: health.services.database.status,
|
|
||||||
latency: `${health.services.database.latencyMs.toString()}ms`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'Cache',
|
|
||||||
status: health.services.cache.status,
|
|
||||||
latency: `${health.services.cache.latencyMs.toString()}ms`,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const svc of services) {
|
|
||||||
const latStr = svc.latency ? ` (${svc.latency})` : '';
|
|
||||||
console.log(` ${svc.name}:${' '.repeat(10 - svc.name.length)}${svc.status}${latStr}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (health.providers && health.providers.length > 0) {
|
|
||||||
const available = health.providers.filter((p) => p.available);
|
|
||||||
const names = available.map((p) => p.name).join(', ');
|
|
||||||
console.log(`\n Providers: ${available.length.toString()} active (${names})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (health.agentPool) {
|
|
||||||
console.log(` Sessions: ${health.agentPool.active.toString()} active`);
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
console.log('\n Admin health: connection error');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
import { existsSync, rmSync } from 'node:fs';
|
|
||||||
import { createInterface } from 'node:readline';
|
|
||||||
import {
|
|
||||||
GATEWAY_HOME,
|
|
||||||
getDaemonPid,
|
|
||||||
readMeta,
|
|
||||||
stopDaemon,
|
|
||||||
uninstallGatewayPackage,
|
|
||||||
} from './daemon.js';
|
|
||||||
|
|
||||||
export async function runUninstall(): Promise<void> {
|
|
||||||
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
||||||
try {
|
|
||||||
await doUninstall(rl);
|
|
||||||
} finally {
|
|
||||||
rl.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function prompt(rl: ReturnType<typeof createInterface>, question: string): Promise<string> {
|
|
||||||
return new Promise((resolve) => rl.question(question, resolve));
|
|
||||||
}
|
|
||||||
|
|
||||||
async function doUninstall(rl: ReturnType<typeof createInterface>): Promise<void> {
|
|
||||||
const meta = readMeta();
|
|
||||||
if (!meta) {
|
|
||||||
console.log('Gateway is not installed.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const answer = await prompt(rl, 'Uninstall Mosaic Gateway? [y/N] ');
|
|
||||||
if (answer.toLowerCase() !== 'y') {
|
|
||||||
console.log('Aborted.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stop if running
|
|
||||||
if (getDaemonPid() !== null) {
|
|
||||||
console.log('Stopping gateway daemon...');
|
|
||||||
try {
|
|
||||||
await stopDaemon();
|
|
||||||
console.log('Stopped.');
|
|
||||||
} catch (err) {
|
|
||||||
console.warn(`Warning: ${err instanceof Error ? err.message : String(err)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove config/data
|
|
||||||
const removeData = await prompt(rl, `Remove all gateway data at ${GATEWAY_HOME}? [y/N] `);
|
|
||||||
if (removeData.toLowerCase() === 'y') {
|
|
||||||
if (existsSync(GATEWAY_HOME)) {
|
|
||||||
rmSync(GATEWAY_HOME, { recursive: true, force: true });
|
|
||||||
console.log('Gateway data removed.');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Uninstall npm package
|
|
||||||
console.log('Uninstalling npm package...');
|
|
||||||
uninstallGatewayPackage();
|
|
||||||
|
|
||||||
console.log('\nGateway uninstalled.');
|
|
||||||
}
|
|
||||||
@@ -7,7 +7,6 @@
|
|||||||
|
|
||||||
import { execFileSync, execSync, spawnSync } from 'node:child_process';
|
import { execFileSync, execSync, spawnSync } from 'node:child_process';
|
||||||
import { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync, rmSync } from 'node:fs';
|
import { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync, rmSync } from 'node:fs';
|
||||||
import { createRequire } from 'node:module';
|
|
||||||
import { homedir } from 'node:os';
|
import { homedir } from 'node:os';
|
||||||
import { join, dirname } from 'node:path';
|
import { join, dirname } from 'node:path';
|
||||||
import type { Command } from 'commander';
|
import type { Command } from 'commander';
|
||||||
@@ -55,31 +54,19 @@ function checkRuntime(cmd: string): void {
|
|||||||
function checkSoul(): void {
|
function checkSoul(): void {
|
||||||
const soulPath = join(MOSAIC_HOME, 'SOUL.md');
|
const soulPath = join(MOSAIC_HOME, 'SOUL.md');
|
||||||
if (!existsSync(soulPath)) {
|
if (!existsSync(soulPath)) {
|
||||||
console.log('[mosaic] SOUL.md not found. Running setup wizard...');
|
console.log('[mosaic] SOUL.md not found. Running mosaic init...');
|
||||||
|
const initBin = join(MOSAIC_HOME, 'bin', 'mosaic-init');
|
||||||
// Prefer the TypeScript wizard (idempotent, detects existing files)
|
|
||||||
try {
|
|
||||||
const result = spawnSync(process.execPath, [process.argv[1]!, 'wizard'], {
|
|
||||||
stdio: 'inherit',
|
|
||||||
});
|
|
||||||
if (result.status === 0 && existsSync(soulPath)) return;
|
|
||||||
} catch {
|
|
||||||
// Fall through to legacy init
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback: legacy bash mosaic-init
|
|
||||||
const initBin = fwScript('mosaic-init');
|
|
||||||
if (existsSync(initBin)) {
|
if (existsSync(initBin)) {
|
||||||
spawnSync(initBin, [], { stdio: 'inherit' });
|
spawnSync(initBin, [], { stdio: 'inherit' });
|
||||||
} else {
|
} else {
|
||||||
console.error('[mosaic] Setup failed. Run: mosaic wizard');
|
console.error('[mosaic] mosaic-init not found. Run: mosaic wizard');
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkSequentialThinking(runtime: string): void {
|
function checkSequentialThinking(runtime: string): void {
|
||||||
const checker = fwScript('mosaic-ensure-sequential-thinking');
|
const checker = join(MOSAIC_HOME, 'bin', 'mosaic-ensure-sequential-thinking');
|
||||||
if (!existsSync(checker)) return; // Skip if checker doesn't exist
|
if (!existsSync(checker)) return; // Skip if checker doesn't exist
|
||||||
const result = spawnSync(checker, ['--check', '--runtime', runtime], { stdio: 'ignore' });
|
const result = spawnSync(checker, ['--check', '--runtime', runtime], { stdio: 'ignore' });
|
||||||
if (result.status !== 0) {
|
if (result.status !== 0) {
|
||||||
@@ -474,208 +461,23 @@ function execRuntime(cmd: string, args: string[]): void {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ─── Framework script/tool delegation ───────────────────────────────────────
|
// ─── Framework script delegation (for tools that remain in bash) ─────────────
|
||||||
|
|
||||||
function delegateToScript(scriptPath: string, args: string[], env?: Record<string, string>): never {
|
function delegateToFrameworkScript(script: string, args: string[]): never {
|
||||||
|
const scriptPath = join(MOSAIC_HOME, 'bin', script);
|
||||||
if (!existsSync(scriptPath)) {
|
if (!existsSync(scriptPath)) {
|
||||||
console.error(`[mosaic] Script not found: ${scriptPath}`);
|
console.error(`[mosaic] Script not found: ${scriptPath}`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
execFileSync('bash', [scriptPath, ...args], {
|
execFileSync(scriptPath, args, { stdio: 'inherit' });
|
||||||
stdio: 'inherit',
|
|
||||||
env: { ...process.env, ...env },
|
|
||||||
});
|
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
process.exit((err as { status?: number }).status ?? 1);
|
process.exit((err as { status?: number }).status ?? 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// ─── Commander registration ──────────────────────────────────────────────────
|
||||||
* Resolve a path under the framework tools directory. Prefers the version
|
|
||||||
* bundled in the @mosaic/mosaic npm package (always matches the installed
|
|
||||||
* CLI version) over the deployed copy in ~/.config/mosaic/ (may be stale).
|
|
||||||
*/
|
|
||||||
function resolveTool(...segments: string[]): string {
|
|
||||||
try {
|
|
||||||
const req = createRequire(import.meta.url);
|
|
||||||
const mosaicPkg = dirname(req.resolve('@mosaic/mosaic/package.json'));
|
|
||||||
const bundled = join(mosaicPkg, 'framework', 'tools', ...segments);
|
|
||||||
if (existsSync(bundled)) return bundled;
|
|
||||||
} catch {
|
|
||||||
// Fall through to deployed copy
|
|
||||||
}
|
|
||||||
return join(MOSAIC_HOME, 'tools', ...segments);
|
|
||||||
}
|
|
||||||
|
|
||||||
function fwScript(name: string): string {
|
|
||||||
return resolveTool('_scripts', name);
|
|
||||||
}
|
|
||||||
|
|
||||||
function toolScript(toolDir: string, name: string): string {
|
|
||||||
return resolveTool(toolDir, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── Coord (mission orchestrator) ───────────────────────────────────────────
|
|
||||||
|
|
||||||
const COORD_SUBCMDS: Record<string, string> = {
|
|
||||||
status: 'session-status.sh',
|
|
||||||
session: 'session-status.sh',
|
|
||||||
init: 'mission-init.sh',
|
|
||||||
mission: 'mission-status.sh',
|
|
||||||
progress: 'mission-status.sh',
|
|
||||||
continue: 'continue-prompt.sh',
|
|
||||||
next: 'continue-prompt.sh',
|
|
||||||
run: 'session-run.sh',
|
|
||||||
start: 'session-run.sh',
|
|
||||||
smoke: 'smoke-test.sh',
|
|
||||||
test: 'smoke-test.sh',
|
|
||||||
resume: 'session-resume.sh',
|
|
||||||
recover: 'session-resume.sh',
|
|
||||||
};
|
|
||||||
|
|
||||||
function runCoord(args: string[]): never {
|
|
||||||
checkMosaicHome();
|
|
||||||
let runtime = 'claude';
|
|
||||||
let yoloFlag = '';
|
|
||||||
const coordArgs: string[] = [];
|
|
||||||
|
|
||||||
for (const arg of args) {
|
|
||||||
if (arg === '--claude' || arg === '--codex' || arg === '--pi') {
|
|
||||||
runtime = arg.slice(2);
|
|
||||||
} else if (arg === '--yolo') {
|
|
||||||
yoloFlag = '--yolo';
|
|
||||||
} else {
|
|
||||||
coordArgs.push(arg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const subcmd = coordArgs[0] ?? 'help';
|
|
||||||
const subArgs = coordArgs.slice(1);
|
|
||||||
const script = COORD_SUBCMDS[subcmd];
|
|
||||||
|
|
||||||
if (!script) {
|
|
||||||
console.log(`mosaic coord — mission coordinator tools
|
|
||||||
|
|
||||||
Commands:
|
|
||||||
init --name <name> [opts] Initialize a new mission
|
|
||||||
mission [--project <path>] Show mission progress dashboard
|
|
||||||
status [--project <path>] Check agent session health
|
|
||||||
continue [--project <path>] Generate continuation prompt
|
|
||||||
run [--project <path>] Launch runtime with mission context
|
|
||||||
smoke Run orchestration smoke checks
|
|
||||||
resume [--project <path>] Crash recovery
|
|
||||||
|
|
||||||
Runtime: --claude (default) | --codex | --pi | --yolo`);
|
|
||||||
process.exit(subcmd === 'help' ? 0 : 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (yoloFlag) subArgs.unshift(yoloFlag);
|
|
||||||
delegateToScript(toolScript('orchestrator', script), subArgs, {
|
|
||||||
MOSAIC_COORD_RUNTIME: runtime,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── Prdy (PRD tools via framework scripts) ─────────────────────────────────
|
|
||||||
|
|
||||||
const PRDY_SUBCMDS: Record<string, string> = {
|
|
||||||
init: 'prdy-init.sh',
|
|
||||||
update: 'prdy-update.sh',
|
|
||||||
validate: 'prdy-validate.sh',
|
|
||||||
check: 'prdy-validate.sh',
|
|
||||||
status: 'prdy-status.sh',
|
|
||||||
};
|
|
||||||
|
|
||||||
function runPrdyLocal(args: string[]): never {
|
|
||||||
checkMosaicHome();
|
|
||||||
let runtime = 'claude';
|
|
||||||
const prdyArgs: string[] = [];
|
|
||||||
|
|
||||||
for (const arg of args) {
|
|
||||||
if (arg === '--claude' || arg === '--codex' || arg === '--pi') {
|
|
||||||
runtime = arg.slice(2);
|
|
||||||
} else {
|
|
||||||
prdyArgs.push(arg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const subcmd = prdyArgs[0] ?? 'help';
|
|
||||||
const subArgs = prdyArgs.slice(1);
|
|
||||||
const script = PRDY_SUBCMDS[subcmd];
|
|
||||||
|
|
||||||
if (!script) {
|
|
||||||
console.log(`mosaic prdy — PRD creation and validation
|
|
||||||
|
|
||||||
Commands:
|
|
||||||
init [--project <path>] [--name <feature>] Create docs/PRD.md
|
|
||||||
update [--project <path>] Update existing PRD
|
|
||||||
validate [--project <path>] Check PRD completeness
|
|
||||||
status [--project <path>] Quick PRD health check
|
|
||||||
|
|
||||||
Runtime: --claude (default) | --codex | --pi`);
|
|
||||||
process.exit(subcmd === 'help' ? 0 : 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
delegateToScript(toolScript('prdy', script), subArgs, {
|
|
||||||
MOSAIC_PRDY_RUNTIME: runtime,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── Seq (sequential-thinking MCP) ──────────────────────────────────────────
|
|
||||||
|
|
||||||
function runSeq(args: string[]): never {
|
|
||||||
checkMosaicHome();
|
|
||||||
const action = args[0] ?? 'check';
|
|
||||||
const rest = args.slice(1);
|
|
||||||
const checker = fwScript('mosaic-ensure-sequential-thinking');
|
|
||||||
|
|
||||||
switch (action) {
|
|
||||||
case 'check':
|
|
||||||
delegateToScript(checker, ['--check', ...rest]);
|
|
||||||
break; // unreachable
|
|
||||||
case 'fix':
|
|
||||||
case 'apply':
|
|
||||||
delegateToScript(checker, rest);
|
|
||||||
break;
|
|
||||||
case 'start': {
|
|
||||||
console.log('[mosaic] Starting sequential-thinking MCP server...');
|
|
||||||
try {
|
|
||||||
execFileSync('npx', ['-y', '@modelcontextprotocol/server-sequential-thinking', ...rest], {
|
|
||||||
stdio: 'inherit',
|
|
||||||
});
|
|
||||||
process.exit(0);
|
|
||||||
} catch (err) {
|
|
||||||
process.exit((err as { status?: number }).status ?? 1);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
console.error(`[mosaic] Unknown seq subcommand '${action}'. Use: check|fix|start`);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── Upgrade ────────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
function runUpgrade(args: string[]): never {
|
|
||||||
checkMosaicHome();
|
|
||||||
const subcmd = args[0];
|
|
||||||
|
|
||||||
if (!subcmd || subcmd === 'release') {
|
|
||||||
delegateToScript(fwScript('mosaic-release-upgrade'), args.slice(subcmd === 'release' ? 1 : 0));
|
|
||||||
} else if (subcmd === 'check') {
|
|
||||||
delegateToScript(fwScript('mosaic-release-upgrade'), ['--dry-run', ...args.slice(1)]);
|
|
||||||
} else if (subcmd === 'project') {
|
|
||||||
delegateToScript(fwScript('mosaic-upgrade'), args.slice(1));
|
|
||||||
} else if (subcmd.startsWith('-')) {
|
|
||||||
delegateToScript(fwScript('mosaic-release-upgrade'), args);
|
|
||||||
} else {
|
|
||||||
delegateToScript(fwScript('mosaic-upgrade'), args);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ─── Commander registration ─────────────────────────────────────────────────
|
|
||||||
|
|
||||||
export function registerLaunchCommands(program: Command): void {
|
export function registerLaunchCommands(program: Command): void {
|
||||||
// Runtime launchers
|
// Runtime launchers
|
||||||
@@ -707,58 +509,15 @@ export function registerLaunchCommands(program: Command): void {
|
|||||||
launchRuntime(runtime as RuntimeName, cmd.args, true);
|
launchRuntime(runtime as RuntimeName, cmd.args, true);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Coord (mission orchestrator)
|
// Framework management commands (delegate to bash scripts)
|
||||||
program
|
const frameworkCommands: Record<string, { desc: string; script: string }> = {
|
||||||
.command('coord')
|
|
||||||
.description('Mission coordinator tools (init, status, run, continue, resume)')
|
|
||||||
.allowUnknownOption(true)
|
|
||||||
.allowExcessArguments(true)
|
|
||||||
.action((_opts: unknown, cmd: Command) => {
|
|
||||||
runCoord(cmd.args);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Prdy (PRD tools via local framework scripts)
|
|
||||||
program
|
|
||||||
.command('prdy')
|
|
||||||
.description('PRD creation and validation (init, update, validate, status)')
|
|
||||||
.allowUnknownOption(true)
|
|
||||||
.allowExcessArguments(true)
|
|
||||||
.action((_opts: unknown, cmd: Command) => {
|
|
||||||
runPrdyLocal(cmd.args);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Seq (sequential-thinking MCP management)
|
|
||||||
program
|
|
||||||
.command('seq')
|
|
||||||
.description('sequential-thinking MCP management (check/fix/start)')
|
|
||||||
.allowUnknownOption(true)
|
|
||||||
.allowExcessArguments(true)
|
|
||||||
.action((_opts: unknown, cmd: Command) => {
|
|
||||||
runSeq(cmd.args);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Upgrade (release + project)
|
|
||||||
program
|
|
||||||
.command('upgrade')
|
|
||||||
.description('Upgrade Mosaic release or project files')
|
|
||||||
.allowUnknownOption(true)
|
|
||||||
.allowExcessArguments(true)
|
|
||||||
.action((_opts: unknown, cmd: Command) => {
|
|
||||||
runUpgrade(cmd.args);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Direct framework script delegates
|
|
||||||
const directCommands: Record<string, { desc: string; script: string }> = {
|
|
||||||
init: { desc: 'Generate SOUL.md (agent identity contract)', script: 'mosaic-init' },
|
init: { desc: 'Generate SOUL.md (agent identity contract)', script: 'mosaic-init' },
|
||||||
doctor: { desc: 'Health audit — detect drift and missing files', script: 'mosaic-doctor' },
|
doctor: { desc: 'Health audit — detect drift and missing files', script: 'mosaic-doctor' },
|
||||||
sync: { desc: 'Sync skills from canonical source', script: 'mosaic-sync-skills' },
|
sync: { desc: 'Sync skills from canonical source', script: 'mosaic-sync-skills' },
|
||||||
bootstrap: {
|
bootstrap: { desc: 'Bootstrap a repo with Mosaic standards', script: 'mosaic-bootstrap-repo' },
|
||||||
desc: 'Bootstrap a repo with Mosaic standards',
|
|
||||||
script: 'mosaic-bootstrap-repo',
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
for (const [name, { desc, script }] of Object.entries(directCommands)) {
|
for (const [name, { desc, script }] of Object.entries(frameworkCommands)) {
|
||||||
program
|
program
|
||||||
.command(name)
|
.command(name)
|
||||||
.description(desc)
|
.description(desc)
|
||||||
@@ -766,7 +525,7 @@ export function registerLaunchCommands(program: Command): void {
|
|||||||
.allowExcessArguments(true)
|
.allowExcessArguments(true)
|
||||||
.action((_opts: unknown, cmd: Command) => {
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
checkMosaicHome();
|
checkMosaicHome();
|
||||||
delegateToScript(fwScript(script), cmd.args);
|
delegateToFrameworkScript(script, cmd.args);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ export function registerMissionCommand(program: Command) {
|
|||||||
const cmd = program
|
const cmd = program
|
||||||
.command('mission')
|
.command('mission')
|
||||||
.description('Manage missions')
|
.description('Manage missions')
|
||||||
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:14242')
|
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:4000')
|
||||||
.option('--list', 'List all missions')
|
.option('--list', 'List all missions')
|
||||||
.option('--init', 'Create a new mission')
|
.option('--init', 'Create a new mission')
|
||||||
.option('--plan <idOrName>', 'Run PRD wizard for a mission')
|
.option('--plan <idOrName>', 'Run PRD wizard for a mission')
|
||||||
@@ -86,7 +86,7 @@ export function registerMissionCommand(program: Command) {
|
|||||||
cmd
|
cmd
|
||||||
.command('task')
|
.command('task')
|
||||||
.description('Manage mission tasks')
|
.description('Manage mission tasks')
|
||||||
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:14242')
|
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:4000')
|
||||||
.option('--list', 'List tasks for a mission')
|
.option('--list', 'List tasks for a mission')
|
||||||
.option('--new', 'Create a task')
|
.option('--new', 'Create a task')
|
||||||
.option('--update <taskId>', 'Update a task')
|
.option('--update <taskId>', 'Update a task')
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ export function registerPrdyCommand(program: Command) {
|
|||||||
const cmd = program
|
const cmd = program
|
||||||
.command('prdy')
|
.command('prdy')
|
||||||
.description('PRD wizard — create and manage Product Requirement Documents')
|
.description('PRD wizard — create and manage Product Requirement Documents')
|
||||||
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:14242')
|
.option('-g, --gateway <url>', 'Gateway URL', 'http://localhost:4000')
|
||||||
.option('--init [name]', 'Create a new PRD')
|
.option('--init [name]', 'Create a new PRD')
|
||||||
.option('--update [name]', 'Update an existing PRD')
|
.option('--update [name]', 'Update an existing PRD')
|
||||||
.option('--project <idOrName>', 'Scope to project')
|
.option('--project <idOrName>', 'Scope to project')
|
||||||
|
|||||||
@@ -1,41 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@mosaic/config",
|
|
||||||
"version": "0.0.1",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/config"
|
|
||||||
},
|
|
||||||
"type": "module",
|
|
||||||
"main": "dist/index.js",
|
|
||||||
"types": "dist/index.d.ts",
|
|
||||||
"exports": {
|
|
||||||
".": {
|
|
||||||
"types": "./dist/index.d.ts",
|
|
||||||
"default": "./dist/index.js"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"build": "tsc",
|
|
||||||
"lint": "eslint src",
|
|
||||||
"typecheck": "tsc --noEmit",
|
|
||||||
"test": "vitest run --passWithNoTests"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@mosaic/memory": "workspace:^",
|
|
||||||
"@mosaic/queue": "workspace:^",
|
|
||||||
"@mosaic/storage": "workspace:^"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"eslint": "^9.0.0",
|
|
||||||
"typescript": "^5.8.0",
|
|
||||||
"vitest": "^2.0.0"
|
|
||||||
},
|
|
||||||
"publishConfig": {
|
|
||||||
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
|
||||||
"access": "public"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
export type { MosaicConfig, StorageTier, MemoryConfigRef } from './mosaic-config.js';
|
|
||||||
export {
|
|
||||||
DEFAULT_LOCAL_CONFIG,
|
|
||||||
DEFAULT_TEAM_CONFIG,
|
|
||||||
loadConfig,
|
|
||||||
validateConfig,
|
|
||||||
} from './mosaic-config.js';
|
|
||||||
@@ -1,140 +0,0 @@
|
|||||||
import { readFileSync, existsSync } from 'node:fs';
|
|
||||||
import { resolve } from 'node:path';
|
|
||||||
import type { StorageConfig } from '@mosaic/storage';
|
|
||||||
import type { QueueAdapterConfig as QueueConfig } from '@mosaic/queue';
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Types */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
export type StorageTier = 'local' | 'team';
|
|
||||||
|
|
||||||
export interface MemoryConfigRef {
|
|
||||||
type: 'pgvector' | 'sqlite-vec' | 'keyword';
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface MosaicConfig {
|
|
||||||
tier: StorageTier;
|
|
||||||
storage: StorageConfig;
|
|
||||||
queue: QueueConfig;
|
|
||||||
memory: MemoryConfigRef;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Defaults */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
export const DEFAULT_LOCAL_CONFIG: MosaicConfig = {
|
|
||||||
tier: 'local',
|
|
||||||
storage: { type: 'pglite', dataDir: '.mosaic/storage-pglite' },
|
|
||||||
queue: { type: 'local', dataDir: '.mosaic/queue' },
|
|
||||||
memory: { type: 'keyword' },
|
|
||||||
};
|
|
||||||
|
|
||||||
export const DEFAULT_TEAM_CONFIG: MosaicConfig = {
|
|
||||||
tier: 'team',
|
|
||||||
storage: { type: 'postgres', url: 'postgresql://mosaic:mosaic@localhost:5432/mosaic' },
|
|
||||||
queue: { type: 'bullmq' },
|
|
||||||
memory: { type: 'pgvector' },
|
|
||||||
};
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Validation */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
const VALID_TIERS = new Set<string>(['local', 'team']);
|
|
||||||
const VALID_STORAGE_TYPES = new Set<string>(['postgres', 'pglite', 'files']);
|
|
||||||
const VALID_QUEUE_TYPES = new Set<string>(['bullmq', 'local']);
|
|
||||||
const VALID_MEMORY_TYPES = new Set<string>(['pgvector', 'sqlite-vec', 'keyword']);
|
|
||||||
|
|
||||||
export function validateConfig(raw: unknown): MosaicConfig {
|
|
||||||
if (typeof raw !== 'object' || raw === null) {
|
|
||||||
throw new Error('MosaicConfig must be a non-null object');
|
|
||||||
}
|
|
||||||
|
|
||||||
const obj = raw as Record<string, unknown>;
|
|
||||||
|
|
||||||
// tier
|
|
||||||
const tier = obj['tier'];
|
|
||||||
if (typeof tier !== 'string' || !VALID_TIERS.has(tier)) {
|
|
||||||
throw new Error(`Invalid tier "${String(tier)}" — expected "local" or "team"`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// storage
|
|
||||||
const storage = obj['storage'];
|
|
||||||
if (typeof storage !== 'object' || storage === null) {
|
|
||||||
throw new Error('config.storage must be a non-null object');
|
|
||||||
}
|
|
||||||
const storageType = (storage as Record<string, unknown>)['type'];
|
|
||||||
if (typeof storageType !== 'string' || !VALID_STORAGE_TYPES.has(storageType)) {
|
|
||||||
throw new Error(`Invalid storage.type "${String(storageType)}"`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// queue
|
|
||||||
const queue = obj['queue'];
|
|
||||||
if (typeof queue !== 'object' || queue === null) {
|
|
||||||
throw new Error('config.queue must be a non-null object');
|
|
||||||
}
|
|
||||||
const queueType = (queue as Record<string, unknown>)['type'];
|
|
||||||
if (typeof queueType !== 'string' || !VALID_QUEUE_TYPES.has(queueType)) {
|
|
||||||
throw new Error(`Invalid queue.type "${String(queueType)}"`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// memory
|
|
||||||
const memory = obj['memory'];
|
|
||||||
if (typeof memory !== 'object' || memory === null) {
|
|
||||||
throw new Error('config.memory must be a non-null object');
|
|
||||||
}
|
|
||||||
const memoryType = (memory as Record<string, unknown>)['type'];
|
|
||||||
if (typeof memoryType !== 'string' || !VALID_MEMORY_TYPES.has(memoryType)) {
|
|
||||||
throw new Error(`Invalid memory.type "${String(memoryType)}"`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
tier: tier as StorageTier,
|
|
||||||
storage: storage as StorageConfig,
|
|
||||||
queue: queue as QueueConfig,
|
|
||||||
memory: memory as MemoryConfigRef,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Loader */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
function detectFromEnv(): MosaicConfig {
|
|
||||||
if (process.env['DATABASE_URL']) {
|
|
||||||
return {
|
|
||||||
...DEFAULT_TEAM_CONFIG,
|
|
||||||
storage: {
|
|
||||||
type: 'postgres',
|
|
||||||
url: process.env['DATABASE_URL'],
|
|
||||||
},
|
|
||||||
queue: {
|
|
||||||
type: 'bullmq',
|
|
||||||
url: process.env['VALKEY_URL'],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return DEFAULT_LOCAL_CONFIG;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function loadConfig(configPath?: string): MosaicConfig {
|
|
||||||
// 1. Explicit path or default location
|
|
||||||
const paths = configPath
|
|
||||||
? [resolve(configPath)]
|
|
||||||
: [
|
|
||||||
resolve(process.cwd(), 'mosaic.config.json'),
|
|
||||||
resolve(process.cwd(), '../../mosaic.config.json'), // monorepo root when cwd is apps/gateway
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const p of paths) {
|
|
||||||
if (existsSync(p)) {
|
|
||||||
const raw: unknown = JSON.parse(readFileSync(p, 'utf-8'));
|
|
||||||
return validateConfig(raw);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Fall back to env-var detection
|
|
||||||
return detectFromEnv();
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
"extends": "../../tsconfig.base.json",
|
|
||||||
"compilerOptions": {
|
|
||||||
"outDir": "dist",
|
|
||||||
"rootDir": "src"
|
|
||||||
},
|
|
||||||
"include": ["src/**/*"],
|
|
||||||
"exclude": ["node_modules", "dist"]
|
|
||||||
}
|
|
||||||
@@ -1,11 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/coord",
|
"name": "@mosaic/coord",
|
||||||
"version": "0.0.2",
|
"version": "0.0.1-alpha.2",
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/coord"
|
|
||||||
},
|
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/db",
|
"name": "@mosaic/db",
|
||||||
"version": "0.0.2",
|
"version": "0.0.1-alpha.2",
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/db"
|
|
||||||
},
|
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -33,7 +28,6 @@
|
|||||||
"vitest": "^2.0.0"
|
"vitest": "^2.0.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@electric-sql/pglite": "^0.2.17",
|
|
||||||
"drizzle-orm": "^0.45.1",
|
"drizzle-orm": "^0.45.1",
|
||||||
"postgres": "^3.4.8"
|
"postgres": "^3.4.8"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,15 +0,0 @@
|
|||||||
import { PGlite } from '@electric-sql/pglite';
|
|
||||||
import { drizzle } from 'drizzle-orm/pglite';
|
|
||||||
import * as schema from './schema.js';
|
|
||||||
import type { DbHandle } from './client.js';
|
|
||||||
|
|
||||||
export function createPgliteDb(dataDir: string): DbHandle {
|
|
||||||
const client = new PGlite(dataDir);
|
|
||||||
const db = drizzle(client, { schema });
|
|
||||||
return {
|
|
||||||
db: db as unknown as DbHandle['db'],
|
|
||||||
close: async () => {
|
|
||||||
await client.close();
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
export { createDb, type Db, type DbHandle } from './client.js';
|
export { createDb, type Db, type DbHandle } from './client.js';
|
||||||
export { createPgliteDb } from './client-pglite.js';
|
|
||||||
export { runMigrations } from './migrate.js';
|
export { runMigrations } from './migrate.js';
|
||||||
export * from './schema.js';
|
export * from './schema.js';
|
||||||
export {
|
export {
|
||||||
@@ -17,5 +16,4 @@ export {
|
|||||||
gte,
|
gte,
|
||||||
lte,
|
lte,
|
||||||
ilike,
|
ilike,
|
||||||
count,
|
|
||||||
} from 'drizzle-orm';
|
} from 'drizzle-orm';
|
||||||
|
|||||||
@@ -91,28 +91,6 @@ export const verifications = pgTable('verifications', {
|
|||||||
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
|
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
|
||||||
});
|
});
|
||||||
|
|
||||||
// ─── Admin API Tokens ───────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
export const adminTokens = pgTable(
|
|
||||||
'admin_tokens',
|
|
||||||
{
|
|
||||||
id: text('id').primaryKey(),
|
|
||||||
userId: text('user_id')
|
|
||||||
.notNull()
|
|
||||||
.references(() => users.id, { onDelete: 'cascade' }),
|
|
||||||
tokenHash: text('token_hash').notNull(),
|
|
||||||
label: text('label').notNull(),
|
|
||||||
scope: text('scope').notNull().default('admin'),
|
|
||||||
expiresAt: timestamp('expires_at', { withTimezone: true }),
|
|
||||||
lastUsedAt: timestamp('last_used_at', { withTimezone: true }),
|
|
||||||
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
|
||||||
},
|
|
||||||
(t) => [
|
|
||||||
index('admin_tokens_user_id_idx').on(t.userId),
|
|
||||||
uniqueIndex('admin_tokens_hash_idx').on(t.tokenHash),
|
|
||||||
],
|
|
||||||
);
|
|
||||||
|
|
||||||
// ─── Teams ───────────────────────────────────────────────────────────────────
|
// ─── Teams ───────────────────────────────────────────────────────────────────
|
||||||
// Declared before projects because projects references teams.
|
// Declared before projects because projects references teams.
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/design-tokens",
|
"name": "@mosaic/design-tokens",
|
||||||
"version": "0.0.2",
|
"version": "0.0.1-alpha.2",
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/design-tokens"
|
|
||||||
},
|
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/forge",
|
"name": "@mosaic/forge",
|
||||||
"version": "0.0.2",
|
"version": "0.0.1-alpha.2",
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/forge"
|
|
||||||
},
|
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/log",
|
"name": "@mosaic/log",
|
||||||
"version": "0.0.2",
|
"version": "0.0.1-alpha.2",
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/log"
|
|
||||||
},
|
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/macp",
|
"name": "@mosaic/macp",
|
||||||
"version": "0.0.2",
|
"version": "0.0.1-alpha.2",
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/macp"
|
|
||||||
},
|
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/memory",
|
"name": "@mosaic/memory",
|
||||||
"version": "0.0.2",
|
"version": "0.0.1-alpha.2",
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
|
||||||
"directory": "packages/memory"
|
|
||||||
},
|
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -23,7 +18,6 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@mosaic/db": "workspace:*",
|
"@mosaic/db": "workspace:*",
|
||||||
"@mosaic/storage": "workspace:*",
|
|
||||||
"@mosaic/types": "workspace:*",
|
"@mosaic/types": "workspace:*",
|
||||||
"drizzle-orm": "^0.45.1"
|
"drizzle-orm": "^0.45.1"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,298 +0,0 @@
|
|||||||
import { describe, it, expect, beforeEach } from 'vitest';
|
|
||||||
import type { StorageAdapter } from '@mosaic/storage';
|
|
||||||
import { KeywordAdapter } from './keyword.js';
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* In-memory mock StorageAdapter */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
function createMockStorage(): StorageAdapter {
|
|
||||||
const collections = new Map<string, Map<string, Record<string, unknown>>>();
|
|
||||||
let idCounter = 0;
|
|
||||||
|
|
||||||
function getCollection(name: string): Map<string, Record<string, unknown>> {
|
|
||||||
if (!collections.has(name)) collections.set(name, new Map());
|
|
||||||
return collections.get(name)!;
|
|
||||||
}
|
|
||||||
|
|
||||||
const adapter: StorageAdapter = {
|
|
||||||
name: 'mock',
|
|
||||||
|
|
||||||
async create<T extends Record<string, unknown>>(
|
|
||||||
collection: string,
|
|
||||||
data: T,
|
|
||||||
): Promise<T & { id: string }> {
|
|
||||||
const id = String(++idCounter);
|
|
||||||
const record = { ...data, id };
|
|
||||||
getCollection(collection).set(id, record);
|
|
||||||
return record as T & { id: string };
|
|
||||||
},
|
|
||||||
|
|
||||||
async read<T extends Record<string, unknown>>(
|
|
||||||
collection: string,
|
|
||||||
id: string,
|
|
||||||
): Promise<T | null> {
|
|
||||||
const record = getCollection(collection).get(id);
|
|
||||||
return (record as T) ?? null;
|
|
||||||
},
|
|
||||||
|
|
||||||
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
|
|
||||||
const col = getCollection(collection);
|
|
||||||
const existing = col.get(id);
|
|
||||||
if (!existing) return false;
|
|
||||||
col.set(id, { ...existing, ...data });
|
|
||||||
return true;
|
|
||||||
},
|
|
||||||
|
|
||||||
async delete(collection: string, id: string): Promise<boolean> {
|
|
||||||
return getCollection(collection).delete(id);
|
|
||||||
},
|
|
||||||
|
|
||||||
async find<T extends Record<string, unknown>>(
|
|
||||||
collection: string,
|
|
||||||
filter?: Record<string, unknown>,
|
|
||||||
): Promise<T[]> {
|
|
||||||
const col = getCollection(collection);
|
|
||||||
const results: T[] = [];
|
|
||||||
for (const record of col.values()) {
|
|
||||||
if (filter && !matchesFilter(record, filter)) continue;
|
|
||||||
results.push(record as T);
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
},
|
|
||||||
|
|
||||||
async findOne<T extends Record<string, unknown>>(
|
|
||||||
collection: string,
|
|
||||||
filter: Record<string, unknown>,
|
|
||||||
): Promise<T | null> {
|
|
||||||
const col = getCollection(collection);
|
|
||||||
for (const record of col.values()) {
|
|
||||||
if (matchesFilter(record, filter)) return record as T;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
},
|
|
||||||
|
|
||||||
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
|
|
||||||
const rows = await adapter.find(collection, filter);
|
|
||||||
return rows.length;
|
|
||||||
},
|
|
||||||
|
|
||||||
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
|
|
||||||
return fn(adapter);
|
|
||||||
},
|
|
||||||
|
|
||||||
async migrate(): Promise<void> {},
|
|
||||||
async close(): Promise<void> {},
|
|
||||||
};
|
|
||||||
|
|
||||||
return adapter;
|
|
||||||
}
|
|
||||||
|
|
||||||
function matchesFilter(record: Record<string, unknown>, filter: Record<string, unknown>): boolean {
|
|
||||||
for (const [key, value] of Object.entries(filter)) {
|
|
||||||
if (record[key] !== value) return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Tests */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
describe('KeywordAdapter', () => {
|
|
||||||
let adapter: KeywordAdapter;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
adapter = new KeywordAdapter({ type: 'keyword', storage: createMockStorage() });
|
|
||||||
});
|
|
||||||
|
|
||||||
/* ---- Preferences ---- */
|
|
||||||
|
|
||||||
describe('preferences', () => {
|
|
||||||
it('should set and get a preference', async () => {
|
|
||||||
await adapter.setPreference('u1', 'theme', 'dark');
|
|
||||||
const value = await adapter.getPreference('u1', 'theme');
|
|
||||||
expect(value).toBe('dark');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return null for missing preference', async () => {
|
|
||||||
const value = await adapter.getPreference('u1', 'nonexistent');
|
|
||||||
expect(value).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should upsert an existing preference', async () => {
|
|
||||||
await adapter.setPreference('u1', 'theme', 'dark');
|
|
||||||
await adapter.setPreference('u1', 'theme', 'light');
|
|
||||||
const value = await adapter.getPreference('u1', 'theme');
|
|
||||||
expect(value).toBe('light');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should delete a preference', async () => {
|
|
||||||
await adapter.setPreference('u1', 'theme', 'dark');
|
|
||||||
const deleted = await adapter.deletePreference('u1', 'theme');
|
|
||||||
expect(deleted).toBe(true);
|
|
||||||
const value = await adapter.getPreference('u1', 'theme');
|
|
||||||
expect(value).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return false when deleting nonexistent preference', async () => {
|
|
||||||
const deleted = await adapter.deletePreference('u1', 'nope');
|
|
||||||
expect(deleted).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should list preferences by userId', async () => {
|
|
||||||
await adapter.setPreference('u1', 'theme', 'dark', 'appearance');
|
|
||||||
await adapter.setPreference('u1', 'lang', 'en', 'locale');
|
|
||||||
await adapter.setPreference('u2', 'theme', 'light', 'appearance');
|
|
||||||
|
|
||||||
const prefs = await adapter.listPreferences('u1');
|
|
||||||
expect(prefs).toHaveLength(2);
|
|
||||||
expect(prefs.map((p) => p.key).sort()).toEqual(['lang', 'theme']);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should filter preferences by category', async () => {
|
|
||||||
await adapter.setPreference('u1', 'theme', 'dark', 'appearance');
|
|
||||||
await adapter.setPreference('u1', 'lang', 'en', 'locale');
|
|
||||||
|
|
||||||
const prefs = await adapter.listPreferences('u1', 'appearance');
|
|
||||||
expect(prefs).toHaveLength(1);
|
|
||||||
expect(prefs[0]!.key).toBe('theme');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
/* ---- Insights ---- */
|
|
||||||
|
|
||||||
describe('insights', () => {
|
|
||||||
it('should store and retrieve an insight', async () => {
|
|
||||||
const insight = await adapter.storeInsight({
|
|
||||||
userId: 'u1',
|
|
||||||
content: 'TypeScript is great for type safety',
|
|
||||||
source: 'chat',
|
|
||||||
category: 'technical',
|
|
||||||
relevanceScore: 0.9,
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(insight.id).toBeDefined();
|
|
||||||
expect(insight.content).toBe('TypeScript is great for type safety');
|
|
||||||
|
|
||||||
const fetched = await adapter.getInsight(insight.id);
|
|
||||||
expect(fetched).not.toBeNull();
|
|
||||||
expect(fetched!.content).toBe('TypeScript is great for type safety');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return null for missing insight', async () => {
|
|
||||||
const result = await adapter.getInsight('nonexistent');
|
|
||||||
expect(result).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should delete an insight', async () => {
|
|
||||||
const insight = await adapter.storeInsight({
|
|
||||||
userId: 'u1',
|
|
||||||
content: 'test',
|
|
||||||
source: 'chat',
|
|
||||||
category: 'general',
|
|
||||||
relevanceScore: 0.5,
|
|
||||||
});
|
|
||||||
|
|
||||||
const deleted = await adapter.deleteInsight(insight.id);
|
|
||||||
expect(deleted).toBe(true);
|
|
||||||
|
|
||||||
const fetched = await adapter.getInsight(insight.id);
|
|
||||||
expect(fetched).toBeNull();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
/* ---- Keyword Search ---- */
|
|
||||||
|
|
||||||
describe('searchInsights', () => {
|
|
||||||
beforeEach(async () => {
|
|
||||||
await adapter.storeInsight({
|
|
||||||
userId: 'u1',
|
|
||||||
content: 'TypeScript provides excellent type safety for JavaScript projects',
|
|
||||||
source: 'chat',
|
|
||||||
category: 'technical',
|
|
||||||
relevanceScore: 0.9,
|
|
||||||
});
|
|
||||||
await adapter.storeInsight({
|
|
||||||
userId: 'u1',
|
|
||||||
content: 'React hooks simplify state management in components',
|
|
||||||
source: 'chat',
|
|
||||||
category: 'technical',
|
|
||||||
relevanceScore: 0.8,
|
|
||||||
});
|
|
||||||
await adapter.storeInsight({
|
|
||||||
userId: 'u1',
|
|
||||||
content: 'TypeScript and React work great together for type safe components',
|
|
||||||
source: 'chat',
|
|
||||||
category: 'technical',
|
|
||||||
relevanceScore: 0.85,
|
|
||||||
});
|
|
||||||
await adapter.storeInsight({
|
|
||||||
userId: 'u2',
|
|
||||||
content: 'TypeScript is popular',
|
|
||||||
source: 'chat',
|
|
||||||
category: 'general',
|
|
||||||
relevanceScore: 0.5,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should find insights by exact keyword', async () => {
|
|
||||||
const results = await adapter.searchInsights('u1', 'hooks');
|
|
||||||
expect(results).toHaveLength(1);
|
|
||||||
expect(results[0]!.content).toContain('hooks');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should be case-insensitive', async () => {
|
|
||||||
const results = await adapter.searchInsights('u1', 'TYPESCRIPT');
|
|
||||||
expect(results.length).toBeGreaterThanOrEqual(1);
|
|
||||||
for (const r of results) {
|
|
||||||
expect(r.content.toLowerCase()).toContain('typescript');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should rank multi-word matches higher', async () => {
|
|
||||||
const results = await adapter.searchInsights('u1', 'TypeScript React');
|
|
||||||
// The insight mentioning both "TypeScript" and "React" should rank first (score=2)
|
|
||||||
expect(results[0]!.score).toBe(2);
|
|
||||||
expect(results[0]!.content).toContain('TypeScript');
|
|
||||||
expect(results[0]!.content).toContain('React');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return empty for no matches', async () => {
|
|
||||||
const results = await adapter.searchInsights('u1', 'python django');
|
|
||||||
expect(results).toHaveLength(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should filter by userId', async () => {
|
|
||||||
const results = await adapter.searchInsights('u2', 'TypeScript');
|
|
||||||
expect(results).toHaveLength(1);
|
|
||||||
expect(results[0]!.content).toBe('TypeScript is popular');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should respect limit option', async () => {
|
|
||||||
const results = await adapter.searchInsights('u1', 'TypeScript', { limit: 1 });
|
|
||||||
expect(results).toHaveLength(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return empty for empty query', async () => {
|
|
||||||
const results = await adapter.searchInsights('u1', ' ');
|
|
||||||
expect(results).toHaveLength(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
/* ---- Lifecycle ---- */
|
|
||||||
|
|
||||||
describe('lifecycle', () => {
|
|
||||||
it('should have name "keyword"', () => {
|
|
||||||
expect(adapter.name).toBe('keyword');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should have null embedder', () => {
|
|
||||||
expect(adapter.embedder).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should close without error', async () => {
|
|
||||||
await expect(adapter.close()).resolves.toBeUndefined();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,195 +0,0 @@
|
|||||||
import type { StorageAdapter } from '@mosaic/storage';
|
|
||||||
import type {
|
|
||||||
MemoryAdapter,
|
|
||||||
MemoryConfig,
|
|
||||||
NewInsight,
|
|
||||||
Insight,
|
|
||||||
InsightSearchResult,
|
|
||||||
} from '../types.js';
|
|
||||||
import type { EmbeddingProvider } from '../vector-store.js';
|
|
||||||
|
|
||||||
type KeywordConfig = Extract<MemoryConfig, { type: 'keyword' }>;
|
|
||||||
|
|
||||||
const PREFERENCES = 'preferences';
|
|
||||||
const INSIGHTS = 'insights';
|
|
||||||
|
|
||||||
type PreferenceRecord = Record<string, unknown> & {
|
|
||||||
id: string;
|
|
||||||
userId: string;
|
|
||||||
key: string;
|
|
||||||
value: unknown;
|
|
||||||
category: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type InsightRecord = Record<string, unknown> & {
|
|
||||||
id: string;
|
|
||||||
userId: string;
|
|
||||||
content: string;
|
|
||||||
source: string;
|
|
||||||
category: string;
|
|
||||||
relevanceScore: number;
|
|
||||||
metadata: Record<string, unknown>;
|
|
||||||
createdAt: string;
|
|
||||||
updatedAt?: string;
|
|
||||||
decayedAt?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export class KeywordAdapter implements MemoryAdapter {
|
|
||||||
readonly name = 'keyword';
|
|
||||||
readonly embedder: EmbeddingProvider | null = null;
|
|
||||||
|
|
||||||
private storage: StorageAdapter;
|
|
||||||
|
|
||||||
constructor(config: KeywordConfig) {
|
|
||||||
this.storage = config.storage;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Preferences */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
async getPreference(userId: string, key: string): Promise<unknown | null> {
|
|
||||||
const row = await this.storage.findOne<PreferenceRecord>(PREFERENCES, { userId, key });
|
|
||||||
return row?.value ?? null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async setPreference(
|
|
||||||
userId: string,
|
|
||||||
key: string,
|
|
||||||
value: unknown,
|
|
||||||
category?: string,
|
|
||||||
): Promise<void> {
|
|
||||||
const existing = await this.storage.findOne<PreferenceRecord>(PREFERENCES, { userId, key });
|
|
||||||
if (existing) {
|
|
||||||
await this.storage.update(PREFERENCES, existing.id, {
|
|
||||||
value,
|
|
||||||
...(category !== undefined ? { category } : {}),
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
await this.storage.create(PREFERENCES, {
|
|
||||||
userId,
|
|
||||||
key,
|
|
||||||
value,
|
|
||||||
category: category ?? 'general',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async deletePreference(userId: string, key: string): Promise<boolean> {
|
|
||||||
const existing = await this.storage.findOne<PreferenceRecord>(PREFERENCES, { userId, key });
|
|
||||||
if (!existing) return false;
|
|
||||||
return this.storage.delete(PREFERENCES, existing.id);
|
|
||||||
}
|
|
||||||
|
|
||||||
async listPreferences(
|
|
||||||
userId: string,
|
|
||||||
category?: string,
|
|
||||||
): Promise<Array<{ key: string; value: unknown; category: string }>> {
|
|
||||||
const filter: Record<string, unknown> = { userId };
|
|
||||||
if (category !== undefined) filter.category = category;
|
|
||||||
|
|
||||||
const rows = await this.storage.find<PreferenceRecord>(PREFERENCES, filter);
|
|
||||||
return rows.map((r) => ({ key: r.key, value: r.value, category: r.category }));
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Insights */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
async storeInsight(insight: NewInsight): Promise<Insight> {
|
|
||||||
const now = new Date();
|
|
||||||
const row = await this.storage.create<Record<string, unknown>>(INSIGHTS, {
|
|
||||||
userId: insight.userId,
|
|
||||||
content: insight.content,
|
|
||||||
source: insight.source,
|
|
||||||
category: insight.category,
|
|
||||||
relevanceScore: insight.relevanceScore,
|
|
||||||
metadata: insight.metadata ?? {},
|
|
||||||
createdAt: now.toISOString(),
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: row.id,
|
|
||||||
userId: insight.userId,
|
|
||||||
content: insight.content,
|
|
||||||
source: insight.source,
|
|
||||||
category: insight.category,
|
|
||||||
relevanceScore: insight.relevanceScore,
|
|
||||||
metadata: insight.metadata,
|
|
||||||
createdAt: now,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async getInsight(id: string): Promise<Insight | null> {
|
|
||||||
const row = await this.storage.read<InsightRecord>(INSIGHTS, id);
|
|
||||||
if (!row) return null;
|
|
||||||
return toInsight(row);
|
|
||||||
}
|
|
||||||
|
|
||||||
async searchInsights(
|
|
||||||
userId: string,
|
|
||||||
query: string,
|
|
||||||
opts?: { limit?: number; embedding?: number[] },
|
|
||||||
): Promise<InsightSearchResult[]> {
|
|
||||||
const limit = opts?.limit ?? 10;
|
|
||||||
const words = query
|
|
||||||
.toLowerCase()
|
|
||||||
.split(/\s+/)
|
|
||||||
.filter((w) => w.length > 0);
|
|
||||||
|
|
||||||
if (words.length === 0) return [];
|
|
||||||
|
|
||||||
const rows = await this.storage.find<InsightRecord>(INSIGHTS, { userId });
|
|
||||||
|
|
||||||
const scored: InsightSearchResult[] = [];
|
|
||||||
for (const row of rows) {
|
|
||||||
const content = row.content.toLowerCase();
|
|
||||||
let score = 0;
|
|
||||||
for (const word of words) {
|
|
||||||
if (content.includes(word)) score++;
|
|
||||||
}
|
|
||||||
if (score > 0) {
|
|
||||||
scored.push({
|
|
||||||
id: row.id,
|
|
||||||
content: row.content,
|
|
||||||
score,
|
|
||||||
metadata: row.metadata ?? undefined,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
scored.sort((a, b) => b.score - a.score);
|
|
||||||
return scored.slice(0, limit);
|
|
||||||
}
|
|
||||||
|
|
||||||
async deleteInsight(id: string): Promise<boolean> {
|
|
||||||
return this.storage.delete(INSIGHTS, id);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Lifecycle */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
async close(): Promise<void> {
|
|
||||||
// no-op — storage adapter manages its own lifecycle
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Helpers */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
function toInsight(row: InsightRecord): Insight {
|
|
||||||
return {
|
|
||||||
id: row.id,
|
|
||||||
userId: row.userId,
|
|
||||||
content: row.content,
|
|
||||||
source: row.source,
|
|
||||||
category: row.category,
|
|
||||||
relevanceScore: row.relevanceScore,
|
|
||||||
metadata: row.metadata ?? undefined,
|
|
||||||
createdAt: new Date(row.createdAt),
|
|
||||||
updatedAt: row.updatedAt ? new Date(row.updatedAt) : undefined,
|
|
||||||
decayedAt: row.decayedAt ? new Date(row.decayedAt) : undefined,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,177 +0,0 @@
|
|||||||
import { createDb, type DbHandle } from '@mosaic/db';
|
|
||||||
import type {
|
|
||||||
MemoryAdapter,
|
|
||||||
MemoryConfig,
|
|
||||||
NewInsight as AdapterNewInsight,
|
|
||||||
Insight as AdapterInsight,
|
|
||||||
InsightSearchResult,
|
|
||||||
} from '../types.js';
|
|
||||||
import type { EmbeddingProvider } from '../vector-store.js';
|
|
||||||
import {
|
|
||||||
createPreferencesRepo,
|
|
||||||
type PreferencesRepo,
|
|
||||||
type Preference,
|
|
||||||
type NewPreference,
|
|
||||||
} from '../preferences.js';
|
|
||||||
import {
|
|
||||||
createInsightsRepo,
|
|
||||||
type InsightsRepo,
|
|
||||||
type NewInsight as DbNewInsight,
|
|
||||||
} from '../insights.js';
|
|
||||||
|
|
||||||
type PgVectorConfig = Extract<MemoryConfig, { type: 'pgvector' }>;
|
|
||||||
|
|
||||||
export class PgVectorAdapter implements MemoryAdapter {
|
|
||||||
readonly name = 'pgvector';
|
|
||||||
readonly embedder: EmbeddingProvider | null;
|
|
||||||
|
|
||||||
private handle: DbHandle;
|
|
||||||
private preferences: PreferencesRepo;
|
|
||||||
private insights: InsightsRepo;
|
|
||||||
|
|
||||||
constructor(config: PgVectorConfig) {
|
|
||||||
this.handle = createDb();
|
|
||||||
this.preferences = createPreferencesRepo(this.handle.db);
|
|
||||||
this.insights = createInsightsRepo(this.handle.db);
|
|
||||||
this.embedder = config.embedder ?? null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Preferences */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
async getPreference(userId: string, key: string): Promise<unknown | null> {
|
|
||||||
const row = await this.preferences.findByUserAndKey(userId, key);
|
|
||||||
return row?.value ?? null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async setPreference(
|
|
||||||
userId: string,
|
|
||||||
key: string,
|
|
||||||
value: unknown,
|
|
||||||
category?: string,
|
|
||||||
): Promise<void> {
|
|
||||||
await this.preferences.upsert({
|
|
||||||
userId,
|
|
||||||
key,
|
|
||||||
value,
|
|
||||||
...(category ? { category: category as NewPreference['category'] } : {}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async deletePreference(userId: string, key: string): Promise<boolean> {
|
|
||||||
return this.preferences.remove(userId, key);
|
|
||||||
}
|
|
||||||
|
|
||||||
async listPreferences(
|
|
||||||
userId: string,
|
|
||||||
category?: string,
|
|
||||||
): Promise<Array<{ key: string; value: unknown; category: string }>> {
|
|
||||||
const rows = category
|
|
||||||
? await this.preferences.findByUserAndCategory(userId, category as Preference['category'])
|
|
||||||
: await this.preferences.findByUser(userId);
|
|
||||||
|
|
||||||
return rows.map((r) => ({ key: r.key, value: r.value, category: r.category }));
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Insights */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
async storeInsight(insight: AdapterNewInsight): Promise<AdapterInsight> {
|
|
||||||
const row = await this.insights.create({
|
|
||||||
userId: insight.userId,
|
|
||||||
content: insight.content,
|
|
||||||
source: insight.source as DbNewInsight['source'],
|
|
||||||
category: insight.category as DbNewInsight['category'],
|
|
||||||
relevanceScore: insight.relevanceScore,
|
|
||||||
metadata: insight.metadata ?? {},
|
|
||||||
embedding: insight.embedding ?? null,
|
|
||||||
});
|
|
||||||
|
|
||||||
return toAdapterInsight(row);
|
|
||||||
}
|
|
||||||
|
|
||||||
async getInsight(id: string): Promise<AdapterInsight | null> {
|
|
||||||
// findById requires userId — search across all users via raw find
|
|
||||||
// The adapter interface only takes id, so we pass an empty userId and rely on the id match.
|
|
||||||
// Since the repo requires userId, we use a two-step approach.
|
|
||||||
const row = await this.insights.findById(id, '');
|
|
||||||
if (!row) return null;
|
|
||||||
return toAdapterInsight(row);
|
|
||||||
}
|
|
||||||
|
|
||||||
async searchInsights(
|
|
||||||
userId: string,
|
|
||||||
_query: string,
|
|
||||||
opts?: { limit?: number; embedding?: number[] },
|
|
||||||
): Promise<InsightSearchResult[]> {
|
|
||||||
if (opts?.embedding) {
|
|
||||||
const results = await this.insights.searchByEmbedding(
|
|
||||||
userId,
|
|
||||||
opts.embedding,
|
|
||||||
opts.limit ?? 10,
|
|
||||||
);
|
|
||||||
return results.map((r) => ({
|
|
||||||
id: r.insight.id,
|
|
||||||
content: r.insight.content,
|
|
||||||
score: 1 - r.distance,
|
|
||||||
metadata: (r.insight.metadata as Record<string, unknown>) ?? undefined,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback: return recent insights for the user
|
|
||||||
const rows = await this.insights.findByUser(userId, opts?.limit ?? 10);
|
|
||||||
return rows.map((r) => ({
|
|
||||||
id: r.id,
|
|
||||||
content: r.content,
|
|
||||||
score: Number(r.relevanceScore),
|
|
||||||
metadata: (r.metadata as Record<string, unknown>) ?? undefined,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
async deleteInsight(id: string): Promise<boolean> {
|
|
||||||
// The repo requires userId — pass empty string since adapter interface only has id
|
|
||||||
return this.insights.remove(id, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Lifecycle */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
async close(): Promise<void> {
|
|
||||||
await this.handle.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Helpers */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
function toAdapterInsight(row: {
|
|
||||||
id: string;
|
|
||||||
userId: string;
|
|
||||||
content: string;
|
|
||||||
source: string;
|
|
||||||
category: string;
|
|
||||||
relevanceScore: number;
|
|
||||||
metadata: unknown;
|
|
||||||
embedding: unknown;
|
|
||||||
createdAt: Date;
|
|
||||||
updatedAt: Date | null;
|
|
||||||
decayedAt: Date | null;
|
|
||||||
}): AdapterInsight {
|
|
||||||
return {
|
|
||||||
id: row.id,
|
|
||||||
userId: row.userId,
|
|
||||||
content: row.content,
|
|
||||||
source: row.source,
|
|
||||||
category: row.category,
|
|
||||||
relevanceScore: row.relevanceScore,
|
|
||||||
metadata: (row.metadata as Record<string, unknown>) ?? undefined,
|
|
||||||
embedding: (row.embedding as number[]) ?? undefined,
|
|
||||||
createdAt: row.createdAt,
|
|
||||||
updatedAt: row.updatedAt ?? undefined,
|
|
||||||
decayedAt: row.decayedAt ?? undefined,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
import type { MemoryAdapter, MemoryConfig } from './types.js';
|
|
||||||
|
|
||||||
type MemoryType = MemoryConfig['type'];
|
|
||||||
|
|
||||||
const registry = new Map<MemoryType, (config: MemoryConfig) => MemoryAdapter>();
|
|
||||||
|
|
||||||
export function registerMemoryAdapter(
|
|
||||||
type: MemoryType,
|
|
||||||
factory: (config: MemoryConfig) => MemoryAdapter,
|
|
||||||
): void {
|
|
||||||
registry.set(type, factory);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createMemoryAdapter(config: MemoryConfig): MemoryAdapter {
|
|
||||||
const factory = registry.get(config.type);
|
|
||||||
if (!factory) throw new Error(`No adapter registered for type: ${config.type}`);
|
|
||||||
return factory(config);
|
|
||||||
}
|
|
||||||
@@ -13,27 +13,3 @@ export {
|
|||||||
type SearchResult,
|
type SearchResult,
|
||||||
} from './insights.js';
|
} from './insights.js';
|
||||||
export type { VectorStore, VectorSearchResult, EmbeddingProvider } from './vector-store.js';
|
export type { VectorStore, VectorSearchResult, EmbeddingProvider } from './vector-store.js';
|
||||||
export type {
|
|
||||||
MemoryAdapter,
|
|
||||||
MemoryConfig,
|
|
||||||
NewInsight as AdapterNewInsight,
|
|
||||||
Insight as AdapterInsight,
|
|
||||||
InsightSearchResult,
|
|
||||||
} from './types.js';
|
|
||||||
export { createMemoryAdapter, registerMemoryAdapter } from './factory.js';
|
|
||||||
export { PgVectorAdapter } from './adapters/pgvector.js';
|
|
||||||
export { KeywordAdapter } from './adapters/keyword.js';
|
|
||||||
|
|
||||||
// Auto-register adapters at module load time
|
|
||||||
import { registerMemoryAdapter } from './factory.js';
|
|
||||||
import { PgVectorAdapter } from './adapters/pgvector.js';
|
|
||||||
import { KeywordAdapter } from './adapters/keyword.js';
|
|
||||||
import type { MemoryConfig } from './types.js';
|
|
||||||
|
|
||||||
registerMemoryAdapter('pgvector', (config: MemoryConfig) => {
|
|
||||||
return new PgVectorAdapter(config as Extract<MemoryConfig, { type: 'pgvector' }>);
|
|
||||||
});
|
|
||||||
|
|
||||||
registerMemoryAdapter('keyword', (config: MemoryConfig) => {
|
|
||||||
return new KeywordAdapter(config as Extract<MemoryConfig, { type: 'keyword' }>);
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -1,73 +0,0 @@
|
|||||||
export type { EmbeddingProvider, VectorSearchResult } from './vector-store.js';
|
|
||||||
import type { EmbeddingProvider } from './vector-store.js';
|
|
||||||
import type { StorageAdapter } from '@mosaic/storage';
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* Insight types (adapter-level, decoupled from Drizzle schema) */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
export interface NewInsight {
|
|
||||||
userId: string;
|
|
||||||
content: string;
|
|
||||||
source: string;
|
|
||||||
category: string;
|
|
||||||
relevanceScore: number;
|
|
||||||
metadata?: Record<string, unknown>;
|
|
||||||
embedding?: number[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Insight extends NewInsight {
|
|
||||||
id: string;
|
|
||||||
createdAt: Date;
|
|
||||||
updatedAt?: Date;
|
|
||||||
decayedAt?: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface InsightSearchResult {
|
|
||||||
id: string;
|
|
||||||
content: string;
|
|
||||||
score: number;
|
|
||||||
metadata?: Record<string, unknown>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* MemoryAdapter interface */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
export interface MemoryAdapter {
|
|
||||||
readonly name: string;
|
|
||||||
|
|
||||||
// Preferences
|
|
||||||
getPreference(userId: string, key: string): Promise<unknown | null>;
|
|
||||||
setPreference(userId: string, key: string, value: unknown, category?: string): Promise<void>;
|
|
||||||
deletePreference(userId: string, key: string): Promise<boolean>;
|
|
||||||
listPreferences(
|
|
||||||
userId: string,
|
|
||||||
category?: string,
|
|
||||||
): Promise<Array<{ key: string; value: unknown; category: string }>>;
|
|
||||||
|
|
||||||
// Insights
|
|
||||||
storeInsight(insight: NewInsight): Promise<Insight>;
|
|
||||||
getInsight(id: string): Promise<Insight | null>;
|
|
||||||
searchInsights(
|
|
||||||
userId: string,
|
|
||||||
query: string,
|
|
||||||
opts?: { limit?: number; embedding?: number[] },
|
|
||||||
): Promise<InsightSearchResult[]>;
|
|
||||||
deleteInsight(id: string): Promise<boolean>;
|
|
||||||
|
|
||||||
// Embedding
|
|
||||||
readonly embedder: EmbeddingProvider | null;
|
|
||||||
|
|
||||||
// Lifecycle
|
|
||||||
close(): Promise<void>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
/* MemoryConfig */
|
|
||||||
/* ------------------------------------------------------------------ */
|
|
||||||
|
|
||||||
export type MemoryConfig =
|
|
||||||
| { type: 'pgvector'; embedder?: EmbeddingProvider }
|
|
||||||
| { type: 'sqlite-vec'; embedder?: EmbeddingProvider }
|
|
||||||
| { type: 'keyword'; storage: StorageAdapter };
|
|
||||||
@@ -1,88 +0,0 @@
|
|||||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
||||||
import {
|
|
||||||
mkdtempSync,
|
|
||||||
mkdirSync,
|
|
||||||
writeFileSync,
|
|
||||||
readFileSync,
|
|
||||||
existsSync,
|
|
||||||
chmodSync,
|
|
||||||
rmSync,
|
|
||||||
} from 'node:fs';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { tmpdir } from 'node:os';
|
|
||||||
import { syncDirectory } from '../../src/platform/file-ops.js';
|
|
||||||
|
|
||||||
describe('syncDirectory', () => {
|
|
||||||
let tmpDir: string;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
tmpDir = mkdtempSync(join(tmpdir(), 'mosaic-file-ops-'));
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('is a no-op when source and target are the same path', () => {
|
|
||||||
const dir = join(tmpDir, 'same');
|
|
||||||
mkdirSync(dir, { recursive: true });
|
|
||||||
writeFileSync(join(dir, 'file.txt'), 'hello');
|
|
||||||
// Should not throw even with read-only files
|
|
||||||
const gitDir = join(dir, '.git', 'objects', 'pack');
|
|
||||||
mkdirSync(gitDir, { recursive: true });
|
|
||||||
const packFile = join(gitDir, 'pack-abc.idx');
|
|
||||||
writeFileSync(packFile, 'data');
|
|
||||||
chmodSync(packFile, 0o444);
|
|
||||||
|
|
||||||
expect(() => syncDirectory(dir, dir)).not.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('skips nested .git directories when excludeGit is true', () => {
|
|
||||||
const src = join(tmpDir, 'src');
|
|
||||||
const dest = join(tmpDir, 'dest');
|
|
||||||
|
|
||||||
// Create source with a nested .git
|
|
||||||
mkdirSync(join(src, 'sources', 'skills', '.git', 'objects'), { recursive: true });
|
|
||||||
writeFileSync(join(src, 'sources', 'skills', '.git', 'objects', 'pack.idx'), 'git-data');
|
|
||||||
writeFileSync(join(src, 'sources', 'skills', 'SKILL.md'), 'skill content');
|
|
||||||
writeFileSync(join(src, 'README.md'), 'readme');
|
|
||||||
|
|
||||||
syncDirectory(src, dest, { excludeGit: true });
|
|
||||||
|
|
||||||
// .git contents should NOT be copied
|
|
||||||
expect(existsSync(join(dest, 'sources', 'skills', '.git'))).toBe(false);
|
|
||||||
// Normal files should be copied
|
|
||||||
expect(readFileSync(join(dest, 'sources', 'skills', 'SKILL.md'), 'utf-8')).toBe(
|
|
||||||
'skill content',
|
|
||||||
);
|
|
||||||
expect(readFileSync(join(dest, 'README.md'), 'utf-8')).toBe('readme');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('copies nested .git directories when excludeGit is false', () => {
|
|
||||||
const src = join(tmpDir, 'src');
|
|
||||||
const dest = join(tmpDir, 'dest');
|
|
||||||
|
|
||||||
mkdirSync(join(src, 'sub', '.git'), { recursive: true });
|
|
||||||
writeFileSync(join(src, 'sub', '.git', 'HEAD'), 'ref: refs/heads/main');
|
|
||||||
|
|
||||||
syncDirectory(src, dest, { excludeGit: false });
|
|
||||||
|
|
||||||
expect(readFileSync(join(dest, 'sub', '.git', 'HEAD'), 'utf-8')).toBe('ref: refs/heads/main');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('respects preserve option', () => {
|
|
||||||
const src = join(tmpDir, 'src');
|
|
||||||
const dest = join(tmpDir, 'dest');
|
|
||||||
|
|
||||||
mkdirSync(src, { recursive: true });
|
|
||||||
mkdirSync(dest, { recursive: true });
|
|
||||||
writeFileSync(join(src, 'SOUL.md'), 'new soul');
|
|
||||||
writeFileSync(join(dest, 'SOUL.md'), 'old soul');
|
|
||||||
writeFileSync(join(src, 'README.md'), 'new readme');
|
|
||||||
|
|
||||||
syncDirectory(src, dest, { preserve: ['SOUL.md'] });
|
|
||||||
|
|
||||||
expect(readFileSync(join(dest, 'SOUL.md'), 'utf-8')).toBe('old soul');
|
|
||||||
expect(readFileSync(join(dest, 'README.md'), 'utf-8')).toBe('new readme');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -65,36 +65,4 @@ describe('detectInstallStage', () => {
|
|||||||
expect(state.installAction).toBe('keep');
|
expect(state.installAction).toBe('keep');
|
||||||
expect(state.soul.agentName).toBe('TestAgent');
|
expect(state.soul.agentName).toBe('TestAgent');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('pre-populates state when reconfiguring', async () => {
|
|
||||||
mkdirSync(join(tmpDir, 'bin'), { recursive: true });
|
|
||||||
writeFileSync(join(tmpDir, 'SOUL.md'), 'You are **Jarvis** in this session.');
|
|
||||||
writeFileSync(join(tmpDir, 'USER.md'), '**Name:** TestUser');
|
|
||||||
|
|
||||||
const p = new HeadlessPrompter({
|
|
||||||
'What would you like to do?': 'reconfigure',
|
|
||||||
});
|
|
||||||
const state = createState(tmpDir);
|
|
||||||
await detectInstallStage(p, state, mockConfig);
|
|
||||||
|
|
||||||
expect(state.installAction).toBe('reconfigure');
|
|
||||||
// Existing values loaded as defaults for reconfiguration
|
|
||||||
expect(state.soul.agentName).toBe('TestAgent');
|
|
||||||
expect(state.user.userName).toBe('TestUser');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not pre-populate state on fresh reset', async () => {
|
|
||||||
mkdirSync(join(tmpDir, 'bin'), { recursive: true });
|
|
||||||
writeFileSync(join(tmpDir, 'SOUL.md'), 'You are **Jarvis** in this session.');
|
|
||||||
|
|
||||||
const p = new HeadlessPrompter({
|
|
||||||
'What would you like to do?': 'reset',
|
|
||||||
});
|
|
||||||
const state = createState(tmpDir);
|
|
||||||
await detectInstallStage(p, state, mockConfig);
|
|
||||||
|
|
||||||
expect(state.installAction).toBe('reset');
|
|
||||||
// Reset should NOT load existing values
|
|
||||||
expect(state.soul.agentName).toBeUndefined();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -113,8 +113,8 @@ echo "[mosaic] Optional: run orchestrator rail via ~/.config/mosaic/bin/mosaic-o
|
|||||||
echo "[mosaic] Optional: run detached orchestrator via bash $TARGET_DIR/scripts/agent/orchestrator-daemon.sh start"
|
echo "[mosaic] Optional: run detached orchestrator via bash $TARGET_DIR/scripts/agent/orchestrator-daemon.sh start"
|
||||||
|
|
||||||
if [[ -n "$QUALITY_TEMPLATE" ]]; then
|
if [[ -n "$QUALITY_TEMPLATE" ]]; then
|
||||||
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-quality-apply" ]]; then
|
if [[ -x "$MOSAIC_HOME/bin/mosaic-quality-apply" ]]; then
|
||||||
"$MOSAIC_HOME/tools/_scripts/mosaic-quality-apply" --template "$QUALITY_TEMPLATE" --target "$TARGET_DIR"
|
"$MOSAIC_HOME/bin/mosaic-quality-apply" --template "$QUALITY_TEMPLATE" --target "$TARGET_DIR"
|
||||||
if [[ -f "$TARGET_DIR/.mosaic/quality-rails.yml" ]]; then
|
if [[ -f "$TARGET_DIR/.mosaic/quality-rails.yml" ]]; then
|
||||||
sed -i "s/^enabled:.*/enabled: true/" "$TARGET_DIR/.mosaic/quality-rails.yml"
|
sed -i "s/^enabled:.*/enabled: true/" "$TARGET_DIR/.mosaic/quality-rails.yml"
|
||||||
sed -i "s/^template:.*/template: \"$QUALITY_TEMPLATE\"/" "$TARGET_DIR/.mosaic/quality-rails.yml"
|
sed -i "s/^template:.*/template: \"$QUALITY_TEMPLATE\"/" "$TARGET_DIR/.mosaic/quality-rails.yml"
|
||||||
@@ -165,18 +165,18 @@ expect_dir "$MOSAIC_HOME/profiles"
|
|||||||
expect_dir "$MOSAIC_HOME/templates/agent"
|
expect_dir "$MOSAIC_HOME/templates/agent"
|
||||||
expect_dir "$MOSAIC_HOME/skills"
|
expect_dir "$MOSAIC_HOME/skills"
|
||||||
expect_dir "$MOSAIC_HOME/skills-local"
|
expect_dir "$MOSAIC_HOME/skills-local"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets"
|
expect_file "$MOSAIC_HOME/bin/mosaic-link-runtime-assets"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-ensure-sequential-thinking"
|
expect_file "$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-sync-skills"
|
expect_file "$MOSAIC_HOME/bin/mosaic-sync-skills"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-projects"
|
expect_file "$MOSAIC_HOME/bin/mosaic-projects"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-quality-apply"
|
expect_file "$MOSAIC_HOME/bin/mosaic-quality-apply"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-quality-verify"
|
expect_file "$MOSAIC_HOME/bin/mosaic-quality-verify"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-run"
|
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-run"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-sync-tasks"
|
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-sync-tasks"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-drain"
|
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-drain"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-matrix-publish"
|
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-matrix-publish"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-matrix-consume"
|
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-matrix-consume"
|
||||||
expect_file "$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-matrix-cycle"
|
expect_file "$MOSAIC_HOME/bin/mosaic-orchestrator-matrix-cycle"
|
||||||
expect_file "$MOSAIC_HOME/tools/git/ci-queue-wait.sh"
|
expect_file "$MOSAIC_HOME/tools/git/ci-queue-wait.sh"
|
||||||
expect_file "$MOSAIC_HOME/tools/git/pr-ci-wait.sh"
|
expect_file "$MOSAIC_HOME/tools/git/pr-ci-wait.sh"
|
||||||
expect_file "$MOSAIC_HOME/tools/orchestrator-matrix/transport/matrix_transport.py"
|
expect_file "$MOSAIC_HOME/tools/orchestrator-matrix/transport/matrix_transport.py"
|
||||||
@@ -215,8 +215,8 @@ check_runtime_contract_file "$HOME/.config/opencode/AGENTS.md" "$MOSAIC_HOME/run
|
|||||||
check_runtime_contract_file "$HOME/.codex/instructions.md" "$MOSAIC_HOME/runtime/codex/instructions.md" "codex"
|
check_runtime_contract_file "$HOME/.codex/instructions.md" "$MOSAIC_HOME/runtime/codex/instructions.md" "codex"
|
||||||
|
|
||||||
# Sequential-thinking MCP hard requirement.
|
# Sequential-thinking MCP hard requirement.
|
||||||
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-ensure-sequential-thinking" ]]; then
|
if [[ -x "$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking" ]]; then
|
||||||
if "$MOSAIC_HOME/tools/_scripts/mosaic-ensure-sequential-thinking" --check >/dev/null 2>&1; then
|
if "$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking" --check >/dev/null 2>&1; then
|
||||||
pass "sequential-thinking MCP configured and available"
|
pass "sequential-thinking MCP configured and available"
|
||||||
else
|
else
|
||||||
warn "sequential-thinking MCP missing or misconfigured"
|
warn "sequential-thinking MCP missing or misconfigured"
|
||||||
@@ -422,8 +422,8 @@ with open('$pi_settings_file', 'w') as f:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# 4. Run link-runtime-assets if available
|
# 4. Run link-runtime-assets if available
|
||||||
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets" ]]; then
|
if [[ -x "$MOSAIC_HOME/bin/mosaic-link-runtime-assets" ]]; then
|
||||||
"$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets" >/dev/null 2>&1 && fix "Re-ran mosaic-link-runtime-assets"
|
"$MOSAIC_HOME/bin/mosaic-link-runtime-assets" >/dev/null 2>&1 && fix "Re-ran mosaic-link-runtime-assets"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "[mosaic-doctor] fixes=$fix_count"
|
echo "[mosaic-doctor] fixes=$fix_count"
|
||||||
@@ -60,14 +60,12 @@ Options:
|
|||||||
--timezone <tz> Your timezone (e.g., "America/Chicago")
|
--timezone <tz> Your timezone (e.g., "America/Chicago")
|
||||||
--non-interactive Fail if any required value is missing (no prompts)
|
--non-interactive Fail if any required value is missing (no prompts)
|
||||||
--soul-only Only generate SOUL.md
|
--soul-only Only generate SOUL.md
|
||||||
--force Overwrite existing files without prompting
|
|
||||||
-h, --help Show help
|
-h, --help Show help
|
||||||
USAGE
|
USAGE
|
||||||
}
|
}
|
||||||
|
|
||||||
NON_INTERACTIVE=0
|
NON_INTERACTIVE=0
|
||||||
SOUL_ONLY=0
|
SOUL_ONLY=0
|
||||||
FORCE=0
|
|
||||||
|
|
||||||
while [[ $# -gt 0 ]]; do
|
while [[ $# -gt 0 ]]; do
|
||||||
case "$1" in
|
case "$1" in
|
||||||
@@ -81,7 +79,6 @@ while [[ $# -gt 0 ]]; do
|
|||||||
--timezone) TIMEZONE="$2"; shift 2 ;;
|
--timezone) TIMEZONE="$2"; shift 2 ;;
|
||||||
--non-interactive) NON_INTERACTIVE=1; shift ;;
|
--non-interactive) NON_INTERACTIVE=1; shift ;;
|
||||||
--soul-only) SOUL_ONLY=1; shift ;;
|
--soul-only) SOUL_ONLY=1; shift ;;
|
||||||
--force) FORCE=1; shift ;;
|
|
||||||
-h|--help) usage; exit 0 ;;
|
-h|--help) usage; exit 0 ;;
|
||||||
*) echo "Unknown argument: $1" >&2; usage >&2; exit 1 ;;
|
*) echo "Unknown argument: $1" >&2; usage >&2; exit 1 ;;
|
||||||
esac
|
esac
|
||||||
@@ -142,134 +139,6 @@ prompt_multiline() {
|
|||||||
eval "$var_name=\"$value\""
|
eval "$var_name=\"$value\""
|
||||||
}
|
}
|
||||||
|
|
||||||
# ── Existing file detection ────────────────────────────────────
|
|
||||||
|
|
||||||
detect_existing_config() {
|
|
||||||
local found=0
|
|
||||||
local existing_files=()
|
|
||||||
|
|
||||||
[[ -f "$SOUL_OUTPUT" ]] && { found=1; existing_files+=("SOUL.md"); }
|
|
||||||
[[ -f "$USER_OUTPUT" ]] && { found=1; existing_files+=("USER.md"); }
|
|
||||||
[[ -f "$TOOLS_OUTPUT" ]] && { found=1; existing_files+=("TOOLS.md"); }
|
|
||||||
|
|
||||||
if [[ $found -eq 0 || $FORCE -eq 1 ]]; then
|
|
||||||
return 0 # No existing files or --force: proceed with fresh install
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "[mosaic-init] Existing configuration detected:"
|
|
||||||
for f in "${existing_files[@]}"; do
|
|
||||||
echo " ✓ $f"
|
|
||||||
done
|
|
||||||
|
|
||||||
# Show current agent name if SOUL.md exists
|
|
||||||
if [[ -f "$SOUL_OUTPUT" ]]; then
|
|
||||||
local current_name
|
|
||||||
current_name=$(grep -oP 'You are \*\*\K[^*]+' "$SOUL_OUTPUT" 2>/dev/null || true)
|
|
||||||
if [[ -n "$current_name" ]]; then
|
|
||||||
echo " Agent: $current_name"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
if [[ $NON_INTERACTIVE -eq 1 ]]; then
|
|
||||||
echo "[mosaic-init] Existing config found. Use --force to overwrite in non-interactive mode."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "What would you like to do?"
|
|
||||||
echo " 1) keep — Keep existing files, skip init (default)"
|
|
||||||
echo " 2) import — Import values from existing files as defaults, then regenerate"
|
|
||||||
echo " 3) overwrite — Start fresh, overwrite all files"
|
|
||||||
printf "Choose [1/2/3]: "
|
|
||||||
read -r choice
|
|
||||||
|
|
||||||
case "${choice:-1}" in
|
|
||||||
1|keep)
|
|
||||||
echo "[mosaic-init] Keeping existing configuration."
|
|
||||||
# Still push to runtime adapters in case framework was updated
|
|
||||||
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets" ]]; then
|
|
||||||
echo "[mosaic-init] Updating runtime adapters..."
|
|
||||||
"$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets"
|
|
||||||
fi
|
|
||||||
echo "[mosaic-init] Done. Launch with: mosaic claude"
|
|
||||||
exit 0
|
|
||||||
;;
|
|
||||||
2|import)
|
|
||||||
echo "[mosaic-init] Importing values from existing files as defaults..."
|
|
||||||
import_existing_values
|
|
||||||
;;
|
|
||||||
3|overwrite)
|
|
||||||
echo "[mosaic-init] Starting fresh install..."
|
|
||||||
# Back up existing files
|
|
||||||
local ts
|
|
||||||
ts=$(date +%Y%m%d%H%M%S)
|
|
||||||
for f in "${existing_files[@]}"; do
|
|
||||||
local src="$MOSAIC_HOME/$f"
|
|
||||||
if [[ -f "$src" ]]; then
|
|
||||||
cp "$src" "${src}.bak.${ts}"
|
|
||||||
echo " Backed up $f → ${f}.bak.${ts}"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "[mosaic-init] Invalid choice. Keeping existing configuration."
|
|
||||||
exit 0
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
}
|
|
||||||
|
|
||||||
import_existing_values() {
|
|
||||||
# Import SOUL.md values
|
|
||||||
if [[ -f "$SOUL_OUTPUT" ]]; then
|
|
||||||
local content
|
|
||||||
content=$(cat "$SOUL_OUTPUT")
|
|
||||||
|
|
||||||
if [[ -z "$AGENT_NAME" ]]; then
|
|
||||||
AGENT_NAME=$(echo "$content" | grep -oP 'You are \*\*\K[^*]+' 2>/dev/null || true)
|
|
||||||
fi
|
|
||||||
if [[ -z "$ROLE_DESCRIPTION" ]]; then
|
|
||||||
ROLE_DESCRIPTION=$(echo "$content" | grep -oP 'Role identity: \K.+' 2>/dev/null || true)
|
|
||||||
fi
|
|
||||||
if [[ -z "$STYLE" ]]; then
|
|
||||||
if echo "$content" | grep -q 'Be direct, concise'; then
|
|
||||||
STYLE="direct"
|
|
||||||
elif echo "$content" | grep -q 'Be warm and conversational'; then
|
|
||||||
STYLE="friendly"
|
|
||||||
elif echo "$content" | grep -q 'Use professional, structured'; then
|
|
||||||
STYLE="formal"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Import USER.md values
|
|
||||||
if [[ -f "$USER_OUTPUT" ]]; then
|
|
||||||
local content
|
|
||||||
content=$(cat "$USER_OUTPUT")
|
|
||||||
|
|
||||||
if [[ -z "$USER_NAME" ]]; then
|
|
||||||
USER_NAME=$(echo "$content" | grep -oP '\*\*Name:\*\* \K.+' 2>/dev/null || true)
|
|
||||||
fi
|
|
||||||
if [[ -z "$PRONOUNS" ]]; then
|
|
||||||
PRONOUNS=$(echo "$content" | grep -oP '\*\*Pronouns:\*\* \K.+' 2>/dev/null || true)
|
|
||||||
fi
|
|
||||||
if [[ -z "$TIMEZONE" ]]; then
|
|
||||||
TIMEZONE=$(echo "$content" | grep -oP '\*\*Timezone:\*\* \K.+' 2>/dev/null || true)
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Import TOOLS.md values
|
|
||||||
if [[ -f "$TOOLS_OUTPUT" ]]; then
|
|
||||||
local content
|
|
||||||
content=$(cat "$TOOLS_OUTPUT")
|
|
||||||
|
|
||||||
if [[ -z "$CREDENTIALS_LOCATION" ]]; then
|
|
||||||
CREDENTIALS_LOCATION=$(echo "$content" | grep -oP '\*\*Location:\*\* \K.+' 2>/dev/null || true)
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
detect_existing_config
|
|
||||||
|
|
||||||
# ── SOUL.md Generation ────────────────────────────────────────
|
# ── SOUL.md Generation ────────────────────────────────────────
|
||||||
echo "[mosaic-init] Generating SOUL.md — agent identity contract"
|
echo "[mosaic-init] Generating SOUL.md — agent identity contract"
|
||||||
echo ""
|
echo ""
|
||||||
@@ -392,9 +261,9 @@ echo "[mosaic-init] Style: $STYLE"
|
|||||||
|
|
||||||
if [[ $SOUL_ONLY -eq 1 ]]; then
|
if [[ $SOUL_ONLY -eq 1 ]]; then
|
||||||
# Push to runtime adapters and exit
|
# Push to runtime adapters and exit
|
||||||
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets" ]]; then
|
if [[ -x "$MOSAIC_HOME/bin/mosaic-link-runtime-assets" ]]; then
|
||||||
echo "[mosaic-init] Updating runtime adapters..."
|
echo "[mosaic-init] Updating runtime adapters..."
|
||||||
"$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets"
|
"$MOSAIC_HOME/bin/mosaic-link-runtime-assets"
|
||||||
fi
|
fi
|
||||||
echo "[mosaic-init] Done. Launch with: mosaic claude"
|
echo "[mosaic-init] Done. Launch with: mosaic claude"
|
||||||
exit 0
|
exit 0
|
||||||
@@ -544,10 +413,10 @@ fi
|
|||||||
# ── Finalize ──────────────────────────────────────────────────
|
# ── Finalize ──────────────────────────────────────────────────
|
||||||
|
|
||||||
# Push to runtime adapters
|
# Push to runtime adapters
|
||||||
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets" ]]; then
|
if [[ -x "$MOSAIC_HOME/bin/mosaic-link-runtime-assets" ]]; then
|
||||||
echo ""
|
echo ""
|
||||||
echo "[mosaic-init] Updating runtime adapters..."
|
echo "[mosaic-init] Updating runtime adapters..."
|
||||||
"$MOSAIC_HOME/tools/_scripts/mosaic-link-runtime-assets"
|
"$MOSAIC_HOME/bin/mosaic-link-runtime-assets"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
845
packages/mosaic/framework/bin/mosaic-launch
Executable file
845
packages/mosaic/framework/bin/mosaic-launch
Executable file
@@ -0,0 +1,845 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# mosaic-launch — Framework agent launcher (called by the mosaic npm CLI)
|
||||||
|
#
|
||||||
|
# AGENTS.md is the global policy source for all agent sessions.
|
||||||
|
# The launcher injects a composed runtime contract (AGENTS + runtime reference).
|
||||||
|
#
|
||||||
|
# Usage (via mosaic CLI):
|
||||||
|
# mosaic claude [args...] Launch Claude Code with runtime contract injected
|
||||||
|
# mosaic codex [args...] Launch Codex with runtime contract injected
|
||||||
|
# mosaic opencode [args...] Launch OpenCode with runtime contract injected
|
||||||
|
# mosaic pi [args...] Launch Pi with runtime contract injected
|
||||||
|
# mosaic yolo <runtime> [args...] Launch runtime in dangerous-permissions mode
|
||||||
|
#
|
||||||
|
# Direct usage:
|
||||||
|
# mosaic-launch claude [args...]
|
||||||
|
# mosaic-launch yolo claude [args...]
|
||||||
|
|
||||||
|
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
||||||
|
VERSION="0.1.0"
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<USAGE
|
||||||
|
mosaic $VERSION — Unified agent launcher
|
||||||
|
|
||||||
|
Usage: mosaic <command> [args...]
|
||||||
|
|
||||||
|
Agent Launchers:
|
||||||
|
pi [args...] Launch Pi with runtime contract injected (recommended)
|
||||||
|
claude [args...] Launch Claude Code with runtime contract injected
|
||||||
|
opencode [args...] Launch OpenCode with runtime contract injected
|
||||||
|
codex [args...] Launch Codex with runtime contract injected
|
||||||
|
yolo <runtime> [args...] Dangerous mode for claude|codex|opencode|pi
|
||||||
|
--yolo <runtime> [args...] Alias for yolo
|
||||||
|
|
||||||
|
Management:
|
||||||
|
init [args...] Generate SOUL.md (agent identity contract)
|
||||||
|
doctor [args...] Audit runtime state and detect drift
|
||||||
|
sync [args...] Sync skills from canonical source
|
||||||
|
seq [subcommand] sequential-thinking MCP management:
|
||||||
|
check [--runtime <r>] [--strict]
|
||||||
|
fix [--runtime <r>]
|
||||||
|
start
|
||||||
|
bootstrap <path> Bootstrap a repo with Mosaic standards
|
||||||
|
upgrade [mode] [args] Upgrade release (default) or project files
|
||||||
|
upgrade check Check release upgrade status (no changes)
|
||||||
|
release-upgrade [...] Upgrade installed Mosaic release
|
||||||
|
project-upgrade [...] Clean up stale SOUL.md/CLAUDE.md in a project
|
||||||
|
|
||||||
|
PRD:
|
||||||
|
prdy <subcommand> PRD creation and validation
|
||||||
|
init Create docs/PRD.md via guided runtime session
|
||||||
|
update Update existing PRD via guided runtime session
|
||||||
|
validate Check PRD completeness (bash-only)
|
||||||
|
status Quick PRD health check (one-liner)
|
||||||
|
|
||||||
|
Coordinator (r0):
|
||||||
|
coord <subcommand> Manual coordinator tools
|
||||||
|
init Initialize a new mission
|
||||||
|
mission Show mission progress dashboard
|
||||||
|
status Check agent session health
|
||||||
|
continue Generate continuation prompt
|
||||||
|
run Generate context and launch selected runtime
|
||||||
|
resume Crash recovery
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-h, --help Show this help
|
||||||
|
-v, --version Show version
|
||||||
|
|
||||||
|
All arguments after the command are forwarded to the target CLI.
|
||||||
|
USAGE
|
||||||
|
}
|
||||||
|
|
||||||
|
# Pre-flight checks
|
||||||
|
check_mosaic_home() {
|
||||||
|
if [[ ! -d "$MOSAIC_HOME" ]]; then
|
||||||
|
echo "[mosaic] ERROR: ~/.config/mosaic not found." >&2
|
||||||
|
echo "[mosaic] Install with: bash <(curl -fsSL https://git.mosaicstack.dev/mosaic/mosaic-stack/raw/branch/main/tools/install.sh)" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
check_agents_md() {
|
||||||
|
if [[ ! -f "$MOSAIC_HOME/AGENTS.md" ]]; then
|
||||||
|
echo "[mosaic] ERROR: ~/.config/mosaic/AGENTS.md not found." >&2
|
||||||
|
echo "[mosaic] Re-run the installer: bash <(curl -fsSL https://git.mosaicstack.dev/mosaic/mosaic-stack/raw/branch/main/tools/install.sh)" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
check_soul() {
|
||||||
|
if [[ ! -f "$MOSAIC_HOME/SOUL.md" ]]; then
|
||||||
|
echo "[mosaic] SOUL.md not found. Running mosaic init..."
|
||||||
|
"$MOSAIC_HOME/bin/mosaic-init"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
check_runtime() {
|
||||||
|
local cmd="$1"
|
||||||
|
if ! command -v "$cmd" >/dev/null 2>&1; then
|
||||||
|
echo "[mosaic] ERROR: '$cmd' not found in PATH." >&2
|
||||||
|
echo "[mosaic] Install $cmd before launching." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
check_sequential_thinking() {
|
||||||
|
local runtime="${1:-all}"
|
||||||
|
local checker="$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking"
|
||||||
|
if [[ ! -x "$checker" ]]; then
|
||||||
|
echo "[mosaic] ERROR: sequential-thinking checker missing: $checker" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if ! "$checker" --check --runtime "$runtime" >/dev/null 2>&1; then
|
||||||
|
echo "[mosaic] ERROR: sequential-thinking MCP is required but not configured." >&2
|
||||||
|
echo "[mosaic] Fix config: $checker --runtime $runtime" >&2
|
||||||
|
echo "[mosaic] Or run: mosaic seq fix --runtime $runtime" >&2
|
||||||
|
echo "[mosaic] Manual server start: mosaic seq start" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
runtime_contract_path() {
|
||||||
|
local runtime="$1"
|
||||||
|
case "$runtime" in
|
||||||
|
claude) echo "$MOSAIC_HOME/runtime/claude/RUNTIME.md" ;;
|
||||||
|
codex) echo "$MOSAIC_HOME/runtime/codex/RUNTIME.md" ;;
|
||||||
|
opencode) echo "$MOSAIC_HOME/runtime/opencode/RUNTIME.md" ;;
|
||||||
|
pi) echo "$MOSAIC_HOME/runtime/pi/RUNTIME.md" ;;
|
||||||
|
*)
|
||||||
|
echo "[mosaic] ERROR: unsupported runtime '$runtime' for runtime contract." >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
build_runtime_prompt() {
|
||||||
|
local runtime="$1"
|
||||||
|
local runtime_file
|
||||||
|
runtime_file="$(runtime_contract_path "$runtime")"
|
||||||
|
if [[ ! -f "$runtime_file" ]]; then
|
||||||
|
echo "[mosaic] ERROR: runtime contract not found: $runtime_file" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Inject active mission context FIRST so the agent sees it immediately
|
||||||
|
local mission_file=".mosaic/orchestrator/mission.json"
|
||||||
|
if [[ -f "$mission_file" ]] && command -v jq &>/dev/null; then
|
||||||
|
local m_status
|
||||||
|
m_status="$(jq -r '.status // "inactive"' "$mission_file" 2>/dev/null)"
|
||||||
|
if [[ "$m_status" == "active" || "$m_status" == "paused" ]]; then
|
||||||
|
local m_name m_id m_count m_completed
|
||||||
|
m_name="$(jq -r '.name // "unnamed"' "$mission_file")"
|
||||||
|
m_id="$(jq -r '.mission_id // ""' "$mission_file")"
|
||||||
|
m_count="$(jq '.milestones | length' "$mission_file")"
|
||||||
|
m_completed="$(jq '[.milestones[] | select(.status == "completed")] | length' "$mission_file")"
|
||||||
|
|
||||||
|
cat <<MISSION_EOF
|
||||||
|
# ACTIVE MISSION — HARD GATE (Read Before Anything Else)
|
||||||
|
|
||||||
|
An active orchestration mission exists in this project. This is a BLOCKING requirement.
|
||||||
|
|
||||||
|
**Mission:** $m_name
|
||||||
|
**ID:** $m_id
|
||||||
|
**Status:** $m_status
|
||||||
|
**Milestones:** $m_completed / $m_count completed
|
||||||
|
|
||||||
|
## MANDATORY — Before ANY Response to the User
|
||||||
|
|
||||||
|
You MUST complete these steps before responding to any user message, including simple greetings:
|
||||||
|
|
||||||
|
1. Read \`~/.config/mosaic/guides/ORCHESTRATOR-PROTOCOL.md\` (mission lifecycle protocol)
|
||||||
|
2. Read \`docs/MISSION-MANIFEST.md\` for full mission scope, milestones, and success criteria
|
||||||
|
3. Read the latest scratchpad in \`docs/scratchpads/\` for session history, decisions, and corrections
|
||||||
|
4. Read \`docs/TASKS.md\` for current task state (what is done, what is next)
|
||||||
|
5. After reading all four, acknowledge the mission state to the user before proceeding
|
||||||
|
|
||||||
|
If the user gives a task, execute it within the mission context. If no task is given, present mission status and ask how to proceed.
|
||||||
|
|
||||||
|
MISSION_EOF
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Inject PRD status so the agent knows requirements state
|
||||||
|
local prd_file="docs/PRD.md"
|
||||||
|
if [[ -f "$prd_file" ]]; then
|
||||||
|
local prd_sections=0
|
||||||
|
local prd_assumptions=0
|
||||||
|
for entry in "Problem Statement|^#{2,3} .*(problem statement|objective)" \
|
||||||
|
"Scope / Non-Goals|^#{2,3} .*(scope|non.goal|out of scope|in.scope)" \
|
||||||
|
"User Stories / Requirements|^#{2,3} .*(user stor|stakeholder|user.*requirement)" \
|
||||||
|
"Functional Requirements|^#{2,3} .*functional requirement" \
|
||||||
|
"Non-Functional Requirements|^#{2,3} .*non.functional" \
|
||||||
|
"Acceptance Criteria|^#{2,3} .*acceptance criteria" \
|
||||||
|
"Technical Considerations|^#{2,3} .*(technical consideration|constraint|dependenc)" \
|
||||||
|
"Risks / Open Questions|^#{2,3} .*(risk|open question)" \
|
||||||
|
"Success Metrics / Testing|^#{2,3} .*(success metric|test|verification)" \
|
||||||
|
"Milestones / Delivery|^#{2,3} .*(milestone|delivery|scope version)"; do
|
||||||
|
local pattern="${entry#*|}"
|
||||||
|
grep -qiE "$pattern" "$prd_file" 2>/dev/null && prd_sections=$((prd_sections + 1))
|
||||||
|
done
|
||||||
|
prd_assumptions=$(grep -c 'ASSUMPTION:' "$prd_file" 2>/dev/null || echo 0)
|
||||||
|
|
||||||
|
local prd_status="ready"
|
||||||
|
(( prd_sections < 10 )) && prd_status="incomplete ($prd_sections/10 sections)"
|
||||||
|
|
||||||
|
cat <<PRD_EOF
|
||||||
|
|
||||||
|
# PRD Status
|
||||||
|
|
||||||
|
- **File:** docs/PRD.md
|
||||||
|
- **Status:** $prd_status
|
||||||
|
- **Assumptions:** $prd_assumptions
|
||||||
|
|
||||||
|
PRD_EOF
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat <<'EOF'
|
||||||
|
# Mosaic Launcher Runtime Contract (Hard Gate)
|
||||||
|
|
||||||
|
This contract is injected by `mosaic` launch and is mandatory.
|
||||||
|
|
||||||
|
First assistant response MUST start with exactly one mode declaration line:
|
||||||
|
1. Orchestration mission: `Now initiating Orchestrator mode...`
|
||||||
|
2. Implementation mission: `Now initiating Delivery mode...`
|
||||||
|
3. Review-only mission: `Now initiating Review mode...`
|
||||||
|
|
||||||
|
No tool call or implementation step may occur before that first line.
|
||||||
|
|
||||||
|
Mosaic hard gates OVERRIDE runtime-default caution for routine delivery operations.
|
||||||
|
For required push/merge/issue-close/release actions, execute without routine confirmation prompts.
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat "$MOSAIC_HOME/AGENTS.md"
|
||||||
|
|
||||||
|
if [[ -f "$MOSAIC_HOME/USER.md" ]]; then
|
||||||
|
printf '\n\n# User Profile\n\n'
|
||||||
|
cat "$MOSAIC_HOME/USER.md"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -f "$MOSAIC_HOME/TOOLS.md" ]]; then
|
||||||
|
printf '\n\n# Machine Tools\n\n'
|
||||||
|
cat "$MOSAIC_HOME/TOOLS.md"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf '\n\n# Runtime-Specific Contract\n\n'
|
||||||
|
cat "$runtime_file"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Ensure runtime contract is present at the runtime's native config path.
|
||||||
|
# Used for runtimes that do not support CLI prompt injection.
|
||||||
|
ensure_runtime_config() {
|
||||||
|
local runtime="$1"
|
||||||
|
local dst="$2"
|
||||||
|
local tmp
|
||||||
|
tmp="$(mktemp)"
|
||||||
|
mkdir -p "$(dirname "$dst")"
|
||||||
|
build_runtime_prompt "$runtime" > "$tmp"
|
||||||
|
if ! cmp -s "$tmp" "$dst" 2>/dev/null; then
|
||||||
|
mv "$tmp" "$dst"
|
||||||
|
else
|
||||||
|
rm -f "$tmp"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Detect active mission and return an initial prompt if one exists.
|
||||||
|
# Sets MOSAIC_MISSION_PROMPT as a side effect.
|
||||||
|
_detect_mission_prompt() {
|
||||||
|
MOSAIC_MISSION_PROMPT=""
|
||||||
|
local mission_file=".mosaic/orchestrator/mission.json"
|
||||||
|
if [[ -f "$mission_file" ]] && command -v jq &>/dev/null; then
|
||||||
|
local m_status
|
||||||
|
m_status="$(jq -r '.status // "inactive"' "$mission_file" 2>/dev/null)"
|
||||||
|
if [[ "$m_status" == "active" || "$m_status" == "paused" ]]; then
|
||||||
|
local m_name
|
||||||
|
m_name="$(jq -r '.name // "unnamed"' "$mission_file")"
|
||||||
|
MOSAIC_MISSION_PROMPT="Active mission detected: ${m_name}. Read the mission state files and report status."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Write a session lock if an active mission exists in the current directory.
|
||||||
|
# Called before exec so $$ captures the PID that will become the agent process.
|
||||||
|
_write_launcher_session_lock() {
|
||||||
|
local runtime="$1"
|
||||||
|
local mission_file=".mosaic/orchestrator/mission.json"
|
||||||
|
local lock_file=".mosaic/orchestrator/session.lock"
|
||||||
|
|
||||||
|
# Only write lock if mission exists and is active
|
||||||
|
[[ -f "$mission_file" ]] || return 0
|
||||||
|
command -v jq &>/dev/null || return 0
|
||||||
|
|
||||||
|
local m_status
|
||||||
|
m_status="$(jq -r '.status // "inactive"' "$mission_file" 2>/dev/null)"
|
||||||
|
[[ "$m_status" == "active" || "$m_status" == "paused" ]] || return 0
|
||||||
|
|
||||||
|
local session_id
|
||||||
|
session_id="${runtime}-$(date +%Y%m%d-%H%M%S)-$$"
|
||||||
|
|
||||||
|
jq -n \
|
||||||
|
--arg sid "$session_id" \
|
||||||
|
--arg rt "$runtime" \
|
||||||
|
--arg pid "$$" \
|
||||||
|
--arg ts "$(date -u +%Y-%m-%dT%H:%M:%SZ)" \
|
||||||
|
--arg pp "$(pwd)" \
|
||||||
|
--arg mid "" \
|
||||||
|
'{
|
||||||
|
session_id: $sid,
|
||||||
|
runtime: $rt,
|
||||||
|
pid: ($pid | tonumber),
|
||||||
|
started_at: $ts,
|
||||||
|
project_path: $pp,
|
||||||
|
milestone_id: $mid
|
||||||
|
}' > "$lock_file"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clean up session lock on exit (covers normal exit + signals).
|
||||||
|
# Registered via trap after _write_launcher_session_lock succeeds.
|
||||||
|
_cleanup_session_lock() {
|
||||||
|
rm -f ".mosaic/orchestrator/session.lock" 2>/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
# Launcher functions
|
||||||
|
launch_claude() {
|
||||||
|
check_mosaic_home
|
||||||
|
check_agents_md
|
||||||
|
check_soul
|
||||||
|
check_runtime "claude"
|
||||||
|
check_sequential_thinking "claude"
|
||||||
|
|
||||||
|
_check_resumable_session
|
||||||
|
|
||||||
|
# Claude supports --append-system-prompt for direct injection
|
||||||
|
local runtime_prompt
|
||||||
|
runtime_prompt="$(build_runtime_prompt "claude")"
|
||||||
|
|
||||||
|
# If active mission exists and no user prompt was given, inject initial prompt
|
||||||
|
_detect_mission_prompt
|
||||||
|
_write_launcher_session_lock "claude"
|
||||||
|
trap _cleanup_session_lock EXIT INT TERM
|
||||||
|
if [[ -n "$MOSAIC_MISSION_PROMPT" && $# -eq 0 ]]; then
|
||||||
|
echo "[mosaic] Launching Claude Code (active mission detected)..."
|
||||||
|
exec claude --append-system-prompt "$runtime_prompt" "$MOSAIC_MISSION_PROMPT"
|
||||||
|
else
|
||||||
|
echo "[mosaic] Launching Claude Code..."
|
||||||
|
exec claude --append-system-prompt "$runtime_prompt" "$@"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
launch_opencode() {
|
||||||
|
check_mosaic_home
|
||||||
|
check_agents_md
|
||||||
|
check_soul
|
||||||
|
check_runtime "opencode"
|
||||||
|
check_sequential_thinking "opencode"
|
||||||
|
|
||||||
|
_check_resumable_session
|
||||||
|
|
||||||
|
# OpenCode reads from ~/.config/opencode/AGENTS.md
|
||||||
|
ensure_runtime_config "opencode" "$HOME/.config/opencode/AGENTS.md"
|
||||||
|
_write_launcher_session_lock "opencode"
|
||||||
|
trap _cleanup_session_lock EXIT INT TERM
|
||||||
|
echo "[mosaic] Launching OpenCode..."
|
||||||
|
exec opencode "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
launch_codex() {
|
||||||
|
check_mosaic_home
|
||||||
|
check_agents_md
|
||||||
|
check_soul
|
||||||
|
check_runtime "codex"
|
||||||
|
check_sequential_thinking "codex"
|
||||||
|
|
||||||
|
_check_resumable_session
|
||||||
|
|
||||||
|
# Codex reads from ~/.codex/instructions.md
|
||||||
|
ensure_runtime_config "codex" "$HOME/.codex/instructions.md"
|
||||||
|
_detect_mission_prompt
|
||||||
|
_write_launcher_session_lock "codex"
|
||||||
|
trap _cleanup_session_lock EXIT INT TERM
|
||||||
|
if [[ -n "$MOSAIC_MISSION_PROMPT" && $# -eq 0 ]]; then
|
||||||
|
echo "[mosaic] Launching Codex (active mission detected)..."
|
||||||
|
exec codex "$MOSAIC_MISSION_PROMPT"
|
||||||
|
else
|
||||||
|
echo "[mosaic] Launching Codex..."
|
||||||
|
exec codex "$@"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
launch_pi() {
|
||||||
|
check_mosaic_home
|
||||||
|
check_agents_md
|
||||||
|
check_soul
|
||||||
|
check_runtime "pi"
|
||||||
|
# Pi has native thinking levels — no sequential-thinking gate required
|
||||||
|
|
||||||
|
_check_resumable_session
|
||||||
|
|
||||||
|
local runtime_prompt
|
||||||
|
runtime_prompt="$(build_runtime_prompt "pi")"
|
||||||
|
|
||||||
|
# Build skill args from Mosaic skills directories (canonical + local)
|
||||||
|
local -a skill_args=()
|
||||||
|
for skills_root in "$MOSAIC_HOME/skills" "$MOSAIC_HOME/skills-local"; do
|
||||||
|
[[ -d "$skills_root" ]] || continue
|
||||||
|
for skill_dir in "$skills_root"/*/; do
|
||||||
|
[[ -f "${skill_dir}SKILL.md" ]] && skill_args+=(--skill "$skill_dir")
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
|
# Load Mosaic extension if present
|
||||||
|
local -a ext_args=()
|
||||||
|
local mosaic_ext="$MOSAIC_HOME/runtime/pi/mosaic-extension.ts"
|
||||||
|
[[ -f "$mosaic_ext" ]] && ext_args=(--extension "$mosaic_ext")
|
||||||
|
|
||||||
|
_detect_mission_prompt
|
||||||
|
_write_launcher_session_lock "pi"
|
||||||
|
trap _cleanup_session_lock EXIT INT TERM
|
||||||
|
if [[ -n "$MOSAIC_MISSION_PROMPT" && $# -eq 0 ]]; then
|
||||||
|
echo "[mosaic] Launching Pi (active mission detected)..."
|
||||||
|
exec pi --append-system-prompt "$runtime_prompt" \
|
||||||
|
"${skill_args[@]}" "${ext_args[@]}" "$MOSAIC_MISSION_PROMPT"
|
||||||
|
else
|
||||||
|
echo "[mosaic] Launching Pi..."
|
||||||
|
exec pi --append-system-prompt "$runtime_prompt" \
|
||||||
|
"${skill_args[@]}" "${ext_args[@]}" "$@"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
launch_yolo() {
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
echo "[mosaic] ERROR: yolo requires a runtime (claude|codex|opencode|pi)." >&2
|
||||||
|
echo "[mosaic] Example: mosaic yolo claude" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
local runtime="$1"
|
||||||
|
shift
|
||||||
|
|
||||||
|
case "$runtime" in
|
||||||
|
claude)
|
||||||
|
check_mosaic_home
|
||||||
|
check_agents_md
|
||||||
|
check_soul
|
||||||
|
check_runtime "claude"
|
||||||
|
check_sequential_thinking "claude"
|
||||||
|
|
||||||
|
# Claude uses an explicit dangerous permissions flag.
|
||||||
|
local runtime_prompt
|
||||||
|
runtime_prompt="$(build_runtime_prompt "claude")"
|
||||||
|
|
||||||
|
_detect_mission_prompt
|
||||||
|
_write_launcher_session_lock "claude"
|
||||||
|
trap _cleanup_session_lock EXIT INT TERM
|
||||||
|
if [[ -n "$MOSAIC_MISSION_PROMPT" && $# -eq 0 ]]; then
|
||||||
|
echo "[mosaic] Launching Claude Code in YOLO mode (active mission detected)..."
|
||||||
|
exec claude --dangerously-skip-permissions --append-system-prompt "$runtime_prompt" "$MOSAIC_MISSION_PROMPT"
|
||||||
|
else
|
||||||
|
echo "[mosaic] Launching Claude Code in YOLO mode (dangerous permissions enabled)..."
|
||||||
|
exec claude --dangerously-skip-permissions --append-system-prompt "$runtime_prompt" "$@"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
codex)
|
||||||
|
check_mosaic_home
|
||||||
|
check_agents_md
|
||||||
|
check_soul
|
||||||
|
check_runtime "codex"
|
||||||
|
check_sequential_thinking "codex"
|
||||||
|
|
||||||
|
# Codex reads instructions.md from ~/.codex and supports a direct dangerous flag.
|
||||||
|
ensure_runtime_config "codex" "$HOME/.codex/instructions.md"
|
||||||
|
_detect_mission_prompt
|
||||||
|
_write_launcher_session_lock "codex"
|
||||||
|
trap _cleanup_session_lock EXIT INT TERM
|
||||||
|
if [[ -n "$MOSAIC_MISSION_PROMPT" && $# -eq 0 ]]; then
|
||||||
|
echo "[mosaic] Launching Codex in YOLO mode (active mission detected)..."
|
||||||
|
exec codex --dangerously-bypass-approvals-and-sandbox "$MOSAIC_MISSION_PROMPT"
|
||||||
|
else
|
||||||
|
echo "[mosaic] Launching Codex in YOLO mode (dangerous permissions enabled)..."
|
||||||
|
exec codex --dangerously-bypass-approvals-and-sandbox "$@"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
opencode)
|
||||||
|
check_mosaic_home
|
||||||
|
check_agents_md
|
||||||
|
check_soul
|
||||||
|
check_runtime "opencode"
|
||||||
|
check_sequential_thinking "opencode"
|
||||||
|
|
||||||
|
# OpenCode defaults to allow-all permissions unless user config restricts them.
|
||||||
|
ensure_runtime_config "opencode" "$HOME/.config/opencode/AGENTS.md"
|
||||||
|
_write_launcher_session_lock "opencode"
|
||||||
|
trap _cleanup_session_lock EXIT INT TERM
|
||||||
|
echo "[mosaic] Launching OpenCode in YOLO mode..."
|
||||||
|
exec opencode "$@"
|
||||||
|
;;
|
||||||
|
pi)
|
||||||
|
# Pi has no permission restrictions — yolo is identical to normal launch
|
||||||
|
launch_pi "$@"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "[mosaic] ERROR: Unsupported yolo runtime '$runtime'. Use claude|codex|opencode|pi." >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Delegate to existing scripts
|
||||||
|
run_init() {
|
||||||
|
# Prefer wizard if Node.js and bundle are available
|
||||||
|
local wizard_bin="$MOSAIC_HOME/dist/mosaic-wizard.mjs"
|
||||||
|
if command -v node >/dev/null 2>&1 && [[ -f "$wizard_bin" ]]; then
|
||||||
|
exec node "$wizard_bin" "$@"
|
||||||
|
fi
|
||||||
|
# Fallback to legacy bash wizard
|
||||||
|
check_mosaic_home
|
||||||
|
exec "$MOSAIC_HOME/bin/mosaic-init" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_doctor() {
|
||||||
|
check_mosaic_home
|
||||||
|
exec "$MOSAIC_HOME/bin/mosaic-doctor" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_sync() {
|
||||||
|
check_mosaic_home
|
||||||
|
exec "$MOSAIC_HOME/bin/mosaic-sync-skills" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_seq() {
|
||||||
|
check_mosaic_home
|
||||||
|
local checker="$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking"
|
||||||
|
local action="${1:-check}"
|
||||||
|
|
||||||
|
case "$action" in
|
||||||
|
check)
|
||||||
|
shift || true
|
||||||
|
exec "$checker" --check "$@"
|
||||||
|
;;
|
||||||
|
fix|apply)
|
||||||
|
shift || true
|
||||||
|
exec "$checker" "$@"
|
||||||
|
;;
|
||||||
|
start)
|
||||||
|
shift || true
|
||||||
|
check_runtime "npx"
|
||||||
|
echo "[mosaic] Starting sequential-thinking MCP server..."
|
||||||
|
exec npx -y @modelcontextprotocol/server-sequential-thinking "$@"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "[mosaic] ERROR: Unknown seq subcommand '$action'." >&2
|
||||||
|
echo "[mosaic] Use: mosaic seq check|fix|start" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
run_coord() {
|
||||||
|
check_mosaic_home
|
||||||
|
local runtime="claude"
|
||||||
|
local runtime_flag=""
|
||||||
|
local yolo_flag=""
|
||||||
|
local -a coord_args=()
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--claude|--codex|--pi)
|
||||||
|
local selected_runtime="${1#--}"
|
||||||
|
if [[ -n "$runtime_flag" ]] && [[ "$runtime" != "$selected_runtime" ]]; then
|
||||||
|
echo "[mosaic] ERROR: --claude, --codex, and --pi are mutually exclusive for 'mosaic coord'." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
runtime="$selected_runtime"
|
||||||
|
runtime_flag="$1"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--yolo)
|
||||||
|
yolo_flag="--yolo"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
coord_args+=("$1")
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
local subcmd="${coord_args[0]:-help}"
|
||||||
|
if (( ${#coord_args[@]} > 1 )); then
|
||||||
|
set -- "${coord_args[@]:1}"
|
||||||
|
else
|
||||||
|
set --
|
||||||
|
fi
|
||||||
|
|
||||||
|
local tool_dir="$MOSAIC_HOME/tools/orchestrator"
|
||||||
|
|
||||||
|
case "$subcmd" in
|
||||||
|
status|session)
|
||||||
|
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/session-status.sh" "$@"
|
||||||
|
;;
|
||||||
|
init)
|
||||||
|
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/mission-init.sh" "$@"
|
||||||
|
;;
|
||||||
|
mission|progress)
|
||||||
|
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/mission-status.sh" "$@"
|
||||||
|
;;
|
||||||
|
continue|next)
|
||||||
|
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/continue-prompt.sh" "$@"
|
||||||
|
;;
|
||||||
|
run|start)
|
||||||
|
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/session-run.sh" ${yolo_flag:+"$yolo_flag"} "$@"
|
||||||
|
;;
|
||||||
|
smoke|test)
|
||||||
|
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/smoke-test.sh" "$@"
|
||||||
|
;;
|
||||||
|
resume|recover)
|
||||||
|
MOSAIC_COORD_RUNTIME="$runtime" exec bash "$tool_dir/session-resume.sh" "$@"
|
||||||
|
;;
|
||||||
|
help|*)
|
||||||
|
cat <<COORD_USAGE
|
||||||
|
mosaic coord — r0 manual coordinator tools
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
init --name <name> [opts] Initialize a new mission
|
||||||
|
mission [--project <path>] Show mission progress dashboard
|
||||||
|
status [--project <path>] Check agent session health
|
||||||
|
continue [--project <path>] Generate continuation prompt for next session
|
||||||
|
run [--project <path>] Generate context and launch selected runtime
|
||||||
|
smoke Run orchestration behavior smoke checks
|
||||||
|
resume [--project <path>] Crash recovery (detect dirty state, generate fix)
|
||||||
|
|
||||||
|
Runtime:
|
||||||
|
--claude Use Claude runtime hints/prompts (default)
|
||||||
|
--codex Use Codex runtime hints/prompts
|
||||||
|
--pi Use Pi runtime hints/prompts
|
||||||
|
--yolo Launch runtime in dangerous/skip-permissions mode (run only)
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
mosaic coord init --name "Security Fix" --milestones "Critical,High,Medium"
|
||||||
|
mosaic coord mission
|
||||||
|
mosaic coord --codex mission
|
||||||
|
mosaic coord --pi run
|
||||||
|
mosaic coord continue --copy
|
||||||
|
mosaic coord run
|
||||||
|
mosaic coord run --codex
|
||||||
|
mosaic coord --yolo run
|
||||||
|
mosaic coord smoke
|
||||||
|
mosaic coord continue --codex --copy
|
||||||
|
|
||||||
|
COORD_USAGE
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Resume advisory — prints warning if active mission or stale session detected
|
||||||
|
_check_resumable_session() {
|
||||||
|
local mission_file=".mosaic/orchestrator/mission.json"
|
||||||
|
local lock_file=".mosaic/orchestrator/session.lock"
|
||||||
|
|
||||||
|
command -v jq &>/dev/null || return 0
|
||||||
|
|
||||||
|
if [[ -f "$lock_file" ]]; then
|
||||||
|
local pid
|
||||||
|
pid="$(jq -r '.pid // 0' "$lock_file" 2>/dev/null)"
|
||||||
|
if [[ -n "$pid" ]] && [[ "$pid" != "0" ]] && ! kill -0 "$pid" 2>/dev/null; then
|
||||||
|
# Stale lock from a dead session — clean it up
|
||||||
|
rm -f "$lock_file"
|
||||||
|
echo "[mosaic] Cleaned up stale session lock (PID $pid no longer running)."
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
elif [[ -f "$mission_file" ]]; then
|
||||||
|
local status
|
||||||
|
status="$(jq -r '.status // "inactive"' "$mission_file" 2>/dev/null)"
|
||||||
|
if [[ "$status" == "active" ]]; then
|
||||||
|
echo "[mosaic] Active mission detected. Generate continuation prompt with:"
|
||||||
|
echo "[mosaic] mosaic coord continue"
|
||||||
|
echo ""
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
run_prdy() {
|
||||||
|
check_mosaic_home
|
||||||
|
local runtime="claude"
|
||||||
|
local runtime_flag=""
|
||||||
|
local -a prdy_args=()
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case "$1" in
|
||||||
|
--claude|--codex|--pi)
|
||||||
|
local selected_runtime="${1#--}"
|
||||||
|
if [[ -n "$runtime_flag" ]] && [[ "$runtime" != "$selected_runtime" ]]; then
|
||||||
|
echo "[mosaic] ERROR: --claude, --codex, and --pi are mutually exclusive for 'mosaic prdy'." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
runtime="$selected_runtime"
|
||||||
|
runtime_flag="$1"
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
prdy_args+=("$1")
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
local subcmd="${prdy_args[0]:-help}"
|
||||||
|
if (( ${#prdy_args[@]} > 1 )); then
|
||||||
|
set -- "${prdy_args[@]:1}"
|
||||||
|
else
|
||||||
|
set --
|
||||||
|
fi
|
||||||
|
|
||||||
|
local tool_dir="$MOSAIC_HOME/tools/prdy"
|
||||||
|
|
||||||
|
case "$subcmd" in
|
||||||
|
init)
|
||||||
|
MOSAIC_PRDY_RUNTIME="$runtime" exec bash "$tool_dir/prdy-init.sh" "$@"
|
||||||
|
;;
|
||||||
|
update)
|
||||||
|
MOSAIC_PRDY_RUNTIME="$runtime" exec bash "$tool_dir/prdy-update.sh" "$@"
|
||||||
|
;;
|
||||||
|
validate|check)
|
||||||
|
MOSAIC_PRDY_RUNTIME="$runtime" exec bash "$tool_dir/prdy-validate.sh" "$@"
|
||||||
|
;;
|
||||||
|
status)
|
||||||
|
exec bash "$tool_dir/prdy-status.sh" "$@"
|
||||||
|
;;
|
||||||
|
help|*)
|
||||||
|
cat <<PRDY_USAGE
|
||||||
|
mosaic prdy — PRD creation and validation tools
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
init [--project <path>] [--name <feature>] Create docs/PRD.md via guided runtime session
|
||||||
|
update [--project <path>] Update existing docs/PRD.md via guided runtime session
|
||||||
|
validate [--project <path>] Check PRD completeness against Mosaic guide (bash-only)
|
||||||
|
status [--project <path>] [--format short|json] Quick PRD health check (one-liner)
|
||||||
|
|
||||||
|
Runtime:
|
||||||
|
--claude Use Claude runtime (default)
|
||||||
|
--codex Use Codex runtime
|
||||||
|
--pi Use Pi runtime
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
mosaic prdy init --name "User Authentication"
|
||||||
|
mosaic prdy update
|
||||||
|
mosaic prdy --pi init --name "User Authentication"
|
||||||
|
mosaic prdy --codex init --name "User Authentication"
|
||||||
|
mosaic prdy validate
|
||||||
|
|
||||||
|
Output location: docs/PRD.md (per Mosaic PRD guide)
|
||||||
|
|
||||||
|
PRDY_USAGE
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
run_bootstrap() {
|
||||||
|
check_mosaic_home
|
||||||
|
exec "$MOSAIC_HOME/bin/mosaic-bootstrap-repo" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_release_upgrade() {
|
||||||
|
check_mosaic_home
|
||||||
|
exec "$MOSAIC_HOME/bin/mosaic-release-upgrade" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_project_upgrade() {
|
||||||
|
check_mosaic_home
|
||||||
|
exec "$MOSAIC_HOME/bin/mosaic-upgrade" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_upgrade() {
|
||||||
|
check_mosaic_home
|
||||||
|
|
||||||
|
# Default: upgrade installed release
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
run_release_upgrade
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
release)
|
||||||
|
shift
|
||||||
|
run_release_upgrade "$@"
|
||||||
|
;;
|
||||||
|
check)
|
||||||
|
shift
|
||||||
|
run_release_upgrade --dry-run "$@"
|
||||||
|
;;
|
||||||
|
project)
|
||||||
|
shift
|
||||||
|
run_project_upgrade "$@"
|
||||||
|
;;
|
||||||
|
|
||||||
|
# Backward compatibility for historical project-upgrade usage.
|
||||||
|
--all|--root)
|
||||||
|
run_project_upgrade "$@"
|
||||||
|
;;
|
||||||
|
--dry-run|--ref|--keep|--overwrite|-y|--yes)
|
||||||
|
run_release_upgrade "$@"
|
||||||
|
;;
|
||||||
|
-*)
|
||||||
|
run_release_upgrade "$@"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
run_project_upgrade "$@"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main router
|
||||||
|
if [[ $# -eq 0 ]]; then
|
||||||
|
usage
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
command="$1"
|
||||||
|
shift
|
||||||
|
|
||||||
|
case "$command" in
|
||||||
|
pi) launch_pi "$@" ;;
|
||||||
|
claude) launch_claude "$@" ;;
|
||||||
|
opencode) launch_opencode "$@" ;;
|
||||||
|
codex) launch_codex "$@" ;;
|
||||||
|
yolo|--yolo) launch_yolo "$@" ;;
|
||||||
|
init) run_init "$@" ;;
|
||||||
|
doctor) run_doctor "$@" ;;
|
||||||
|
sync) run_sync "$@" ;;
|
||||||
|
seq) run_seq "$@" ;;
|
||||||
|
bootstrap) run_bootstrap "$@" ;;
|
||||||
|
prdy) run_prdy "$@" ;;
|
||||||
|
coord) run_coord "$@" ;;
|
||||||
|
upgrade) run_upgrade "$@" ;;
|
||||||
|
release-upgrade) run_release_upgrade "$@" ;;
|
||||||
|
project-upgrade) run_project_upgrade "$@" ;;
|
||||||
|
help|-h|--help) usage ;;
|
||||||
|
version|-v|--version) echo "mosaic $VERSION" ;;
|
||||||
|
*)
|
||||||
|
echo "[mosaic] Unknown command: $command" >&2
|
||||||
|
echo "[mosaic] Run 'mosaic --help' for usage." >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
@@ -128,8 +128,8 @@ fi
|
|||||||
# Pi extension is loaded via --extension flag in the mosaic launcher.
|
# Pi extension is loaded via --extension flag in the mosaic launcher.
|
||||||
# Do NOT copy into ~/.pi/agent/extensions/ — that causes duplicate loading.
|
# Do NOT copy into ~/.pi/agent/extensions/ — that causes duplicate loading.
|
||||||
|
|
||||||
if [[ -x "$MOSAIC_HOME/tools/_scripts/mosaic-ensure-sequential-thinking" ]]; then
|
if [[ -x "$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking" ]]; then
|
||||||
"$MOSAIC_HOME/tools/_scripts/mosaic-ensure-sequential-thinking"
|
"$MOSAIC_HOME/bin/mosaic-ensure-sequential-thinking"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "[mosaic-link] Runtime assets synced (non-symlink mode)"
|
echo "[mosaic-link] Runtime assets synced (non-symlink mode)"
|
||||||
@@ -2,8 +2,8 @@
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
||||||
sync_cmd="$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-sync-tasks"
|
sync_cmd="$MOSAIC_HOME/bin/mosaic-orchestrator-sync-tasks"
|
||||||
run_cmd="$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-run"
|
run_cmd="$MOSAIC_HOME/bin/mosaic-orchestrator-run"
|
||||||
|
|
||||||
do_sync=1
|
do_sync=1
|
||||||
poll_sec=15
|
poll_sec=15
|
||||||
@@ -3,9 +3,9 @@ set -euo pipefail
|
|||||||
|
|
||||||
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
|
||||||
|
|
||||||
consume="$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-matrix-consume"
|
consume="$MOSAIC_HOME/bin/mosaic-orchestrator-matrix-consume"
|
||||||
run="$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-run"
|
run="$MOSAIC_HOME/bin/mosaic-orchestrator-run"
|
||||||
publish="$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-matrix-publish"
|
publish="$MOSAIC_HOME/bin/mosaic-orchestrator-matrix-publish"
|
||||||
|
|
||||||
for cmd in "$consume" "$run" "$publish"; do
|
for cmd in "$consume" "$run" "$publish"; do
|
||||||
if [[ ! -x "$cmd" ]]; then
|
if [[ ! -x "$cmd" ]]; then
|
||||||
@@ -151,7 +151,7 @@ case "$cmd" in
|
|||||||
[[ -n "$quality_template" ]] && args+=(--quality-template "$quality_template")
|
[[ -n "$quality_template" ]] && args+=(--quality-template "$quality_template")
|
||||||
args+=("$repo")
|
args+=("$repo")
|
||||||
echo "[mosaic-projects] bootstrap: $repo"
|
echo "[mosaic-projects] bootstrap: $repo"
|
||||||
"$MOSAIC_HOME/tools/_scripts/mosaic-bootstrap-repo" "${args[@]}"
|
"$MOSAIC_HOME/bin/mosaic-bootstrap-repo" "${args[@]}"
|
||||||
add_repo "$repo" || true
|
add_repo "$repo" || true
|
||||||
done
|
done
|
||||||
;;
|
;;
|
||||||
@@ -193,7 +193,7 @@ case "$cmd" in
|
|||||||
drain)
|
drain)
|
||||||
args=(--poll-sec "$poll_sec")
|
args=(--poll-sec "$poll_sec")
|
||||||
[[ $no_sync -eq 1 ]] && args+=(--no-sync)
|
[[ $no_sync -eq 1 ]] && args+=(--no-sync)
|
||||||
"$MOSAIC_HOME/tools/_scripts/mosaic-orchestrator-drain" "${args[@]}"
|
"$MOSAIC_HOME/bin/mosaic-orchestrator-drain" "${args[@]}"
|
||||||
;;
|
;;
|
||||||
status)
|
status)
|
||||||
echo "[mosaic-projects] no daemon script in repo; run from bootstrapped repo or re-bootstrap"
|
echo "[mosaic-projects] no daemon script in repo; run from bootstrapped repo or re-bootstrap"
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user