Compare commits
1 Commits
feat/gatew
...
fix/idempo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a08da9164a |
@@ -15,7 +15,6 @@ steps:
|
||||
image: *node_image
|
||||
commands:
|
||||
- corepack enable
|
||||
- apk add --no-cache python3 make g++
|
||||
- pnpm install --frozen-lockfile
|
||||
|
||||
typecheck:
|
||||
|
||||
@@ -35,31 +35,17 @@ steps:
|
||||
- |
|
||||
echo "//git.mosaicstack.dev/api/packages/mosaic/npm/:_authToken=$NPM_TOKEN" > ~/.npmrc
|
||||
echo "@mosaic:registry=https://git.mosaicstack.dev/api/packages/mosaic/npm/" >> ~/.npmrc
|
||||
# Publish non-private packages to Gitea (--no-git-checks skips dirty/branch checks in CI)
|
||||
# --filter excludes web (private)
|
||||
# Publish all non-private packages (--no-git-checks skips dirty/branch checks in CI)
|
||||
# --filter excludes private apps (gateway, web) and the root
|
||||
- >
|
||||
pnpm --filter "@mosaic/*"
|
||||
--filter "!@mosaic/gateway"
|
||||
--filter "!@mosaic/web"
|
||||
publish --no-git-checks --access public
|
||||
|| echo "[publish] Some packages may already exist at this version — continuing"
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# TODO: Uncomment when ready to publish to npmjs.org
|
||||
# publish-npmjs:
|
||||
# image: *node_image
|
||||
# environment:
|
||||
# NPM_TOKEN:
|
||||
# from_secret: npmjs_token
|
||||
# commands:
|
||||
# - *enable_pnpm
|
||||
# - apk add --no-cache jq bash
|
||||
# - bash scripts/publish-npmjs.sh
|
||||
# depends_on:
|
||||
# - build
|
||||
# when:
|
||||
# - event: [tag]
|
||||
|
||||
build-gateway:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
|
||||
@@ -1,23 +1,9 @@
|
||||
{
|
||||
"name": "@mosaic/gateway",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "apps/gateway"
|
||||
},
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"main": "dist/main.js",
|
||||
"bin": {
|
||||
"mosaic-gateway": "dist/main.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"publishConfig": {
|
||||
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||
"access": "public"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"dev": "tsx watch src/main.ts",
|
||||
@@ -33,14 +19,12 @@
|
||||
"@modelcontextprotocol/sdk": "^1.27.1",
|
||||
"@mosaic/auth": "workspace:^",
|
||||
"@mosaic/brain": "workspace:^",
|
||||
"@mosaic/config": "workspace:^",
|
||||
"@mosaic/coord": "workspace:^",
|
||||
"@mosaic/db": "workspace:^",
|
||||
"@mosaic/discord-plugin": "workspace:^",
|
||||
"@mosaic/log": "workspace:^",
|
||||
"@mosaic/memory": "workspace:^",
|
||||
"@mosaic/queue": "workspace:^",
|
||||
"@mosaic/storage": "workspace:^",
|
||||
"@mosaic/telegram-plugin": "workspace:^",
|
||||
"@mosaic/types": "workspace:^",
|
||||
"@nestjs/common": "^11.0.0",
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Delete,
|
||||
Get,
|
||||
HttpCode,
|
||||
HttpStatus,
|
||||
Inject,
|
||||
Param,
|
||||
Post,
|
||||
UseGuards,
|
||||
} from '@nestjs/common';
|
||||
import { randomBytes, createHash } from 'node:crypto';
|
||||
import { eq, type Db, adminTokens } from '@mosaic/db';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { DB } from '../database/database.module.js';
|
||||
import { AdminGuard } from './admin.guard.js';
|
||||
import { CurrentUser } from '../auth/current-user.decorator.js';
|
||||
import type {
|
||||
CreateTokenDto,
|
||||
TokenCreatedDto,
|
||||
TokenDto,
|
||||
TokenListDto,
|
||||
} from './admin-tokens.dto.js';
|
||||
|
||||
function hashToken(plaintext: string): string {
|
||||
return createHash('sha256').update(plaintext).digest('hex');
|
||||
}
|
||||
|
||||
function toTokenDto(row: typeof adminTokens.$inferSelect): TokenDto {
|
||||
return {
|
||||
id: row.id,
|
||||
label: row.label,
|
||||
scope: row.scope,
|
||||
expiresAt: row.expiresAt?.toISOString() ?? null,
|
||||
lastUsedAt: row.lastUsedAt?.toISOString() ?? null,
|
||||
createdAt: row.createdAt.toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
@Controller('api/admin/tokens')
|
||||
@UseGuards(AdminGuard)
|
||||
export class AdminTokensController {
|
||||
constructor(@Inject(DB) private readonly db: Db) {}
|
||||
|
||||
@Post()
|
||||
async create(
|
||||
@Body() dto: CreateTokenDto,
|
||||
@CurrentUser() user: { id: string },
|
||||
): Promise<TokenCreatedDto> {
|
||||
const plaintext = randomBytes(32).toString('hex');
|
||||
const tokenHash = hashToken(plaintext);
|
||||
const id = uuid();
|
||||
|
||||
const expiresAt = dto.expiresInDays
|
||||
? new Date(Date.now() + dto.expiresInDays * 24 * 60 * 60 * 1000)
|
||||
: null;
|
||||
|
||||
const [row] = await this.db
|
||||
.insert(adminTokens)
|
||||
.values({
|
||||
id,
|
||||
userId: user.id,
|
||||
tokenHash,
|
||||
label: dto.label ?? 'CLI token',
|
||||
scope: dto.scope ?? 'admin',
|
||||
expiresAt,
|
||||
})
|
||||
.returning();
|
||||
|
||||
return { ...toTokenDto(row!), plaintext };
|
||||
}
|
||||
|
||||
@Get()
|
||||
async list(@CurrentUser() user: { id: string }): Promise<TokenListDto> {
|
||||
const rows = await this.db
|
||||
.select()
|
||||
.from(adminTokens)
|
||||
.where(eq(adminTokens.userId, user.id))
|
||||
.orderBy(adminTokens.createdAt);
|
||||
|
||||
return { tokens: rows.map(toTokenDto), total: rows.length };
|
||||
}
|
||||
|
||||
@Delete(':id')
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
async revoke(@Param('id') id: string, @CurrentUser() _user: { id: string }): Promise<void> {
|
||||
await this.db.delete(adminTokens).where(eq(adminTokens.id, id));
|
||||
}
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
import { IsString, IsOptional, IsInt, Min } from 'class-validator';
|
||||
|
||||
export class CreateTokenDto {
|
||||
@IsString()
|
||||
label!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
scope?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsInt()
|
||||
@Min(1)
|
||||
expiresInDays?: number;
|
||||
}
|
||||
|
||||
export interface TokenDto {
|
||||
id: string;
|
||||
label: string;
|
||||
scope: string;
|
||||
expiresAt: string | null;
|
||||
lastUsedAt: string | null;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
export interface TokenCreatedDto extends TokenDto {
|
||||
plaintext: string;
|
||||
}
|
||||
|
||||
export interface TokenListDto {
|
||||
tokens: TokenDto[];
|
||||
total: number;
|
||||
}
|
||||
@@ -6,11 +6,10 @@ import {
|
||||
Injectable,
|
||||
UnauthorizedException,
|
||||
} from '@nestjs/common';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { fromNodeHeaders } from 'better-auth/node';
|
||||
import type { Auth } from '@mosaic/auth';
|
||||
import type { Db } from '@mosaic/db';
|
||||
import { eq, adminTokens, users as usersTable } from '@mosaic/db';
|
||||
import { eq, users as usersTable } from '@mosaic/db';
|
||||
import type { FastifyRequest } from 'fastify';
|
||||
import { AUTH } from '../auth/auth.tokens.js';
|
||||
import { DB } from '../database/database.module.js';
|
||||
@@ -20,8 +19,6 @@ interface UserWithRole {
|
||||
role?: string;
|
||||
}
|
||||
|
||||
type AuthenticatedRequest = FastifyRequest & { user: unknown; session: unknown };
|
||||
|
||||
@Injectable()
|
||||
export class AdminGuard implements CanActivate {
|
||||
constructor(
|
||||
@@ -31,64 +28,8 @@ export class AdminGuard implements CanActivate {
|
||||
|
||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||
const request = context.switchToHttp().getRequest<FastifyRequest>();
|
||||
|
||||
// Try bearer token auth first
|
||||
const authHeader = request.raw.headers['authorization'];
|
||||
if (authHeader?.startsWith('Bearer ')) {
|
||||
return this.validateBearerToken(request, authHeader.slice(7));
|
||||
}
|
||||
|
||||
// Fall back to BetterAuth session
|
||||
return this.validateSession(request);
|
||||
}
|
||||
|
||||
private async validateBearerToken(request: FastifyRequest, plaintext: string): Promise<boolean> {
|
||||
const tokenHash = createHash('sha256').update(plaintext).digest('hex');
|
||||
|
||||
const [row] = await this.db
|
||||
.select({
|
||||
tokenId: adminTokens.id,
|
||||
userId: adminTokens.userId,
|
||||
scope: adminTokens.scope,
|
||||
expiresAt: adminTokens.expiresAt,
|
||||
userName: usersTable.name,
|
||||
userEmail: usersTable.email,
|
||||
userRole: usersTable.role,
|
||||
})
|
||||
.from(adminTokens)
|
||||
.innerJoin(usersTable, eq(adminTokens.userId, usersTable.id))
|
||||
.where(eq(adminTokens.tokenHash, tokenHash))
|
||||
.limit(1);
|
||||
|
||||
if (!row) {
|
||||
throw new UnauthorizedException('Invalid API token');
|
||||
}
|
||||
|
||||
if (row.expiresAt && row.expiresAt < new Date()) {
|
||||
throw new UnauthorizedException('API token expired');
|
||||
}
|
||||
|
||||
if (row.userRole !== 'admin') {
|
||||
throw new ForbiddenException('Admin access required');
|
||||
}
|
||||
|
||||
// Update last-used timestamp (fire-and-forget)
|
||||
this.db
|
||||
.update(adminTokens)
|
||||
.set({ lastUsedAt: new Date() })
|
||||
.where(eq(adminTokens.id, row.tokenId))
|
||||
.then(() => {})
|
||||
.catch(() => {});
|
||||
|
||||
const req = request as AuthenticatedRequest;
|
||||
req.user = { id: row.userId, name: row.userName, email: row.userEmail, role: row.userRole };
|
||||
req.session = { id: `token:${row.tokenId}`, userId: row.userId };
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private async validateSession(request: FastifyRequest): Promise<boolean> {
|
||||
const headers = fromNodeHeaders(request.raw.headers);
|
||||
|
||||
const result = await this.auth.api.getSession({ headers });
|
||||
|
||||
if (!result) {
|
||||
@@ -97,6 +38,8 @@ export class AdminGuard implements CanActivate {
|
||||
|
||||
const user = result.user as UserWithRole;
|
||||
|
||||
// Ensure the role field is populated. better-auth should include additionalFields
|
||||
// in the session, but as a fallback, fetch the role from the database if needed.
|
||||
let userRole = user.role;
|
||||
if (!userRole) {
|
||||
const [dbUser] = await this.db
|
||||
@@ -105,6 +48,7 @@ export class AdminGuard implements CanActivate {
|
||||
.where(eq(usersTable.id, user.id))
|
||||
.limit(1);
|
||||
userRole = dbUser?.role ?? 'member';
|
||||
// Update the session user object with the fetched role
|
||||
(user as UserWithRole).role = userRole;
|
||||
}
|
||||
|
||||
@@ -112,9 +56,8 @@ export class AdminGuard implements CanActivate {
|
||||
throw new ForbiddenException('Admin access required');
|
||||
}
|
||||
|
||||
const req = request as AuthenticatedRequest;
|
||||
req.user = result.user;
|
||||
req.session = result.session;
|
||||
(request as FastifyRequest & { user: unknown; session: unknown }).user = result.user;
|
||||
(request as FastifyRequest & { user: unknown; session: unknown }).session = result.session;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -2,18 +2,10 @@ import { Module } from '@nestjs/common';
|
||||
import { AdminController } from './admin.controller.js';
|
||||
import { AdminHealthController } from './admin-health.controller.js';
|
||||
import { AdminJobsController } from './admin-jobs.controller.js';
|
||||
import { AdminTokensController } from './admin-tokens.controller.js';
|
||||
import { BootstrapController } from './bootstrap.controller.js';
|
||||
import { AdminGuard } from './admin.guard.js';
|
||||
|
||||
@Module({
|
||||
controllers: [
|
||||
AdminController,
|
||||
AdminHealthController,
|
||||
AdminJobsController,
|
||||
AdminTokensController,
|
||||
BootstrapController,
|
||||
],
|
||||
controllers: [AdminController, AdminHealthController, AdminJobsController],
|
||||
providers: [AdminGuard],
|
||||
})
|
||||
export class AdminModule {}
|
||||
|
||||
@@ -1,101 +0,0 @@
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
ForbiddenException,
|
||||
Get,
|
||||
Inject,
|
||||
InternalServerErrorException,
|
||||
Post,
|
||||
} from '@nestjs/common';
|
||||
import { randomBytes, createHash } from 'node:crypto';
|
||||
import { count, eq, type Db, users as usersTable, adminTokens } from '@mosaic/db';
|
||||
import type { Auth } from '@mosaic/auth';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { AUTH } from '../auth/auth.tokens.js';
|
||||
import { DB } from '../database/database.module.js';
|
||||
import type { BootstrapSetupDto, BootstrapStatusDto, BootstrapResultDto } from './bootstrap.dto.js';
|
||||
|
||||
@Controller('api/bootstrap')
|
||||
export class BootstrapController {
|
||||
constructor(
|
||||
@Inject(AUTH) private readonly auth: Auth,
|
||||
@Inject(DB) private readonly db: Db,
|
||||
) {}
|
||||
|
||||
@Get('status')
|
||||
async status(): Promise<BootstrapStatusDto> {
|
||||
const [result] = await this.db.select({ total: count() }).from(usersTable);
|
||||
return { needsSetup: (result?.total ?? 0) === 0 };
|
||||
}
|
||||
|
||||
@Post('setup')
|
||||
async setup(@Body() dto: BootstrapSetupDto): Promise<BootstrapResultDto> {
|
||||
// Only allow setup when zero users exist
|
||||
const [result] = await this.db.select({ total: count() }).from(usersTable);
|
||||
if ((result?.total ?? 0) > 0) {
|
||||
throw new ForbiddenException('Setup already completed — users exist');
|
||||
}
|
||||
|
||||
// Create admin user via BetterAuth API
|
||||
const authApi = this.auth.api as unknown as {
|
||||
createUser: (opts: {
|
||||
body: { name: string; email: string; password: string; role?: string };
|
||||
}) => Promise<{
|
||||
user: { id: string; name: string; email: string };
|
||||
}>;
|
||||
};
|
||||
|
||||
const created = await authApi.createUser({
|
||||
body: {
|
||||
name: dto.name,
|
||||
email: dto.email,
|
||||
password: dto.password,
|
||||
role: 'admin',
|
||||
},
|
||||
});
|
||||
|
||||
// Verify user was created
|
||||
const [user] = await this.db
|
||||
.select()
|
||||
.from(usersTable)
|
||||
.where(eq(usersTable.id, created.user.id))
|
||||
.limit(1);
|
||||
|
||||
if (!user) throw new InternalServerErrorException('User created but not found');
|
||||
|
||||
// Ensure role is admin (createUser may not set it via BetterAuth)
|
||||
if (user.role !== 'admin') {
|
||||
await this.db.update(usersTable).set({ role: 'admin' }).where(eq(usersTable.id, user.id));
|
||||
}
|
||||
|
||||
// Generate admin API token
|
||||
const plaintext = randomBytes(32).toString('hex');
|
||||
const tokenHash = createHash('sha256').update(plaintext).digest('hex');
|
||||
const tokenId = uuid();
|
||||
|
||||
const [token] = await this.db
|
||||
.insert(adminTokens)
|
||||
.values({
|
||||
id: tokenId,
|
||||
userId: user.id,
|
||||
tokenHash,
|
||||
label: 'Initial setup token',
|
||||
scope: 'admin',
|
||||
})
|
||||
.returning();
|
||||
|
||||
return {
|
||||
user: {
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
role: 'admin',
|
||||
},
|
||||
token: {
|
||||
id: token!.id,
|
||||
plaintext,
|
||||
label: token!.label,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
import { IsString, IsEmail, MinLength } from 'class-validator';
|
||||
|
||||
export class BootstrapSetupDto {
|
||||
@IsString()
|
||||
name!: string;
|
||||
|
||||
@IsEmail()
|
||||
email!: string;
|
||||
|
||||
@IsString()
|
||||
@MinLength(8)
|
||||
password!: string;
|
||||
}
|
||||
|
||||
export interface BootstrapStatusDto {
|
||||
needsSetup: boolean;
|
||||
}
|
||||
|
||||
export interface BootstrapResultDto {
|
||||
user: {
|
||||
id: string;
|
||||
name: string;
|
||||
email: string;
|
||||
role: string;
|
||||
};
|
||||
token: {
|
||||
id: string;
|
||||
plaintext: string;
|
||||
label: string;
|
||||
};
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { APP_GUARD } from '@nestjs/core';
|
||||
import { HealthController } from './health/health.controller.js';
|
||||
import { ConfigModule } from './config/config.module.js';
|
||||
import { DatabaseModule } from './database/database.module.js';
|
||||
import { AuthModule } from './auth/auth.module.js';
|
||||
import { BrainModule } from './brain/brain.module.js';
|
||||
@@ -29,7 +28,6 @@ import { ThrottlerGuard, ThrottlerModule } from '@nestjs/throttler';
|
||||
@Module({
|
||||
imports: [
|
||||
ThrottlerModule.forRoot([{ name: 'default', ttl: 60_000, limit: 60 }]),
|
||||
ConfigModule,
|
||||
DatabaseModule,
|
||||
AuthModule,
|
||||
BrainModule,
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import { Global, Module } from '@nestjs/common';
|
||||
import { loadConfig, type MosaicConfig } from '@mosaic/config';
|
||||
|
||||
export const MOSAIC_CONFIG = 'MOSAIC_CONFIG';
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [
|
||||
{
|
||||
provide: MOSAIC_CONFIG,
|
||||
useFactory: (): MosaicConfig => loadConfig(),
|
||||
},
|
||||
],
|
||||
exports: [MOSAIC_CONFIG],
|
||||
})
|
||||
export class ConfigModule {}
|
||||
@@ -1,51 +1,28 @@
|
||||
import { mkdirSync } from 'node:fs';
|
||||
import { homedir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { Global, Inject, Module, type OnApplicationShutdown } from '@nestjs/common';
|
||||
import { createDb, createPgliteDb, type Db, type DbHandle } from '@mosaic/db';
|
||||
import { createStorageAdapter, type StorageAdapter } from '@mosaic/storage';
|
||||
import type { MosaicConfig } from '@mosaic/config';
|
||||
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
||||
import { createDb, type Db, type DbHandle } from '@mosaic/db';
|
||||
|
||||
export const DB_HANDLE = 'DB_HANDLE';
|
||||
export const DB = 'DB';
|
||||
export const STORAGE_ADAPTER = 'STORAGE_ADAPTER';
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [
|
||||
{
|
||||
provide: DB_HANDLE,
|
||||
useFactory: (config: MosaicConfig): DbHandle => {
|
||||
if (config.tier === 'local') {
|
||||
const dataDir = join(homedir(), '.config', 'mosaic', 'gateway', 'pglite');
|
||||
mkdirSync(dataDir, { recursive: true });
|
||||
return createPgliteDb(dataDir);
|
||||
}
|
||||
return createDb(config.storage.type === 'postgres' ? config.storage.url : undefined);
|
||||
},
|
||||
inject: [MOSAIC_CONFIG],
|
||||
useFactory: (): DbHandle => createDb(),
|
||||
},
|
||||
{
|
||||
provide: DB,
|
||||
useFactory: (handle: DbHandle): Db => handle.db,
|
||||
inject: [DB_HANDLE],
|
||||
},
|
||||
{
|
||||
provide: STORAGE_ADAPTER,
|
||||
useFactory: (config: MosaicConfig): StorageAdapter => createStorageAdapter(config.storage),
|
||||
inject: [MOSAIC_CONFIG],
|
||||
},
|
||||
],
|
||||
exports: [DB, STORAGE_ADAPTER],
|
||||
exports: [DB],
|
||||
})
|
||||
export class DatabaseModule implements OnApplicationShutdown {
|
||||
constructor(
|
||||
@Inject(DB_HANDLE) private readonly handle: DbHandle,
|
||||
@Inject(STORAGE_ADAPTER) private readonly storageAdapter: StorageAdapter,
|
||||
) {}
|
||||
constructor(@Inject(DB_HANDLE) private readonly handle: DbHandle) {}
|
||||
|
||||
async onApplicationShutdown(): Promise<void> {
|
||||
await Promise.all([this.handle.close(), this.storageAdapter.close()]);
|
||||
await this.handle.close();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,5 @@
|
||||
#!/usr/bin/env node
|
||||
import { config } from 'dotenv';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { resolve, join } from 'node:path';
|
||||
import { homedir } from 'node:os';
|
||||
|
||||
// Load .env from daemon config dir (global install / daemon mode).
|
||||
// Loaded first so monorepo .env can override for local dev.
|
||||
const daemonEnv = join(homedir(), '.config', 'mosaic', 'gateway', '.env');
|
||||
if (existsSync(daemonEnv)) config({ path: daemonEnv });
|
||||
import { resolve } from 'node:path';
|
||||
|
||||
// Load .env from monorepo root (cwd is apps/gateway when run via pnpm filter)
|
||||
config({ path: resolve(process.cwd(), '../../.env') });
|
||||
|
||||
@@ -1,29 +1,11 @@
|
||||
import { Global, Module } from '@nestjs/common';
|
||||
import {
|
||||
createMemory,
|
||||
type Memory,
|
||||
createMemoryAdapter,
|
||||
type MemoryAdapter,
|
||||
type MemoryConfig,
|
||||
} from '@mosaic/memory';
|
||||
import { createMemory, type Memory } from '@mosaic/memory';
|
||||
import type { Db } from '@mosaic/db';
|
||||
import type { StorageAdapter } from '@mosaic/storage';
|
||||
import type { MosaicConfig } from '@mosaic/config';
|
||||
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
||||
import { DB, STORAGE_ADAPTER } from '../database/database.module.js';
|
||||
import { DB } from '../database/database.module.js';
|
||||
import { MEMORY } from './memory.tokens.js';
|
||||
import { MemoryController } from './memory.controller.js';
|
||||
import { EmbeddingService } from './embedding.service.js';
|
||||
|
||||
export const MEMORY_ADAPTER = 'MEMORY_ADAPTER';
|
||||
|
||||
function buildMemoryConfig(config: MosaicConfig, storageAdapter: StorageAdapter): MemoryConfig {
|
||||
if (config.memory.type === 'keyword') {
|
||||
return { type: 'keyword', storage: storageAdapter };
|
||||
}
|
||||
return { type: config.memory.type };
|
||||
}
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [
|
||||
@@ -32,15 +14,9 @@ function buildMemoryConfig(config: MosaicConfig, storageAdapter: StorageAdapter)
|
||||
useFactory: (db: Db): Memory => createMemory(db),
|
||||
inject: [DB],
|
||||
},
|
||||
{
|
||||
provide: MEMORY_ADAPTER,
|
||||
useFactory: (config: MosaicConfig, storageAdapter: StorageAdapter): MemoryAdapter =>
|
||||
createMemoryAdapter(buildMemoryConfig(config, storageAdapter)),
|
||||
inject: [MOSAIC_CONFIG, STORAGE_ADAPTER],
|
||||
},
|
||||
EmbeddingService,
|
||||
],
|
||||
controllers: [MemoryController],
|
||||
exports: [MEMORY, MEMORY_ADAPTER, EmbeddingService],
|
||||
exports: [MEMORY, EmbeddingService],
|
||||
})
|
||||
export class MemoryModule {}
|
||||
|
||||
@@ -1,21 +1,9 @@
|
||||
import { Global, Module } from '@nestjs/common';
|
||||
import { createQueueAdapter, type QueueAdapter } from '@mosaic/queue';
|
||||
import type { MosaicConfig } from '@mosaic/config';
|
||||
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
||||
import { QueueService } from './queue.service.js';
|
||||
|
||||
export const QUEUE_ADAPTER = 'QUEUE_ADAPTER';
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
providers: [
|
||||
QueueService,
|
||||
{
|
||||
provide: QUEUE_ADAPTER,
|
||||
useFactory: (config: MosaicConfig): QueueAdapter => createQueueAdapter(config.queue),
|
||||
inject: [MOSAIC_CONFIG],
|
||||
},
|
||||
],
|
||||
exports: [QueueService, QUEUE_ADAPTER],
|
||||
providers: [QueueService],
|
||||
exports: [QueueService],
|
||||
})
|
||||
export class QueueModule {}
|
||||
|
||||
@@ -1,30 +1,73 @@
|
||||
# Tasks — Storage Abstraction Retrofit
|
||||
# Tasks — Harness Foundation
|
||||
|
||||
> Single-writer: orchestrator only. Workers read but never modify.
|
||||
>
|
||||
> **Mission:** Decouple gateway from hardcoded Postgres/Valkey backends. Introduce interface-driven middleware so the gateway is backend-agnostic. Default to local tier (SQLite + JSON) for zero-dependency installs.
|
||||
>
|
||||
> **`agent` column values:** `codex` | `sonnet` | `haiku` | `glm-5` | `opus` | `—` (auto/default)
|
||||
|
||||
| id | status | agent | description | tokens |
|
||||
| --------- | ----------- | ------ | ---------------------------------------------------------------- | ------ |
|
||||
| SA-P1-001 | done | sonnet | Define QueueAdapter interface in packages/queue/src/types.ts | 3K |
|
||||
| SA-P1-002 | done | sonnet | Define StorageAdapter interface in packages/storage/src/types.ts | 3K |
|
||||
| SA-P1-003 | done | sonnet | Define MemoryAdapter interface in packages/memory/src/types.ts | 3K |
|
||||
| SA-P1-004 | done | sonnet | Create adapter factory pattern + config types | 3K |
|
||||
| SA-P2-001 | done | sonnet | Refactor @mosaic/queue: wrap ioredis as BullMQ adapter | 3K |
|
||||
| SA-P2-002 | done | sonnet | Create @mosaic/storage: wrap Drizzle as Postgres adapter | 6K |
|
||||
| SA-P2-003 | done | sonnet | Refactor @mosaic/memory: extract pgvector adapter | 4K |
|
||||
| SA-P2-004 | done | sonnet | Update gateway modules to use factories + DI tokens | 5K |
|
||||
| SA-P2-005 | done | opus | Verify Phase 2: all tests pass, typecheck clean | — |
|
||||
| SA-P3-001 | done | sonnet | Implement local queue adapter: JSON file persistence | 5K |
|
||||
| SA-P3-002 | done | sonnet | Implement SQLite storage adapter with better-sqlite3 | 8K |
|
||||
| SA-P3-003 | done | sonnet | Implement keyword memory adapter — no vector dependency | 4K |
|
||||
| SA-P3-004 | done | opus | Verify Phase 3: 42 new tests, 347 total passing | — |
|
||||
| SA-P4-001 | done | sonnet | MosaicConfig schema + loader with tier auto-detection | 6K |
|
||||
| SA-P4-002 | done | sonnet | CLI: mosaic gateway init — interactive wizard | 4K |
|
||||
| SA-P4-003 | done | sonnet | CLI: mosaic gateway start/stop/status lifecycle | 5K |
|
||||
| SA-P4-004 | done | opus | Verify Phase 4: 381 tests passing, 40/40 tasks clean | — |
|
||||
| SA-P5-001 | not-started | codex | Migration tooling: mosaic storage export/import | — |
|
||||
| SA-P5-002 | not-started | codex | Docker Compose profiles: local vs team | — |
|
||||
| SA-P5-003 | not-started | codex | Final verification + docs: README, architecture diagram | — |
|
||||
| id | status | agent | milestone | description | pr | notes |
|
||||
| ------ | ------ | ------ | ------------------ | ------------------------------------------------------------------ | ---- | ----------- |
|
||||
| M1-001 | done | sonnet | M1: Persistence | Wire ChatGateway → ConversationsRepo for user messages | #292 | #224 closed |
|
||||
| M1-002 | done | sonnet | M1: Persistence | Wire agent event relay → ConversationsRepo for assistant responses | #292 | #225 closed |
|
||||
| M1-003 | done | sonnet | M1: Persistence | Store message metadata: model, provider, tokens, tool calls | #292 | #226 closed |
|
||||
| M1-004 | done | sonnet | M1: Persistence | Load message history into Pi session on resume | #301 | #227 closed |
|
||||
| M1-005 | done | sonnet | M1: Persistence | Context window management: summarize when >80% | #301 | #228 closed |
|
||||
| M1-006 | done | sonnet | M1: Persistence | Conversation search endpoint | #299 | #229 closed |
|
||||
| M1-007 | done | sonnet | M1: Persistence | TUI /history command | #297 | #230 closed |
|
||||
| M1-008 | done | sonnet | M1: Persistence | Verify persistence — 20 tests | #304 | #231 closed |
|
||||
| M2-001 | done | sonnet | M2: Security | InsightsRepo userId on searchByEmbedding | #290 | #232 closed |
|
||||
| M2-002 | done | sonnet | M2: Security | InsightsRepo userId on findByUser/decay | #290 | #233 closed |
|
||||
| M2-003 | done | sonnet | M2: Security | PreferencesRepo userId verified | #294 | #234 closed |
|
||||
| M2-004 | done | sonnet | M2: Security | Memory tools userId injection fixed | #294 | #235 closed |
|
||||
| M2-005 | done | sonnet | M2: Security | ConversationsRepo ownership checks | #293 | #236 closed |
|
||||
| M2-006 | done | sonnet | M2: Security | AgentsRepo findAccessible scoped | #293 | #237 closed |
|
||||
| M2-007 | done | sonnet | M2: Security | Cross-user isolation — 28 tests | #305 | #238 closed |
|
||||
| M2-008 | done | sonnet | M2: Security | Valkey SCAN + /gc admin-only | #298 | #239 closed |
|
||||
| M3-001 | done | sonnet | M3: Providers | IProviderAdapter + OllamaAdapter | #306 | #240 closed |
|
||||
| M3-002 | done | sonnet | M3: Providers | AnthropicAdapter | #309 | #241 closed |
|
||||
| M3-003 | done | sonnet | M3: Providers | OpenAIAdapter | #310 | #242 closed |
|
||||
| M3-004 | done | sonnet | M3: Providers | OpenRouterAdapter | #311 | #243 closed |
|
||||
| M3-005 | done | sonnet | M3: Providers | ZaiAdapter (GLM-5) | #314 | #244 closed |
|
||||
| M3-006 | done | sonnet | M3: Providers | Ollama embedding support | #311 | #245 closed |
|
||||
| M3-007 | done | sonnet | M3: Providers | Provider health checks | #308 | #246 closed |
|
||||
| M3-008 | done | sonnet | M3: Providers | Model capability matrix | #303 | #247 closed |
|
||||
| M3-009 | done | sonnet | M3: Providers | EmbeddingService → Ollama default | #308 | #248 closed |
|
||||
| M3-010 | done | sonnet | M3: Providers | OAuth token storage (AES-256-GCM) | #317 | #249 closed |
|
||||
| M3-011 | done | sonnet | M3: Providers | Provider credentials CRUD | #317 | #250 closed |
|
||||
| M3-012 | done | sonnet | M3: Providers | Verify providers — 40 tests | #319 | #251 closed |
|
||||
| M4-001 | done | sonnet | M4: Routing | routing_rules DB schema | #315 | #252 closed |
|
||||
| M4-002 | done | sonnet | M4: Routing | Condition types | #315 | #253 closed |
|
||||
| M4-003 | done | sonnet | M4: Routing | Action types | #315 | #254 closed |
|
||||
| M4-004 | done | sonnet | M4: Routing | Default routing rules (11 seeds) | #316 | #255 closed |
|
||||
| M4-005 | done | sonnet | M4: Routing | Task classifier (60+ tests) | #316 | #256 closed |
|
||||
| M4-006 | done | sonnet | M4: Routing | Routing decision pipeline | #318 | #257 closed |
|
||||
| M4-007 | done | sonnet | M4: Routing | /model override | #323 | #258 closed |
|
||||
| M4-008 | done | sonnet | M4: Routing | Routing transparency in session:info | #323 | #259 closed |
|
||||
| M4-009 | done | sonnet | M4: Routing | Routing rules CRUD API | #320 | #260 closed |
|
||||
| M4-010 | done | sonnet | M4: Routing | Per-user routing overrides | #320 | #261 closed |
|
||||
| M4-011 | done | sonnet | M4: Routing | Agent specialization capabilities | #320 | #262 closed |
|
||||
| M4-012 | done | sonnet | M4: Routing | Routing wired into ChatGateway | #323 | #263 closed |
|
||||
| M4-013 | done | sonnet | M4: Routing | Verify routing — 9 E2E tests | #323 | #264 closed |
|
||||
| M5-001 | done | sonnet | M5: Sessions | Agent config loaded on session create | #323 | #265 closed |
|
||||
| M5-002 | done | sonnet | M5: Sessions | /model command end-to-end | #323 | #266 closed |
|
||||
| M5-003 | done | sonnet | M5: Sessions | /agent command mid-session | #323 | #267 closed |
|
||||
| M5-004 | done | sonnet | M5: Sessions | Session ↔ conversation binding | #321 | #268 closed |
|
||||
| M5-005 | done | sonnet | M5: Sessions | Session info broadcast | #321 | #269 closed |
|
||||
| M5-006 | done | sonnet | M5: Sessions | /agent new from TUI | #321 | #270 closed |
|
||||
| M5-007 | done | sonnet | M5: Sessions | Session metrics | #321 | #271 closed |
|
||||
| M5-008 | done | sonnet | M5: Sessions | Verify sessions — 28 tests | #324 | #272 closed |
|
||||
| M6-001 | done | sonnet | M6: Jobs | BullMQ + Valkey config | #324 | #273 closed |
|
||||
| M6-002 | done | sonnet | M6: Jobs | Queue service with typed jobs | #324 | #274 closed |
|
||||
| M6-003 | done | sonnet | M6: Jobs | Summarization → BullMQ | #324 | #275 closed |
|
||||
| M6-004 | done | sonnet | M6: Jobs | GC → BullMQ | #324 | #276 closed |
|
||||
| M6-005 | done | sonnet | M6: Jobs | Tier management → BullMQ | #324 | #277 closed |
|
||||
| M6-006 | done | sonnet | M6: Jobs | Admin jobs API | #325 | #278 closed |
|
||||
| M6-007 | done | sonnet | M6: Jobs | Job event logging | #325 | #279 closed |
|
||||
| M6-008 | done | sonnet | M6: Jobs | Verify jobs | #324 | #280 closed |
|
||||
| M7-001 | done | sonnet | M7: Channel Design | IChannelAdapter interface | #325 | #281 closed |
|
||||
| M7-002 | done | sonnet | M7: Channel Design | Channel message protocol | #325 | #282 closed |
|
||||
| M7-003 | done | sonnet | M7: Channel Design | Matrix integration design | #326 | #283 closed |
|
||||
| M7-004 | done | sonnet | M7: Channel Design | Conversation multiplexing | #326 | #284 closed |
|
||||
| M7-005 | done | sonnet | M7: Channel Design | Remote auth bridging | #326 | #285 closed |
|
||||
| M7-006 | done | sonnet | M7: Channel Design | Agent-to-agent via Matrix | #326 | #286 closed |
|
||||
| M7-007 | done | sonnet | M7: Channel Design | Multi-user isolation in Matrix | #326 | #287 closed |
|
||||
| M7-008 | done | sonnet | M7: Channel Design | channel-protocol.md published | #326 | #288 closed |
|
||||
|
||||
@@ -1,555 +0,0 @@
|
||||
# Storage & Queue Abstraction — Middleware Architecture
|
||||
|
||||
Design
|
||||
Status: Design (retrofit required)
|
||||
date: 2026-04-02
|
||||
context: Agents coupled directly to infrastructure backends, bypassing intended middleware layer
|
||||
|
||||
---
|
||||
|
||||
## The Problem
|
||||
|
||||
Current packages are **direct adapters**, not **middleware**:
|
||||
| Package | Current State | Intended Design |
|
||||
|---------|---------------|-----------------|
|
||||
| `@mosaic/queue` | `ioredis` hardcoded | Interface → BullMQ OR local-files |
|
||||
| `@mosaic/db` | Drizzle + Postgres hardcoded | Interface → Postgres OR SQLite OR JSON/MD |
|
||||
| `@mosaic/memory` | pgvector required | Interface → pgvector OR sqlite-vec OR keyword-search |
|
||||
|
||||
## The gateway and TUI import these packages directly, which means they they're coupled to specific infrastructure. Users cannot run Mosaic Stack without Postgres + Valkey.
|
||||
|
||||
## The Intended Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Gateway / TUI / CLI │
|
||||
│ (agnostic of storage backend, talks to middleware) │
|
||||
└───────────────────────────┬─────────────────────────────────────┘
|
||||
│
|
||||
┌───────────────────┼───────────────────┐
|
||||
│ │ │
|
||||
▼─────────────────┴─────────────────┴─────────────────┘
|
||||
| | | |
|
||||
▼─────────────────┴───────────────────┴─────────────────┘
|
||||
| | | |
|
||||
Queue Storage Memory
|
||||
| | | |
|
||||
┌─────────┬─────────┬─────────┬─────────────────────────────────┐
|
||||
| BullMQ | | Local | | Postgres | SQLite | JSON/MD | pgvector | sqlite-vec | keyword |
|
||||
|(Valkey)| |(files) | | | | | |
|
||||
└─────────┴─────────┴─────────┴─────────────────────────────────┘
|
||||
```
|
||||
|
||||
The gateway imports the interface, not the backend. At startup it reads config and instantiates the correct adapter.
|
||||
|
||||
## The Drift
|
||||
|
||||
```typescript
|
||||
// What should have happened:
|
||||
gateway/queue.service.ts → @mosaic/queue (interface) → queue.adapter.ts
|
||||
|
||||
// What actually happened:
|
||||
gateway/queue.service.ts → @mosaic/queue → ioredis (hardcoded)
|
||||
```
|
||||
|
||||
## The Current State Analysis
|
||||
|
||||
### `@mosaic/queue` (packages/queue/src/queue.ts)
|
||||
|
||||
```typescript
|
||||
import Redis from 'ioredis'; // ← Direct import of backend
|
||||
|
||||
export function createQueue(config?: QueueConfig): QueueHandle {
|
||||
const url = config?.url ?? process.env['VALKEY_URL'] ?? DEFAULT_VALKEY_URL;
|
||||
const redis = new Redis(url, { maxRetriesPerRequest: 3 });
|
||||
// ...queue ops directly on redis...
|
||||
}
|
||||
```
|
||||
|
||||
**Problem:** `ioredis` is imported in the package, not the adapter interface. Consumers cannot swap backends.
|
||||
|
||||
### `@mosaic/db` (packages/db/src/client.ts)
|
||||
|
||||
```typescript
|
||||
import { drizzle, type PostgresJsDatabase } from 'drizzle-orm/postgres-js';
|
||||
import postgres from 'postgres';
|
||||
|
||||
export function createDb(url?: string): DbHandle {
|
||||
const connectionString = url ?? process.env['DATABASE_URL'] ?? DEFAULT_DATABASE_URL;
|
||||
const sql = postgres(connectionString, { max: 20, idle_timeout: 30, connect_timeout: 5 });
|
||||
const db = drizzle(sql, { schema });
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
**Problem:** Drizzle + Postgres is hardcoded. No SQLite, JSON, or file-based options.
|
||||
|
||||
### `@mosaic/memory` (packages/memory/src/memory.ts)
|
||||
|
||||
```typescript
|
||||
import type { Db } from '@mosaic/db'; // ← Depends on Drizzle/PG
|
||||
|
||||
export function createMemory(db: Db): Memory {
|
||||
return {
|
||||
preferences: createPreferencesRepo(db),
|
||||
insights: createInsightsRepo(db),
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
**Problem:** Memory package is tightly coupled to `@mosaic/db` (which is Postgres-only). No alternative storage backends.
|
||||
|
||||
## The Target Interfaces
|
||||
|
||||
### Queue Interface
|
||||
|
||||
```typescript
|
||||
// packages/queue/src/types.ts
|
||||
export interface QueueAdapter {
|
||||
readonly name: string;
|
||||
|
||||
enqueue(queueName: string, payload: TaskPayload): Promise<void>;
|
||||
dequeue(queueName: string): Promise<TaskPayload | null>;
|
||||
length(queueName: string): Promise<number>;
|
||||
publish(channel: string, message: string): Promise<void>;
|
||||
subscribe(channel: string, handler: (message: string) => void): () => void;
|
||||
close(): Promise<void>;
|
||||
}
|
||||
|
||||
export interface TaskPayload {
|
||||
id: string;
|
||||
type: string;
|
||||
data: Record<string, unknown>;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
export interface QueueConfig {
|
||||
type: 'bullmq' | 'local';
|
||||
url?: string; // For bullmq: Valkey/Redis URL
|
||||
dataDir?: string; // For local: directory for JSON persistence
|
||||
}
|
||||
```
|
||||
|
||||
### Storage Interface
|
||||
|
||||
```typescript
|
||||
// packages/storage/src/types.ts
|
||||
export interface StorageAdapter {
|
||||
readonly name: string;
|
||||
|
||||
// Entity CRUD
|
||||
create<T>(collection: string, data: O): Promise<T>;
|
||||
read<T>(collection: string, id: string): Promise<T | null>;
|
||||
update<T>(collection: string, id: string, data: Partial<O>): Promise<T | null>;
|
||||
delete(collection: string, id: string): Promise<boolean>;
|
||||
|
||||
// Queries
|
||||
find<T>(collection: string, filter: Record<string, unknown>): Promise<T[]>;
|
||||
findOne<T>(collection: string, filter: Record<string, unknown): Promise<T | null>;
|
||||
|
||||
// Bulk operations
|
||||
createMany<T>(collection: string, items: O[]): Promise<T[]>;
|
||||
updateMany<T>(collection: string, ids: string[], data: Partial<O>): Promise<number>;
|
||||
deleteMany(collection: string, ids: string[]): Promise<number>;
|
||||
|
||||
// Raw queries (for complex queries)
|
||||
query<T>(collection: string, query: string, params?: unknown[]): Promise<T[]>;
|
||||
|
||||
// Transaction support
|
||||
transaction<T>(fn: (tx: StorageTransaction) => Promise<T>): Promise<T>;
|
||||
|
||||
close(): Promise<void>;
|
||||
}
|
||||
|
||||
export interface StorageTransaction {
|
||||
commit(): Promise<void>;
|
||||
rollback(): Promise<void>;
|
||||
}
|
||||
|
||||
export interface StorageConfig {
|
||||
type: 'postgres' | 'sqlite' | 'files';
|
||||
url?: string; // For postgres
|
||||
path?: string; // For sqlite/files
|
||||
}
|
||||
```
|
||||
|
||||
### Memory Interface (Vector + Preferences)
|
||||
|
||||
```typescript
|
||||
// packages/memory/src/types.ts
|
||||
export interface MemoryAdapter {
|
||||
readonly name: string;
|
||||
|
||||
// Preferences (key-value storage)
|
||||
getPreference(userId: string, key: string): Promise<unknown | null>;
|
||||
setPreference(userId: string, key: string, value: unknown): Promise<void>;
|
||||
deletePreference(userId: string, key: string): Promise<boolean>;
|
||||
listPreferences(
|
||||
userId: string,
|
||||
category?: string,
|
||||
): Promise<Array<{ key: string; value: unknown }>>;
|
||||
|
||||
// Insights (with optional vector search)
|
||||
storeInsight(insight: NewInsight): Promise<Insight>;
|
||||
getInsight(id: string): Promise<Insight | null>;
|
||||
searchInsights(query: string, limit?: number, filter?: InsightFilter): Promise<SearchResult[]>;
|
||||
deleteInsight(id: string): Promise<boolean>;
|
||||
|
||||
// Embedding provider (optional, null = no vector search)
|
||||
readonly embedder?: EmbeddingProvider | null;
|
||||
|
||||
close(): Promise<void>;
|
||||
}
|
||||
|
||||
export interface NewInsight {
|
||||
id: string;
|
||||
userId: string;
|
||||
content: string;
|
||||
embedding?: number[]; // If embedder is available
|
||||
source: 'agent' | 'user' | 'summarization' | 'system';
|
||||
category: 'decision' | 'learning' | 'preference' | 'fact' | 'pattern' | 'general';
|
||||
relevanceScore: number;
|
||||
metadata?: Record<string, unknown>;
|
||||
createdAt: Date;
|
||||
decayedAt?: Date;
|
||||
}
|
||||
|
||||
export interface InsightFilter {
|
||||
userId?: string;
|
||||
category?: string;
|
||||
source?: string;
|
||||
minRelevance?: number;
|
||||
fromDate?: Date;
|
||||
toDate?: Date;
|
||||
}
|
||||
|
||||
export interface SearchResult {
|
||||
documentId: string;
|
||||
content: string;
|
||||
distance: number;
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface MemoryConfig {
|
||||
type: 'pgvector' | 'sqlite-vec' | 'keyword';
|
||||
storage: StorageAdapter;
|
||||
embedder?: EmbeddingProvider;
|
||||
}
|
||||
|
||||
export interface EmbeddingProvider {
|
||||
embed(text: string): Promise<number[]>;
|
||||
embedBatch(texts: string[]): Promise<number[][]>;
|
||||
readonly dimensions: number;
|
||||
}
|
||||
```
|
||||
|
||||
## Three Tiers
|
||||
|
||||
### Tier 1: Local (Zero Dependencies)
|
||||
|
||||
**Target:** Single user, single machine, no external services
|
||||
|
||||
| Component | Backend | Storage |
|
||||
| --------- | --------------------------------------------- | ------------ |
|
||||
| Queue | In-process + JSON files in `~/.mosaic/queue/` |
|
||||
| Storage | SQLite (better-sqlite3) `~/.mosaic/data.db` |
|
||||
| Memory | Keyword search | SQLite table |
|
||||
| Vector | None | N/A |
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
- `better-sqlite3` (bundled)
|
||||
- No Postgres, No Valkey, No pgvector
|
||||
|
||||
**Upgrade path:**
|
||||
|
||||
1. Run `mosaic gateway configure` → select "local" tier
|
||||
2. Gateway starts with SQLite database
|
||||
3. Optional: run `mosaic gateway upgrade --tier team` to migrate to Postgres
|
||||
|
||||
### Tier 2: Team (Postgres + Valkey)
|
||||
|
||||
**Target:** Multiple users, shared server, CI/CD environments
|
||||
|
||||
| Component | Backend | Storage |
|
||||
| --------- | -------------- | ------------------------------ |
|
||||
| Queue | BullMQ | Valkey |
|
||||
| Storage | Postgres | Shared PG instance |
|
||||
| Memory | pgvector | Postgres with vector extension |
|
||||
| Vector | LLM embeddings | Configured provider |
|
||||
|
||||
**Dependencies:**
|
||||
|
||||
- PostgreSQL 17+ with pgvector extension
|
||||
- Valkey (Redis-compatible)
|
||||
- LLM provider for embeddings
|
||||
|
||||
**Migration from Local → Team:**
|
||||
|
||||
1. `mosaic gateway backup` → creates dump of SQLite database
|
||||
2. `mosaic gateway upgrade --tier team` → restores to Postgres
|
||||
3. Queue replays from BullMQ (may need manual reconciliation for in-flight jobs)
|
||||
4. Memory embeddings regenerated if vector search was new
|
||||
|
||||
### Tier 3: Enterprise (Clustered)
|
||||
|
||||
**Target:** Large teams, multi-region, high availability
|
||||
|
||||
| Component | Backend | Storage |
|
||||
| --------- | --------------------------- | ----------------------------- |
|
||||
| Queue | BullMQ cluster | Multiple Valkey nodes |
|
||||
| Storage | Postgres cluster | Primary + replicas |
|
||||
| Memory | Dedicated vector DB | Qdrant, Pinecone, or pgvector |
|
||||
| Vector | Dedicated embedding service | Separate microservice |
|
||||
|
||||
## MarkdownDB Integration
|
||||
|
||||
For file-based storage, we use [MarkdownDB](https://markdowndb.com) to parse MD files into queryable data.
|
||||
|
||||
**What it provides:**
|
||||
|
||||
- Parses frontmatter (YAML/JSON/TOML)
|
||||
- Extracts links, tags, metadata
|
||||
- Builds index in JSON or SQLite
|
||||
- Queryable via SQL-like interface
|
||||
|
||||
**Usage in Mosaic:**
|
||||
|
||||
```typescript
|
||||
// Local tier with MD files for documents
|
||||
const storage = createStorageAdapter({
|
||||
type: 'files',
|
||||
path: path.join(mosaicHome, 'docs'),
|
||||
markdowndb: {
|
||||
parseFrontmatter: true,
|
||||
extractLinks: true,
|
||||
indexFile: 'index.json',
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Dream Mode — Memory Consolidation
|
||||
|
||||
Automated equivalent to Claude Code's "Dream: Memory Consolidation" cycle
|
||||
|
||||
**Trigger:** Every 24 hours (if 5+ sessions active)
|
||||
|
||||
**Phases:**
|
||||
|
||||
1. **Orient** — What happened, what's the current state
|
||||
- Scan recent session logs
|
||||
- Identify active tasks, missions, conversations
|
||||
- Calculate time window (last 24h)
|
||||
|
||||
2. **Gather** — Pull in relevant context
|
||||
- Load conversations, decisions, agent logs
|
||||
- Extract key interactions and outcomes
|
||||
- Identify patterns and learnings
|
||||
|
||||
3. **Consolidate** — Summarize and compress
|
||||
- Generate summary of the last 24h
|
||||
- Extract key decisions and their rationale
|
||||
- Identify recurring patterns
|
||||
- Compress verbose logs into concise insights
|
||||
|
||||
4. **Prune** — Archive and cleanup
|
||||
- Archive raw session files to dated folders
|
||||
- Delete redundant/temporary data
|
||||
- Update MEMORY.md with consolidated content
|
||||
- Update insight relevance scores
|
||||
|
||||
**Implementation:**
|
||||
|
||||
```typescript
|
||||
// In @mosaic/dream (new package)
|
||||
export async function runDreamCycle(config: DreamConfig): Promise<DreamResult> {
|
||||
const memory = await loadMemoryAdapter(config.storage);
|
||||
|
||||
// Orient
|
||||
const sessions = await memory.getRecentSessions(24 * 60 * 60 * 1000);
|
||||
if (sessions.length < 5) return { skipped: true, reason: 'insufficient_sessions' };
|
||||
|
||||
// Gather
|
||||
const context = await gatherContext(memory, sessions);
|
||||
|
||||
// Consolidate
|
||||
const consolidated = await consolidateWithLLM(context, config.llm);
|
||||
|
||||
// Prune
|
||||
await pruneArchivedData(memory, config.retention);
|
||||
|
||||
// Store consolidated insights
|
||||
await memory.storeInsights(consolidated.insights);
|
||||
|
||||
return {
|
||||
sessionsProcessed: sessions.length,
|
||||
insightsCreated: consolidated.insights.length,
|
||||
bytesPruned: consolidated.bytesRemoved,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Retrofit Plan
|
||||
|
||||
### Phase 1: Interface Extraction (2-3 days)
|
||||
|
||||
**Goal:** Define interfaces without changing existing behavior
|
||||
|
||||
1. Create `packages/queue/src/types.ts` with `QueueAdapter` interface
|
||||
2. Create `packages/storage/src/types.ts` with `StorageAdapter` interface
|
||||
3. Create `packages/memory/src/types.ts` with `MemoryAdapter` interface (refactor existing)
|
||||
4. Add adapter registry pattern to each package
|
||||
5. No breaking changes — existing code continues to work
|
||||
|
||||
### Phase 2: Refactor Existing to Adapters (3-5 days)
|
||||
|
||||
**Goal:** Move existing implementations behind adapters
|
||||
|
||||
#### 2.1 Queue Refactor
|
||||
|
||||
1. Rename `packages/queue/src/queue.ts` → `packages/queue/src/adapters/bullmq.ts`
|
||||
2. Create `packages/queue/src/index.ts` to export factory function
|
||||
3. Factory function reads config, instantiates correct adapter
|
||||
4. Update gateway imports to use factory
|
||||
|
||||
#### 2.2 Storage Refactor
|
||||
|
||||
1. Create `packages/storage/` (new package)
|
||||
2. Move Drizzle logic to `packages/storage/src/adapters/postgres.ts`
|
||||
3. Create SQLite adapter in `packages/storage/src/adapters/sqlite.ts`
|
||||
4. Update gateway to use storage factory
|
||||
5. Deprecate direct `@mosaic/db` imports
|
||||
|
||||
#### 2.3 Memory Refactor
|
||||
|
||||
1. Extract existing logic to `packages/memory/src/adapters/pgvector.ts`
|
||||
2. Create keyword adapter in `packages/memory/src/adapters/keyword.ts`
|
||||
3. Update vector-store.ts to be adapter-agnostic
|
||||
|
||||
### Phase 3: Local Tier Implementation (2-3 days)
|
||||
|
||||
**Goal:** Zero-dependency baseline
|
||||
|
||||
1. Implement `packages/queue/src/adapters/local.ts` (in-process + JSON persistence)
|
||||
2. Implement `packages/storage/src/adapters/files.ts` (JSON + MD via MarkdownDB)
|
||||
3. Implement `packages/memory/src/adapters/keyword.ts` (TF-IDF search)
|
||||
4. Add `packages/dream/` for consolidation cycle
|
||||
5. Wire up local tier in gateway startup
|
||||
|
||||
### Phase 4: Configuration System (1-2 days)
|
||||
|
||||
**Goal:** Runtime backend selection
|
||||
|
||||
1. Create `packages/config/src/storage.ts` for storage configuration
|
||||
2. Add `mosaic.config.ts` schema with storage tier settings
|
||||
3. Update gateway to read config on startup
|
||||
4. Add `mosaic gateway configure` CLI command
|
||||
5. Add tier migration commands (`mosaic gateway upgrade`)
|
||||
|
||||
### Phase 5: Testing & Documentation (2-3 days)
|
||||
|
||||
1. Unit tests for each adapter
|
||||
2. Integration tests for factory pattern
|
||||
3. Migration tests (local → team)
|
||||
4. Update README and architecture docs
|
||||
5. Add configuration guide
|
||||
|
||||
---
|
||||
|
||||
## File Changes Summary
|
||||
|
||||
### New Files
|
||||
|
||||
```
|
||||
packages/
|
||||
├── config/
|
||||
│ └── src/
|
||||
│ ├── storage.ts # Storage config schema
|
||||
│ └── index.ts
|
||||
├── dream/ # NEW: Dream mode consolidation
|
||||
│ ├── src/
|
||||
│ │ ├── index.ts
|
||||
│ │ ├── orient.ts
|
||||
│ │ ├── gather.ts
|
||||
│ │ ├── consolidate.ts
|
||||
│ │ └── prune.ts
|
||||
│ └── package.json
|
||||
├── queue/
|
||||
│ └── src/
|
||||
│ ├── types.ts # NEW: QueueAdapter interface
|
||||
│ ├── index.ts # NEW: Factory function
|
||||
│ └── adapters/
|
||||
│ ├── bullmq.ts # MOVED from queue.ts
|
||||
│ └── local.ts # NEW: In-process adapter
|
||||
├── storage/ # NEW: Storage abstraction
|
||||
│ ├── src/
|
||||
│ │ ├── types.ts # StorageAdapter interface
|
||||
│ │ ├── index.ts # Factory function
|
||||
│ │ └── adapters/
|
||||
│ │ ├── postgres.ts # MOVED from @mosaic/db
|
||||
│ │ ├── sqlite.ts # NEW: SQLite adapter
|
||||
│ │ └── files.ts # NEW: JSON/MD adapter
|
||||
│ └── package.json
|
||||
└── memory/
|
||||
└── src/
|
||||
├── types.ts # UPDATED: MemoryAdapter interface
|
||||
├── index.ts # UPDATED: Factory function
|
||||
└── adapters/
|
||||
├── pgvector.ts # EXTRACTED from existing code
|
||||
├── sqlite-vec.ts # NEW: SQLite with vectors
|
||||
└── keyword.ts # NEW: TF-IDF search
|
||||
```
|
||||
|
||||
### Modified Files
|
||||
|
||||
```
|
||||
packages/
|
||||
├── db/ # DEPRECATED: Logic moved to storage adapters
|
||||
├── queue/
|
||||
│ └── src/
|
||||
│ └── queue.ts # → adapters/bullmq.ts
|
||||
├── memory/
|
||||
│ ├── src/
|
||||
│ │ ├── memory.ts # → use factory
|
||||
│ │ ├── insights.ts # → use factory
|
||||
│ │ └── preferences.ts # → use factory
|
||||
│ └── package.json # Remove pgvector from dependencies
|
||||
└── gateway/
|
||||
└── src/
|
||||
├── database/
|
||||
│ └── database.module.ts # Update to use storage factory
|
||||
├── memory/
|
||||
│ └── memory.module.ts # Update to use memory factory
|
||||
└── queue/
|
||||
└── queue.module.ts # Update to use queue factory
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Breaking Changes
|
||||
|
||||
1. **`@mosaic/db`** → **`@mosaic/storage`** (with migration guide)
|
||||
2. Direct `ioredis` imports → Use `@mosaic/queue` factory
|
||||
3. Direct `pgvector` queries → Use `@mosaic/memory` factory
|
||||
4. Gateway startup now requires storage config (defaults to local)
|
||||
|
||||
## Non-Breaking Migration Path
|
||||
|
||||
1. Existing deployments with Postgres/Valkey continue to work (default config)
|
||||
2. New deployments can choose local tier
|
||||
3. Migration commands available when ready to upgrade
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Local tier runs with zero external dependencies
|
||||
- [ ] All three tiers (local, team, enterprise) work correctly
|
||||
- [ ] Factory pattern correctly selects backend at runtime
|
||||
- [ ] Migration from local → team preserves all data
|
||||
- [ ] Dream mode consolidates 24h of sessions
|
||||
- [ ] Documentation covers all three tiers and migration paths
|
||||
- [ ] All existing tests pass
|
||||
- [ ] New adapters have >80% coverage
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"tier": "local",
|
||||
"storage": { "type": "sqlite", "path": ".mosaic/data.db" },
|
||||
"queue": { "type": "local", "dataDir": ".mosaic/queue" },
|
||||
"memory": { "type": "keyword" }
|
||||
}
|
||||
@@ -23,10 +23,5 @@
|
||||
"turbo": "^2.0.0",
|
||||
"typescript": "^5.8.0",
|
||||
"vitest": "^2.0.0"
|
||||
},
|
||||
"pnpm": {
|
||||
"onlyBuiltDependencies": [
|
||||
"better-sqlite3"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/agent",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/agent"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/auth",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/auth"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/brain",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/brain"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/cli",
|
||||
"version": "0.0.10",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/cli"
|
||||
},
|
||||
"version": "0.0.4",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -27,7 +22,6 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@clack/prompts": "^0.9.0",
|
||||
"@mosaic/config": "workspace:^",
|
||||
"@mosaic/mosaic": "workspace:^",
|
||||
"@mosaic/prdy": "workspace:^",
|
||||
"@mosaic/quality-rails": "workspace:^",
|
||||
|
||||
@@ -2,12 +2,11 @@
|
||||
|
||||
import { createRequire } from 'module';
|
||||
import { Command } from 'commander';
|
||||
import { registerQualityRails } from '@mosaic/quality-rails';
|
||||
import { createQualityRailsCli } from '@mosaic/quality-rails';
|
||||
import { registerAgentCommand } from './commands/agent.js';
|
||||
import { registerMissionCommand } from './commands/mission.js';
|
||||
// prdy is registered via launch.ts
|
||||
import { registerLaunchCommands } from './commands/launch.js';
|
||||
import { registerGatewayCommand } from './commands/gateway.js';
|
||||
|
||||
const _require = createRequire(import.meta.url);
|
||||
const CLI_VERSION: string = (_require('../package.json') as { version: string }).version;
|
||||
@@ -291,10 +290,6 @@ sessionsCmd
|
||||
}
|
||||
});
|
||||
|
||||
// ─── gateway ──────────────────────────────────────────────────────────
|
||||
|
||||
registerGatewayCommand(program);
|
||||
|
||||
// ─── agent ─────────────────────────────────────────────────────────────
|
||||
|
||||
registerAgentCommand(program);
|
||||
@@ -305,7 +300,11 @@ registerMissionCommand(program);
|
||||
|
||||
// ─── quality-rails ──────────────────────────────────────────────────────
|
||||
|
||||
registerQualityRails(program);
|
||||
const qrWrapper = createQualityRailsCli();
|
||||
const qrCmd = qrWrapper.commands.find((c) => c.name() === 'quality-rails');
|
||||
if (qrCmd !== undefined) {
|
||||
program.addCommand(qrCmd as unknown as Command);
|
||||
}
|
||||
|
||||
// ─── update ─────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@@ -1,152 +0,0 @@
|
||||
import type { Command } from 'commander';
|
||||
import {
|
||||
getDaemonPid,
|
||||
readMeta,
|
||||
startDaemon,
|
||||
stopDaemon,
|
||||
waitForHealth,
|
||||
} from './gateway/daemon.js';
|
||||
|
||||
interface GatewayParentOpts {
|
||||
host: string;
|
||||
port: string;
|
||||
token?: string;
|
||||
}
|
||||
|
||||
function resolveOpts(raw: GatewayParentOpts): { host: string; port: number; token?: string } {
|
||||
const meta = readMeta();
|
||||
return {
|
||||
host: raw.host ?? meta?.host ?? 'localhost',
|
||||
port: parseInt(raw.port, 10) || meta?.port || 4000,
|
||||
token: raw.token ?? meta?.adminToken,
|
||||
};
|
||||
}
|
||||
|
||||
export function registerGatewayCommand(program: Command): void {
|
||||
const gw = program
|
||||
.command('gateway')
|
||||
.description('Manage the Mosaic gateway daemon')
|
||||
.helpOption('--help', 'Display help')
|
||||
.option('-h, --host <host>', 'Gateway host', 'localhost')
|
||||
.option('-p, --port <port>', 'Gateway port', '4000')
|
||||
.option('-t, --token <token>', 'Admin API token')
|
||||
.action(() => {
|
||||
gw.outputHelp();
|
||||
});
|
||||
|
||||
// ─── install ────────────────────────────────────────────────────────────
|
||||
|
||||
gw.command('install')
|
||||
.description('Install and configure the gateway daemon')
|
||||
.option('--skip-install', 'Skip npm package installation (use local build)')
|
||||
.action(async (cmdOpts: { skipInstall?: boolean }) => {
|
||||
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
||||
const { runInstall } = await import('./gateway/install.js');
|
||||
await runInstall({ ...opts, skipInstall: cmdOpts.skipInstall });
|
||||
});
|
||||
|
||||
// ─── start ──────────────────────────────────────────────────────────────
|
||||
|
||||
gw.command('start')
|
||||
.description('Start the gateway daemon')
|
||||
.action(async () => {
|
||||
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
||||
try {
|
||||
const pid = startDaemon();
|
||||
console.log(`Gateway started (PID ${pid.toString()})`);
|
||||
console.log('Waiting for health...');
|
||||
const healthy = await waitForHealth(opts.host, opts.port);
|
||||
if (healthy) {
|
||||
console.log(`Gateway ready at http://${opts.host}:${opts.port.toString()}`);
|
||||
} else {
|
||||
console.warn('Gateway started but health check timed out. Check logs.');
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err instanceof Error ? err.message : String(err));
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
// ─── stop ───────────────────────────────────────────────────────────────
|
||||
|
||||
gw.command('stop')
|
||||
.description('Stop the gateway daemon')
|
||||
.action(async () => {
|
||||
try {
|
||||
await stopDaemon();
|
||||
console.log('Gateway stopped.');
|
||||
} catch (err) {
|
||||
console.error(err instanceof Error ? err.message : String(err));
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
// ─── restart ────────────────────────────────────────────────────────────
|
||||
|
||||
gw.command('restart')
|
||||
.description('Restart the gateway daemon')
|
||||
.action(async () => {
|
||||
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
||||
const pid = getDaemonPid();
|
||||
if (pid !== null) {
|
||||
console.log('Stopping gateway...');
|
||||
await stopDaemon();
|
||||
}
|
||||
console.log('Starting gateway...');
|
||||
try {
|
||||
const newPid = startDaemon();
|
||||
console.log(`Gateway started (PID ${newPid.toString()})`);
|
||||
const healthy = await waitForHealth(opts.host, opts.port);
|
||||
if (healthy) {
|
||||
console.log(`Gateway ready at http://${opts.host}:${opts.port.toString()}`);
|
||||
} else {
|
||||
console.warn('Gateway started but health check timed out. Check logs.');
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err instanceof Error ? err.message : String(err));
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
// ─── status ─────────────────────────────────────────────────────────────
|
||||
|
||||
gw.command('status')
|
||||
.description('Show gateway daemon status and health')
|
||||
.action(async () => {
|
||||
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
||||
const { runStatus } = await import('./gateway/status.js');
|
||||
await runStatus(opts);
|
||||
});
|
||||
|
||||
// ─── config ─────────────────────────────────────────────────────────────
|
||||
|
||||
gw.command('config')
|
||||
.description('View or modify gateway configuration')
|
||||
.option('--set <KEY=VALUE>', 'Set a configuration value')
|
||||
.option('--unset <KEY>', 'Remove a configuration key')
|
||||
.option('--edit', 'Open config in $EDITOR')
|
||||
.action(async (cmdOpts: { set?: string; unset?: string; edit?: boolean }) => {
|
||||
const { runConfig } = await import('./gateway/config.js');
|
||||
await runConfig(cmdOpts);
|
||||
});
|
||||
|
||||
// ─── logs ───────────────────────────────────────────────────────────────
|
||||
|
||||
gw.command('logs')
|
||||
.description('View gateway daemon logs')
|
||||
.option('-f, --follow', 'Follow log output')
|
||||
.option('-n, --lines <count>', 'Number of lines to show', '50')
|
||||
.action(async (cmdOpts: { follow?: boolean; lines?: string }) => {
|
||||
const { runLogs } = await import('./gateway/logs.js');
|
||||
runLogs({ follow: cmdOpts.follow, lines: parseInt(cmdOpts.lines ?? '50', 10) });
|
||||
});
|
||||
|
||||
// ─── uninstall ──────────────────────────────────────────────────────────
|
||||
|
||||
gw.command('uninstall')
|
||||
.description('Uninstall the gateway daemon and optionally remove data')
|
||||
.action(async () => {
|
||||
const { runUninstall } = await import('./gateway/uninstall.js');
|
||||
await runUninstall();
|
||||
});
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
import { existsSync, readFileSync, writeFileSync } from 'node:fs';
|
||||
import { execSync } from 'node:child_process';
|
||||
import { ENV_FILE, getDaemonPid, readMeta, META_FILE, ensureDirs } from './daemon.js';
|
||||
|
||||
// Keys that should be masked in output
|
||||
const SECRET_KEYS = new Set([
|
||||
'BETTER_AUTH_SECRET',
|
||||
'ANTHROPIC_API_KEY',
|
||||
'OPENAI_API_KEY',
|
||||
'ZAI_API_KEY',
|
||||
'OPENROUTER_API_KEY',
|
||||
'DISCORD_BOT_TOKEN',
|
||||
'TELEGRAM_BOT_TOKEN',
|
||||
]);
|
||||
|
||||
function maskValue(key: string, value: string): string {
|
||||
if (SECRET_KEYS.has(key) && value.length > 8) {
|
||||
return value.slice(0, 4) + '…' + value.slice(-4);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function parseEnvFile(): Map<string, string> {
|
||||
const map = new Map<string, string>();
|
||||
if (!existsSync(ENV_FILE)) return map;
|
||||
|
||||
const lines = readFileSync(ENV_FILE, 'utf-8').split('\n');
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
const eqIdx = trimmed.indexOf('=');
|
||||
if (eqIdx === -1) continue;
|
||||
map.set(trimmed.slice(0, eqIdx), trimmed.slice(eqIdx + 1));
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
function writeEnvFile(entries: Map<string, string>): void {
|
||||
ensureDirs();
|
||||
const lines: string[] = [];
|
||||
for (const [key, value] of entries) {
|
||||
lines.push(`${key}=${value}`);
|
||||
}
|
||||
writeFileSync(ENV_FILE, lines.join('\n') + '\n', { mode: 0o600 });
|
||||
}
|
||||
|
||||
interface ConfigOpts {
|
||||
set?: string;
|
||||
unset?: string;
|
||||
edit?: boolean;
|
||||
}
|
||||
|
||||
export async function runConfig(opts: ConfigOpts): Promise<void> {
|
||||
// Set a value
|
||||
if (opts.set) {
|
||||
const eqIdx = opts.set.indexOf('=');
|
||||
if (eqIdx === -1) {
|
||||
console.error('Usage: mosaic gateway config --set KEY=VALUE');
|
||||
process.exit(1);
|
||||
}
|
||||
const key = opts.set.slice(0, eqIdx);
|
||||
const value = opts.set.slice(eqIdx + 1);
|
||||
const entries = parseEnvFile();
|
||||
entries.set(key, value);
|
||||
writeEnvFile(entries);
|
||||
console.log(`Set ${key}=${maskValue(key, value)}`);
|
||||
promptRestart();
|
||||
return;
|
||||
}
|
||||
|
||||
// Unset a value
|
||||
if (opts.unset) {
|
||||
const entries = parseEnvFile();
|
||||
if (!entries.has(opts.unset)) {
|
||||
console.error(`Key not found: ${opts.unset}`);
|
||||
process.exit(1);
|
||||
}
|
||||
entries.delete(opts.unset);
|
||||
writeEnvFile(entries);
|
||||
console.log(`Removed ${opts.unset}`);
|
||||
promptRestart();
|
||||
return;
|
||||
}
|
||||
|
||||
// Open in editor
|
||||
if (opts.edit) {
|
||||
if (!existsSync(ENV_FILE)) {
|
||||
console.error(`No config file found at ${ENV_FILE}`);
|
||||
console.error('Run `mosaic gateway install` first.');
|
||||
process.exit(1);
|
||||
}
|
||||
const editor = process.env['EDITOR'] ?? process.env['VISUAL'] ?? 'vi';
|
||||
try {
|
||||
execSync(`${editor} "${ENV_FILE}"`, { stdio: 'inherit' });
|
||||
promptRestart();
|
||||
} catch {
|
||||
console.error('Editor exited with error.');
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Default: show current config
|
||||
showConfig();
|
||||
}
|
||||
|
||||
function showConfig(): void {
|
||||
if (!existsSync(ENV_FILE)) {
|
||||
console.log('No gateway configuration found.');
|
||||
console.log('Run `mosaic gateway install` to set up.');
|
||||
return;
|
||||
}
|
||||
|
||||
const entries = parseEnvFile();
|
||||
const meta = readMeta();
|
||||
|
||||
console.log('Mosaic Gateway Configuration');
|
||||
console.log('────────────────────────────');
|
||||
console.log(` Config file: ${ENV_FILE}`);
|
||||
console.log(` Meta file: ${META_FILE}`);
|
||||
console.log();
|
||||
|
||||
if (entries.size === 0) {
|
||||
console.log(' (empty)');
|
||||
return;
|
||||
}
|
||||
|
||||
const maxKeyLen = Math.max(...[...entries.keys()].map((k) => k.length));
|
||||
for (const [key, value] of entries) {
|
||||
const padding = ' '.repeat(maxKeyLen - key.length);
|
||||
console.log(` ${key}${padding} ${maskValue(key, value)}`);
|
||||
}
|
||||
|
||||
if (meta?.adminToken) {
|
||||
console.log();
|
||||
console.log(` Admin token: ${maskValue('token', meta.adminToken)}`);
|
||||
}
|
||||
}
|
||||
|
||||
function promptRestart(): void {
|
||||
if (getDaemonPid() !== null) {
|
||||
console.log('\nGateway is running — restart to apply changes: mosaic gateway restart');
|
||||
}
|
||||
}
|
||||
@@ -1,253 +0,0 @@
|
||||
import { spawn, execSync } from 'node:child_process';
|
||||
import {
|
||||
existsSync,
|
||||
mkdirSync,
|
||||
readFileSync,
|
||||
writeFileSync,
|
||||
unlinkSync,
|
||||
openSync,
|
||||
constants,
|
||||
} from 'node:fs';
|
||||
import { join, resolve } from 'node:path';
|
||||
import { homedir } from 'node:os';
|
||||
import { createRequire } from 'node:module';
|
||||
|
||||
// ─── Paths ──────────────────────────────────────────────────────────────────
|
||||
|
||||
export const GATEWAY_HOME = resolve(
|
||||
process.env['MOSAIC_GATEWAY_HOME'] ?? join(homedir(), '.config', 'mosaic', 'gateway'),
|
||||
);
|
||||
export const PID_FILE = join(GATEWAY_HOME, 'daemon.pid');
|
||||
export const LOG_DIR = join(GATEWAY_HOME, 'logs');
|
||||
export const LOG_FILE = join(LOG_DIR, 'gateway.log');
|
||||
export const ENV_FILE = join(GATEWAY_HOME, '.env');
|
||||
export const META_FILE = join(GATEWAY_HOME, 'meta.json');
|
||||
|
||||
// ─── Meta ───────────────────────────────────────────────────────────────────
|
||||
|
||||
export interface GatewayMeta {
|
||||
version: string;
|
||||
installedAt: string;
|
||||
entryPoint: string;
|
||||
adminToken?: string;
|
||||
host: string;
|
||||
port: number;
|
||||
}
|
||||
|
||||
export function readMeta(): GatewayMeta | null {
|
||||
if (!existsSync(META_FILE)) return null;
|
||||
try {
|
||||
return JSON.parse(readFileSync(META_FILE, 'utf-8')) as GatewayMeta;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function writeMeta(meta: GatewayMeta): void {
|
||||
ensureDirs();
|
||||
writeFileSync(META_FILE, JSON.stringify(meta, null, 2), { mode: 0o600 });
|
||||
}
|
||||
|
||||
// ─── Directories ────────────────────────────────────────────────────────────
|
||||
|
||||
export function ensureDirs(): void {
|
||||
mkdirSync(GATEWAY_HOME, { recursive: true, mode: 0o700 });
|
||||
mkdirSync(LOG_DIR, { recursive: true, mode: 0o700 });
|
||||
}
|
||||
|
||||
// ─── PID management ─────────────────────────────────────────────────────────
|
||||
|
||||
export function readPid(): number | null {
|
||||
if (!existsSync(PID_FILE)) return null;
|
||||
try {
|
||||
const pid = parseInt(readFileSync(PID_FILE, 'utf-8').trim(), 10);
|
||||
return Number.isNaN(pid) ? null : pid;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function isRunning(pid: number): boolean {
|
||||
try {
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function getDaemonPid(): number | null {
|
||||
const pid = readPid();
|
||||
if (pid === null) return null;
|
||||
return isRunning(pid) ? pid : null;
|
||||
}
|
||||
|
||||
// ─── Entry point resolution ─────────────────────────────────────────────────
|
||||
|
||||
export function resolveGatewayEntry(): string {
|
||||
// Check meta.json for custom entry point
|
||||
const meta = readMeta();
|
||||
if (meta?.entryPoint && existsSync(meta.entryPoint)) {
|
||||
return meta.entryPoint;
|
||||
}
|
||||
|
||||
// Try to resolve from globally installed @mosaicstack/gateway
|
||||
try {
|
||||
const req = createRequire(import.meta.url);
|
||||
const pkgPath = req.resolve('@mosaicstack/gateway/package.json');
|
||||
const mainEntry = join(resolve(pkgPath, '..'), 'dist', 'main.js');
|
||||
if (existsSync(mainEntry)) return mainEntry;
|
||||
} catch {
|
||||
// Not installed globally via @mosaicstack
|
||||
}
|
||||
|
||||
// Try @mosaic/gateway (workspace / dev)
|
||||
try {
|
||||
const req = createRequire(import.meta.url);
|
||||
const pkgPath = req.resolve('@mosaic/gateway/package.json');
|
||||
const mainEntry = join(resolve(pkgPath, '..'), 'dist', 'main.js');
|
||||
if (existsSync(mainEntry)) return mainEntry;
|
||||
} catch {
|
||||
// Not available
|
||||
}
|
||||
|
||||
throw new Error('Cannot find gateway entry point. Run `mosaic gateway install` first.');
|
||||
}
|
||||
|
||||
// ─── Start / Stop / Health ──────────────────────────────────────────────────
|
||||
|
||||
export function startDaemon(): number {
|
||||
const running = getDaemonPid();
|
||||
if (running !== null) {
|
||||
throw new Error(`Gateway is already running (PID ${running.toString()})`);
|
||||
}
|
||||
|
||||
ensureDirs();
|
||||
const entryPoint = resolveGatewayEntry();
|
||||
|
||||
// Load env vars from gateway .env
|
||||
const env: Record<string, string> = { ...process.env } as Record<string, string>;
|
||||
if (existsSync(ENV_FILE)) {
|
||||
for (const line of readFileSync(ENV_FILE, 'utf-8').split('\n')) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
const eqIdx = trimmed.indexOf('=');
|
||||
if (eqIdx > 0) env[trimmed.slice(0, eqIdx)] = trimmed.slice(eqIdx + 1);
|
||||
}
|
||||
}
|
||||
|
||||
const logFd = openSync(LOG_FILE, constants.O_WRONLY | constants.O_CREAT | constants.O_APPEND);
|
||||
|
||||
const child = spawn('node', [entryPoint], {
|
||||
detached: true,
|
||||
stdio: ['ignore', logFd, logFd],
|
||||
env,
|
||||
cwd: GATEWAY_HOME,
|
||||
});
|
||||
|
||||
if (!child.pid) {
|
||||
throw new Error('Failed to spawn gateway process');
|
||||
}
|
||||
|
||||
writeFileSync(PID_FILE, child.pid.toString(), { mode: 0o600 });
|
||||
child.unref();
|
||||
|
||||
return child.pid;
|
||||
}
|
||||
|
||||
export async function stopDaemon(timeoutMs = 10_000): Promise<void> {
|
||||
const pid = getDaemonPid();
|
||||
if (pid === null) {
|
||||
throw new Error('Gateway is not running');
|
||||
}
|
||||
|
||||
process.kill(pid, 'SIGTERM');
|
||||
|
||||
// Poll for exit
|
||||
const start = Date.now();
|
||||
while (Date.now() - start < timeoutMs) {
|
||||
if (!isRunning(pid)) {
|
||||
cleanPidFile();
|
||||
return;
|
||||
}
|
||||
await sleep(250);
|
||||
}
|
||||
|
||||
// Force kill
|
||||
try {
|
||||
process.kill(pid, 'SIGKILL');
|
||||
} catch {
|
||||
// Already dead
|
||||
}
|
||||
cleanPidFile();
|
||||
}
|
||||
|
||||
function cleanPidFile(): void {
|
||||
try {
|
||||
unlinkSync(PID_FILE);
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
export async function waitForHealth(
|
||||
host: string,
|
||||
port: number,
|
||||
timeoutMs = 30_000,
|
||||
): Promise<boolean> {
|
||||
const start = Date.now();
|
||||
let delay = 500;
|
||||
|
||||
while (Date.now() - start < timeoutMs) {
|
||||
try {
|
||||
const res = await fetch(`http://${host}:${port.toString()}/health`);
|
||||
if (res.ok) return true;
|
||||
} catch {
|
||||
// Not ready yet
|
||||
}
|
||||
await sleep(delay);
|
||||
delay = Math.min(delay * 1.5, 3000);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
// ─── npm install helper ─────────────────────────────────────────────────────
|
||||
|
||||
export function installGatewayPackage(): void {
|
||||
console.log('Installing @mosaicstack/gateway...');
|
||||
execSync('npm install -g @mosaicstack/gateway@latest', {
|
||||
stdio: 'inherit',
|
||||
timeout: 120_000,
|
||||
});
|
||||
}
|
||||
|
||||
export function uninstallGatewayPackage(): void {
|
||||
try {
|
||||
execSync('npm uninstall -g @mosaicstack/gateway', {
|
||||
stdio: 'inherit',
|
||||
timeout: 60_000,
|
||||
});
|
||||
} catch {
|
||||
console.warn('Warning: npm uninstall may not have completed cleanly.');
|
||||
}
|
||||
}
|
||||
|
||||
export function getInstalledGatewayVersion(): string | null {
|
||||
try {
|
||||
const output = execSync('npm ls -g @mosaicstack/gateway --json --depth=0', {
|
||||
encoding: 'utf-8',
|
||||
timeout: 15_000,
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
});
|
||||
const data = JSON.parse(output) as {
|
||||
dependencies?: { '@mosaicstack/gateway'?: { version?: string } };
|
||||
};
|
||||
return data.dependencies?.['@mosaicstack/gateway']?.version ?? null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -1,259 +0,0 @@
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { createInterface } from 'node:readline';
|
||||
import type { GatewayMeta } from './daemon.js';
|
||||
import {
|
||||
ENV_FILE,
|
||||
GATEWAY_HOME,
|
||||
ensureDirs,
|
||||
installGatewayPackage,
|
||||
readMeta,
|
||||
resolveGatewayEntry,
|
||||
startDaemon,
|
||||
waitForHealth,
|
||||
writeMeta,
|
||||
getInstalledGatewayVersion,
|
||||
} from './daemon.js';
|
||||
|
||||
interface InstallOpts {
|
||||
host: string;
|
||||
port: number;
|
||||
skipInstall?: boolean;
|
||||
}
|
||||
|
||||
function prompt(rl: ReturnType<typeof createInterface>, question: string): Promise<string> {
|
||||
return new Promise((resolve) => rl.question(question, resolve));
|
||||
}
|
||||
|
||||
export async function runInstall(opts: InstallOpts): Promise<void> {
|
||||
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
||||
try {
|
||||
await doInstall(rl, opts);
|
||||
} finally {
|
||||
rl.close();
|
||||
}
|
||||
}
|
||||
|
||||
async function doInstall(rl: ReturnType<typeof createInterface>, opts: InstallOpts): Promise<void> {
|
||||
// Check existing installation
|
||||
const existing = readMeta();
|
||||
if (existing) {
|
||||
const answer = await prompt(
|
||||
rl,
|
||||
`Gateway already installed (v${existing.version}). Reinstall? [y/N] `,
|
||||
);
|
||||
if (answer.toLowerCase() !== 'y') {
|
||||
console.log('Aborted.');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Step 1: Install npm package
|
||||
if (!opts.skipInstall) {
|
||||
installGatewayPackage();
|
||||
}
|
||||
|
||||
ensureDirs();
|
||||
|
||||
// Step 2: Collect configuration
|
||||
console.log('\n─── Gateway Configuration ───\n');
|
||||
|
||||
// Tier selection
|
||||
console.log('Storage tier:');
|
||||
console.log(' 1. Local (embedded database, no dependencies)');
|
||||
console.log(' 2. Team (PostgreSQL + Valkey required)');
|
||||
const tierAnswer = (await prompt(rl, 'Select [1]: ')).trim() || '1';
|
||||
const tier = tierAnswer === '2' ? 'team' : 'local';
|
||||
|
||||
const port =
|
||||
opts.port !== 4000
|
||||
? opts.port
|
||||
: parseInt(
|
||||
(await prompt(rl, `Gateway port [${opts.port.toString()}]: `)) || opts.port.toString(),
|
||||
10,
|
||||
);
|
||||
|
||||
let databaseUrl: string | undefined;
|
||||
let valkeyUrl: string | undefined;
|
||||
|
||||
if (tier === 'team') {
|
||||
databaseUrl =
|
||||
(await prompt(rl, 'DATABASE_URL [postgresql://mosaic:mosaic@localhost:5433/mosaic]: ')) ||
|
||||
'postgresql://mosaic:mosaic@localhost:5433/mosaic';
|
||||
|
||||
valkeyUrl =
|
||||
(await prompt(rl, 'VALKEY_URL [redis://localhost:6380]: ')) || 'redis://localhost:6380';
|
||||
}
|
||||
|
||||
const anthropicKey = await prompt(rl, 'ANTHROPIC_API_KEY (optional, press Enter to skip): ');
|
||||
|
||||
const corsOrigin =
|
||||
(await prompt(rl, 'CORS origin [http://localhost:3000]: ')) || 'http://localhost:3000';
|
||||
|
||||
// Generate auth secret
|
||||
const authSecret = randomBytes(32).toString('hex');
|
||||
|
||||
// Step 3: Write .env
|
||||
const envLines = [
|
||||
`GATEWAY_PORT=${port.toString()}`,
|
||||
`BETTER_AUTH_SECRET=${authSecret}`,
|
||||
`BETTER_AUTH_URL=http://${opts.host}:${port.toString()}`,
|
||||
`GATEWAY_CORS_ORIGIN=${corsOrigin}`,
|
||||
`OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318`,
|
||||
`OTEL_SERVICE_NAME=mosaic-gateway`,
|
||||
];
|
||||
|
||||
if (tier === 'team' && databaseUrl && valkeyUrl) {
|
||||
envLines.push(`DATABASE_URL=${databaseUrl}`);
|
||||
envLines.push(`VALKEY_URL=${valkeyUrl}`);
|
||||
}
|
||||
|
||||
if (anthropicKey) {
|
||||
envLines.push(`ANTHROPIC_API_KEY=${anthropicKey}`);
|
||||
}
|
||||
|
||||
writeFileSync(ENV_FILE, envLines.join('\n') + '\n', { mode: 0o600 });
|
||||
console.log(`\nConfig written to ${ENV_FILE}`);
|
||||
|
||||
// Step 3b: Write mosaic.config.json
|
||||
const mosaicConfig =
|
||||
tier === 'local'
|
||||
? {
|
||||
tier: 'local',
|
||||
storage: { type: 'sqlite', path: join(GATEWAY_HOME, 'data.db') },
|
||||
queue: { type: 'local', dataDir: join(GATEWAY_HOME, 'queue') },
|
||||
memory: { type: 'keyword' },
|
||||
}
|
||||
: {
|
||||
tier: 'team',
|
||||
storage: { type: 'postgres', url: databaseUrl },
|
||||
queue: { type: 'bullmq', url: valkeyUrl },
|
||||
memory: { type: 'pgvector' },
|
||||
};
|
||||
|
||||
const configFile = join(GATEWAY_HOME, 'mosaic.config.json');
|
||||
writeFileSync(configFile, JSON.stringify(mosaicConfig, null, 2) + '\n', { mode: 0o600 });
|
||||
console.log(`Config written to ${configFile}`);
|
||||
|
||||
// Step 4: Write meta.json
|
||||
let entryPoint: string;
|
||||
try {
|
||||
entryPoint = resolveGatewayEntry();
|
||||
} catch {
|
||||
console.error('Error: Gateway package not found after install.');
|
||||
console.error('Check that @mosaicstack/gateway installed correctly.');
|
||||
return;
|
||||
}
|
||||
|
||||
const version = getInstalledGatewayVersion() ?? 'unknown';
|
||||
|
||||
const meta = {
|
||||
version,
|
||||
installedAt: new Date().toISOString(),
|
||||
entryPoint,
|
||||
host: opts.host,
|
||||
port,
|
||||
};
|
||||
writeMeta(meta);
|
||||
|
||||
// Step 5: Start the daemon
|
||||
console.log('\nStarting gateway daemon...');
|
||||
try {
|
||||
const pid = startDaemon();
|
||||
console.log(`Gateway started (PID ${pid.toString()})`);
|
||||
} catch (err) {
|
||||
console.error(`Failed to start: ${err instanceof Error ? err.message : String(err)}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 6: Wait for health
|
||||
console.log('Waiting for gateway to become healthy...');
|
||||
const healthy = await waitForHealth(opts.host, port, 30_000);
|
||||
if (!healthy) {
|
||||
console.error('Gateway did not become healthy within 30 seconds.');
|
||||
console.error(`Check logs: mosaic gateway logs`);
|
||||
return;
|
||||
}
|
||||
console.log('Gateway is healthy.\n');
|
||||
|
||||
// Step 7: Bootstrap — first user setup
|
||||
await bootstrapFirstUser(rl, opts.host, port, meta);
|
||||
|
||||
console.log('\n─── Installation Complete ───');
|
||||
console.log(` Endpoint: http://${opts.host}:${port.toString()}`);
|
||||
console.log(` Config: ${GATEWAY_HOME}`);
|
||||
console.log(` Logs: mosaic gateway logs`);
|
||||
console.log(` Status: mosaic gateway status`);
|
||||
}
|
||||
|
||||
async function bootstrapFirstUser(
|
||||
rl: ReturnType<typeof createInterface>,
|
||||
host: string,
|
||||
port: number,
|
||||
meta: Omit<GatewayMeta, 'adminToken'> & { adminToken?: string },
|
||||
): Promise<void> {
|
||||
const baseUrl = `http://${host}:${port.toString()}`;
|
||||
|
||||
try {
|
||||
const statusRes = await fetch(`${baseUrl}/api/bootstrap/status`);
|
||||
if (!statusRes.ok) return;
|
||||
|
||||
const status = (await statusRes.json()) as { needsSetup: boolean };
|
||||
if (!status.needsSetup) {
|
||||
console.log('Admin user already exists — skipping setup.');
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
console.warn('Could not check bootstrap status — skipping first user setup.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('─── Admin User Setup ───\n');
|
||||
|
||||
const name = (await prompt(rl, 'Admin name: ')).trim();
|
||||
if (!name) {
|
||||
console.error('Name is required.');
|
||||
return;
|
||||
}
|
||||
|
||||
const email = (await prompt(rl, 'Admin email: ')).trim();
|
||||
if (!email) {
|
||||
console.error('Email is required.');
|
||||
return;
|
||||
}
|
||||
|
||||
const password = (await prompt(rl, 'Admin password (min 8 chars): ')).trim();
|
||||
if (password.length < 8) {
|
||||
console.error('Password must be at least 8 characters.');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await fetch(`${baseUrl}/api/bootstrap/setup`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ name, email, password }),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const body = await res.text().catch(() => '');
|
||||
console.error(`Bootstrap failed (${res.status.toString()}): ${body}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const result = (await res.json()) as {
|
||||
user: { id: string; email: string };
|
||||
token: { plaintext: string };
|
||||
};
|
||||
|
||||
// Save admin token to meta
|
||||
meta.adminToken = result.token.plaintext;
|
||||
writeMeta(meta as GatewayMeta);
|
||||
|
||||
console.log(`\nAdmin user created: ${result.user.email}`);
|
||||
console.log('Admin API token saved to gateway config.');
|
||||
} catch (err) {
|
||||
console.error(`Bootstrap error: ${err instanceof Error ? err.message : String(err)}`);
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
import { existsSync, readFileSync } from 'node:fs';
|
||||
import { spawn } from 'node:child_process';
|
||||
import { LOG_FILE } from './daemon.js';
|
||||
|
||||
interface LogsOpts {
|
||||
follow?: boolean;
|
||||
lines?: number;
|
||||
}
|
||||
|
||||
export function runLogs(opts: LogsOpts): void {
|
||||
if (!existsSync(LOG_FILE)) {
|
||||
console.log('No log file found. Is the gateway installed?');
|
||||
return;
|
||||
}
|
||||
|
||||
if (opts.follow) {
|
||||
const lines = opts.lines ?? 50;
|
||||
const tail = spawn('tail', ['-n', lines.toString(), '-f', LOG_FILE], {
|
||||
stdio: 'inherit',
|
||||
});
|
||||
tail.on('error', () => {
|
||||
// Fallback for systems without tail
|
||||
console.log(readLastLines(opts.lines ?? 50));
|
||||
console.log('\n(--follow requires `tail` command)');
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Just print last N lines
|
||||
console.log(readLastLines(opts.lines ?? 50));
|
||||
}
|
||||
|
||||
function readLastLines(n: number): string {
|
||||
const content = readFileSync(LOG_FILE, 'utf-8');
|
||||
const lines = content.split('\n');
|
||||
return lines.slice(-n).join('\n');
|
||||
}
|
||||
@@ -1,115 +0,0 @@
|
||||
import { getDaemonPid, readMeta, LOG_FILE, GATEWAY_HOME } from './daemon.js';
|
||||
|
||||
interface GatewayOpts {
|
||||
host: string;
|
||||
port: number;
|
||||
token?: string;
|
||||
}
|
||||
|
||||
interface ServiceStatus {
|
||||
name: string;
|
||||
status: string;
|
||||
latency?: string;
|
||||
}
|
||||
|
||||
interface AdminHealth {
|
||||
status: string;
|
||||
services: {
|
||||
database: { status: string; latencyMs: number };
|
||||
cache: { status: string; latencyMs: number };
|
||||
};
|
||||
agentPool?: { active: number };
|
||||
providers?: Array<{ name: string; available: boolean; models: number }>;
|
||||
}
|
||||
|
||||
export async function runStatus(opts: GatewayOpts): Promise<void> {
|
||||
const meta = readMeta();
|
||||
const pid = getDaemonPid();
|
||||
|
||||
console.log('Mosaic Gateway Status');
|
||||
console.log('─────────────────────');
|
||||
|
||||
// Daemon status
|
||||
if (pid !== null) {
|
||||
console.log(` Status: running (PID ${pid.toString()})`);
|
||||
} else {
|
||||
console.log(' Status: stopped');
|
||||
}
|
||||
|
||||
// Version
|
||||
console.log(` Version: ${meta?.version ?? 'unknown'}`);
|
||||
|
||||
// Endpoint
|
||||
const host = opts.host;
|
||||
const port = opts.port;
|
||||
console.log(` Endpoint: http://${host}:${port.toString()}`);
|
||||
console.log(` Config: ${GATEWAY_HOME}`);
|
||||
console.log(` Logs: ${LOG_FILE}`);
|
||||
|
||||
if (pid === null) return;
|
||||
|
||||
// Health check
|
||||
try {
|
||||
const healthRes = await fetch(`http://${host}:${port.toString()}/health`);
|
||||
if (!healthRes.ok) {
|
||||
console.log('\n Health: unreachable');
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
console.log('\n Health: unreachable');
|
||||
return;
|
||||
}
|
||||
|
||||
// Admin health (requires token)
|
||||
const token = opts.token ?? meta?.adminToken;
|
||||
if (!token) {
|
||||
console.log(
|
||||
'\n (No admin token — run `mosaic gateway config` to set one for detailed status)',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await fetch(`http://${host}:${port.toString()}/api/admin/health`, {
|
||||
headers: { Authorization: `Bearer ${token}` },
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
console.log('\n Admin health: unauthorized or unavailable');
|
||||
return;
|
||||
}
|
||||
|
||||
const health = (await res.json()) as AdminHealth;
|
||||
|
||||
console.log('\n Services:');
|
||||
const services: ServiceStatus[] = [
|
||||
{
|
||||
name: 'Database',
|
||||
status: health.services.database.status,
|
||||
latency: `${health.services.database.latencyMs.toString()}ms`,
|
||||
},
|
||||
{
|
||||
name: 'Cache',
|
||||
status: health.services.cache.status,
|
||||
latency: `${health.services.cache.latencyMs.toString()}ms`,
|
||||
},
|
||||
];
|
||||
|
||||
for (const svc of services) {
|
||||
const latStr = svc.latency ? ` (${svc.latency})` : '';
|
||||
console.log(` ${svc.name}:${' '.repeat(10 - svc.name.length)}${svc.status}${latStr}`);
|
||||
}
|
||||
|
||||
if (health.providers && health.providers.length > 0) {
|
||||
const available = health.providers.filter((p) => p.available);
|
||||
const names = available.map((p) => p.name).join(', ');
|
||||
console.log(`\n Providers: ${available.length.toString()} active (${names})`);
|
||||
}
|
||||
|
||||
if (health.agentPool) {
|
||||
console.log(` Sessions: ${health.agentPool.active.toString()} active`);
|
||||
}
|
||||
} catch {
|
||||
console.log('\n Admin health: connection error');
|
||||
}
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
import { existsSync, rmSync } from 'node:fs';
|
||||
import { createInterface } from 'node:readline';
|
||||
import {
|
||||
GATEWAY_HOME,
|
||||
getDaemonPid,
|
||||
readMeta,
|
||||
stopDaemon,
|
||||
uninstallGatewayPackage,
|
||||
} from './daemon.js';
|
||||
|
||||
export async function runUninstall(): Promise<void> {
|
||||
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
||||
try {
|
||||
await doUninstall(rl);
|
||||
} finally {
|
||||
rl.close();
|
||||
}
|
||||
}
|
||||
|
||||
function prompt(rl: ReturnType<typeof createInterface>, question: string): Promise<string> {
|
||||
return new Promise((resolve) => rl.question(question, resolve));
|
||||
}
|
||||
|
||||
async function doUninstall(rl: ReturnType<typeof createInterface>): Promise<void> {
|
||||
const meta = readMeta();
|
||||
if (!meta) {
|
||||
console.log('Gateway is not installed.');
|
||||
return;
|
||||
}
|
||||
|
||||
const answer = await prompt(rl, 'Uninstall Mosaic Gateway? [y/N] ');
|
||||
if (answer.toLowerCase() !== 'y') {
|
||||
console.log('Aborted.');
|
||||
return;
|
||||
}
|
||||
|
||||
// Stop if running
|
||||
if (getDaemonPid() !== null) {
|
||||
console.log('Stopping gateway daemon...');
|
||||
try {
|
||||
await stopDaemon();
|
||||
console.log('Stopped.');
|
||||
} catch (err) {
|
||||
console.warn(`Warning: ${err instanceof Error ? err.message : String(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove config/data
|
||||
const removeData = await prompt(rl, `Remove all gateway data at ${GATEWAY_HOME}? [y/N] `);
|
||||
if (removeData.toLowerCase() === 'y') {
|
||||
if (existsSync(GATEWAY_HOME)) {
|
||||
rmSync(GATEWAY_HOME, { recursive: true, force: true });
|
||||
console.log('Gateway data removed.');
|
||||
}
|
||||
}
|
||||
|
||||
// Uninstall npm package
|
||||
console.log('Uninstalling npm package...');
|
||||
uninstallGatewayPackage();
|
||||
|
||||
console.log('\nGateway uninstalled.');
|
||||
}
|
||||
@@ -7,7 +7,6 @@
|
||||
|
||||
import { execFileSync, execSync, spawnSync } from 'node:child_process';
|
||||
import { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync, rmSync } from 'node:fs';
|
||||
import { createRequire } from 'node:module';
|
||||
import { homedir } from 'node:os';
|
||||
import { join, dirname } from 'node:path';
|
||||
import type { Command } from 'commander';
|
||||
@@ -68,7 +67,7 @@ function checkSoul(): void {
|
||||
}
|
||||
|
||||
// Fallback: legacy bash mosaic-init
|
||||
const initBin = fwScript('mosaic-init');
|
||||
const initBin = join(MOSAIC_HOME, 'tools', '_scripts', 'mosaic-init');
|
||||
if (existsSync(initBin)) {
|
||||
spawnSync(initBin, [], { stdio: 'inherit' });
|
||||
} else {
|
||||
@@ -79,7 +78,7 @@ function checkSoul(): void {
|
||||
}
|
||||
|
||||
function checkSequentialThinking(runtime: string): void {
|
||||
const checker = fwScript('mosaic-ensure-sequential-thinking');
|
||||
const checker = join(MOSAIC_HOME, 'tools', '_scripts', 'mosaic-ensure-sequential-thinking');
|
||||
if (!existsSync(checker)) return; // Skip if checker doesn't exist
|
||||
const result = spawnSync(checker, ['--check', '--runtime', runtime], { stdio: 'ignore' });
|
||||
if (result.status !== 0) {
|
||||
@@ -492,29 +491,12 @@ function delegateToScript(scriptPath: string, args: string[], env?: Record<strin
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a path under the framework tools directory. Prefers the version
|
||||
* bundled in the @mosaic/mosaic npm package (always matches the installed
|
||||
* CLI version) over the deployed copy in ~/.config/mosaic/ (may be stale).
|
||||
*/
|
||||
function resolveTool(...segments: string[]): string {
|
||||
try {
|
||||
const req = createRequire(import.meta.url);
|
||||
const mosaicPkg = dirname(req.resolve('@mosaic/mosaic/package.json'));
|
||||
const bundled = join(mosaicPkg, 'framework', 'tools', ...segments);
|
||||
if (existsSync(bundled)) return bundled;
|
||||
} catch {
|
||||
// Fall through to deployed copy
|
||||
}
|
||||
return join(MOSAIC_HOME, 'tools', ...segments);
|
||||
}
|
||||
|
||||
function fwScript(name: string): string {
|
||||
return resolveTool('_scripts', name);
|
||||
return join(MOSAIC_HOME, 'tools', '_scripts', name);
|
||||
}
|
||||
|
||||
function toolScript(toolDir: string, name: string): string {
|
||||
return resolveTool(toolDir, name);
|
||||
return join(MOSAIC_HOME, 'tools', toolDir, name);
|
||||
}
|
||||
|
||||
// ─── Coord (mission orchestrator) ───────────────────────────────────────────
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
{
|
||||
"name": "@mosaic/config",
|
||||
"version": "0.0.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/config"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"default": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"lint": "eslint src",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"test": "vitest run --passWithNoTests"
|
||||
},
|
||||
"dependencies": {
|
||||
"@mosaic/memory": "workspace:^",
|
||||
"@mosaic/queue": "workspace:^",
|
||||
"@mosaic/storage": "workspace:^"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5.8.0",
|
||||
"vitest": "^2.0.0"
|
||||
},
|
||||
"publishConfig": {
|
||||
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||
"access": "public"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
]
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
export type { MosaicConfig, StorageTier, MemoryConfigRef } from './mosaic-config.js';
|
||||
export {
|
||||
DEFAULT_LOCAL_CONFIG,
|
||||
DEFAULT_TEAM_CONFIG,
|
||||
loadConfig,
|
||||
validateConfig,
|
||||
} from './mosaic-config.js';
|
||||
@@ -1,140 +0,0 @@
|
||||
import { readFileSync, existsSync } from 'node:fs';
|
||||
import { resolve } from 'node:path';
|
||||
import type { StorageConfig } from '@mosaic/storage';
|
||||
import type { QueueAdapterConfig as QueueConfig } from '@mosaic/queue';
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Types */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
export type StorageTier = 'local' | 'team';
|
||||
|
||||
export interface MemoryConfigRef {
|
||||
type: 'pgvector' | 'sqlite-vec' | 'keyword';
|
||||
}
|
||||
|
||||
export interface MosaicConfig {
|
||||
tier: StorageTier;
|
||||
storage: StorageConfig;
|
||||
queue: QueueConfig;
|
||||
memory: MemoryConfigRef;
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Defaults */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
export const DEFAULT_LOCAL_CONFIG: MosaicConfig = {
|
||||
tier: 'local',
|
||||
storage: { type: 'sqlite', path: '.mosaic/data.db' },
|
||||
queue: { type: 'local', dataDir: '.mosaic/queue' },
|
||||
memory: { type: 'keyword' },
|
||||
};
|
||||
|
||||
export const DEFAULT_TEAM_CONFIG: MosaicConfig = {
|
||||
tier: 'team',
|
||||
storage: { type: 'postgres', url: 'postgresql://mosaic:mosaic@localhost:5432/mosaic' },
|
||||
queue: { type: 'bullmq' },
|
||||
memory: { type: 'pgvector' },
|
||||
};
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Validation */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
const VALID_TIERS = new Set<string>(['local', 'team']);
|
||||
const VALID_STORAGE_TYPES = new Set<string>(['postgres', 'sqlite', 'files']);
|
||||
const VALID_QUEUE_TYPES = new Set<string>(['bullmq', 'local']);
|
||||
const VALID_MEMORY_TYPES = new Set<string>(['pgvector', 'sqlite-vec', 'keyword']);
|
||||
|
||||
export function validateConfig(raw: unknown): MosaicConfig {
|
||||
if (typeof raw !== 'object' || raw === null) {
|
||||
throw new Error('MosaicConfig must be a non-null object');
|
||||
}
|
||||
|
||||
const obj = raw as Record<string, unknown>;
|
||||
|
||||
// tier
|
||||
const tier = obj['tier'];
|
||||
if (typeof tier !== 'string' || !VALID_TIERS.has(tier)) {
|
||||
throw new Error(`Invalid tier "${String(tier)}" — expected "local" or "team"`);
|
||||
}
|
||||
|
||||
// storage
|
||||
const storage = obj['storage'];
|
||||
if (typeof storage !== 'object' || storage === null) {
|
||||
throw new Error('config.storage must be a non-null object');
|
||||
}
|
||||
const storageType = (storage as Record<string, unknown>)['type'];
|
||||
if (typeof storageType !== 'string' || !VALID_STORAGE_TYPES.has(storageType)) {
|
||||
throw new Error(`Invalid storage.type "${String(storageType)}"`);
|
||||
}
|
||||
|
||||
// queue
|
||||
const queue = obj['queue'];
|
||||
if (typeof queue !== 'object' || queue === null) {
|
||||
throw new Error('config.queue must be a non-null object');
|
||||
}
|
||||
const queueType = (queue as Record<string, unknown>)['type'];
|
||||
if (typeof queueType !== 'string' || !VALID_QUEUE_TYPES.has(queueType)) {
|
||||
throw new Error(`Invalid queue.type "${String(queueType)}"`);
|
||||
}
|
||||
|
||||
// memory
|
||||
const memory = obj['memory'];
|
||||
if (typeof memory !== 'object' || memory === null) {
|
||||
throw new Error('config.memory must be a non-null object');
|
||||
}
|
||||
const memoryType = (memory as Record<string, unknown>)['type'];
|
||||
if (typeof memoryType !== 'string' || !VALID_MEMORY_TYPES.has(memoryType)) {
|
||||
throw new Error(`Invalid memory.type "${String(memoryType)}"`);
|
||||
}
|
||||
|
||||
return {
|
||||
tier: tier as StorageTier,
|
||||
storage: storage as StorageConfig,
|
||||
queue: queue as QueueConfig,
|
||||
memory: memory as MemoryConfigRef,
|
||||
};
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Loader */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
function detectFromEnv(): MosaicConfig {
|
||||
if (process.env['DATABASE_URL']) {
|
||||
return {
|
||||
...DEFAULT_TEAM_CONFIG,
|
||||
storage: {
|
||||
type: 'postgres',
|
||||
url: process.env['DATABASE_URL'],
|
||||
},
|
||||
queue: {
|
||||
type: 'bullmq',
|
||||
url: process.env['VALKEY_URL'],
|
||||
},
|
||||
};
|
||||
}
|
||||
return DEFAULT_LOCAL_CONFIG;
|
||||
}
|
||||
|
||||
export function loadConfig(configPath?: string): MosaicConfig {
|
||||
// 1. Explicit path or default location
|
||||
const paths = configPath
|
||||
? [resolve(configPath)]
|
||||
: [
|
||||
resolve(process.cwd(), 'mosaic.config.json'),
|
||||
resolve(process.cwd(), '../../mosaic.config.json'), // monorepo root when cwd is apps/gateway
|
||||
];
|
||||
|
||||
for (const p of paths) {
|
||||
if (existsSync(p)) {
|
||||
const raw: unknown = JSON.parse(readFileSync(p, 'utf-8'));
|
||||
return validateConfig(raw);
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Fall back to env-var detection
|
||||
return detectFromEnv();
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"rootDir": "src"
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/coord",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/coord"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/db",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/db"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -33,7 +28,6 @@
|
||||
"vitest": "^2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@electric-sql/pglite": "^0.2.17",
|
||||
"drizzle-orm": "^0.45.1",
|
||||
"postgres": "^3.4.8"
|
||||
},
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
import { PGlite } from '@electric-sql/pglite';
|
||||
import { drizzle } from 'drizzle-orm/pglite';
|
||||
import * as schema from './schema.js';
|
||||
import type { DbHandle } from './client.js';
|
||||
|
||||
export function createPgliteDb(dataDir: string): DbHandle {
|
||||
const client = new PGlite(dataDir);
|
||||
const db = drizzle(client, { schema });
|
||||
return {
|
||||
db: db as unknown as DbHandle['db'],
|
||||
close: async () => {
|
||||
await client.close();
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
export { createDb, type Db, type DbHandle } from './client.js';
|
||||
export { createPgliteDb } from './client-pglite.js';
|
||||
export { runMigrations } from './migrate.js';
|
||||
export * from './schema.js';
|
||||
export {
|
||||
@@ -17,5 +16,4 @@ export {
|
||||
gte,
|
||||
lte,
|
||||
ilike,
|
||||
count,
|
||||
} from 'drizzle-orm';
|
||||
|
||||
@@ -91,28 +91,6 @@ export const verifications = pgTable('verifications', {
|
||||
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
|
||||
});
|
||||
|
||||
// ─── Admin API Tokens ───────────────────────────────────────────────────────
|
||||
|
||||
export const adminTokens = pgTable(
|
||||
'admin_tokens',
|
||||
{
|
||||
id: text('id').primaryKey(),
|
||||
userId: text('user_id')
|
||||
.notNull()
|
||||
.references(() => users.id, { onDelete: 'cascade' }),
|
||||
tokenHash: text('token_hash').notNull(),
|
||||
label: text('label').notNull(),
|
||||
scope: text('scope').notNull().default('admin'),
|
||||
expiresAt: timestamp('expires_at', { withTimezone: true }),
|
||||
lastUsedAt: timestamp('last_used_at', { withTimezone: true }),
|
||||
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||
},
|
||||
(t) => [
|
||||
index('admin_tokens_user_id_idx').on(t.userId),
|
||||
uniqueIndex('admin_tokens_hash_idx').on(t.tokenHash),
|
||||
],
|
||||
);
|
||||
|
||||
// ─── Teams ───────────────────────────────────────────────────────────────────
|
||||
// Declared before projects because projects references teams.
|
||||
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/design-tokens",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/design-tokens"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/forge",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/forge"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/log",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/log"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/macp",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/macp"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/memory",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/memory"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -23,7 +18,6 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@mosaic/db": "workspace:*",
|
||||
"@mosaic/storage": "workspace:*",
|
||||
"@mosaic/types": "workspace:*",
|
||||
"drizzle-orm": "^0.45.1"
|
||||
},
|
||||
|
||||
@@ -1,298 +0,0 @@
|
||||
import { describe, it, expect, beforeEach } from 'vitest';
|
||||
import type { StorageAdapter } from '@mosaic/storage';
|
||||
import { KeywordAdapter } from './keyword.js';
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* In-memory mock StorageAdapter */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
function createMockStorage(): StorageAdapter {
|
||||
const collections = new Map<string, Map<string, Record<string, unknown>>>();
|
||||
let idCounter = 0;
|
||||
|
||||
function getCollection(name: string): Map<string, Record<string, unknown>> {
|
||||
if (!collections.has(name)) collections.set(name, new Map());
|
||||
return collections.get(name)!;
|
||||
}
|
||||
|
||||
const adapter: StorageAdapter = {
|
||||
name: 'mock',
|
||||
|
||||
async create<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
data: T,
|
||||
): Promise<T & { id: string }> {
|
||||
const id = String(++idCounter);
|
||||
const record = { ...data, id };
|
||||
getCollection(collection).set(id, record);
|
||||
return record as T & { id: string };
|
||||
},
|
||||
|
||||
async read<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
id: string,
|
||||
): Promise<T | null> {
|
||||
const record = getCollection(collection).get(id);
|
||||
return (record as T) ?? null;
|
||||
},
|
||||
|
||||
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
|
||||
const col = getCollection(collection);
|
||||
const existing = col.get(id);
|
||||
if (!existing) return false;
|
||||
col.set(id, { ...existing, ...data });
|
||||
return true;
|
||||
},
|
||||
|
||||
async delete(collection: string, id: string): Promise<boolean> {
|
||||
return getCollection(collection).delete(id);
|
||||
},
|
||||
|
||||
async find<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter?: Record<string, unknown>,
|
||||
): Promise<T[]> {
|
||||
const col = getCollection(collection);
|
||||
const results: T[] = [];
|
||||
for (const record of col.values()) {
|
||||
if (filter && !matchesFilter(record, filter)) continue;
|
||||
results.push(record as T);
|
||||
}
|
||||
return results;
|
||||
},
|
||||
|
||||
async findOne<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter: Record<string, unknown>,
|
||||
): Promise<T | null> {
|
||||
const col = getCollection(collection);
|
||||
for (const record of col.values()) {
|
||||
if (matchesFilter(record, filter)) return record as T;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
|
||||
const rows = await adapter.find(collection, filter);
|
||||
return rows.length;
|
||||
},
|
||||
|
||||
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
|
||||
return fn(adapter);
|
||||
},
|
||||
|
||||
async migrate(): Promise<void> {},
|
||||
async close(): Promise<void> {},
|
||||
};
|
||||
|
||||
return adapter;
|
||||
}
|
||||
|
||||
function matchesFilter(record: Record<string, unknown>, filter: Record<string, unknown>): boolean {
|
||||
for (const [key, value] of Object.entries(filter)) {
|
||||
if (record[key] !== value) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Tests */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
describe('KeywordAdapter', () => {
|
||||
let adapter: KeywordAdapter;
|
||||
|
||||
beforeEach(() => {
|
||||
adapter = new KeywordAdapter({ type: 'keyword', storage: createMockStorage() });
|
||||
});
|
||||
|
||||
/* ---- Preferences ---- */
|
||||
|
||||
describe('preferences', () => {
|
||||
it('should set and get a preference', async () => {
|
||||
await adapter.setPreference('u1', 'theme', 'dark');
|
||||
const value = await adapter.getPreference('u1', 'theme');
|
||||
expect(value).toBe('dark');
|
||||
});
|
||||
|
||||
it('should return null for missing preference', async () => {
|
||||
const value = await adapter.getPreference('u1', 'nonexistent');
|
||||
expect(value).toBeNull();
|
||||
});
|
||||
|
||||
it('should upsert an existing preference', async () => {
|
||||
await adapter.setPreference('u1', 'theme', 'dark');
|
||||
await adapter.setPreference('u1', 'theme', 'light');
|
||||
const value = await adapter.getPreference('u1', 'theme');
|
||||
expect(value).toBe('light');
|
||||
});
|
||||
|
||||
it('should delete a preference', async () => {
|
||||
await adapter.setPreference('u1', 'theme', 'dark');
|
||||
const deleted = await adapter.deletePreference('u1', 'theme');
|
||||
expect(deleted).toBe(true);
|
||||
const value = await adapter.getPreference('u1', 'theme');
|
||||
expect(value).toBeNull();
|
||||
});
|
||||
|
||||
it('should return false when deleting nonexistent preference', async () => {
|
||||
const deleted = await adapter.deletePreference('u1', 'nope');
|
||||
expect(deleted).toBe(false);
|
||||
});
|
||||
|
||||
it('should list preferences by userId', async () => {
|
||||
await adapter.setPreference('u1', 'theme', 'dark', 'appearance');
|
||||
await adapter.setPreference('u1', 'lang', 'en', 'locale');
|
||||
await adapter.setPreference('u2', 'theme', 'light', 'appearance');
|
||||
|
||||
const prefs = await adapter.listPreferences('u1');
|
||||
expect(prefs).toHaveLength(2);
|
||||
expect(prefs.map((p) => p.key).sort()).toEqual(['lang', 'theme']);
|
||||
});
|
||||
|
||||
it('should filter preferences by category', async () => {
|
||||
await adapter.setPreference('u1', 'theme', 'dark', 'appearance');
|
||||
await adapter.setPreference('u1', 'lang', 'en', 'locale');
|
||||
|
||||
const prefs = await adapter.listPreferences('u1', 'appearance');
|
||||
expect(prefs).toHaveLength(1);
|
||||
expect(prefs[0]!.key).toBe('theme');
|
||||
});
|
||||
});
|
||||
|
||||
/* ---- Insights ---- */
|
||||
|
||||
describe('insights', () => {
|
||||
it('should store and retrieve an insight', async () => {
|
||||
const insight = await adapter.storeInsight({
|
||||
userId: 'u1',
|
||||
content: 'TypeScript is great for type safety',
|
||||
source: 'chat',
|
||||
category: 'technical',
|
||||
relevanceScore: 0.9,
|
||||
});
|
||||
|
||||
expect(insight.id).toBeDefined();
|
||||
expect(insight.content).toBe('TypeScript is great for type safety');
|
||||
|
||||
const fetched = await adapter.getInsight(insight.id);
|
||||
expect(fetched).not.toBeNull();
|
||||
expect(fetched!.content).toBe('TypeScript is great for type safety');
|
||||
});
|
||||
|
||||
it('should return null for missing insight', async () => {
|
||||
const result = await adapter.getInsight('nonexistent');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should delete an insight', async () => {
|
||||
const insight = await adapter.storeInsight({
|
||||
userId: 'u1',
|
||||
content: 'test',
|
||||
source: 'chat',
|
||||
category: 'general',
|
||||
relevanceScore: 0.5,
|
||||
});
|
||||
|
||||
const deleted = await adapter.deleteInsight(insight.id);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
const fetched = await adapter.getInsight(insight.id);
|
||||
expect(fetched).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
/* ---- Keyword Search ---- */
|
||||
|
||||
describe('searchInsights', () => {
|
||||
beforeEach(async () => {
|
||||
await adapter.storeInsight({
|
||||
userId: 'u1',
|
||||
content: 'TypeScript provides excellent type safety for JavaScript projects',
|
||||
source: 'chat',
|
||||
category: 'technical',
|
||||
relevanceScore: 0.9,
|
||||
});
|
||||
await adapter.storeInsight({
|
||||
userId: 'u1',
|
||||
content: 'React hooks simplify state management in components',
|
||||
source: 'chat',
|
||||
category: 'technical',
|
||||
relevanceScore: 0.8,
|
||||
});
|
||||
await adapter.storeInsight({
|
||||
userId: 'u1',
|
||||
content: 'TypeScript and React work great together for type safe components',
|
||||
source: 'chat',
|
||||
category: 'technical',
|
||||
relevanceScore: 0.85,
|
||||
});
|
||||
await adapter.storeInsight({
|
||||
userId: 'u2',
|
||||
content: 'TypeScript is popular',
|
||||
source: 'chat',
|
||||
category: 'general',
|
||||
relevanceScore: 0.5,
|
||||
});
|
||||
});
|
||||
|
||||
it('should find insights by exact keyword', async () => {
|
||||
const results = await adapter.searchInsights('u1', 'hooks');
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]!.content).toContain('hooks');
|
||||
});
|
||||
|
||||
it('should be case-insensitive', async () => {
|
||||
const results = await adapter.searchInsights('u1', 'TYPESCRIPT');
|
||||
expect(results.length).toBeGreaterThanOrEqual(1);
|
||||
for (const r of results) {
|
||||
expect(r.content.toLowerCase()).toContain('typescript');
|
||||
}
|
||||
});
|
||||
|
||||
it('should rank multi-word matches higher', async () => {
|
||||
const results = await adapter.searchInsights('u1', 'TypeScript React');
|
||||
// The insight mentioning both "TypeScript" and "React" should rank first (score=2)
|
||||
expect(results[0]!.score).toBe(2);
|
||||
expect(results[0]!.content).toContain('TypeScript');
|
||||
expect(results[0]!.content).toContain('React');
|
||||
});
|
||||
|
||||
it('should return empty for no matches', async () => {
|
||||
const results = await adapter.searchInsights('u1', 'python django');
|
||||
expect(results).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should filter by userId', async () => {
|
||||
const results = await adapter.searchInsights('u2', 'TypeScript');
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]!.content).toBe('TypeScript is popular');
|
||||
});
|
||||
|
||||
it('should respect limit option', async () => {
|
||||
const results = await adapter.searchInsights('u1', 'TypeScript', { limit: 1 });
|
||||
expect(results).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should return empty for empty query', async () => {
|
||||
const results = await adapter.searchInsights('u1', ' ');
|
||||
expect(results).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
/* ---- Lifecycle ---- */
|
||||
|
||||
describe('lifecycle', () => {
|
||||
it('should have name "keyword"', () => {
|
||||
expect(adapter.name).toBe('keyword');
|
||||
});
|
||||
|
||||
it('should have null embedder', () => {
|
||||
expect(adapter.embedder).toBeNull();
|
||||
});
|
||||
|
||||
it('should close without error', async () => {
|
||||
await expect(adapter.close()).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,195 +0,0 @@
|
||||
import type { StorageAdapter } from '@mosaic/storage';
|
||||
import type {
|
||||
MemoryAdapter,
|
||||
MemoryConfig,
|
||||
NewInsight,
|
||||
Insight,
|
||||
InsightSearchResult,
|
||||
} from '../types.js';
|
||||
import type { EmbeddingProvider } from '../vector-store.js';
|
||||
|
||||
type KeywordConfig = Extract<MemoryConfig, { type: 'keyword' }>;
|
||||
|
||||
const PREFERENCES = 'preferences';
|
||||
const INSIGHTS = 'insights';
|
||||
|
||||
type PreferenceRecord = Record<string, unknown> & {
|
||||
id: string;
|
||||
userId: string;
|
||||
key: string;
|
||||
value: unknown;
|
||||
category: string;
|
||||
};
|
||||
|
||||
type InsightRecord = Record<string, unknown> & {
|
||||
id: string;
|
||||
userId: string;
|
||||
content: string;
|
||||
source: string;
|
||||
category: string;
|
||||
relevanceScore: number;
|
||||
metadata: Record<string, unknown>;
|
||||
createdAt: string;
|
||||
updatedAt?: string;
|
||||
decayedAt?: string;
|
||||
};
|
||||
|
||||
export class KeywordAdapter implements MemoryAdapter {
|
||||
readonly name = 'keyword';
|
||||
readonly embedder: EmbeddingProvider | null = null;
|
||||
|
||||
private storage: StorageAdapter;
|
||||
|
||||
constructor(config: KeywordConfig) {
|
||||
this.storage = config.storage;
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Preferences */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
async getPreference(userId: string, key: string): Promise<unknown | null> {
|
||||
const row = await this.storage.findOne<PreferenceRecord>(PREFERENCES, { userId, key });
|
||||
return row?.value ?? null;
|
||||
}
|
||||
|
||||
async setPreference(
|
||||
userId: string,
|
||||
key: string,
|
||||
value: unknown,
|
||||
category?: string,
|
||||
): Promise<void> {
|
||||
const existing = await this.storage.findOne<PreferenceRecord>(PREFERENCES, { userId, key });
|
||||
if (existing) {
|
||||
await this.storage.update(PREFERENCES, existing.id, {
|
||||
value,
|
||||
...(category !== undefined ? { category } : {}),
|
||||
});
|
||||
} else {
|
||||
await this.storage.create(PREFERENCES, {
|
||||
userId,
|
||||
key,
|
||||
value,
|
||||
category: category ?? 'general',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async deletePreference(userId: string, key: string): Promise<boolean> {
|
||||
const existing = await this.storage.findOne<PreferenceRecord>(PREFERENCES, { userId, key });
|
||||
if (!existing) return false;
|
||||
return this.storage.delete(PREFERENCES, existing.id);
|
||||
}
|
||||
|
||||
async listPreferences(
|
||||
userId: string,
|
||||
category?: string,
|
||||
): Promise<Array<{ key: string; value: unknown; category: string }>> {
|
||||
const filter: Record<string, unknown> = { userId };
|
||||
if (category !== undefined) filter.category = category;
|
||||
|
||||
const rows = await this.storage.find<PreferenceRecord>(PREFERENCES, filter);
|
||||
return rows.map((r) => ({ key: r.key, value: r.value, category: r.category }));
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Insights */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
async storeInsight(insight: NewInsight): Promise<Insight> {
|
||||
const now = new Date();
|
||||
const row = await this.storage.create<Record<string, unknown>>(INSIGHTS, {
|
||||
userId: insight.userId,
|
||||
content: insight.content,
|
||||
source: insight.source,
|
||||
category: insight.category,
|
||||
relevanceScore: insight.relevanceScore,
|
||||
metadata: insight.metadata ?? {},
|
||||
createdAt: now.toISOString(),
|
||||
});
|
||||
|
||||
return {
|
||||
id: row.id,
|
||||
userId: insight.userId,
|
||||
content: insight.content,
|
||||
source: insight.source,
|
||||
category: insight.category,
|
||||
relevanceScore: insight.relevanceScore,
|
||||
metadata: insight.metadata,
|
||||
createdAt: now,
|
||||
};
|
||||
}
|
||||
|
||||
async getInsight(id: string): Promise<Insight | null> {
|
||||
const row = await this.storage.read<InsightRecord>(INSIGHTS, id);
|
||||
if (!row) return null;
|
||||
return toInsight(row);
|
||||
}
|
||||
|
||||
async searchInsights(
|
||||
userId: string,
|
||||
query: string,
|
||||
opts?: { limit?: number; embedding?: number[] },
|
||||
): Promise<InsightSearchResult[]> {
|
||||
const limit = opts?.limit ?? 10;
|
||||
const words = query
|
||||
.toLowerCase()
|
||||
.split(/\s+/)
|
||||
.filter((w) => w.length > 0);
|
||||
|
||||
if (words.length === 0) return [];
|
||||
|
||||
const rows = await this.storage.find<InsightRecord>(INSIGHTS, { userId });
|
||||
|
||||
const scored: InsightSearchResult[] = [];
|
||||
for (const row of rows) {
|
||||
const content = row.content.toLowerCase();
|
||||
let score = 0;
|
||||
for (const word of words) {
|
||||
if (content.includes(word)) score++;
|
||||
}
|
||||
if (score > 0) {
|
||||
scored.push({
|
||||
id: row.id,
|
||||
content: row.content,
|
||||
score,
|
||||
metadata: row.metadata ?? undefined,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
scored.sort((a, b) => b.score - a.score);
|
||||
return scored.slice(0, limit);
|
||||
}
|
||||
|
||||
async deleteInsight(id: string): Promise<boolean> {
|
||||
return this.storage.delete(INSIGHTS, id);
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Lifecycle */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
async close(): Promise<void> {
|
||||
// no-op — storage adapter manages its own lifecycle
|
||||
}
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Helpers */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
function toInsight(row: InsightRecord): Insight {
|
||||
return {
|
||||
id: row.id,
|
||||
userId: row.userId,
|
||||
content: row.content,
|
||||
source: row.source,
|
||||
category: row.category,
|
||||
relevanceScore: row.relevanceScore,
|
||||
metadata: row.metadata ?? undefined,
|
||||
createdAt: new Date(row.createdAt),
|
||||
updatedAt: row.updatedAt ? new Date(row.updatedAt) : undefined,
|
||||
decayedAt: row.decayedAt ? new Date(row.decayedAt) : undefined,
|
||||
};
|
||||
}
|
||||
@@ -1,177 +0,0 @@
|
||||
import { createDb, type DbHandle } from '@mosaic/db';
|
||||
import type {
|
||||
MemoryAdapter,
|
||||
MemoryConfig,
|
||||
NewInsight as AdapterNewInsight,
|
||||
Insight as AdapterInsight,
|
||||
InsightSearchResult,
|
||||
} from '../types.js';
|
||||
import type { EmbeddingProvider } from '../vector-store.js';
|
||||
import {
|
||||
createPreferencesRepo,
|
||||
type PreferencesRepo,
|
||||
type Preference,
|
||||
type NewPreference,
|
||||
} from '../preferences.js';
|
||||
import {
|
||||
createInsightsRepo,
|
||||
type InsightsRepo,
|
||||
type NewInsight as DbNewInsight,
|
||||
} from '../insights.js';
|
||||
|
||||
type PgVectorConfig = Extract<MemoryConfig, { type: 'pgvector' }>;
|
||||
|
||||
export class PgVectorAdapter implements MemoryAdapter {
|
||||
readonly name = 'pgvector';
|
||||
readonly embedder: EmbeddingProvider | null;
|
||||
|
||||
private handle: DbHandle;
|
||||
private preferences: PreferencesRepo;
|
||||
private insights: InsightsRepo;
|
||||
|
||||
constructor(config: PgVectorConfig) {
|
||||
this.handle = createDb();
|
||||
this.preferences = createPreferencesRepo(this.handle.db);
|
||||
this.insights = createInsightsRepo(this.handle.db);
|
||||
this.embedder = config.embedder ?? null;
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Preferences */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
async getPreference(userId: string, key: string): Promise<unknown | null> {
|
||||
const row = await this.preferences.findByUserAndKey(userId, key);
|
||||
return row?.value ?? null;
|
||||
}
|
||||
|
||||
async setPreference(
|
||||
userId: string,
|
||||
key: string,
|
||||
value: unknown,
|
||||
category?: string,
|
||||
): Promise<void> {
|
||||
await this.preferences.upsert({
|
||||
userId,
|
||||
key,
|
||||
value,
|
||||
...(category ? { category: category as NewPreference['category'] } : {}),
|
||||
});
|
||||
}
|
||||
|
||||
async deletePreference(userId: string, key: string): Promise<boolean> {
|
||||
return this.preferences.remove(userId, key);
|
||||
}
|
||||
|
||||
async listPreferences(
|
||||
userId: string,
|
||||
category?: string,
|
||||
): Promise<Array<{ key: string; value: unknown; category: string }>> {
|
||||
const rows = category
|
||||
? await this.preferences.findByUserAndCategory(userId, category as Preference['category'])
|
||||
: await this.preferences.findByUser(userId);
|
||||
|
||||
return rows.map((r) => ({ key: r.key, value: r.value, category: r.category }));
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Insights */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
async storeInsight(insight: AdapterNewInsight): Promise<AdapterInsight> {
|
||||
const row = await this.insights.create({
|
||||
userId: insight.userId,
|
||||
content: insight.content,
|
||||
source: insight.source as DbNewInsight['source'],
|
||||
category: insight.category as DbNewInsight['category'],
|
||||
relevanceScore: insight.relevanceScore,
|
||||
metadata: insight.metadata ?? {},
|
||||
embedding: insight.embedding ?? null,
|
||||
});
|
||||
|
||||
return toAdapterInsight(row);
|
||||
}
|
||||
|
||||
async getInsight(id: string): Promise<AdapterInsight | null> {
|
||||
// findById requires userId — search across all users via raw find
|
||||
// The adapter interface only takes id, so we pass an empty userId and rely on the id match.
|
||||
// Since the repo requires userId, we use a two-step approach.
|
||||
const row = await this.insights.findById(id, '');
|
||||
if (!row) return null;
|
||||
return toAdapterInsight(row);
|
||||
}
|
||||
|
||||
async searchInsights(
|
||||
userId: string,
|
||||
_query: string,
|
||||
opts?: { limit?: number; embedding?: number[] },
|
||||
): Promise<InsightSearchResult[]> {
|
||||
if (opts?.embedding) {
|
||||
const results = await this.insights.searchByEmbedding(
|
||||
userId,
|
||||
opts.embedding,
|
||||
opts.limit ?? 10,
|
||||
);
|
||||
return results.map((r) => ({
|
||||
id: r.insight.id,
|
||||
content: r.insight.content,
|
||||
score: 1 - r.distance,
|
||||
metadata: (r.insight.metadata as Record<string, unknown>) ?? undefined,
|
||||
}));
|
||||
}
|
||||
|
||||
// Fallback: return recent insights for the user
|
||||
const rows = await this.insights.findByUser(userId, opts?.limit ?? 10);
|
||||
return rows.map((r) => ({
|
||||
id: r.id,
|
||||
content: r.content,
|
||||
score: Number(r.relevanceScore),
|
||||
metadata: (r.metadata as Record<string, unknown>) ?? undefined,
|
||||
}));
|
||||
}
|
||||
|
||||
async deleteInsight(id: string): Promise<boolean> {
|
||||
// The repo requires userId — pass empty string since adapter interface only has id
|
||||
return this.insights.remove(id, '');
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Lifecycle */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
async close(): Promise<void> {
|
||||
await this.handle.close();
|
||||
}
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Helpers */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
function toAdapterInsight(row: {
|
||||
id: string;
|
||||
userId: string;
|
||||
content: string;
|
||||
source: string;
|
||||
category: string;
|
||||
relevanceScore: number;
|
||||
metadata: unknown;
|
||||
embedding: unknown;
|
||||
createdAt: Date;
|
||||
updatedAt: Date | null;
|
||||
decayedAt: Date | null;
|
||||
}): AdapterInsight {
|
||||
return {
|
||||
id: row.id,
|
||||
userId: row.userId,
|
||||
content: row.content,
|
||||
source: row.source,
|
||||
category: row.category,
|
||||
relevanceScore: row.relevanceScore,
|
||||
metadata: (row.metadata as Record<string, unknown>) ?? undefined,
|
||||
embedding: (row.embedding as number[]) ?? undefined,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt ?? undefined,
|
||||
decayedAt: row.decayedAt ?? undefined,
|
||||
};
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import type { MemoryAdapter, MemoryConfig } from './types.js';
|
||||
|
||||
type MemoryType = MemoryConfig['type'];
|
||||
|
||||
const registry = new Map<MemoryType, (config: MemoryConfig) => MemoryAdapter>();
|
||||
|
||||
export function registerMemoryAdapter(
|
||||
type: MemoryType,
|
||||
factory: (config: MemoryConfig) => MemoryAdapter,
|
||||
): void {
|
||||
registry.set(type, factory);
|
||||
}
|
||||
|
||||
export function createMemoryAdapter(config: MemoryConfig): MemoryAdapter {
|
||||
const factory = registry.get(config.type);
|
||||
if (!factory) throw new Error(`No adapter registered for type: ${config.type}`);
|
||||
return factory(config);
|
||||
}
|
||||
@@ -13,27 +13,3 @@ export {
|
||||
type SearchResult,
|
||||
} from './insights.js';
|
||||
export type { VectorStore, VectorSearchResult, EmbeddingProvider } from './vector-store.js';
|
||||
export type {
|
||||
MemoryAdapter,
|
||||
MemoryConfig,
|
||||
NewInsight as AdapterNewInsight,
|
||||
Insight as AdapterInsight,
|
||||
InsightSearchResult,
|
||||
} from './types.js';
|
||||
export { createMemoryAdapter, registerMemoryAdapter } from './factory.js';
|
||||
export { PgVectorAdapter } from './adapters/pgvector.js';
|
||||
export { KeywordAdapter } from './adapters/keyword.js';
|
||||
|
||||
// Auto-register adapters at module load time
|
||||
import { registerMemoryAdapter } from './factory.js';
|
||||
import { PgVectorAdapter } from './adapters/pgvector.js';
|
||||
import { KeywordAdapter } from './adapters/keyword.js';
|
||||
import type { MemoryConfig } from './types.js';
|
||||
|
||||
registerMemoryAdapter('pgvector', (config: MemoryConfig) => {
|
||||
return new PgVectorAdapter(config as Extract<MemoryConfig, { type: 'pgvector' }>);
|
||||
});
|
||||
|
||||
registerMemoryAdapter('keyword', (config: MemoryConfig) => {
|
||||
return new KeywordAdapter(config as Extract<MemoryConfig, { type: 'keyword' }>);
|
||||
});
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
export type { EmbeddingProvider, VectorSearchResult } from './vector-store.js';
|
||||
import type { EmbeddingProvider } from './vector-store.js';
|
||||
import type { StorageAdapter } from '@mosaic/storage';
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Insight types (adapter-level, decoupled from Drizzle schema) */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
export interface NewInsight {
|
||||
userId: string;
|
||||
content: string;
|
||||
source: string;
|
||||
category: string;
|
||||
relevanceScore: number;
|
||||
metadata?: Record<string, unknown>;
|
||||
embedding?: number[];
|
||||
}
|
||||
|
||||
export interface Insight extends NewInsight {
|
||||
id: string;
|
||||
createdAt: Date;
|
||||
updatedAt?: Date;
|
||||
decayedAt?: Date;
|
||||
}
|
||||
|
||||
export interface InsightSearchResult {
|
||||
id: string;
|
||||
content: string;
|
||||
score: number;
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* MemoryAdapter interface */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
export interface MemoryAdapter {
|
||||
readonly name: string;
|
||||
|
||||
// Preferences
|
||||
getPreference(userId: string, key: string): Promise<unknown | null>;
|
||||
setPreference(userId: string, key: string, value: unknown, category?: string): Promise<void>;
|
||||
deletePreference(userId: string, key: string): Promise<boolean>;
|
||||
listPreferences(
|
||||
userId: string,
|
||||
category?: string,
|
||||
): Promise<Array<{ key: string; value: unknown; category: string }>>;
|
||||
|
||||
// Insights
|
||||
storeInsight(insight: NewInsight): Promise<Insight>;
|
||||
getInsight(id: string): Promise<Insight | null>;
|
||||
searchInsights(
|
||||
userId: string,
|
||||
query: string,
|
||||
opts?: { limit?: number; embedding?: number[] },
|
||||
): Promise<InsightSearchResult[]>;
|
||||
deleteInsight(id: string): Promise<boolean>;
|
||||
|
||||
// Embedding
|
||||
readonly embedder: EmbeddingProvider | null;
|
||||
|
||||
// Lifecycle
|
||||
close(): Promise<void>;
|
||||
}
|
||||
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* MemoryConfig */
|
||||
/* ------------------------------------------------------------------ */
|
||||
|
||||
export type MemoryConfig =
|
||||
| { type: 'pgvector'; embedder?: EmbeddingProvider }
|
||||
| { type: 'sqlite-vec'; embedder?: EmbeddingProvider }
|
||||
| { type: 'keyword'; storage: StorageAdapter };
|
||||
@@ -56,66 +56,24 @@ if [[ $fetch -eq 1 ]]; then
|
||||
if [[ -d "$SKILLS_REPO_DIR/.git" ]]; then
|
||||
echo "[mosaic-skills] Updating skills source: $SKILLS_REPO_DIR"
|
||||
|
||||
# ── Detect dirty state ──────────────────────────────────────────────
|
||||
dirty=""
|
||||
dirty="$(git -C "$SKILLS_REPO_DIR" status --porcelain 2>/dev/null || true)"
|
||||
|
||||
if [[ -n "$dirty" ]]; then
|
||||
# ── Auto-migrate customized skills to skills-local/ ─────────────
|
||||
# Instead of stash/pop (fragile, merge conflicts), we:
|
||||
# 1. Identify which skill dirs contain user edits
|
||||
# 2. Copy those full skill dirs into skills-local/ (preserving edits)
|
||||
# 3. Reset the repo clean so pull always succeeds
|
||||
# 4. skills-local/ takes precedence during linking, so edits win
|
||||
|
||||
SOURCE_SKILLS_SUBDIR="$SKILLS_REPO_DIR/skills"
|
||||
migrated=()
|
||||
|
||||
while IFS= read -r line; do
|
||||
# porcelain format: XY <path> — extract the file path
|
||||
file="${line:3}"
|
||||
# Only migrate files under skills/ subdir in the repo
|
||||
if [[ "$file" == skills/* ]]; then
|
||||
# Extract the skill directory name (first path component after skills/)
|
||||
skill_name="${file#skills/}"
|
||||
skill_name="${skill_name%%/*}"
|
||||
|
||||
# Skip if already migrated this skill in this run
|
||||
local_skill_dir="$MOSAIC_LOCAL_SKILLS_DIR/$skill_name"
|
||||
if [[ -d "$local_skill_dir" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Skip if skill_name is empty or hidden
|
||||
if [[ -z "$skill_name" || "$skill_name" == .* ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# Copy the skill (with user's edits) from repo working tree to skills-local/
|
||||
if [[ -d "$SOURCE_SKILLS_SUBDIR/$skill_name" ]]; then
|
||||
cp -R "$SOURCE_SKILLS_SUBDIR/$skill_name" "$local_skill_dir"
|
||||
migrated+=("$skill_name")
|
||||
fi
|
||||
fi
|
||||
done <<< "$dirty"
|
||||
|
||||
if [[ ${#migrated[@]} -gt 0 ]]; then
|
||||
echo "[mosaic-skills] Migrated ${#migrated[@]} customized skill(s) to skills-local/:"
|
||||
for s in "${migrated[@]}"; do
|
||||
echo " → $MOSAIC_LOCAL_SKILLS_DIR/$s"
|
||||
done
|
||||
echo "[mosaic-skills] Your edits are preserved there and take precedence over canonical."
|
||||
fi
|
||||
|
||||
# Reset repo to clean state so pull always works
|
||||
echo "[mosaic-skills] Resetting source repo to clean state..."
|
||||
git -C "$SKILLS_REPO_DIR" checkout . 2>/dev/null || true
|
||||
git -C "$SKILLS_REPO_DIR" clean -fd 2>/dev/null || true
|
||||
# Stash any local changes (dirty index or worktree) before pulling
|
||||
local_changes=0
|
||||
if ! git -C "$SKILLS_REPO_DIR" diff --quiet 2>/dev/null || \
|
||||
! git -C "$SKILLS_REPO_DIR" diff --cached --quiet 2>/dev/null; then
|
||||
local_changes=1
|
||||
echo "[mosaic-skills] Stashing local changes..."
|
||||
git -C "$SKILLS_REPO_DIR" stash push -q -m "mosaic-sync-skills auto-stash"
|
||||
fi
|
||||
|
||||
if ! git -C "$SKILLS_REPO_DIR" pull --rebase 2>/dev/null; then
|
||||
if ! git -C "$SKILLS_REPO_DIR" pull --rebase; then
|
||||
echo "[mosaic-skills] WARN: pull failed — continuing with existing checkout" >&2
|
||||
git -C "$SKILLS_REPO_DIR" rebase --abort 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Restore stashed changes
|
||||
if [[ $local_changes -eq 1 ]]; then
|
||||
echo "[mosaic-skills] Restoring local changes..."
|
||||
git -C "$SKILLS_REPO_DIR" stash pop -q 2>/dev/null || \
|
||||
echo "[mosaic-skills] WARN: stash pop had conflicts — check $SKILLS_REPO_DIR" >&2
|
||||
fi
|
||||
else
|
||||
echo "[mosaic-skills] Cloning skills source to: $SKILLS_REPO_DIR"
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/mosaic",
|
||||
"version": "0.0.10",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/mosaic"
|
||||
},
|
||||
"version": "0.0.4",
|
||||
"description": "Mosaic agent framework — installation wizard and meta package",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
@@ -32,7 +27,7 @@
|
||||
"@mosaic/quality-rails": "workspace:*",
|
||||
"@mosaic/types": "workspace:*",
|
||||
"@clack/prompts": "^0.9.1",
|
||||
"commander": "^13.0.0",
|
||||
"commander": "^12.1.0",
|
||||
"picocolors": "^1.1.1",
|
||||
"yaml": "^2.6.1",
|
||||
"zod": "^3.23.8"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { readFileSync, existsSync, readdirSync, statSync, copyFileSync } from 'node:fs';
|
||||
import { readFileSync, existsSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import type { ConfigService } from './config-service.js';
|
||||
import type { SoulConfig, UserConfig, ToolsConfig, InstallAction } from '../types.js';
|
||||
@@ -140,23 +140,6 @@ export class FileConfigAdapter implements ConfigService {
|
||||
preserve: preservePaths,
|
||||
excludeGit: true,
|
||||
});
|
||||
|
||||
// Copy default root-level .md files (AGENTS.md, STANDARDS.md, etc.)
|
||||
// from framework/defaults/ into mosaicHome root if they don't exist yet.
|
||||
// These are framework contracts — only written on first install, never
|
||||
// overwritten (user may have customized them).
|
||||
const defaultsDir = join(this.sourceDir, 'defaults');
|
||||
if (existsSync(defaultsDir)) {
|
||||
for (const entry of readdirSync(defaultsDir)) {
|
||||
const dest = join(this.mosaicHome, entry);
|
||||
if (!existsSync(dest)) {
|
||||
const src = join(defaultsDir, entry);
|
||||
if (statSync(src).isFile()) {
|
||||
copyFileSync(src, dest);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
#!/usr/bin/env node
|
||||
import { existsSync } from 'node:fs';
|
||||
import { dirname, resolve } from 'node:path';
|
||||
import { resolve } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import { Command } from 'commander';
|
||||
@@ -50,14 +49,7 @@ program
|
||||
.action(async (opts: Record<string, string | boolean | undefined>) => {
|
||||
try {
|
||||
const mosaicHome = (opts['mosaicHome'] as string) ?? DEFAULT_MOSAIC_HOME;
|
||||
// Default source to the framework/ dir bundled in this npm package.
|
||||
// This ensures syncFramework copies AGENTS.md, STANDARDS.md, guides/, etc.
|
||||
// Falls back to mosaicHome if the bundled dir doesn't exist (shouldn't happen).
|
||||
const pkgRoot = dirname(fileURLToPath(import.meta.url));
|
||||
const bundledFramework = resolve(pkgRoot, '..', 'framework');
|
||||
const sourceDir =
|
||||
(opts['sourceDir'] as string | undefined) ??
|
||||
(existsSync(bundledFramework) ? bundledFramework : mosaicHome);
|
||||
const sourceDir = (opts['sourceDir'] as string | undefined) ?? mosaicHome;
|
||||
|
||||
const prompter = opts['nonInteractive'] ? new HeadlessPrompter() : new ClackPrompter();
|
||||
|
||||
|
||||
@@ -122,18 +122,10 @@ export function semverLt(a: string, b: string): boolean {
|
||||
|
||||
// ─── Cache ──────────────────────────────────────────────────────────────────
|
||||
|
||||
/** Cache stores only the latest registry version (the expensive network call).
|
||||
* The installed version is always checked fresh — it's a local `npm ls`. */
|
||||
interface RegistryCache {
|
||||
latest: string;
|
||||
checkedAt: string;
|
||||
registry: string;
|
||||
}
|
||||
|
||||
function readCache(): RegistryCache | null {
|
||||
function readCache(): UpdateCheckResult | null {
|
||||
try {
|
||||
if (!existsSync(CACHE_FILE)) return null;
|
||||
const raw = JSON.parse(readFileSync(CACHE_FILE, 'utf-8')) as RegistryCache;
|
||||
const raw = JSON.parse(readFileSync(CACHE_FILE, 'utf-8')) as UpdateCheckResult;
|
||||
const age = Date.now() - new Date(raw.checkedAt).getTime();
|
||||
if (age > CACHE_TTL_MS) return null;
|
||||
return raw;
|
||||
@@ -142,10 +134,10 @@ function readCache(): RegistryCache | null {
|
||||
}
|
||||
}
|
||||
|
||||
function writeCache(entry: RegistryCache): void {
|
||||
function writeCache(result: UpdateCheckResult): void {
|
||||
try {
|
||||
mkdirSync(CACHE_DIR, { recursive: true });
|
||||
writeFileSync(CACHE_FILE, JSON.stringify(entry, null, 2) + '\n', 'utf-8');
|
||||
writeFileSync(CACHE_FILE, JSON.stringify(result, null, 2) + '\n', 'utf-8');
|
||||
} catch {
|
||||
// Best-effort — cache is not critical
|
||||
}
|
||||
@@ -182,40 +174,29 @@ export function getLatestVersion(): string {
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform an update check — uses registry cache when fresh, always checks
|
||||
* installed version fresh (local npm ls is cheap, caching it causes stale
|
||||
* "update available" banners after an upgrade).
|
||||
* Perform an update check — uses cache when fresh, otherwise hits registry.
|
||||
* Never throws.
|
||||
*/
|
||||
export function checkForUpdate(options?: { skipCache?: boolean }): UpdateCheckResult {
|
||||
const current = getInstalledVersion();
|
||||
|
||||
let latest: string;
|
||||
let checkedAt: string;
|
||||
|
||||
if (!options?.skipCache) {
|
||||
const cached = readCache();
|
||||
if (cached) {
|
||||
latest = cached.latest;
|
||||
checkedAt = cached.checkedAt;
|
||||
} else {
|
||||
latest = getLatestVersion();
|
||||
checkedAt = new Date().toISOString();
|
||||
writeCache({ latest, checkedAt, registry: REGISTRY });
|
||||
}
|
||||
} else {
|
||||
latest = getLatestVersion();
|
||||
checkedAt = new Date().toISOString();
|
||||
writeCache({ latest, checkedAt, registry: REGISTRY });
|
||||
if (cached) return cached;
|
||||
}
|
||||
|
||||
return {
|
||||
const current = getInstalledVersion();
|
||||
const latest = getLatestVersion();
|
||||
const updateAvailable = !!(current && latest && semverLt(current, latest));
|
||||
|
||||
const result: UpdateCheckResult = {
|
||||
current,
|
||||
latest,
|
||||
updateAvailable: !!(current && latest && semverLt(current, latest)),
|
||||
checkedAt,
|
||||
updateAvailable,
|
||||
checkedAt: new Date().toISOString(),
|
||||
registry: REGISTRY,
|
||||
};
|
||||
|
||||
writeCache(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/prdy",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/prdy"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
@@ -22,7 +17,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@clack/prompts": "^0.9.0",
|
||||
"commander": "^13.0.0",
|
||||
"commander": "^12.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"zod": "^3.22.0"
|
||||
},
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/quality-rails",
|
||||
"version": "0.0.3",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/quality-rails"
|
||||
},
|
||||
"version": "0.0.2",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -22,7 +17,7 @@
|
||||
"test": "vitest run --passWithNoTests"
|
||||
},
|
||||
"dependencies": {
|
||||
"commander": "^13.0.0"
|
||||
"commander": "^12.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.0.0",
|
||||
|
||||
@@ -106,26 +106,12 @@ function printScaffoldResult(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register quality-rails subcommands on an existing Commander program.
|
||||
* This avoids cross-package Commander version mismatches by using the
|
||||
* caller's Command instance directly.
|
||||
*/
|
||||
export function registerQualityRails(parent: Command): void {
|
||||
buildQualityRailsCommand(
|
||||
parent.command('quality-rails').description('Manage quality rails scaffolding'),
|
||||
);
|
||||
}
|
||||
|
||||
export function createQualityRailsCli(): Command {
|
||||
const program = new Command('mosaic');
|
||||
buildQualityRailsCommand(
|
||||
program.command('quality-rails').description('Manage quality rails scaffolding'),
|
||||
);
|
||||
return program;
|
||||
}
|
||||
const qualityRails = program
|
||||
.command('quality-rails')
|
||||
.description('Manage quality rails scaffolding');
|
||||
|
||||
function buildQualityRailsCommand(qualityRails: Command): void {
|
||||
qualityRails
|
||||
.command('init')
|
||||
.requiredOption('--project <path>', 'Project path')
|
||||
@@ -198,6 +184,8 @@ function buildQualityRailsCommand(qualityRails: Command): void {
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
export async function runQualityRailsCli(argv: string[] = process.argv): Promise<void> {
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/queue",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/queue"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
import Redis from 'ioredis';
|
||||
|
||||
import type { QueueAdapter, QueueConfig, TaskPayload } from '../types.js';
|
||||
|
||||
const DEFAULT_VALKEY_URL = 'redis://localhost:6380';
|
||||
|
||||
export function createBullMQAdapter(config: QueueConfig): QueueAdapter {
|
||||
if (config.type !== 'bullmq') {
|
||||
throw new Error(`Expected config type "bullmq", got "${config.type}"`);
|
||||
}
|
||||
|
||||
const url = config.url ?? process.env['VALKEY_URL'] ?? DEFAULT_VALKEY_URL;
|
||||
const redis = new Redis(url, { maxRetriesPerRequest: 3 });
|
||||
|
||||
return {
|
||||
name: 'bullmq',
|
||||
|
||||
async enqueue(queueName: string, payload: TaskPayload): Promise<void> {
|
||||
await redis.lpush(queueName, JSON.stringify(payload));
|
||||
},
|
||||
|
||||
async dequeue(queueName: string): Promise<TaskPayload | null> {
|
||||
const item = await redis.rpop(queueName);
|
||||
if (!item) return null;
|
||||
return JSON.parse(item) as TaskPayload;
|
||||
},
|
||||
|
||||
async length(queueName: string): Promise<number> {
|
||||
return redis.llen(queueName);
|
||||
},
|
||||
|
||||
async publish(channel: string, message: string): Promise<void> {
|
||||
await redis.publish(channel, message);
|
||||
},
|
||||
|
||||
subscribe(channel: string, handler: (message: string) => void): () => void {
|
||||
const sub = redis.duplicate();
|
||||
sub.subscribe(channel).catch(() => {});
|
||||
sub.on('message', (_ch: string, msg: string) => handler(msg));
|
||||
return () => {
|
||||
sub.unsubscribe(channel).catch(() => {});
|
||||
sub.disconnect();
|
||||
};
|
||||
},
|
||||
|
||||
async close(): Promise<void> {
|
||||
await redis.quit();
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -1,81 +0,0 @@
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
|
||||
import type { TaskPayload } from '../types.js';
|
||||
import { createLocalAdapter } from './local.js';
|
||||
|
||||
function makePayload(id: string): TaskPayload {
|
||||
return { id, type: 'test', data: { value: id }, createdAt: new Date().toISOString() };
|
||||
}
|
||||
|
||||
describe('LocalAdapter', () => {
|
||||
let dataDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
dataDir = mkdtempSync(join(tmpdir(), 'mosaic-queue-test-'));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(dataDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('enqueue + dequeue in FIFO order', async () => {
|
||||
const adapter = createLocalAdapter({ type: 'local', dataDir });
|
||||
const a = makePayload('a');
|
||||
const b = makePayload('b');
|
||||
const c = makePayload('c');
|
||||
|
||||
await adapter.enqueue('tasks', a);
|
||||
await adapter.enqueue('tasks', b);
|
||||
await adapter.enqueue('tasks', c);
|
||||
|
||||
expect(await adapter.dequeue('tasks')).toEqual(a);
|
||||
expect(await adapter.dequeue('tasks')).toEqual(b);
|
||||
expect(await adapter.dequeue('tasks')).toEqual(c);
|
||||
expect(await adapter.dequeue('tasks')).toBeNull();
|
||||
});
|
||||
|
||||
it('length accuracy', async () => {
|
||||
const adapter = createLocalAdapter({ type: 'local', dataDir });
|
||||
|
||||
expect(await adapter.length('q')).toBe(0);
|
||||
await adapter.enqueue('q', makePayload('1'));
|
||||
await adapter.enqueue('q', makePayload('2'));
|
||||
expect(await adapter.length('q')).toBe(2);
|
||||
await adapter.dequeue('q');
|
||||
expect(await adapter.length('q')).toBe(1);
|
||||
});
|
||||
|
||||
it('publish + subscribe delivery', async () => {
|
||||
const adapter = createLocalAdapter({ type: 'local', dataDir });
|
||||
const received: string[] = [];
|
||||
|
||||
const unsub = adapter.subscribe('chan', (msg) => received.push(msg));
|
||||
await adapter.publish('chan', 'hello');
|
||||
await adapter.publish('chan', 'world');
|
||||
|
||||
expect(received).toEqual(['hello', 'world']);
|
||||
|
||||
unsub();
|
||||
await adapter.publish('chan', 'after-unsub');
|
||||
expect(received).toEqual(['hello', 'world']);
|
||||
});
|
||||
|
||||
it('persistence survives close and re-create', async () => {
|
||||
const p1 = makePayload('x');
|
||||
const p2 = makePayload('y');
|
||||
|
||||
const adapter1 = createLocalAdapter({ type: 'local', dataDir });
|
||||
await adapter1.enqueue('persist-q', p1);
|
||||
await adapter1.enqueue('persist-q', p2);
|
||||
await adapter1.close();
|
||||
|
||||
const adapter2 = createLocalAdapter({ type: 'local', dataDir });
|
||||
expect(await adapter2.length('persist-q')).toBe(2);
|
||||
expect(await adapter2.dequeue('persist-q')).toEqual(p1);
|
||||
expect(await adapter2.dequeue('persist-q')).toEqual(p2);
|
||||
await adapter2.close();
|
||||
});
|
||||
});
|
||||
@@ -1,87 +0,0 @@
|
||||
import { mkdirSync, readFileSync, readdirSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { EventEmitter } from 'node:events';
|
||||
|
||||
import type { QueueAdapter, QueueConfig, TaskPayload } from '../types.js';
|
||||
|
||||
const DEFAULT_DATA_DIR = '.mosaic/queue';
|
||||
|
||||
export function createLocalAdapter(config: QueueConfig): QueueAdapter {
|
||||
if (config.type !== 'local') {
|
||||
throw new Error(`Expected config type "local", got "${config.type}"`);
|
||||
}
|
||||
|
||||
const dataDir = config.dataDir ?? DEFAULT_DATA_DIR;
|
||||
const queues = new Map<string, TaskPayload[]>();
|
||||
const emitter = new EventEmitter();
|
||||
|
||||
mkdirSync(dataDir, { recursive: true });
|
||||
|
||||
// Load existing JSON files on startup
|
||||
for (const file of readdirSync(dataDir)) {
|
||||
if (!file.endsWith('.json')) continue;
|
||||
const queueName = file.slice(0, -5);
|
||||
try {
|
||||
const raw = readFileSync(join(dataDir, file), 'utf-8');
|
||||
const items = JSON.parse(raw) as TaskPayload[];
|
||||
if (Array.isArray(items)) {
|
||||
queues.set(queueName, items);
|
||||
}
|
||||
} catch {
|
||||
// Ignore corrupt files
|
||||
}
|
||||
}
|
||||
|
||||
function persist(queueName: string): void {
|
||||
const items = queues.get(queueName) ?? [];
|
||||
writeFileSync(join(dataDir, `${queueName}.json`), JSON.stringify(items), 'utf-8');
|
||||
}
|
||||
|
||||
function getQueue(queueName: string): TaskPayload[] {
|
||||
let q = queues.get(queueName);
|
||||
if (!q) {
|
||||
q = [];
|
||||
queues.set(queueName, q);
|
||||
}
|
||||
return q;
|
||||
}
|
||||
|
||||
return {
|
||||
name: 'local',
|
||||
|
||||
async enqueue(queueName: string, payload: TaskPayload): Promise<void> {
|
||||
getQueue(queueName).push(payload);
|
||||
persist(queueName);
|
||||
},
|
||||
|
||||
async dequeue(queueName: string): Promise<TaskPayload | null> {
|
||||
const q = getQueue(queueName);
|
||||
const item = q.shift() ?? null;
|
||||
persist(queueName);
|
||||
return item;
|
||||
},
|
||||
|
||||
async length(queueName: string): Promise<number> {
|
||||
return getQueue(queueName).length;
|
||||
},
|
||||
|
||||
async publish(channel: string, message: string): Promise<void> {
|
||||
emitter.emit(channel, message);
|
||||
},
|
||||
|
||||
subscribe(channel: string, handler: (message: string) => void): () => void {
|
||||
emitter.on(channel, handler);
|
||||
return () => {
|
||||
emitter.off(channel, handler);
|
||||
};
|
||||
},
|
||||
|
||||
async close(): Promise<void> {
|
||||
for (const queueName of queues.keys()) {
|
||||
persist(queueName);
|
||||
}
|
||||
queues.clear();
|
||||
emitter.removeAllListeners();
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import type { QueueAdapter, QueueConfig } from './types.js';
|
||||
|
||||
type QueueType = QueueConfig['type'];
|
||||
|
||||
const registry = new Map<QueueType, (config: QueueConfig) => QueueAdapter>();
|
||||
|
||||
export function registerQueueAdapter(
|
||||
type: QueueType,
|
||||
factory: (config: QueueConfig) => QueueAdapter,
|
||||
): void {
|
||||
registry.set(type, factory);
|
||||
}
|
||||
|
||||
export function createQueueAdapter(config: QueueConfig): QueueAdapter {
|
||||
const factory = registry.get(config.type);
|
||||
if (!factory) throw new Error(`No adapter registered for type: ${config.type}`);
|
||||
return factory(config);
|
||||
}
|
||||
@@ -6,15 +6,3 @@ export {
|
||||
type QueueClient,
|
||||
type TaskPayload,
|
||||
} from './queue.js';
|
||||
|
||||
export { type QueueAdapter, type QueueConfig as QueueAdapterConfig } from './types.js';
|
||||
export { createQueueAdapter, registerQueueAdapter } from './factory.js';
|
||||
export { createBullMQAdapter } from './adapters/bullmq.js';
|
||||
export { createLocalAdapter } from './adapters/local.js';
|
||||
|
||||
import { registerQueueAdapter } from './factory.js';
|
||||
import { createBullMQAdapter } from './adapters/bullmq.js';
|
||||
import { createLocalAdapter } from './adapters/local.js';
|
||||
|
||||
registerQueueAdapter('bullmq', createBullMQAdapter);
|
||||
registerQueueAdapter('local', createLocalAdapter);
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
export interface TaskPayload {
|
||||
id: string;
|
||||
type: string;
|
||||
data: Record<string, unknown>;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
export interface QueueAdapter {
|
||||
readonly name: string;
|
||||
enqueue(queueName: string, payload: TaskPayload): Promise<void>;
|
||||
dequeue(queueName: string): Promise<TaskPayload | null>;
|
||||
length(queueName: string): Promise<number>;
|
||||
publish(channel: string, message: string): Promise<void>;
|
||||
subscribe(channel: string, handler: (message: string) => void): () => void;
|
||||
close(): Promise<void>;
|
||||
}
|
||||
|
||||
export type QueueConfig = { type: 'bullmq'; url?: string } | { type: 'local'; dataDir?: string };
|
||||
@@ -1,40 +0,0 @@
|
||||
{
|
||||
"name": "@mosaic/storage",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/storage"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"types": "./dist/index.d.ts",
|
||||
"default": "./dist/index.js"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"lint": "eslint src",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"test": "vitest run --passWithNoTests"
|
||||
},
|
||||
"dependencies": {
|
||||
"@mosaic/db": "workspace:^",
|
||||
"@mosaic/types": "workspace:*",
|
||||
"better-sqlite3": "^12.8.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/better-sqlite3": "^7.6.13",
|
||||
"typescript": "^5.8.0",
|
||||
"vitest": "^2.0.0"
|
||||
},
|
||||
"publishConfig": {
|
||||
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||
"access": "public"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
]
|
||||
}
|
||||
@@ -1,252 +0,0 @@
|
||||
import {
|
||||
createDb,
|
||||
runMigrations,
|
||||
eq,
|
||||
and,
|
||||
asc,
|
||||
desc,
|
||||
sql,
|
||||
type Db,
|
||||
type DbHandle,
|
||||
} from '@mosaic/db';
|
||||
import * as schema from '@mosaic/db';
|
||||
import type { StorageAdapter, StorageConfig } from '../types.js';
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
|
||||
/**
|
||||
* Maps collection name → Drizzle table object.
|
||||
* Typed as `any` because the generic StorageAdapter interface erases table
|
||||
* types — all runtime values are still strongly-typed Drizzle table objects.
|
||||
*/
|
||||
const TABLE_MAP: Record<string, any> = {
|
||||
users: schema.users,
|
||||
sessions: schema.sessions,
|
||||
accounts: schema.accounts,
|
||||
verifications: schema.verifications,
|
||||
teams: schema.teams,
|
||||
team_members: schema.teamMembers,
|
||||
projects: schema.projects,
|
||||
missions: schema.missions,
|
||||
tasks: schema.tasks,
|
||||
mission_tasks: schema.missionTasks,
|
||||
events: schema.events,
|
||||
agents: schema.agents,
|
||||
tickets: schema.tickets,
|
||||
appreciations: schema.appreciations,
|
||||
conversations: schema.conversations,
|
||||
messages: schema.messages,
|
||||
preferences: schema.preferences,
|
||||
insights: schema.insights,
|
||||
agent_logs: schema.agentLogs,
|
||||
skills: schema.skills,
|
||||
routing_rules: schema.routingRules,
|
||||
provider_credentials: schema.providerCredentials,
|
||||
summarization_jobs: schema.summarizationJobs,
|
||||
};
|
||||
|
||||
function getTable(collection: string): any {
|
||||
const table = TABLE_MAP[collection];
|
||||
if (!table) throw new Error(`Unknown collection: ${collection}`);
|
||||
return table;
|
||||
}
|
||||
|
||||
function buildWhereClause(table: any, filter?: Record<string, unknown>) {
|
||||
if (!filter || Object.keys(filter).length === 0) return undefined;
|
||||
const conditions = Object.entries(filter).map(([key, value]) => {
|
||||
const column = table[key];
|
||||
if (!column) throw new Error(`Unknown column "${key}" on table`);
|
||||
return eq(column, value);
|
||||
});
|
||||
return conditions.length === 1 ? conditions[0]! : and(...conditions);
|
||||
}
|
||||
|
||||
export class PostgresAdapter implements StorageAdapter {
|
||||
readonly name = 'postgres';
|
||||
private handle: DbHandle;
|
||||
private db: Db;
|
||||
private url: string;
|
||||
|
||||
constructor(config: Extract<StorageConfig, { type: 'postgres' }>) {
|
||||
this.url = config.url;
|
||||
this.handle = createDb(config.url);
|
||||
this.db = this.handle.db;
|
||||
}
|
||||
|
||||
async create<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
data: T,
|
||||
): Promise<T & { id: string }> {
|
||||
const table = getTable(collection);
|
||||
const [row] = await (this.db as any).insert(table).values(data).returning();
|
||||
return row as T & { id: string };
|
||||
}
|
||||
|
||||
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
|
||||
const table = getTable(collection);
|
||||
const [row] = await (this.db as any).select().from(table).where(eq(table.id, id));
|
||||
return (row as T) ?? null;
|
||||
}
|
||||
|
||||
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
|
||||
const table = getTable(collection);
|
||||
const result = await (this.db as any)
|
||||
.update(table)
|
||||
.set(data)
|
||||
.where(eq(table.id, id))
|
||||
.returning();
|
||||
return result.length > 0;
|
||||
}
|
||||
|
||||
async delete(collection: string, id: string): Promise<boolean> {
|
||||
const table = getTable(collection);
|
||||
const result = await (this.db as any).delete(table).where(eq(table.id, id)).returning();
|
||||
return result.length > 0;
|
||||
}
|
||||
|
||||
async find<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter?: Record<string, unknown>,
|
||||
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
|
||||
): Promise<T[]> {
|
||||
const table = getTable(collection);
|
||||
let query = (this.db as any).select().from(table);
|
||||
const where = buildWhereClause(table, filter);
|
||||
if (where) query = query.where(where);
|
||||
if (opts?.orderBy) {
|
||||
const col = table[opts.orderBy];
|
||||
if (col) {
|
||||
query = query.orderBy(opts.order === 'desc' ? desc(col) : asc(col));
|
||||
}
|
||||
}
|
||||
if (opts?.limit) query = query.limit(opts.limit);
|
||||
if (opts?.offset) query = query.offset(opts.offset);
|
||||
return (await query) as T[];
|
||||
}
|
||||
|
||||
async findOne<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter: Record<string, unknown>,
|
||||
): Promise<T | null> {
|
||||
const results = await this.find<T>(collection, filter, { limit: 1 });
|
||||
return results[0] ?? null;
|
||||
}
|
||||
|
||||
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
|
||||
const table = getTable(collection);
|
||||
let query = (this.db as any).select({ count: sql<number>`count(*)::int` }).from(table);
|
||||
const where = buildWhereClause(table, filter);
|
||||
if (where) query = query.where(where);
|
||||
const [row] = await query;
|
||||
return (row as any)?.count ?? 0;
|
||||
}
|
||||
|
||||
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
|
||||
return (this.db as any).transaction(async (drizzleTx: any) => {
|
||||
const txAdapter = new PostgresTxAdapter(drizzleTx, this.url);
|
||||
return fn(txAdapter);
|
||||
});
|
||||
}
|
||||
|
||||
async migrate(): Promise<void> {
|
||||
await runMigrations(this.url);
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
await this.handle.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Thin transaction wrapper — delegates to the Drizzle transaction object
|
||||
* instead of the top-level db handle.
|
||||
*/
|
||||
class PostgresTxAdapter implements StorageAdapter {
|
||||
readonly name = 'postgres';
|
||||
private tx: any;
|
||||
private url: string;
|
||||
|
||||
constructor(tx: any, url: string) {
|
||||
this.tx = tx;
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
async create<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
data: T,
|
||||
): Promise<T & { id: string }> {
|
||||
const table = getTable(collection);
|
||||
const [row] = await this.tx.insert(table).values(data).returning();
|
||||
return row as T & { id: string };
|
||||
}
|
||||
|
||||
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
|
||||
const table = getTable(collection);
|
||||
const [row] = await this.tx.select().from(table).where(eq(table.id, id));
|
||||
return (row as T) ?? null;
|
||||
}
|
||||
|
||||
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
|
||||
const table = getTable(collection);
|
||||
const result = await this.tx.update(table).set(data).where(eq(table.id, id)).returning();
|
||||
return result.length > 0;
|
||||
}
|
||||
|
||||
async delete(collection: string, id: string): Promise<boolean> {
|
||||
const table = getTable(collection);
|
||||
const result = await this.tx.delete(table).where(eq(table.id, id)).returning();
|
||||
return result.length > 0;
|
||||
}
|
||||
|
||||
async find<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter?: Record<string, unknown>,
|
||||
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
|
||||
): Promise<T[]> {
|
||||
const table = getTable(collection);
|
||||
let query = this.tx.select().from(table);
|
||||
const where = buildWhereClause(table, filter);
|
||||
if (where) query = query.where(where);
|
||||
if (opts?.orderBy) {
|
||||
const col = table[opts.orderBy];
|
||||
if (col) {
|
||||
query = query.orderBy(opts.order === 'desc' ? desc(col) : asc(col));
|
||||
}
|
||||
}
|
||||
if (opts?.limit) query = query.limit(opts.limit);
|
||||
if (opts?.offset) query = query.offset(opts.offset);
|
||||
return (await query) as T[];
|
||||
}
|
||||
|
||||
async findOne<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter: Record<string, unknown>,
|
||||
): Promise<T | null> {
|
||||
const results = await this.find<T>(collection, filter, { limit: 1 });
|
||||
return results[0] ?? null;
|
||||
}
|
||||
|
||||
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
|
||||
const table = getTable(collection);
|
||||
let query = this.tx.select({ count: sql<number>`count(*)::int` }).from(table);
|
||||
const where = buildWhereClause(table, filter);
|
||||
if (where) query = query.where(where);
|
||||
const [row] = await query;
|
||||
return (row as any)?.count ?? 0;
|
||||
}
|
||||
|
||||
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
|
||||
return this.tx.transaction(async (nestedTx: any) => {
|
||||
const nested = new PostgresTxAdapter(nestedTx, this.url);
|
||||
return fn(nested);
|
||||
});
|
||||
}
|
||||
|
||||
async migrate(): Promise<void> {
|
||||
await runMigrations(this.url);
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
// No-op inside a transaction
|
||||
}
|
||||
}
|
||||
@@ -1,201 +0,0 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { SqliteAdapter } from './sqlite.js';
|
||||
|
||||
describe('SqliteAdapter', () => {
|
||||
let adapter: SqliteAdapter;
|
||||
|
||||
beforeEach(async () => {
|
||||
adapter = new SqliteAdapter({ type: 'sqlite', path: ':memory:' });
|
||||
await adapter.migrate();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await adapter.close();
|
||||
});
|
||||
|
||||
describe('CRUD', () => {
|
||||
it('creates and reads a record', async () => {
|
||||
const created = await adapter.create('users', { name: 'Alice', email: 'alice@test.com' });
|
||||
expect(created.id).toBeDefined();
|
||||
expect(created.name).toBe('Alice');
|
||||
|
||||
const read = await adapter.read('users', created.id);
|
||||
expect(read).not.toBeNull();
|
||||
expect(read!.name).toBe('Alice');
|
||||
expect(read!.email).toBe('alice@test.com');
|
||||
});
|
||||
|
||||
it('returns null for non-existent record', async () => {
|
||||
const result = await adapter.read('users', 'does-not-exist');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('updates a record', async () => {
|
||||
const created = await adapter.create('users', { name: 'Alice' });
|
||||
const updated = await adapter.update('users', created.id, { name: 'Bob' });
|
||||
expect(updated).toBe(true);
|
||||
|
||||
const read = await adapter.read('users', created.id);
|
||||
expect(read!.name).toBe('Bob');
|
||||
});
|
||||
|
||||
it('update returns false for non-existent record', async () => {
|
||||
const result = await adapter.update('users', 'does-not-exist', { name: 'X' });
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('deletes a record', async () => {
|
||||
const created = await adapter.create('users', { name: 'Alice' });
|
||||
const deleted = await adapter.delete('users', created.id);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
const read = await adapter.read('users', created.id);
|
||||
expect(read).toBeNull();
|
||||
});
|
||||
|
||||
it('delete returns false for non-existent record', async () => {
|
||||
const result = await adapter.delete('users', 'does-not-exist');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('find', () => {
|
||||
it('finds records with filter', async () => {
|
||||
await adapter.create('users', { name: 'Alice', role: 'admin' });
|
||||
await adapter.create('users', { name: 'Bob', role: 'user' });
|
||||
await adapter.create('users', { name: 'Charlie', role: 'admin' });
|
||||
|
||||
const admins = await adapter.find('users', { role: 'admin' });
|
||||
expect(admins).toHaveLength(2);
|
||||
expect(admins.map((u) => u.name).sort()).toEqual(['Alice', 'Charlie']);
|
||||
});
|
||||
|
||||
it('finds all records without filter', async () => {
|
||||
await adapter.create('users', { name: 'Alice' });
|
||||
await adapter.create('users', { name: 'Bob' });
|
||||
|
||||
const all = await adapter.find('users');
|
||||
expect(all).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('supports limit and offset', async () => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
await adapter.create('users', { name: `User${i}`, idx: i });
|
||||
}
|
||||
|
||||
const page = await adapter.find('users', undefined, {
|
||||
limit: 2,
|
||||
offset: 1,
|
||||
orderBy: 'created_at',
|
||||
});
|
||||
expect(page).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('findOne returns first match', async () => {
|
||||
await adapter.create('users', { name: 'Alice', role: 'admin' });
|
||||
await adapter.create('users', { name: 'Bob', role: 'user' });
|
||||
|
||||
const found = await adapter.findOne('users', { role: 'user' });
|
||||
expect(found).not.toBeNull();
|
||||
expect(found!.name).toBe('Bob');
|
||||
});
|
||||
|
||||
it('findOne returns null when no match', async () => {
|
||||
const result = await adapter.findOne('users', { role: 'nonexistent' });
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('count', () => {
|
||||
it('counts all records', async () => {
|
||||
await adapter.create('users', { name: 'Alice' });
|
||||
await adapter.create('users', { name: 'Bob' });
|
||||
|
||||
const total = await adapter.count('users');
|
||||
expect(total).toBe(2);
|
||||
});
|
||||
|
||||
it('counts with filter', async () => {
|
||||
await adapter.create('users', { name: 'Alice', role: 'admin' });
|
||||
await adapter.create('users', { name: 'Bob', role: 'user' });
|
||||
await adapter.create('users', { name: 'Charlie', role: 'admin' });
|
||||
|
||||
const adminCount = await adapter.count('users', { role: 'admin' });
|
||||
expect(adminCount).toBe(2);
|
||||
});
|
||||
|
||||
it('returns 0 for empty collection', async () => {
|
||||
const count = await adapter.count('users');
|
||||
expect(count).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('transaction', () => {
|
||||
it('commits on success', async () => {
|
||||
await adapter.transaction(async (tx) => {
|
||||
await tx.create('users', { name: 'Alice' });
|
||||
await tx.create('users', { name: 'Bob' });
|
||||
});
|
||||
|
||||
const count = await adapter.count('users');
|
||||
expect(count).toBe(2);
|
||||
});
|
||||
|
||||
it('rolls back on error', async () => {
|
||||
await expect(
|
||||
adapter.transaction(async (tx) => {
|
||||
await tx.create('users', { name: 'Alice' });
|
||||
throw new Error('rollback test');
|
||||
}),
|
||||
).rejects.toThrow('rollback test');
|
||||
|
||||
const count = await adapter.count('users');
|
||||
expect(count).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrate', () => {
|
||||
it('creates all tables', async () => {
|
||||
// migrate() was already called in beforeEach — verify tables exist
|
||||
const collections = [
|
||||
'users',
|
||||
'sessions',
|
||||
'accounts',
|
||||
'projects',
|
||||
'missions',
|
||||
'tasks',
|
||||
'agents',
|
||||
'conversations',
|
||||
'messages',
|
||||
'preferences',
|
||||
'insights',
|
||||
'skills',
|
||||
'events',
|
||||
'routing_rules',
|
||||
'provider_credentials',
|
||||
'agent_logs',
|
||||
'teams',
|
||||
'team_members',
|
||||
'mission_tasks',
|
||||
'tickets',
|
||||
'summarization_jobs',
|
||||
'appreciations',
|
||||
'verifications',
|
||||
];
|
||||
|
||||
for (const collection of collections) {
|
||||
// Should not throw
|
||||
const count = await adapter.count(collection);
|
||||
expect(count).toBe(0);
|
||||
}
|
||||
});
|
||||
|
||||
it('is idempotent', async () => {
|
||||
await adapter.migrate();
|
||||
await adapter.migrate();
|
||||
// Should not throw
|
||||
const count = await adapter.count('users');
|
||||
expect(count).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,283 +0,0 @@
|
||||
import Database from 'better-sqlite3';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import type { StorageAdapter, StorageConfig } from '../types.js';
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
|
||||
const COLLECTIONS = [
|
||||
'users',
|
||||
'sessions',
|
||||
'accounts',
|
||||
'projects',
|
||||
'missions',
|
||||
'tasks',
|
||||
'agents',
|
||||
'conversations',
|
||||
'messages',
|
||||
'preferences',
|
||||
'insights',
|
||||
'skills',
|
||||
'events',
|
||||
'routing_rules',
|
||||
'provider_credentials',
|
||||
'agent_logs',
|
||||
'teams',
|
||||
'team_members',
|
||||
'mission_tasks',
|
||||
'tickets',
|
||||
'summarization_jobs',
|
||||
'appreciations',
|
||||
'verifications',
|
||||
] as const;
|
||||
|
||||
function buildFilterClause(filter?: Record<string, unknown>): {
|
||||
clause: string;
|
||||
params: unknown[];
|
||||
} {
|
||||
if (!filter || Object.keys(filter).length === 0) return { clause: '', params: [] };
|
||||
const conditions: string[] = [];
|
||||
const params: unknown[] = [];
|
||||
for (const [key, value] of Object.entries(filter)) {
|
||||
if (key === 'id') {
|
||||
conditions.push('id = ?');
|
||||
params.push(value);
|
||||
} else {
|
||||
conditions.push(`json_extract(data_json, '$.${key}') = ?`);
|
||||
params.push(typeof value === 'object' ? JSON.stringify(value) : value);
|
||||
}
|
||||
}
|
||||
return { clause: ` WHERE ${conditions.join(' AND ')}`, params };
|
||||
}
|
||||
|
||||
export class SqliteAdapter implements StorageAdapter {
|
||||
readonly name = 'sqlite';
|
||||
private db: Database.Database;
|
||||
|
||||
constructor(config: Extract<StorageConfig, { type: 'sqlite' }>) {
|
||||
this.db = new Database(config.path);
|
||||
this.db.pragma('journal_mode = WAL');
|
||||
this.db.pragma('foreign_keys = ON');
|
||||
}
|
||||
|
||||
async create<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
data: T,
|
||||
): Promise<T & { id: string }> {
|
||||
const id = (data as any).id ?? randomUUID();
|
||||
const now = new Date().toISOString();
|
||||
const rest = Object.fromEntries(Object.entries(data).filter(([k]) => k !== 'id'));
|
||||
this.db
|
||||
.prepare(
|
||||
`INSERT INTO ${collection} (id, data_json, created_at, updated_at) VALUES (?, ?, ?, ?)`,
|
||||
)
|
||||
.run(id, JSON.stringify(rest), now, now);
|
||||
return { ...data, id } as T & { id: string };
|
||||
}
|
||||
|
||||
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
|
||||
const row = this.db.prepare(`SELECT * FROM ${collection} WHERE id = ?`).get(id) as any;
|
||||
if (!row) return null;
|
||||
return { id: row.id, ...JSON.parse(row.data_json as string) } as T;
|
||||
}
|
||||
|
||||
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
|
||||
const existing = this.db
|
||||
.prepare(`SELECT data_json FROM ${collection} WHERE id = ?`)
|
||||
.get(id) as any;
|
||||
if (!existing) return false;
|
||||
const merged = { ...JSON.parse(existing.data_json as string), ...data };
|
||||
const now = new Date().toISOString();
|
||||
const result = this.db
|
||||
.prepare(`UPDATE ${collection} SET data_json = ?, updated_at = ? WHERE id = ?`)
|
||||
.run(JSON.stringify(merged), now, id);
|
||||
return result.changes > 0;
|
||||
}
|
||||
|
||||
async delete(collection: string, id: string): Promise<boolean> {
|
||||
const result = this.db.prepare(`DELETE FROM ${collection} WHERE id = ?`).run(id);
|
||||
return result.changes > 0;
|
||||
}
|
||||
|
||||
async find<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter?: Record<string, unknown>,
|
||||
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
|
||||
): Promise<T[]> {
|
||||
const { clause, params } = buildFilterClause(filter);
|
||||
let query = `SELECT * FROM ${collection}${clause}`;
|
||||
if (opts?.orderBy) {
|
||||
const dir = opts.order === 'desc' ? 'DESC' : 'ASC';
|
||||
const col =
|
||||
opts.orderBy === 'id' || opts.orderBy === 'created_at' || opts.orderBy === 'updated_at'
|
||||
? opts.orderBy
|
||||
: `json_extract(data_json, '$.${opts.orderBy}')`;
|
||||
query += ` ORDER BY ${col} ${dir}`;
|
||||
}
|
||||
if (opts?.limit) {
|
||||
query += ` LIMIT ?`;
|
||||
params.push(opts.limit);
|
||||
}
|
||||
if (opts?.offset) {
|
||||
query += ` OFFSET ?`;
|
||||
params.push(opts.offset);
|
||||
}
|
||||
const rows = this.db.prepare(query).all(...params) as any[];
|
||||
return rows.map((row) => ({ id: row.id, ...JSON.parse(row.data_json as string) }) as T);
|
||||
}
|
||||
|
||||
async findOne<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter: Record<string, unknown>,
|
||||
): Promise<T | null> {
|
||||
const results = await this.find<T>(collection, filter, { limit: 1 });
|
||||
return results[0] ?? null;
|
||||
}
|
||||
|
||||
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
|
||||
const { clause, params } = buildFilterClause(filter);
|
||||
const row = this.db
|
||||
.prepare(`SELECT COUNT(*) as count FROM ${collection}${clause}`)
|
||||
.get(...params) as any;
|
||||
return row?.count ?? 0;
|
||||
}
|
||||
|
||||
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
|
||||
const txAdapter = new SqliteTxAdapter(this.db);
|
||||
this.db.exec('BEGIN');
|
||||
try {
|
||||
const result = await fn(txAdapter);
|
||||
this.db.exec('COMMIT');
|
||||
return result;
|
||||
} catch (err) {
|
||||
this.db.exec('ROLLBACK');
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async migrate(): Promise<void> {
|
||||
const createTable = (name: string) =>
|
||||
this.db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS ${name} (
|
||||
id TEXT PRIMARY KEY,
|
||||
data_json TEXT NOT NULL DEFAULT '{}',
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
)
|
||||
`);
|
||||
for (const collection of COLLECTIONS) {
|
||||
createTable(collection);
|
||||
}
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction wrapper that uses the same db handle — better-sqlite3 transactions
|
||||
* are connection-level, so all statements on the same Database instance within
|
||||
* a db.transaction() callback participate in the transaction.
|
||||
*/
|
||||
class SqliteTxAdapter implements StorageAdapter {
|
||||
readonly name = 'sqlite';
|
||||
private db: Database.Database;
|
||||
|
||||
constructor(db: Database.Database) {
|
||||
this.db = db;
|
||||
}
|
||||
|
||||
async create<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
data: T,
|
||||
): Promise<T & { id: string }> {
|
||||
const id = (data as any).id ?? randomUUID();
|
||||
const now = new Date().toISOString();
|
||||
const rest = Object.fromEntries(Object.entries(data).filter(([k]) => k !== 'id'));
|
||||
this.db
|
||||
.prepare(
|
||||
`INSERT INTO ${collection} (id, data_json, created_at, updated_at) VALUES (?, ?, ?, ?)`,
|
||||
)
|
||||
.run(id, JSON.stringify(rest), now, now);
|
||||
return { ...data, id } as T & { id: string };
|
||||
}
|
||||
|
||||
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
|
||||
const row = this.db.prepare(`SELECT * FROM ${collection} WHERE id = ?`).get(id) as any;
|
||||
if (!row) return null;
|
||||
return { id: row.id, ...JSON.parse(row.data_json as string) } as T;
|
||||
}
|
||||
|
||||
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
|
||||
const existing = this.db
|
||||
.prepare(`SELECT data_json FROM ${collection} WHERE id = ?`)
|
||||
.get(id) as any;
|
||||
if (!existing) return false;
|
||||
const merged = { ...JSON.parse(existing.data_json as string), ...data };
|
||||
const now = new Date().toISOString();
|
||||
const result = this.db
|
||||
.prepare(`UPDATE ${collection} SET data_json = ?, updated_at = ? WHERE id = ?`)
|
||||
.run(JSON.stringify(merged), now, id);
|
||||
return result.changes > 0;
|
||||
}
|
||||
|
||||
async delete(collection: string, id: string): Promise<boolean> {
|
||||
const result = this.db.prepare(`DELETE FROM ${collection} WHERE id = ?`).run(id);
|
||||
return result.changes > 0;
|
||||
}
|
||||
|
||||
async find<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter?: Record<string, unknown>,
|
||||
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
|
||||
): Promise<T[]> {
|
||||
const { clause, params } = buildFilterClause(filter);
|
||||
let query = `SELECT * FROM ${collection}${clause}`;
|
||||
if (opts?.orderBy) {
|
||||
const dir = opts.order === 'desc' ? 'DESC' : 'ASC';
|
||||
const col =
|
||||
opts.orderBy === 'id' || opts.orderBy === 'created_at' || opts.orderBy === 'updated_at'
|
||||
? opts.orderBy
|
||||
: `json_extract(data_json, '$.${opts.orderBy}')`;
|
||||
query += ` ORDER BY ${col} ${dir}`;
|
||||
}
|
||||
if (opts?.limit) {
|
||||
query += ` LIMIT ?`;
|
||||
params.push(opts.limit);
|
||||
}
|
||||
if (opts?.offset) {
|
||||
query += ` OFFSET ?`;
|
||||
params.push(opts.offset);
|
||||
}
|
||||
const rows = this.db.prepare(query).all(...params) as any[];
|
||||
return rows.map((row) => ({ id: row.id, ...JSON.parse(row.data_json as string) }) as T);
|
||||
}
|
||||
|
||||
async findOne<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter: Record<string, unknown>,
|
||||
): Promise<T | null> {
|
||||
const results = await this.find<T>(collection, filter, { limit: 1 });
|
||||
return results[0] ?? null;
|
||||
}
|
||||
|
||||
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
|
||||
const { clause, params } = buildFilterClause(filter);
|
||||
const row = this.db
|
||||
.prepare(`SELECT COUNT(*) as count FROM ${collection}${clause}`)
|
||||
.get(...params) as any;
|
||||
return row?.count ?? 0;
|
||||
}
|
||||
|
||||
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
|
||||
return fn(this);
|
||||
}
|
||||
|
||||
async migrate(): Promise<void> {
|
||||
// No-op inside transaction
|
||||
}
|
||||
|
||||
async close(): Promise<void> {
|
||||
// No-op inside transaction
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import type { StorageAdapter, StorageConfig } from './types.js';
|
||||
|
||||
type StorageType = StorageConfig['type'];
|
||||
|
||||
const registry = new Map<StorageType, (config: StorageConfig) => StorageAdapter>();
|
||||
|
||||
export function registerStorageAdapter(
|
||||
type: StorageType,
|
||||
factory: (config: StorageConfig) => StorageAdapter,
|
||||
): void {
|
||||
registry.set(type, factory);
|
||||
}
|
||||
|
||||
export function createStorageAdapter(config: StorageConfig): StorageAdapter {
|
||||
const factory = registry.get(config.type);
|
||||
if (!factory) throw new Error(`No adapter registered for type: ${config.type}`);
|
||||
return factory(config);
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
export type { StorageAdapter, StorageConfig } from './types.js';
|
||||
export { createStorageAdapter, registerStorageAdapter } from './factory.js';
|
||||
export { PostgresAdapter } from './adapters/postgres.js';
|
||||
export { SqliteAdapter } from './adapters/sqlite.js';
|
||||
|
||||
import { registerStorageAdapter } from './factory.js';
|
||||
import { PostgresAdapter } from './adapters/postgres.js';
|
||||
import { SqliteAdapter } from './adapters/sqlite.js';
|
||||
import type { StorageConfig } from './types.js';
|
||||
|
||||
registerStorageAdapter('postgres', (config: StorageConfig) => {
|
||||
return new PostgresAdapter(config as Extract<StorageConfig, { type: 'postgres' }>);
|
||||
});
|
||||
|
||||
registerStorageAdapter('sqlite', (config: StorageConfig) => {
|
||||
return new SqliteAdapter(config as Extract<StorageConfig, { type: 'sqlite' }>);
|
||||
});
|
||||
@@ -1,43 +0,0 @@
|
||||
export interface StorageAdapter {
|
||||
readonly name: string;
|
||||
|
||||
create<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
data: T,
|
||||
): Promise<T & { id: string }>;
|
||||
|
||||
read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null>;
|
||||
|
||||
update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean>;
|
||||
|
||||
delete(collection: string, id: string): Promise<boolean>;
|
||||
|
||||
find<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter?: Record<string, unknown>,
|
||||
opts?: {
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
orderBy?: string;
|
||||
order?: 'asc' | 'desc';
|
||||
},
|
||||
): Promise<T[]>;
|
||||
|
||||
findOne<T extends Record<string, unknown>>(
|
||||
collection: string,
|
||||
filter: Record<string, unknown>,
|
||||
): Promise<T | null>;
|
||||
|
||||
count(collection: string, filter?: Record<string, unknown>): Promise<number>;
|
||||
|
||||
transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T>;
|
||||
|
||||
migrate(): Promise<void>;
|
||||
|
||||
close(): Promise<void>;
|
||||
}
|
||||
|
||||
export type StorageConfig =
|
||||
| { type: 'postgres'; url: string }
|
||||
| { type: 'sqlite'; path: string }
|
||||
| { type: 'files'; dataDir: string; format?: 'json' | 'md' };
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"rootDir": "src"
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/types",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "packages/types"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/discord-plugin",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "plugins/discord"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/oc-macp-plugin",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "plugins/macp"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "src/index.ts",
|
||||
"description": "OpenClaw ACP runtime backend that routes sessions_spawn(runtime:\"macp\") to the Pi MACP runner.",
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/oc-framework-plugin",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "plugins/mosaic-framework"
|
||||
},
|
||||
"type": "module",
|
||||
"main": "src/index.ts",
|
||||
"description": "Injects Mosaic framework rails, runtime contract, and active mission context into all OpenClaw agent sessions and ACP subagent spawns.",
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
{
|
||||
"name": "@mosaic/telegram-plugin",
|
||||
"version": "0.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||
"directory": "plugins/telegram"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
|
||||
312
pnpm-lock.yaml
generated
312
pnpm-lock.yaml
generated
@@ -62,9 +62,6 @@ importers:
|
||||
'@mosaic/brain':
|
||||
specifier: workspace:^
|
||||
version: link:../../packages/brain
|
||||
'@mosaic/config':
|
||||
specifier: workspace:^
|
||||
version: link:../../packages/config
|
||||
'@mosaic/coord':
|
||||
specifier: workspace:^
|
||||
version: link:../../packages/coord
|
||||
@@ -83,9 +80,6 @@ importers:
|
||||
'@mosaic/queue':
|
||||
specifier: workspace:^
|
||||
version: link:../../packages/queue
|
||||
'@mosaic/storage':
|
||||
specifier: workspace:^
|
||||
version: link:../../packages/storage
|
||||
'@mosaic/telegram-plugin':
|
||||
specifier: workspace:^
|
||||
version: link:../../plugins/telegram
|
||||
@@ -136,7 +130,7 @@ importers:
|
||||
version: 0.34.48
|
||||
better-auth:
|
||||
specifier: ^1.5.5
|
||||
version: 1.5.5(better-sqlite3@12.8.0)(drizzle-kit@0.31.9)(drizzle-orm@0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8))(mongodb@7.1.0(socks@2.8.7))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@2.1.9(@types/node@22.19.15)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1))
|
||||
version: 1.5.5(drizzle-kit@0.31.9)(drizzle-orm@0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8))(mongodb@7.1.0(socks@2.8.7))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@2.1.9(@types/node@22.19.15)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1))
|
||||
bullmq:
|
||||
specifier: ^5.71.0
|
||||
version: 5.71.0
|
||||
@@ -200,7 +194,7 @@ importers:
|
||||
version: link:../../packages/design-tokens
|
||||
better-auth:
|
||||
specifier: ^1.5.5
|
||||
version: 1.5.5(better-sqlite3@12.8.0)(drizzle-kit@0.31.9)(drizzle-orm@0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8))(mongodb@7.1.0(socks@2.8.7))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@2.1.9(@types/node@22.19.15)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1))
|
||||
version: 1.5.5(drizzle-kit@0.31.9)(drizzle-orm@0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8))(mongodb@7.1.0(socks@2.8.7))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@2.1.9(@types/node@22.19.15)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1))
|
||||
clsx:
|
||||
specifier: ^2.1.0
|
||||
version: 2.1.1
|
||||
@@ -271,7 +265,7 @@ importers:
|
||||
version: link:../db
|
||||
better-auth:
|
||||
specifier: ^1.5.5
|
||||
version: 1.5.5(better-sqlite3@12.8.0)(drizzle-kit@0.31.9)(drizzle-orm@0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8))(mongodb@7.1.0(socks@2.8.7))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@2.1.9(@types/node@22.19.15)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1))
|
||||
version: 1.5.5(drizzle-kit@0.31.9)(drizzle-orm@0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8))(mongodb@7.1.0(socks@2.8.7))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@2.1.9(@types/node@22.19.15)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1))
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
@@ -307,9 +301,6 @@ importers:
|
||||
'@clack/prompts':
|
||||
specifier: ^0.9.0
|
||||
version: 0.9.1
|
||||
'@mosaic/config':
|
||||
specifier: workspace:^
|
||||
version: link:../config
|
||||
'@mosaic/mosaic':
|
||||
specifier: workspace:^
|
||||
version: link:../mosaic
|
||||
@@ -357,25 +348,6 @@ importers:
|
||||
specifier: ^2.0.0
|
||||
version: 2.1.9(@types/node@22.19.15)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1)
|
||||
|
||||
packages/config:
|
||||
dependencies:
|
||||
'@mosaic/memory':
|
||||
specifier: workspace:^
|
||||
version: link:../memory
|
||||
'@mosaic/queue':
|
||||
specifier: workspace:^
|
||||
version: link:../queue
|
||||
'@mosaic/storage':
|
||||
specifier: workspace:^
|
||||
version: link:../storage
|
||||
devDependencies:
|
||||
typescript:
|
||||
specifier: ^5.8.0
|
||||
version: 5.9.3
|
||||
vitest:
|
||||
specifier: ^2.0.0
|
||||
version: 2.1.9(@types/node@24.12.0)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1)
|
||||
|
||||
packages/coord:
|
||||
dependencies:
|
||||
'@mosaic/types':
|
||||
@@ -394,12 +366,9 @@ importers:
|
||||
|
||||
packages/db:
|
||||
dependencies:
|
||||
'@electric-sql/pglite':
|
||||
specifier: ^0.2.17
|
||||
version: 0.2.17
|
||||
drizzle-orm:
|
||||
specifier: ^0.45.1
|
||||
version: 0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8)
|
||||
version: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8)
|
||||
postgres:
|
||||
specifier: ^3.4.8
|
||||
version: 3.4.8
|
||||
@@ -455,7 +424,7 @@ importers:
|
||||
version: link:../db
|
||||
drizzle-orm:
|
||||
specifier: ^0.45.1
|
||||
version: 0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8)
|
||||
version: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8)
|
||||
devDependencies:
|
||||
typescript:
|
||||
specifier: ^5.8.0
|
||||
@@ -484,15 +453,12 @@ importers:
|
||||
'@mosaic/db':
|
||||
specifier: workspace:*
|
||||
version: link:../db
|
||||
'@mosaic/storage':
|
||||
specifier: workspace:*
|
||||
version: link:../storage
|
||||
'@mosaic/types':
|
||||
specifier: workspace:*
|
||||
version: link:../types
|
||||
drizzle-orm:
|
||||
specifier: ^0.45.1
|
||||
version: 0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8)
|
||||
version: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8)
|
||||
devDependencies:
|
||||
typescript:
|
||||
specifier: ^5.8.0
|
||||
@@ -522,8 +488,8 @@ importers:
|
||||
specifier: workspace:*
|
||||
version: link:../types
|
||||
commander:
|
||||
specifier: ^13.0.0
|
||||
version: 13.1.0
|
||||
specifier: ^12.1.0
|
||||
version: 12.1.0
|
||||
picocolors:
|
||||
specifier: ^1.1.1
|
||||
version: 1.1.1
|
||||
@@ -550,8 +516,8 @@ importers:
|
||||
specifier: ^0.9.0
|
||||
version: 0.9.1
|
||||
commander:
|
||||
specifier: ^13.0.0
|
||||
version: 13.1.0
|
||||
specifier: ^12.0.0
|
||||
version: 12.1.0
|
||||
js-yaml:
|
||||
specifier: ^4.1.0
|
||||
version: 4.1.1
|
||||
@@ -575,8 +541,8 @@ importers:
|
||||
packages/quality-rails:
|
||||
dependencies:
|
||||
commander:
|
||||
specifier: ^13.0.0
|
||||
version: 13.1.0
|
||||
specifier: ^12.0.0
|
||||
version: 12.1.0
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.0.0
|
||||
@@ -604,28 +570,6 @@ importers:
|
||||
specifier: ^2.0.0
|
||||
version: 2.1.9(@types/node@24.12.0)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1)
|
||||
|
||||
packages/storage:
|
||||
dependencies:
|
||||
'@mosaic/db':
|
||||
specifier: workspace:^
|
||||
version: link:../db
|
||||
'@mosaic/types':
|
||||
specifier: workspace:*
|
||||
version: link:../types
|
||||
better-sqlite3:
|
||||
specifier: ^12.8.0
|
||||
version: 12.8.0
|
||||
devDependencies:
|
||||
'@types/better-sqlite3':
|
||||
specifier: ^7.6.13
|
||||
version: 7.6.13
|
||||
typescript:
|
||||
specifier: ^5.8.0
|
||||
version: 5.9.3
|
||||
vitest:
|
||||
specifier: ^2.0.0
|
||||
version: 2.1.9(@types/node@24.12.0)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1)
|
||||
|
||||
packages/types:
|
||||
dependencies:
|
||||
class-transformer:
|
||||
@@ -1153,9 +1097,6 @@ packages:
|
||||
'@drizzle-team/brocli@0.10.2':
|
||||
resolution: {integrity: sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w==}
|
||||
|
||||
'@electric-sql/pglite@0.2.17':
|
||||
resolution: {integrity: sha512-qEpKRT2oUaWDH6tjRxLHjdzMqRUGYDnGZlKrnL4dJ77JVMcP2Hpo3NYnOSPKdZdeec57B6QPprCUFg0picx5Pw==}
|
||||
|
||||
'@emnapi/runtime@1.9.0':
|
||||
resolution: {integrity: sha512-QN75eB0IH2ywSpRpNddCRfQIhmJYBCJ1x5Lb3IscKAL8bMnVAKnRg8dCoXbHzVLLH7P38N2Z3mtulB7W0J0FKw==}
|
||||
|
||||
@@ -3458,9 +3399,6 @@ packages:
|
||||
'@types/aws-lambda@8.10.161':
|
||||
resolution: {integrity: sha512-rUYdp+MQwSFocxIOcSsYSF3YYYC/uUpMbCY/mbO21vGqfrEYvNSoPyKYDj6RhXXpPfS0KstW9RwG3qXh9sL7FQ==}
|
||||
|
||||
'@types/better-sqlite3@7.6.13':
|
||||
resolution: {integrity: sha512-NMv9ASNARoKksWtsq/SHakpYAYnhBrQgGD8zkLYk/jaK8jUGn08CfEdTRgYhMypUQAfzSP8W6gNLe0q19/t4VA==}
|
||||
|
||||
'@types/bunyan@1.8.11':
|
||||
resolution: {integrity: sha512-758fRH7umIMk5qt5ELmRMff4mLDlN+xyYzC+dkPTdKwbSkJFvz6xwyScrytPU0QIBbRRwbiE8/BIg8bpajerNQ==}
|
||||
|
||||
@@ -3873,22 +3811,12 @@ packages:
|
||||
zod:
|
||||
optional: true
|
||||
|
||||
better-sqlite3@12.8.0:
|
||||
resolution: {integrity: sha512-RxD2Vd96sQDjQr20kdP+F+dK/1OUNiVOl200vKBZY8u0vTwysfolF6Hq+3ZK2+h8My9YvZhHsF+RSGZW2VYrPQ==}
|
||||
engines: {node: 20.x || 22.x || 23.x || 24.x || 25.x}
|
||||
|
||||
bidi-js@1.0.3:
|
||||
resolution: {integrity: sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==}
|
||||
|
||||
bignumber.js@9.3.1:
|
||||
resolution: {integrity: sha512-Ko0uX15oIUS7wJ3Rb30Fs6SkVbLmPBAKdlm7q9+ak9bbIeFf0MwuBsQV6z7+X768/cHsfg+WlysDWJcmthjsjQ==}
|
||||
|
||||
bindings@1.5.0:
|
||||
resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==}
|
||||
|
||||
bl@4.1.0:
|
||||
resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==}
|
||||
|
||||
body-parser@2.2.2:
|
||||
resolution: {integrity: sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==}
|
||||
engines: {node: '>=18'}
|
||||
@@ -3938,9 +3866,6 @@ packages:
|
||||
buffer-from@1.1.2:
|
||||
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
|
||||
|
||||
buffer@5.7.1:
|
||||
resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==}
|
||||
|
||||
bullmq@5.71.0:
|
||||
resolution: {integrity: sha512-aeNWh4drsafSKnAJeiNH/nZP/5O8ZdtdMbnOPZmpjXj7NZUP5YC901U3bIH41iZValm7d1i3c34ojv7q31m30w==}
|
||||
|
||||
@@ -4002,9 +3927,6 @@ packages:
|
||||
resolution: {integrity: sha512-TQMmc3w+5AxjpL8iIiwebF73dRDF4fBIieAqGn9RGCWaEVwQ6Fb2cGe31Yns0RRIzii5goJ1Y7xbMwo1TxMplw==}
|
||||
engines: {node: '>= 20.19.0'}
|
||||
|
||||
chownr@1.1.4:
|
||||
resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==}
|
||||
|
||||
chownr@3.0.0:
|
||||
resolution: {integrity: sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==}
|
||||
engines: {node: '>=18'}
|
||||
@@ -4082,6 +4004,10 @@ packages:
|
||||
comma-separated-tokens@2.0.3:
|
||||
resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==}
|
||||
|
||||
commander@12.1.0:
|
||||
resolution: {integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
commander@13.1.0:
|
||||
resolution: {integrity: sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==}
|
||||
engines: {node: '>=18'}
|
||||
@@ -4184,18 +4110,10 @@ packages:
|
||||
decode-named-character-reference@1.3.0:
|
||||
resolution: {integrity: sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==}
|
||||
|
||||
decompress-response@6.0.0:
|
||||
resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
deep-eql@5.0.2:
|
||||
resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==}
|
||||
engines: {node: '>=6'}
|
||||
|
||||
deep-extend@0.6.0:
|
||||
resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==}
|
||||
engines: {node: '>=4.0.0'}
|
||||
|
||||
deep-is@0.1.4:
|
||||
resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==}
|
||||
|
||||
@@ -4568,10 +4486,6 @@ packages:
|
||||
resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==}
|
||||
engines: {node: '>=16.17'}
|
||||
|
||||
expand-template@2.0.3:
|
||||
resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==}
|
||||
engines: {node: '>=6'}
|
||||
|
||||
expect-type@1.3.0:
|
||||
resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
@@ -4672,9 +4586,6 @@ packages:
|
||||
resolution: {integrity: sha512-cmBmnYo8Zymabm2+qAP7jTFbKF10bQpYmxoGfuZbRFRcq00BRddJdGNH/P7GA1EMpJy5yQbqa9B7yROb3z8Ziw==}
|
||||
engines: {node: '>=22'}
|
||||
|
||||
file-uri-to-path@1.0.0:
|
||||
resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==}
|
||||
|
||||
fill-range@7.1.1:
|
||||
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
|
||||
engines: {node: '>=8'}
|
||||
@@ -4730,9 +4641,6 @@ packages:
|
||||
resolution: {integrity: sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==}
|
||||
engines: {node: '>= 0.8'}
|
||||
|
||||
fs-constants@1.0.0:
|
||||
resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==}
|
||||
|
||||
fsevents@2.3.2:
|
||||
resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==}
|
||||
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
|
||||
@@ -4797,9 +4705,6 @@ packages:
|
||||
resolution: {integrity: sha512-b1O07XYq8eRuVzBNgJLstU6FYc1tS6wnMtF1I1D9lE8LxZSOGZ7LhxN54yPP6mGw5f2CkXY2BQUL9Fx41qvcIg==}
|
||||
engines: {node: '>= 14'}
|
||||
|
||||
github-from-package@0.0.0:
|
||||
resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==}
|
||||
|
||||
glob-parent@6.0.2:
|
||||
resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==}
|
||||
engines: {node: '>=10.13.0'}
|
||||
@@ -4959,9 +4864,6 @@ packages:
|
||||
inherits@2.0.4:
|
||||
resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
|
||||
|
||||
ini@1.3.8:
|
||||
resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==}
|
||||
|
||||
ink-spinner@5.0.0:
|
||||
resolution: {integrity: sha512-EYEasbEjkqLGyPOUc8hBJZNuC5GvXGMLu0w5gdTNskPc7Izc5vO3tdQEYnzvshucyGCBXc86ig0ujXPMWaQCdA==}
|
||||
engines: {node: '>=14.16'}
|
||||
@@ -5529,10 +5431,6 @@ packages:
|
||||
resolution: {integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
mimic-response@3.1.0:
|
||||
resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
minimatch@10.2.4:
|
||||
resolution: {integrity: sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==}
|
||||
engines: {node: 18 || 20 || >=22}
|
||||
@@ -5544,9 +5442,6 @@ packages:
|
||||
resolution: {integrity: sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==}
|
||||
engines: {node: '>=16 || 14 >=14.17'}
|
||||
|
||||
minimist@1.2.8:
|
||||
resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==}
|
||||
|
||||
minipass@7.1.3:
|
||||
resolution: {integrity: sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==}
|
||||
engines: {node: '>=16 || 14 >=14.17'}
|
||||
@@ -5555,9 +5450,6 @@ packages:
|
||||
resolution: {integrity: sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==}
|
||||
engines: {node: '>= 18'}
|
||||
|
||||
mkdirp-classic@0.5.3:
|
||||
resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==}
|
||||
|
||||
module-details-from-path@1.0.4:
|
||||
resolution: {integrity: sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w==}
|
||||
|
||||
@@ -5618,9 +5510,6 @@ packages:
|
||||
resolution: {integrity: sha512-EYJqS25r2iBeTtGQCHidXl1VfZ1jXM7Q04zXJOrMlxVVmD0ptxJaNux92n1mJ7c5lN3zTq12MhH/8x59nP+qmg==}
|
||||
engines: {node: ^20.0.0 || >=22.0.0}
|
||||
|
||||
napi-build-utils@2.0.0:
|
||||
resolution: {integrity: sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==}
|
||||
|
||||
natural-compare@1.4.0:
|
||||
resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==}
|
||||
|
||||
@@ -5657,10 +5546,6 @@ packages:
|
||||
sass:
|
||||
optional: true
|
||||
|
||||
node-abi@3.89.0:
|
||||
resolution: {integrity: sha512-6u9UwL0HlAl21+agMN3YAMXcKByMqwGx+pq+P76vii5f7hTPtKDp08/H9py6DY+cfDw7kQNTGEj/rly3IgbNQA==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
node-abort-controller@3.1.1:
|
||||
resolution: {integrity: sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==}
|
||||
|
||||
@@ -5984,12 +5869,6 @@ packages:
|
||||
resolution: {integrity: sha512-d+JFcLM17njZaOLkv6SCev7uoLaBtfK86vMUXhW1Z4glPWh4jozno9APvW/XKFJ3CCxVoC7OL38BqRydtu5nGg==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
prebuild-install@7.1.3:
|
||||
resolution: {integrity: sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==}
|
||||
engines: {node: '>=10'}
|
||||
deprecated: No longer maintained. Please contact the author of the relevant native addon; alternatives are available.
|
||||
hasBin: true
|
||||
|
||||
prelude-ls@1.2.1:
|
||||
resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
@@ -6063,10 +5942,6 @@ packages:
|
||||
resolution: {integrity: sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==}
|
||||
engines: {node: '>= 0.10'}
|
||||
|
||||
rc@1.2.8:
|
||||
resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==}
|
||||
hasBin: true
|
||||
|
||||
react-dom@19.2.4:
|
||||
resolution: {integrity: sha512-AXJdLo8kgMbimY95O2aKQqsz2iWi9jMgKJhRBAxECE4IFxfcazB2LmzloIoibJI3C12IlY20+KFaLv+71bUJeQ==}
|
||||
peerDependencies:
|
||||
@@ -6095,10 +5970,6 @@ packages:
|
||||
readable-stream@2.3.8:
|
||||
resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==}
|
||||
|
||||
readable-stream@3.6.2:
|
||||
resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==}
|
||||
engines: {node: '>= 6'}
|
||||
|
||||
readdirp@5.0.0:
|
||||
resolution: {integrity: sha512-9u/XQ1pvrQtYyMpZe7DXKv2p5CNvyVwzUB6uhLAnQwHMSgKMBR62lc7AHljaeteeHXn11XTAaLLUVZYVZyuRBQ==}
|
||||
engines: {node: '>= 20.19.0'}
|
||||
@@ -6293,12 +6164,6 @@ packages:
|
||||
resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==}
|
||||
engines: {node: '>=14'}
|
||||
|
||||
simple-concat@1.0.1:
|
||||
resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==}
|
||||
|
||||
simple-get@4.0.1:
|
||||
resolution: {integrity: sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==}
|
||||
|
||||
sisteransi@1.0.5:
|
||||
resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==}
|
||||
|
||||
@@ -6440,10 +6305,6 @@ packages:
|
||||
resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
strip-json-comments@2.0.1:
|
||||
resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
strip-json-comments@3.1.1:
|
||||
resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==}
|
||||
engines: {node: '>=8'}
|
||||
@@ -6495,13 +6356,6 @@ packages:
|
||||
resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==}
|
||||
engines: {node: '>=6'}
|
||||
|
||||
tar-fs@2.1.4:
|
||||
resolution: {integrity: sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==}
|
||||
|
||||
tar-stream@2.2.0:
|
||||
resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==}
|
||||
engines: {node: '>=6'}
|
||||
|
||||
tar@7.5.13:
|
||||
resolution: {integrity: sha512-tOG/7GyXpFevhXVh8jOPJrmtRpOTsYqUIkVdVooZYJS/z8WhfQUX8RJILmeuJNinGAMSu1veBr4asSHFt5/hng==}
|
||||
engines: {node: '>=18'}
|
||||
@@ -6616,9 +6470,6 @@ packages:
|
||||
engines: {node: '>=18.0.0'}
|
||||
hasBin: true
|
||||
|
||||
tunnel-agent@0.6.0:
|
||||
resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==}
|
||||
|
||||
turbo-darwin-64@2.8.16:
|
||||
resolution: {integrity: sha512-KWa4hUMWrpADC6Q/wIHRkBLw6X6MV9nx6X7hSXbTrrMz0KdaKhmfudUZ3sS76bJFmgArBU25cSc0AUyyrswYxg==}
|
||||
cpu: [x64]
|
||||
@@ -7767,12 +7618,12 @@ snapshots:
|
||||
nanostores: 1.1.1
|
||||
zod: 4.3.6
|
||||
|
||||
'@better-auth/drizzle-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@4.3.6))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8))':
|
||||
'@better-auth/drizzle-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@4.3.6))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8))':
|
||||
dependencies:
|
||||
'@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@4.3.6))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1)
|
||||
'@better-auth/utils': 0.3.1
|
||||
optionalDependencies:
|
||||
drizzle-orm: 0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8)
|
||||
drizzle-orm: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8)
|
||||
|
||||
'@better-auth/kysely-adapter@1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@4.3.6))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(kysely@0.28.11)':
|
||||
dependencies:
|
||||
@@ -7907,8 +7758,6 @@ snapshots:
|
||||
|
||||
'@drizzle-team/brocli@0.10.2': {}
|
||||
|
||||
'@electric-sql/pglite@0.2.17': {}
|
||||
|
||||
'@emnapi/runtime@1.9.0':
|
||||
dependencies:
|
||||
tslib: 2.8.1
|
||||
@@ -10325,10 +10174,6 @@ snapshots:
|
||||
|
||||
'@types/aws-lambda@8.10.161': {}
|
||||
|
||||
'@types/better-sqlite3@7.6.13':
|
||||
dependencies:
|
||||
'@types/node': 22.19.15
|
||||
|
||||
'@types/bunyan@1.8.11':
|
||||
dependencies:
|
||||
'@types/node': 22.19.15
|
||||
@@ -10715,10 +10560,10 @@ snapshots:
|
||||
|
||||
basic-ftp@5.2.0: {}
|
||||
|
||||
better-auth@1.5.5(better-sqlite3@12.8.0)(drizzle-kit@0.31.9)(drizzle-orm@0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8))(mongodb@7.1.0(socks@2.8.7))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@2.1.9(@types/node@22.19.15)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1)):
|
||||
better-auth@1.5.5(drizzle-kit@0.31.9)(drizzle-orm@0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8))(mongodb@7.1.0(socks@2.8.7))(next@16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(vitest@2.1.9(@types/node@22.19.15)(jsdom@29.0.0(@noble/hashes@2.0.1))(lightningcss@1.31.1)):
|
||||
dependencies:
|
||||
'@better-auth/core': 1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@4.3.6))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1)
|
||||
'@better-auth/drizzle-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@4.3.6))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8))
|
||||
'@better-auth/drizzle-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@4.3.6))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(drizzle-orm@0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8))
|
||||
'@better-auth/kysely-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@4.3.6))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(kysely@0.28.11)
|
||||
'@better-auth/memory-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@4.3.6))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)
|
||||
'@better-auth/mongo-adapter': 1.5.5(@better-auth/core@1.5.5(@better-auth/utils@0.3.1)(@better-fetch/fetch@1.1.21)(better-call@1.3.2(zod@4.3.6))(jose@6.2.1)(kysely@0.28.11)(nanostores@1.1.1))(@better-auth/utils@0.3.1)(mongodb@7.1.0(socks@2.8.7))
|
||||
@@ -10735,9 +10580,8 @@ snapshots:
|
||||
nanostores: 1.1.1
|
||||
zod: 4.3.6
|
||||
optionalDependencies:
|
||||
better-sqlite3: 12.8.0
|
||||
drizzle-kit: 0.31.9
|
||||
drizzle-orm: 0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8)
|
||||
drizzle-orm: 0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8)
|
||||
mongodb: 7.1.0(socks@2.8.7)
|
||||
next: 16.1.6(@opentelemetry/api@1.9.0)(@playwright/test@1.58.2)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
|
||||
react: 19.2.4
|
||||
@@ -10755,27 +10599,12 @@ snapshots:
|
||||
optionalDependencies:
|
||||
zod: 4.3.6
|
||||
|
||||
better-sqlite3@12.8.0:
|
||||
dependencies:
|
||||
bindings: 1.5.0
|
||||
prebuild-install: 7.1.3
|
||||
|
||||
bidi-js@1.0.3:
|
||||
dependencies:
|
||||
require-from-string: 2.0.2
|
||||
|
||||
bignumber.js@9.3.1: {}
|
||||
|
||||
bindings@1.5.0:
|
||||
dependencies:
|
||||
file-uri-to-path: 1.0.0
|
||||
|
||||
bl@4.1.0:
|
||||
dependencies:
|
||||
buffer: 5.7.1
|
||||
inherits: 2.0.4
|
||||
readable-stream: 3.6.2
|
||||
|
||||
body-parser@2.2.2:
|
||||
dependencies:
|
||||
bytes: 3.1.2
|
||||
@@ -10832,11 +10661,6 @@ snapshots:
|
||||
|
||||
buffer-from@1.1.2: {}
|
||||
|
||||
buffer@5.7.1:
|
||||
dependencies:
|
||||
base64-js: 1.5.1
|
||||
ieee754: 1.2.1
|
||||
|
||||
bullmq@5.71.0:
|
||||
dependencies:
|
||||
cron-parser: 4.9.0
|
||||
@@ -10898,8 +10722,6 @@ snapshots:
|
||||
dependencies:
|
||||
readdirp: 5.0.0
|
||||
|
||||
chownr@1.1.4: {}
|
||||
|
||||
chownr@3.0.0: {}
|
||||
|
||||
cjs-module-lexer@2.2.0: {}
|
||||
@@ -10975,6 +10797,8 @@ snapshots:
|
||||
|
||||
comma-separated-tokens@2.0.3: {}
|
||||
|
||||
commander@12.1.0: {}
|
||||
|
||||
commander@13.1.0: {}
|
||||
|
||||
commander@14.0.3: {}
|
||||
@@ -11054,14 +10878,8 @@ snapshots:
|
||||
dependencies:
|
||||
character-entities: 2.0.2
|
||||
|
||||
decompress-response@6.0.0:
|
||||
dependencies:
|
||||
mimic-response: 3.1.0
|
||||
|
||||
deep-eql@5.0.2: {}
|
||||
|
||||
deep-extend@0.6.0: {}
|
||||
|
||||
deep-is@0.1.4: {}
|
||||
|
||||
defu@6.1.4: {}
|
||||
@@ -11142,13 +10960,10 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
drizzle-orm@0.45.1(@electric-sql/pglite@0.2.17)(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(better-sqlite3@12.8.0)(kysely@0.28.11)(postgres@3.4.8):
|
||||
drizzle-orm@0.45.1(@opentelemetry/api@1.9.0)(@types/pg@8.15.6)(kysely@0.28.11)(postgres@3.4.8):
|
||||
optionalDependencies:
|
||||
'@electric-sql/pglite': 0.2.17
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@types/better-sqlite3': 7.6.13
|
||||
'@types/pg': 8.15.6
|
||||
better-sqlite3: 12.8.0
|
||||
kysely: 0.28.11
|
||||
postgres: 3.4.8
|
||||
|
||||
@@ -11478,8 +11293,6 @@ snapshots:
|
||||
signal-exit: 4.1.0
|
||||
strip-final-newline: 3.0.0
|
||||
|
||||
expand-template@2.0.3: {}
|
||||
|
||||
expect-type@1.3.0: {}
|
||||
|
||||
express-rate-limit@8.3.1(express@5.2.1):
|
||||
@@ -11653,8 +11466,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
file-uri-to-path@1.0.0: {}
|
||||
|
||||
fill-range@7.1.1:
|
||||
dependencies:
|
||||
to-regex-range: 5.0.1
|
||||
@@ -11715,8 +11526,6 @@ snapshots:
|
||||
|
||||
fresh@2.0.0: {}
|
||||
|
||||
fs-constants@1.0.0: {}
|
||||
|
||||
fsevents@2.3.2:
|
||||
optional: true
|
||||
|
||||
@@ -11810,8 +11619,6 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
github-from-package@0.0.0: {}
|
||||
|
||||
glob-parent@6.0.2:
|
||||
dependencies:
|
||||
is-glob: 4.0.3
|
||||
@@ -11996,8 +11803,6 @@ snapshots:
|
||||
|
||||
inherits@2.0.4: {}
|
||||
|
||||
ini@1.3.8: {}
|
||||
|
||||
ink-spinner@5.0.0(ink@5.2.1(@types/react@18.3.28)(react@18.3.1))(react@18.3.1):
|
||||
dependencies:
|
||||
cli-spinners: 2.9.2
|
||||
@@ -12706,8 +12511,6 @@ snapshots:
|
||||
|
||||
mimic-function@5.0.1: {}
|
||||
|
||||
mimic-response@3.1.0: {}
|
||||
|
||||
minimatch@10.2.4:
|
||||
dependencies:
|
||||
brace-expansion: 5.0.4
|
||||
@@ -12720,16 +12523,12 @@ snapshots:
|
||||
dependencies:
|
||||
brace-expansion: 2.0.2
|
||||
|
||||
minimist@1.2.8: {}
|
||||
|
||||
minipass@7.1.3: {}
|
||||
|
||||
minizlib@3.1.0:
|
||||
dependencies:
|
||||
minipass: 7.1.3
|
||||
|
||||
mkdirp-classic@0.5.3: {}
|
||||
|
||||
module-details-from-path@1.0.4: {}
|
||||
|
||||
mongodb-connection-string-url@7.0.1:
|
||||
@@ -12775,8 +12574,6 @@ snapshots:
|
||||
|
||||
nanostores@1.1.1: {}
|
||||
|
||||
napi-build-utils@2.0.0: {}
|
||||
|
||||
natural-compare@1.4.0: {}
|
||||
|
||||
negotiator@0.6.3: {}
|
||||
@@ -12811,10 +12608,6 @@ snapshots:
|
||||
- '@babel/core'
|
||||
- babel-plugin-macros
|
||||
|
||||
node-abi@3.89.0:
|
||||
dependencies:
|
||||
semver: 7.7.4
|
||||
|
||||
node-abort-controller@3.1.1: {}
|
||||
|
||||
node-cron@4.2.1: {}
|
||||
@@ -13165,21 +12958,6 @@ snapshots:
|
||||
|
||||
postgres@3.4.8: {}
|
||||
|
||||
prebuild-install@7.1.3:
|
||||
dependencies:
|
||||
detect-libc: 2.1.2
|
||||
expand-template: 2.0.3
|
||||
github-from-package: 0.0.0
|
||||
minimist: 1.2.8
|
||||
mkdirp-classic: 0.5.3
|
||||
napi-build-utils: 2.0.0
|
||||
node-abi: 3.89.0
|
||||
pump: 3.0.4
|
||||
rc: 1.2.8
|
||||
simple-get: 4.0.1
|
||||
tar-fs: 2.1.4
|
||||
tunnel-agent: 0.6.0
|
||||
|
||||
prelude-ls@1.2.1: {}
|
||||
|
||||
prettier@3.8.1: {}
|
||||
@@ -13262,13 +13040,6 @@ snapshots:
|
||||
iconv-lite: 0.7.2
|
||||
unpipe: 1.0.0
|
||||
|
||||
rc@1.2.8:
|
||||
dependencies:
|
||||
deep-extend: 0.6.0
|
||||
ini: 1.3.8
|
||||
minimist: 1.2.8
|
||||
strip-json-comments: 2.0.1
|
||||
|
||||
react-dom@19.2.4(react@19.2.4):
|
||||
dependencies:
|
||||
react: 19.2.4
|
||||
@@ -13314,12 +13085,6 @@ snapshots:
|
||||
string_decoder: 1.1.1
|
||||
util-deprecate: 1.0.2
|
||||
|
||||
readable-stream@3.6.2:
|
||||
dependencies:
|
||||
inherits: 2.0.4
|
||||
string_decoder: 1.1.1
|
||||
util-deprecate: 1.0.2
|
||||
|
||||
readdirp@5.0.0: {}
|
||||
|
||||
real-require@0.2.0: {}
|
||||
@@ -13573,14 +13338,6 @@ snapshots:
|
||||
|
||||
signal-exit@4.1.0: {}
|
||||
|
||||
simple-concat@1.0.1: {}
|
||||
|
||||
simple-get@4.0.1:
|
||||
dependencies:
|
||||
decompress-response: 6.0.0
|
||||
once: 1.4.0
|
||||
simple-concat: 1.0.1
|
||||
|
||||
sisteransi@1.0.5: {}
|
||||
|
||||
slice-ansi@5.0.0:
|
||||
@@ -13744,8 +13501,6 @@ snapshots:
|
||||
|
||||
strip-final-newline@3.0.0: {}
|
||||
|
||||
strip-json-comments@2.0.1: {}
|
||||
|
||||
strip-json-comments@3.1.1: {}
|
||||
|
||||
strnum@2.2.0: {}
|
||||
@@ -13783,21 +13538,6 @@ snapshots:
|
||||
|
||||
tapable@2.3.0: {}
|
||||
|
||||
tar-fs@2.1.4:
|
||||
dependencies:
|
||||
chownr: 1.1.4
|
||||
mkdirp-classic: 0.5.3
|
||||
pump: 3.0.4
|
||||
tar-stream: 2.2.0
|
||||
|
||||
tar-stream@2.2.0:
|
||||
dependencies:
|
||||
bl: 4.1.0
|
||||
end-of-stream: 1.4.5
|
||||
fs-constants: 1.0.0
|
||||
inherits: 2.0.4
|
||||
readable-stream: 3.6.2
|
||||
|
||||
tar@7.5.13:
|
||||
dependencies:
|
||||
'@isaacs/fs-minipass': 4.0.1
|
||||
@@ -13910,10 +13650,6 @@ snapshots:
|
||||
optionalDependencies:
|
||||
fsevents: 2.3.3
|
||||
|
||||
tunnel-agent@0.6.0:
|
||||
dependencies:
|
||||
safe-buffer: 5.2.1
|
||||
|
||||
turbo-darwin-64@2.8.16:
|
||||
optional: true
|
||||
|
||||
|
||||
@@ -1,165 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Publish @mosaic/* packages to npmjs.org as @mosaicstack/*
|
||||
#
|
||||
# This script patches each package.json to:
|
||||
# 1. Rename @mosaic/X → @mosaicstack/X
|
||||
# 2. Replace workspace:^ deps with resolved versions using @mosaicstack/* names
|
||||
# 3. Run npm publish
|
||||
# 4. Restore original package.json
|
||||
#
|
||||
# Usage:
|
||||
# scripts/publish-npmjs.sh [--dry-run] [--filter <package-name>]
|
||||
#
|
||||
# Requirements:
|
||||
# - NPM_TOKEN env var set (npmjs.org auth token)
|
||||
# - jq installed
|
||||
# - Run from monorepo root after `pnpm build`
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DRY_RUN=false
|
||||
FILTER=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--dry-run) DRY_RUN=true; shift ;;
|
||||
--filter) FILTER="$2"; shift 2 ;;
|
||||
*) echo "Unknown option: $1"; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
|
||||
# Collect all publishable package directories (non-private, has publishConfig)
|
||||
PACKAGE_DIRS=()
|
||||
for pkg_json in "$REPO_ROOT"/packages/*/package.json "$REPO_ROOT"/plugins/*/package.json "$REPO_ROOT"/apps/gateway/package.json; do
|
||||
[[ -f "$pkg_json" ]] || continue
|
||||
is_private=$(jq -r '.private // false' "$pkg_json")
|
||||
[[ "$is_private" == "true" ]] && continue
|
||||
PACKAGE_DIRS+=("$(dirname "$pkg_json")")
|
||||
done
|
||||
|
||||
echo "Found ${#PACKAGE_DIRS[@]} publishable packages"
|
||||
|
||||
# Build a version map: @mosaic/X → version
|
||||
declare -A VERSION_MAP
|
||||
for dir in "${PACKAGE_DIRS[@]}"; do
|
||||
name=$(jq -r '.name' "$dir/package.json")
|
||||
version=$(jq -r '.version' "$dir/package.json")
|
||||
VERSION_MAP["$name"]="$version"
|
||||
done
|
||||
|
||||
# Configure npmjs auth
|
||||
if [[ -z "${NPM_TOKEN:-}" ]] && [[ "$DRY_RUN" == "false" ]]; then
|
||||
echo "ERROR: NPM_TOKEN is required for publishing. Set it or use --dry-run."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
publish_package() {
|
||||
local dir="$1"
|
||||
local orig_json="$dir/package.json"
|
||||
local backup="$dir/package.json.bak"
|
||||
|
||||
local name
|
||||
name=$(jq -r '.name' "$orig_json")
|
||||
|
||||
# Apply filter if set
|
||||
if [[ -n "$FILTER" ]] && [[ "$name" != "$FILTER" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
local new_name="${name/@mosaic\//@mosaicstack/}"
|
||||
echo ""
|
||||
echo "━━━ Publishing $name → $new_name ━━━"
|
||||
|
||||
# Backup original
|
||||
cp "$orig_json" "$backup"
|
||||
|
||||
# Patch: rename package
|
||||
local patched
|
||||
patched=$(jq --arg new_name "$new_name" '.name = $new_name' "$orig_json")
|
||||
|
||||
# Patch: publishConfig to npmjs
|
||||
patched=$(echo "$patched" | jq '.publishConfig = {"registry": "https://registry.npmjs.org/", "access": "public"}')
|
||||
|
||||
# Patch: replace workspace:^ dependencies with @mosaicstack/* and resolved versions
|
||||
for dep_field in dependencies devDependencies peerDependencies; do
|
||||
if echo "$patched" | jq -e ".$dep_field" > /dev/null 2>&1; then
|
||||
local deps
|
||||
deps=$(echo "$patched" | jq -r ".$dep_field // {} | keys[]")
|
||||
for dep in $deps; do
|
||||
local dep_version
|
||||
dep_version=$(echo "$patched" | jq -r ".$dep_field[\"$dep\"]")
|
||||
|
||||
# Only transform @mosaic/* workspace deps
|
||||
if [[ "$dep" == @mosaic/* ]] && [[ "$dep_version" == workspace:* ]]; then
|
||||
local new_dep="${dep/@mosaic\//@mosaicstack/}"
|
||||
local resolved="${VERSION_MAP[$dep]:-}"
|
||||
|
||||
if [[ -z "$resolved" ]]; then
|
||||
echo " WARNING: No version found for $dep — using '*'"
|
||||
resolved="*"
|
||||
else
|
||||
# workspace:^ means ^version, workspace:* means *
|
||||
if [[ "$dep_version" == "workspace:^" ]]; then
|
||||
resolved="^$resolved"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Rename the dep key and set the resolved version
|
||||
patched=$(echo "$patched" | jq \
|
||||
--arg field "$dep_field" \
|
||||
--arg old_dep "$dep" \
|
||||
--arg new_dep "$new_dep" \
|
||||
--arg version "$resolved" \
|
||||
'.[$field] |= (del(.[$old_dep]) | .[$new_dep] = $version)')
|
||||
fi
|
||||
done
|
||||
fi
|
||||
done
|
||||
|
||||
# Write patched package.json
|
||||
echo "$patched" > "$orig_json"
|
||||
|
||||
echo " Patched: $new_name"
|
||||
|
||||
# Publish
|
||||
if [[ "$DRY_RUN" == "true" ]]; then
|
||||
echo " [DRY RUN] npm publish --dry-run"
|
||||
(cd "$dir" && npm publish --dry-run 2>&1) || true
|
||||
else
|
||||
echo " Publishing to npmjs..."
|
||||
(cd "$dir" && npm publish 2>&1) || echo " WARNING: Publish failed (may already exist at this version)"
|
||||
fi
|
||||
|
||||
# Restore original
|
||||
mv "$backup" "$orig_json"
|
||||
echo " Restored original package.json"
|
||||
}
|
||||
|
||||
# Set up npmrc for npmjs
|
||||
if [[ -n "${NPM_TOKEN:-}" ]]; then
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" > ~/.npmrc
|
||||
fi
|
||||
|
||||
# Publish in dependency order: packages first, then plugins, then apps
|
||||
echo ""
|
||||
echo "=== Publishing packages ==="
|
||||
for dir in "$REPO_ROOT"/packages/*/; do
|
||||
[[ -f "$dir/package.json" ]] || continue
|
||||
publish_package "$dir"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "=== Publishing plugins ==="
|
||||
for dir in "$REPO_ROOT"/plugins/*/; do
|
||||
[[ -f "$dir/package.json" ]] || continue
|
||||
publish_package "$dir"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "=== Publishing apps ==="
|
||||
publish_package "$REPO_ROOT/apps/gateway"
|
||||
|
||||
echo ""
|
||||
echo "Done."
|
||||
Reference in New Issue
Block a user