refactor(storage): replace better-sqlite3 with PGlite adapter

Eliminates the `prebuild-install` deprecation warning that appears during
`mosaic gateway install` by removing `better-sqlite3` from the dependency
graph entirely and replacing the SqliteAdapter with a PGlite-based adapter
that mirrors the same interface and test coverage.

- New `PgliteAdapter` in `packages/storage/src/adapters/pglite.ts` using
  `@electric-sql/pglite` with JSONB columns and pg-style `$N` placeholders
- Ported all 18 adapter tests to `pglite.test.ts` (in-memory PGlite instances)
- Deleted `sqlite.ts` and `sqlite.test.ts`
- Updated `StorageConfig` discriminated union: `sqlite` → `pglite` with optional
  `dataDir` (absent = in-memory)
- `packages/storage/index.ts` re-exports `PgliteAdapter` and registers it
- CLI install wizard writes `{ type: 'pglite', dataDir: '…/storage-pglite' }`
- Removed `better-sqlite3` and `@types/better-sqlite3` from storage deps;
  added `@electric-sql/pglite@^0.2.17`
- Removed `pnpm.onlyBuiltDependencies: ["better-sqlite3"]` from root package.json
- Bump `@mosaic/storage` 0.0.2 → 0.0.3
- Bump `@mosaic/gateway` 0.0.3 → 0.1.0 (minor — storage dialect change)

Breaking: local-tier `data.db` SQLite files are not migrated. Clean break.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Jarvis
2026-04-04 16:44:48 -05:00
parent 30c0fb1308
commit 72e365438c
10 changed files with 349 additions and 323 deletions

View File

@@ -1,6 +1,6 @@
{ {
"name": "@mosaic/gateway", "name": "@mosaic/gateway",
"version": "0.0.3", "version": "0.1.0",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git", "url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",

View File

@@ -23,10 +23,5 @@
"turbo": "^2.0.0", "turbo": "^2.0.0",
"typescript": "^5.8.0", "typescript": "^5.8.0",
"vitest": "^2.0.0" "vitest": "^2.0.0"
},
"pnpm": {
"onlyBuiltDependencies": [
"better-sqlite3"
]
} }
} }

View File

@@ -121,7 +121,7 @@ async function doInstall(rl: ReturnType<typeof createInterface>, opts: InstallOp
tier === 'local' tier === 'local'
? { ? {
tier: 'local', tier: 'local',
storage: { type: 'sqlite', path: join(GATEWAY_HOME, 'data.db') }, storage: { type: 'pglite', dataDir: join(GATEWAY_HOME, 'storage-pglite') },
queue: { type: 'local', dataDir: join(GATEWAY_HOME, 'queue') }, queue: { type: 'local', dataDir: join(GATEWAY_HOME, 'queue') },
memory: { type: 'keyword' }, memory: { type: 'keyword' },
} }

View File

@@ -1,6 +1,6 @@
{ {
"name": "@mosaic/storage", "name": "@mosaic/storage",
"version": "0.0.2", "version": "0.0.3",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git", "url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
@@ -21,12 +21,11 @@
"test": "vitest run --passWithNoTests" "test": "vitest run --passWithNoTests"
}, },
"dependencies": { "dependencies": {
"@electric-sql/pglite": "^0.2.17",
"@mosaic/db": "workspace:^", "@mosaic/db": "workspace:^",
"@mosaic/types": "workspace:*", "@mosaic/types": "workspace:*"
"better-sqlite3": "^12.8.0"
}, },
"devDependencies": { "devDependencies": {
"@types/better-sqlite3": "^7.6.13",
"typescript": "^5.8.0", "typescript": "^5.8.0",
"vitest": "^2.0.0" "vitest": "^2.0.0"
}, },

View File

@@ -1,11 +1,12 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest'; import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { SqliteAdapter } from './sqlite.js'; import { PgliteAdapter } from './pglite.js';
describe('SqliteAdapter', () => { describe('PgliteAdapter', () => {
let adapter: SqliteAdapter; let adapter: PgliteAdapter;
beforeEach(async () => { beforeEach(async () => {
adapter = new SqliteAdapter({ type: 'sqlite', path: ':memory:' }); // In-memory PGlite instance — no dataDir = memory mode
adapter = new PgliteAdapter({ type: 'pglite' });
await adapter.migrate(); await adapter.migrate();
}); });
@@ -80,7 +81,7 @@ describe('SqliteAdapter', () => {
it('supports limit and offset', async () => { it('supports limit and offset', async () => {
for (let i = 0; i < 5; i++) { for (let i = 0; i < 5; i++) {
await adapter.create('users', { name: `User${i}`, idx: i }); await adapter.create('users', { name: `User${i.toString()}`, idx: i });
} }
const page = await adapter.find('users', undefined, { const page = await adapter.find('users', undefined, {

View File

@@ -0,0 +1,290 @@
import { PGlite } from '@electric-sql/pglite';
import { randomUUID } from 'node:crypto';
import type { StorageAdapter, StorageConfig } from '../types.js';
/* eslint-disable @typescript-eslint/no-explicit-any */
const COLLECTIONS = [
'users',
'sessions',
'accounts',
'projects',
'missions',
'tasks',
'agents',
'conversations',
'messages',
'preferences',
'insights',
'skills',
'events',
'routing_rules',
'provider_credentials',
'agent_logs',
'teams',
'team_members',
'mission_tasks',
'tickets',
'summarization_jobs',
'appreciations',
'verifications',
] as const;
function buildFilterClause(filter?: Record<string, unknown>): {
clause: string;
params: unknown[];
} {
if (!filter || Object.keys(filter).length === 0) return { clause: '', params: [] };
const conditions: string[] = [];
const params: unknown[] = [];
let paramIdx = 1;
for (const [key, value] of Object.entries(filter)) {
if (key === 'id') {
conditions.push(`id = $${paramIdx.toString()}`);
params.push(value);
paramIdx++;
} else {
conditions.push(`data->>'${key}' = $${paramIdx.toString()}`);
params.push(typeof value === 'object' ? JSON.stringify(value) : value);
paramIdx++;
}
}
return { clause: ` WHERE ${conditions.join(' AND ')}`, params };
}
type PgClient = PGlite | { query: PGlite['query'] };
async function pgCreate<T extends Record<string, unknown>>(
pg: PgClient,
collection: string,
data: T,
): Promise<T & { id: string }> {
const id = (data as any).id ?? randomUUID();
const rest = Object.fromEntries(Object.entries(data).filter(([k]) => k !== 'id'));
await pg.query(`INSERT INTO ${collection} (id, data) VALUES ($1, $2::jsonb)`, [
id,
JSON.stringify(rest),
]);
return { ...data, id } as T & { id: string };
}
async function pgRead<T extends Record<string, unknown>>(
pg: PgClient,
collection: string,
id: string,
): Promise<T | null> {
const result = await pg.query<{ id: string; data: Record<string, unknown> }>(
`SELECT id, data FROM ${collection} WHERE id = $1`,
[id],
);
const row = result.rows[0];
if (!row) return null;
return { id: row.id, ...(row.data as object) } as unknown as T;
}
async function pgUpdate(
pg: PgClient,
collection: string,
id: string,
data: Record<string, unknown>,
): Promise<boolean> {
const existing = await pg.query<{ data: Record<string, unknown> }>(
`SELECT data FROM ${collection} WHERE id = $1`,
[id],
);
const row = existing.rows[0];
if (!row) return false;
const merged = { ...(row.data as object), ...data };
const result = await pg.query(
`UPDATE ${collection} SET data = $1::jsonb, updated_at = now() WHERE id = $2`,
[JSON.stringify(merged), id],
);
return (result.affectedRows ?? 0) > 0;
}
async function pgDelete(pg: PgClient, collection: string, id: string): Promise<boolean> {
const result = await pg.query(`DELETE FROM ${collection} WHERE id = $1`, [id]);
return (result.affectedRows ?? 0) > 0;
}
async function pgFind<T extends Record<string, unknown>>(
pg: PgClient,
collection: string,
filter?: Record<string, unknown>,
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
): Promise<T[]> {
const { clause, params } = buildFilterClause(filter);
let paramIdx = params.length + 1;
let query = `SELECT id, data FROM ${collection}${clause}`;
if (opts?.orderBy) {
const dir = opts.order === 'desc' ? 'DESC' : 'ASC';
const col =
opts.orderBy === 'id'
? 'id'
: opts.orderBy === 'created_at' || opts.orderBy === 'updated_at'
? opts.orderBy
: `data->>'${opts.orderBy}'`;
query += ` ORDER BY ${col} ${dir}`;
}
if (opts?.limit !== undefined) {
query += ` LIMIT $${paramIdx.toString()}`;
params.push(opts.limit);
paramIdx++;
}
if (opts?.offset !== undefined) {
query += ` OFFSET $${paramIdx.toString()}`;
params.push(opts.offset);
paramIdx++;
}
const result = await pg.query<{ id: string; data: Record<string, unknown> }>(query, params);
return result.rows.map((row) => ({ id: row.id, ...(row.data as object) }) as unknown as T);
}
async function pgCount(
pg: PgClient,
collection: string,
filter?: Record<string, unknown>,
): Promise<number> {
const { clause, params } = buildFilterClause(filter);
const result = await pg.query<{ count: string }>(
`SELECT COUNT(*) as count FROM ${collection}${clause}`,
params,
);
return parseInt(result.rows[0]?.count ?? '0', 10);
}
export class PgliteAdapter implements StorageAdapter {
readonly name = 'pglite';
private pg: PGlite;
constructor(config: Extract<StorageConfig, { type: 'pglite' }>) {
this.pg = new PGlite(config.dataDir);
}
async create<T extends Record<string, unknown>>(
collection: string,
data: T,
): Promise<T & { id: string }> {
return pgCreate(this.pg, collection, data);
}
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
return pgRead(this.pg, collection, id);
}
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
return pgUpdate(this.pg, collection, id, data);
}
async delete(collection: string, id: string): Promise<boolean> {
return pgDelete(this.pg, collection, id);
}
async find<T extends Record<string, unknown>>(
collection: string,
filter?: Record<string, unknown>,
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
): Promise<T[]> {
return pgFind(this.pg, collection, filter, opts);
}
async findOne<T extends Record<string, unknown>>(
collection: string,
filter: Record<string, unknown>,
): Promise<T | null> {
const results = await this.find<T>(collection, filter, { limit: 1 });
return results[0] ?? null;
}
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
return pgCount(this.pg, collection, filter);
}
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
return this.pg.transaction(async (tx) => {
const txAdapter = new PgliteTxAdapter(tx as unknown as PgClient);
return fn(txAdapter);
});
}
async migrate(): Promise<void> {
for (const name of COLLECTIONS) {
await this.pg.query(`
CREATE TABLE IF NOT EXISTS ${name} (
id TEXT PRIMARY KEY,
data JSONB NOT NULL DEFAULT '{}'::jsonb,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
)
`);
}
}
async close(): Promise<void> {
await this.pg.close();
}
}
/**
* Transaction wrapper that delegates to the PGlite transaction connection.
*/
class PgliteTxAdapter implements StorageAdapter {
readonly name = 'pglite';
private pg: PgClient;
constructor(pg: PgClient) {
this.pg = pg;
}
async create<T extends Record<string, unknown>>(
collection: string,
data: T,
): Promise<T & { id: string }> {
return pgCreate(this.pg, collection, data);
}
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
return pgRead(this.pg, collection, id);
}
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
return pgUpdate(this.pg, collection, id, data);
}
async delete(collection: string, id: string): Promise<boolean> {
return pgDelete(this.pg, collection, id);
}
async find<T extends Record<string, unknown>>(
collection: string,
filter?: Record<string, unknown>,
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
): Promise<T[]> {
return pgFind(this.pg, collection, filter, opts);
}
async findOne<T extends Record<string, unknown>>(
collection: string,
filter: Record<string, unknown>,
): Promise<T | null> {
const results = await this.find<T>(collection, filter, { limit: 1 });
return results[0] ?? null;
}
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
return pgCount(this.pg, collection, filter);
}
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
// Already inside a transaction — run directly
return fn(this);
}
async migrate(): Promise<void> {
// No-op inside transaction
}
async close(): Promise<void> {
// No-op inside transaction
}
}

View File

@@ -1,283 +0,0 @@
import Database from 'better-sqlite3';
import { randomUUID } from 'node:crypto';
import type { StorageAdapter, StorageConfig } from '../types.js';
/* eslint-disable @typescript-eslint/no-explicit-any */
const COLLECTIONS = [
'users',
'sessions',
'accounts',
'projects',
'missions',
'tasks',
'agents',
'conversations',
'messages',
'preferences',
'insights',
'skills',
'events',
'routing_rules',
'provider_credentials',
'agent_logs',
'teams',
'team_members',
'mission_tasks',
'tickets',
'summarization_jobs',
'appreciations',
'verifications',
] as const;
function buildFilterClause(filter?: Record<string, unknown>): {
clause: string;
params: unknown[];
} {
if (!filter || Object.keys(filter).length === 0) return { clause: '', params: [] };
const conditions: string[] = [];
const params: unknown[] = [];
for (const [key, value] of Object.entries(filter)) {
if (key === 'id') {
conditions.push('id = ?');
params.push(value);
} else {
conditions.push(`json_extract(data_json, '$.${key}') = ?`);
params.push(typeof value === 'object' ? JSON.stringify(value) : value);
}
}
return { clause: ` WHERE ${conditions.join(' AND ')}`, params };
}
export class SqliteAdapter implements StorageAdapter {
readonly name = 'sqlite';
private db: Database.Database;
constructor(config: Extract<StorageConfig, { type: 'sqlite' }>) {
this.db = new Database(config.path);
this.db.pragma('journal_mode = WAL');
this.db.pragma('foreign_keys = ON');
}
async create<T extends Record<string, unknown>>(
collection: string,
data: T,
): Promise<T & { id: string }> {
const id = (data as any).id ?? randomUUID();
const now = new Date().toISOString();
const rest = Object.fromEntries(Object.entries(data).filter(([k]) => k !== 'id'));
this.db
.prepare(
`INSERT INTO ${collection} (id, data_json, created_at, updated_at) VALUES (?, ?, ?, ?)`,
)
.run(id, JSON.stringify(rest), now, now);
return { ...data, id } as T & { id: string };
}
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
const row = this.db.prepare(`SELECT * FROM ${collection} WHERE id = ?`).get(id) as any;
if (!row) return null;
return { id: row.id, ...JSON.parse(row.data_json as string) } as T;
}
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
const existing = this.db
.prepare(`SELECT data_json FROM ${collection} WHERE id = ?`)
.get(id) as any;
if (!existing) return false;
const merged = { ...JSON.parse(existing.data_json as string), ...data };
const now = new Date().toISOString();
const result = this.db
.prepare(`UPDATE ${collection} SET data_json = ?, updated_at = ? WHERE id = ?`)
.run(JSON.stringify(merged), now, id);
return result.changes > 0;
}
async delete(collection: string, id: string): Promise<boolean> {
const result = this.db.prepare(`DELETE FROM ${collection} WHERE id = ?`).run(id);
return result.changes > 0;
}
async find<T extends Record<string, unknown>>(
collection: string,
filter?: Record<string, unknown>,
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
): Promise<T[]> {
const { clause, params } = buildFilterClause(filter);
let query = `SELECT * FROM ${collection}${clause}`;
if (opts?.orderBy) {
const dir = opts.order === 'desc' ? 'DESC' : 'ASC';
const col =
opts.orderBy === 'id' || opts.orderBy === 'created_at' || opts.orderBy === 'updated_at'
? opts.orderBy
: `json_extract(data_json, '$.${opts.orderBy}')`;
query += ` ORDER BY ${col} ${dir}`;
}
if (opts?.limit) {
query += ` LIMIT ?`;
params.push(opts.limit);
}
if (opts?.offset) {
query += ` OFFSET ?`;
params.push(opts.offset);
}
const rows = this.db.prepare(query).all(...params) as any[];
return rows.map((row) => ({ id: row.id, ...JSON.parse(row.data_json as string) }) as T);
}
async findOne<T extends Record<string, unknown>>(
collection: string,
filter: Record<string, unknown>,
): Promise<T | null> {
const results = await this.find<T>(collection, filter, { limit: 1 });
return results[0] ?? null;
}
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
const { clause, params } = buildFilterClause(filter);
const row = this.db
.prepare(`SELECT COUNT(*) as count FROM ${collection}${clause}`)
.get(...params) as any;
return row?.count ?? 0;
}
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
const txAdapter = new SqliteTxAdapter(this.db);
this.db.exec('BEGIN');
try {
const result = await fn(txAdapter);
this.db.exec('COMMIT');
return result;
} catch (err) {
this.db.exec('ROLLBACK');
throw err;
}
}
async migrate(): Promise<void> {
const createTable = (name: string) =>
this.db.exec(`
CREATE TABLE IF NOT EXISTS ${name} (
id TEXT PRIMARY KEY,
data_json TEXT NOT NULL DEFAULT '{}',
created_at TEXT NOT NULL DEFAULT (datetime('now')),
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
)
`);
for (const collection of COLLECTIONS) {
createTable(collection);
}
}
async close(): Promise<void> {
this.db.close();
}
}
/**
* Transaction wrapper that uses the same db handle — better-sqlite3 transactions
* are connection-level, so all statements on the same Database instance within
* a db.transaction() callback participate in the transaction.
*/
class SqliteTxAdapter implements StorageAdapter {
readonly name = 'sqlite';
private db: Database.Database;
constructor(db: Database.Database) {
this.db = db;
}
async create<T extends Record<string, unknown>>(
collection: string,
data: T,
): Promise<T & { id: string }> {
const id = (data as any).id ?? randomUUID();
const now = new Date().toISOString();
const rest = Object.fromEntries(Object.entries(data).filter(([k]) => k !== 'id'));
this.db
.prepare(
`INSERT INTO ${collection} (id, data_json, created_at, updated_at) VALUES (?, ?, ?, ?)`,
)
.run(id, JSON.stringify(rest), now, now);
return { ...data, id } as T & { id: string };
}
async read<T extends Record<string, unknown>>(collection: string, id: string): Promise<T | null> {
const row = this.db.prepare(`SELECT * FROM ${collection} WHERE id = ?`).get(id) as any;
if (!row) return null;
return { id: row.id, ...JSON.parse(row.data_json as string) } as T;
}
async update(collection: string, id: string, data: Record<string, unknown>): Promise<boolean> {
const existing = this.db
.prepare(`SELECT data_json FROM ${collection} WHERE id = ?`)
.get(id) as any;
if (!existing) return false;
const merged = { ...JSON.parse(existing.data_json as string), ...data };
const now = new Date().toISOString();
const result = this.db
.prepare(`UPDATE ${collection} SET data_json = ?, updated_at = ? WHERE id = ?`)
.run(JSON.stringify(merged), now, id);
return result.changes > 0;
}
async delete(collection: string, id: string): Promise<boolean> {
const result = this.db.prepare(`DELETE FROM ${collection} WHERE id = ?`).run(id);
return result.changes > 0;
}
async find<T extends Record<string, unknown>>(
collection: string,
filter?: Record<string, unknown>,
opts?: { limit?: number; offset?: number; orderBy?: string; order?: 'asc' | 'desc' },
): Promise<T[]> {
const { clause, params } = buildFilterClause(filter);
let query = `SELECT * FROM ${collection}${clause}`;
if (opts?.orderBy) {
const dir = opts.order === 'desc' ? 'DESC' : 'ASC';
const col =
opts.orderBy === 'id' || opts.orderBy === 'created_at' || opts.orderBy === 'updated_at'
? opts.orderBy
: `json_extract(data_json, '$.${opts.orderBy}')`;
query += ` ORDER BY ${col} ${dir}`;
}
if (opts?.limit) {
query += ` LIMIT ?`;
params.push(opts.limit);
}
if (opts?.offset) {
query += ` OFFSET ?`;
params.push(opts.offset);
}
const rows = this.db.prepare(query).all(...params) as any[];
return rows.map((row) => ({ id: row.id, ...JSON.parse(row.data_json as string) }) as T);
}
async findOne<T extends Record<string, unknown>>(
collection: string,
filter: Record<string, unknown>,
): Promise<T | null> {
const results = await this.find<T>(collection, filter, { limit: 1 });
return results[0] ?? null;
}
async count(collection: string, filter?: Record<string, unknown>): Promise<number> {
const { clause, params } = buildFilterClause(filter);
const row = this.db
.prepare(`SELECT COUNT(*) as count FROM ${collection}${clause}`)
.get(...params) as any;
return row?.count ?? 0;
}
async transaction<T>(fn: (tx: StorageAdapter) => Promise<T>): Promise<T> {
return fn(this);
}
async migrate(): Promise<void> {
// No-op inside transaction
}
async close(): Promise<void> {
// No-op inside transaction
}
}

View File

@@ -1,17 +1,17 @@
export type { StorageAdapter, StorageConfig } from './types.js'; export type { StorageAdapter, StorageConfig } from './types.js';
export { createStorageAdapter, registerStorageAdapter } from './factory.js'; export { createStorageAdapter, registerStorageAdapter } from './factory.js';
export { PostgresAdapter } from './adapters/postgres.js'; export { PostgresAdapter } from './adapters/postgres.js';
export { SqliteAdapter } from './adapters/sqlite.js'; export { PgliteAdapter } from './adapters/pglite.js';
import { registerStorageAdapter } from './factory.js'; import { registerStorageAdapter } from './factory.js';
import { PostgresAdapter } from './adapters/postgres.js'; import { PostgresAdapter } from './adapters/postgres.js';
import { SqliteAdapter } from './adapters/sqlite.js'; import { PgliteAdapter } from './adapters/pglite.js';
import type { StorageConfig } from './types.js'; import type { StorageConfig } from './types.js';
registerStorageAdapter('postgres', (config: StorageConfig) => { registerStorageAdapter('postgres', (config: StorageConfig) => {
return new PostgresAdapter(config as Extract<StorageConfig, { type: 'postgres' }>); return new PostgresAdapter(config as Extract<StorageConfig, { type: 'postgres' }>);
}); });
registerStorageAdapter('sqlite', (config: StorageConfig) => { registerStorageAdapter('pglite', (config: StorageConfig) => {
return new SqliteAdapter(config as Extract<StorageConfig, { type: 'sqlite' }>); return new PgliteAdapter(config as Extract<StorageConfig, { type: 'pglite' }>);
}); });

View File

@@ -39,5 +39,5 @@ export interface StorageAdapter {
export type StorageConfig = export type StorageConfig =
| { type: 'postgres'; url: string } | { type: 'postgres'; url: string }
| { type: 'sqlite'; path: string } | { type: 'pglite'; dataDir?: string }
| { type: 'files'; dataDir: string; format?: 'json' | 'md' }; | { type: 'files'; dataDir: string; format?: 'json' | 'md' };

62
pnpm-lock.yaml generated
View File

@@ -606,19 +606,16 @@ importers:
packages/storage: packages/storage:
dependencies: dependencies:
'@electric-sql/pglite':
specifier: ^0.2.17
version: 0.2.17
'@mosaic/db': '@mosaic/db':
specifier: workspace:^ specifier: workspace:^
version: link:../db version: link:../db
'@mosaic/types': '@mosaic/types':
specifier: workspace:* specifier: workspace:*
version: link:../types version: link:../types
better-sqlite3:
specifier: ^12.8.0
version: 12.8.0
devDependencies: devDependencies:
'@types/better-sqlite3':
specifier: ^7.6.13
version: 7.6.13
typescript: typescript:
specifier: ^5.8.0 specifier: ^5.8.0
version: 5.9.3 version: 5.9.3
@@ -10236,6 +10233,7 @@ snapshots:
'@types/better-sqlite3@7.6.13': '@types/better-sqlite3@7.6.13':
dependencies: dependencies:
'@types/node': 22.19.15 '@types/node': 22.19.15
optional: true
'@types/bunyan@1.8.11': '@types/bunyan@1.8.11':
dependencies: dependencies:
@@ -10667,6 +10665,7 @@ snapshots:
dependencies: dependencies:
bindings: 1.5.0 bindings: 1.5.0
prebuild-install: 7.1.3 prebuild-install: 7.1.3
optional: true
bidi-js@1.0.3: bidi-js@1.0.3:
dependencies: dependencies:
@@ -10677,12 +10676,14 @@ snapshots:
bindings@1.5.0: bindings@1.5.0:
dependencies: dependencies:
file-uri-to-path: 1.0.0 file-uri-to-path: 1.0.0
optional: true
bl@4.1.0: bl@4.1.0:
dependencies: dependencies:
buffer: 5.7.1 buffer: 5.7.1
inherits: 2.0.4 inherits: 2.0.4
readable-stream: 3.6.2 readable-stream: 3.6.2
optional: true
body-parser@2.2.2: body-parser@2.2.2:
dependencies: dependencies:
@@ -10744,6 +10745,7 @@ snapshots:
dependencies: dependencies:
base64-js: 1.5.1 base64-js: 1.5.1
ieee754: 1.2.1 ieee754: 1.2.1
optional: true
bullmq@5.71.0: bullmq@5.71.0:
dependencies: dependencies:
@@ -10806,7 +10808,8 @@ snapshots:
dependencies: dependencies:
readdirp: 5.0.0 readdirp: 5.0.0
chownr@1.1.4: {} chownr@1.1.4:
optional: true
chownr@3.0.0: {} chownr@3.0.0: {}
@@ -10965,10 +10968,12 @@ snapshots:
decompress-response@6.0.0: decompress-response@6.0.0:
dependencies: dependencies:
mimic-response: 3.1.0 mimic-response: 3.1.0
optional: true
deep-eql@5.0.2: {} deep-eql@5.0.2: {}
deep-extend@0.6.0: {} deep-extend@0.6.0:
optional: true
deep-is@0.1.4: {} deep-is@0.1.4: {}
@@ -11386,7 +11391,8 @@ snapshots:
signal-exit: 4.1.0 signal-exit: 4.1.0
strip-final-newline: 3.0.0 strip-final-newline: 3.0.0
expand-template@2.0.3: {} expand-template@2.0.3:
optional: true
expect-type@1.3.0: {} expect-type@1.3.0: {}
@@ -11552,7 +11558,8 @@ snapshots:
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
file-uri-to-path@1.0.0: {} file-uri-to-path@1.0.0:
optional: true
fill-range@7.1.1: fill-range@7.1.1:
dependencies: dependencies:
@@ -11614,7 +11621,8 @@ snapshots:
fresh@2.0.0: {} fresh@2.0.0: {}
fs-constants@1.0.0: {} fs-constants@1.0.0:
optional: true
fsevents@2.3.2: fsevents@2.3.2:
optional: true optional: true
@@ -11709,7 +11717,8 @@ snapshots:
transitivePeerDependencies: transitivePeerDependencies:
- supports-color - supports-color
github-from-package@0.0.0: {} github-from-package@0.0.0:
optional: true
glob-parent@6.0.2: glob-parent@6.0.2:
dependencies: dependencies:
@@ -11895,7 +11904,8 @@ snapshots:
inherits@2.0.4: {} inherits@2.0.4: {}
ini@1.3.8: {} ini@1.3.8:
optional: true
ink-spinner@5.0.0(ink@5.2.1(@types/react@18.3.28)(react@18.3.1))(react@18.3.1): ink-spinner@5.0.0(ink@5.2.1(@types/react@18.3.28)(react@18.3.1))(react@18.3.1):
dependencies: dependencies:
@@ -12603,7 +12613,8 @@ snapshots:
mimic-function@5.0.1: {} mimic-function@5.0.1: {}
mimic-response@3.1.0: {} mimic-response@3.1.0:
optional: true
minimatch@10.2.4: minimatch@10.2.4:
dependencies: dependencies:
@@ -12617,7 +12628,8 @@ snapshots:
dependencies: dependencies:
brace-expansion: 2.0.2 brace-expansion: 2.0.2
minimist@1.2.8: {} minimist@1.2.8:
optional: true
minipass@7.1.3: {} minipass@7.1.3: {}
@@ -12625,7 +12637,8 @@ snapshots:
dependencies: dependencies:
minipass: 7.1.3 minipass: 7.1.3
mkdirp-classic@0.5.3: {} mkdirp-classic@0.5.3:
optional: true
module-details-from-path@1.0.4: {} module-details-from-path@1.0.4: {}
@@ -12672,7 +12685,8 @@ snapshots:
nanostores@1.1.1: {} nanostores@1.1.1: {}
napi-build-utils@2.0.0: {} napi-build-utils@2.0.0:
optional: true
natural-compare@1.4.0: {} natural-compare@1.4.0: {}
@@ -12711,6 +12725,7 @@ snapshots:
node-abi@3.89.0: node-abi@3.89.0:
dependencies: dependencies:
semver: 7.7.4 semver: 7.7.4
optional: true
node-abort-controller@3.1.1: {} node-abort-controller@3.1.1: {}
@@ -13074,6 +13089,7 @@ snapshots:
simple-get: 4.0.1 simple-get: 4.0.1
tar-fs: 2.1.4 tar-fs: 2.1.4
tunnel-agent: 0.6.0 tunnel-agent: 0.6.0
optional: true
prelude-ls@1.2.1: {} prelude-ls@1.2.1: {}
@@ -13163,6 +13179,7 @@ snapshots:
ini: 1.3.8 ini: 1.3.8
minimist: 1.2.8 minimist: 1.2.8
strip-json-comments: 2.0.1 strip-json-comments: 2.0.1
optional: true
react-dom@19.2.4(react@19.2.4): react-dom@19.2.4(react@19.2.4):
dependencies: dependencies:
@@ -13214,6 +13231,7 @@ snapshots:
inherits: 2.0.4 inherits: 2.0.4
string_decoder: 1.1.1 string_decoder: 1.1.1
util-deprecate: 1.0.2 util-deprecate: 1.0.2
optional: true
readdirp@5.0.0: {} readdirp@5.0.0: {}
@@ -13468,13 +13486,15 @@ snapshots:
signal-exit@4.1.0: {} signal-exit@4.1.0: {}
simple-concat@1.0.1: {} simple-concat@1.0.1:
optional: true
simple-get@4.0.1: simple-get@4.0.1:
dependencies: dependencies:
decompress-response: 6.0.0 decompress-response: 6.0.0
once: 1.4.0 once: 1.4.0
simple-concat: 1.0.1 simple-concat: 1.0.1
optional: true
sisteransi@1.0.5: {} sisteransi@1.0.5: {}
@@ -13639,7 +13659,8 @@ snapshots:
strip-final-newline@3.0.0: {} strip-final-newline@3.0.0: {}
strip-json-comments@2.0.1: {} strip-json-comments@2.0.1:
optional: true
strip-json-comments@3.1.1: {} strip-json-comments@3.1.1: {}
@@ -13684,6 +13705,7 @@ snapshots:
mkdirp-classic: 0.5.3 mkdirp-classic: 0.5.3
pump: 3.0.4 pump: 3.0.4
tar-stream: 2.2.0 tar-stream: 2.2.0
optional: true
tar-stream@2.2.0: tar-stream@2.2.0:
dependencies: dependencies:
@@ -13692,6 +13714,7 @@ snapshots:
fs-constants: 1.0.0 fs-constants: 1.0.0
inherits: 2.0.4 inherits: 2.0.4
readable-stream: 3.6.2 readable-stream: 3.6.2
optional: true
tar@7.5.13: tar@7.5.13:
dependencies: dependencies:
@@ -13808,6 +13831,7 @@ snapshots:
tunnel-agent@0.6.0: tunnel-agent@0.6.0:
dependencies: dependencies:
safe-buffer: 5.2.1 safe-buffer: 5.2.1
optional: true
turbo-darwin-64@2.8.16: turbo-darwin-64@2.8.16:
optional: true optional: true