455 lines
18 KiB
TypeScript
455 lines
18 KiB
TypeScript
import type { Command } from 'commander';
|
|
import type { MigrationSource } from './migrate-tier.js';
|
|
|
|
/**
|
|
* Reads the DATABASE_URL environment variable and redacts the password portion.
|
|
*/
|
|
function redactedConnectionString(): string | null {
|
|
const url = process.env['DATABASE_URL'];
|
|
if (!url) return null;
|
|
try {
|
|
const parsed = new URL(url);
|
|
if (parsed.password) {
|
|
parsed.password = '***';
|
|
}
|
|
return parsed.toString();
|
|
} catch {
|
|
// Not a valid URL — redact anything that looks like :password@
|
|
return url.replace(/:([^@/]+)@/, ':***@');
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Determine the active storage tier from the environment.
|
|
* Looks at DATABASE_URL; if absent or set to a pglite path, treats tier as pglite.
|
|
*/
|
|
function activeTier(): 'postgres' | 'pglite' {
|
|
const url = process.env['DATABASE_URL'];
|
|
if (url && url.startsWith('postgres')) return 'postgres';
|
|
return 'pglite';
|
|
}
|
|
|
|
/**
|
|
* Return a human-readable config source description.
|
|
*/
|
|
function configSource(): string {
|
|
if (process.env['DATABASE_URL']) return 'env:DATABASE_URL';
|
|
const pgliteDir = process.env['PGLITE_DATA_DIR'];
|
|
if (pgliteDir) return `env:PGLITE_DATA_DIR (${pgliteDir})`;
|
|
return 'default (no DATABASE_URL set)';
|
|
}
|
|
|
|
/**
|
|
* Register storage subcommands on an existing Commander program.
|
|
* Follows the registerQualityRails pattern — uses the caller's Command
|
|
* instance to avoid cross-package Commander version mismatches.
|
|
*/
|
|
export function registerStorageCommand(parent: Command): void {
|
|
const storage = parent
|
|
.command('storage')
|
|
.description('Inspect and manage Mosaic storage configuration');
|
|
|
|
// ── storage status ───────────────────────────────────────────────────────
|
|
|
|
storage
|
|
.command('status')
|
|
.description('Show the configured storage tier and whether the adapter is reachable')
|
|
.action(async () => {
|
|
const tier = activeTier();
|
|
const source = configSource();
|
|
const connStr = tier === 'postgres' ? redactedConnectionString() : null;
|
|
|
|
console.log(`[storage] tier: ${tier}`);
|
|
console.log(`[storage] config source: ${source}`);
|
|
|
|
if (tier === 'postgres' && connStr) {
|
|
console.log(`[storage] connection: ${connStr}`);
|
|
try {
|
|
const { createDb, sql } = await import('@mosaicstack/db');
|
|
const url = process.env['DATABASE_URL'] ?? '';
|
|
const handle = createDb(url);
|
|
await handle.db.execute(sql`SELECT 1`);
|
|
await handle.close();
|
|
console.log('[storage] reachable: yes');
|
|
} catch (err) {
|
|
console.log(
|
|
`[storage] reachable: no (${err instanceof Error ? err.message : String(err)})`,
|
|
);
|
|
}
|
|
} else {
|
|
const dataDir = process.env['PGLITE_DATA_DIR'] ?? ':memory:';
|
|
console.log(`[storage] data dir: ${dataDir}`);
|
|
console.log('[storage] reachable: pglite is always local — no network check needed');
|
|
}
|
|
});
|
|
|
|
// ── storage tier ─────────────────────────────────────────────────────────
|
|
|
|
const tier = storage.command('tier').description('Inspect or switch the storage tier');
|
|
|
|
tier
|
|
.command('show')
|
|
.description('Print the active storage tier and its config source')
|
|
.action(() => {
|
|
const activeTierValue = activeTier();
|
|
const source = configSource();
|
|
console.log(`[storage] active tier: ${activeTierValue}`);
|
|
console.log(`[storage] config source: ${source}`);
|
|
});
|
|
|
|
tier
|
|
.command('switch <tier>')
|
|
.description('Switch storage tier between pglite and postgres')
|
|
.action((newTier: string) => {
|
|
const validTiers = ['pglite', 'postgres'];
|
|
if (!validTiers.includes(newTier)) {
|
|
console.error(
|
|
`[storage] unknown tier: ${newTier}. Valid options: ${validTiers.join(', ')}`,
|
|
);
|
|
process.exitCode = 1;
|
|
return;
|
|
}
|
|
|
|
console.log(`[storage] tier switch requested: ${newTier}`);
|
|
console.log('');
|
|
console.log('Mosaic storage tier is controlled by environment variables.');
|
|
console.log('Automatic config-file mutation is not supported — set the variable manually.');
|
|
console.log('');
|
|
|
|
if (newTier === 'postgres') {
|
|
console.log('To switch to postgres:');
|
|
console.log(' 1. Set DATABASE_URL in your environment or .env file:');
|
|
console.log(' export DATABASE_URL="postgresql://user:pass@localhost:5432/mosaic"');
|
|
console.log(' 2. Run migrations:');
|
|
console.log(' pnpm --filter @mosaicstack/db db:migrate');
|
|
console.log(' 3. Restart the gateway.');
|
|
} else {
|
|
console.log('To switch to pglite:');
|
|
console.log(' 1. Unset DATABASE_URL (or set it to a pglite path):');
|
|
console.log(' unset DATABASE_URL');
|
|
console.log(' # optionally: export PGLITE_DATA_DIR=/path/to/pglite/data');
|
|
console.log(' 2. Restart the gateway.');
|
|
console.log(' Note: pglite uses an in-process database — no migrations needed.');
|
|
}
|
|
});
|
|
|
|
// ── storage export ───────────────────────────────────────────────────────
|
|
|
|
storage
|
|
.command('export <path>')
|
|
.description('Dump the active storage contents to a file')
|
|
.action((outputPath: string) => {
|
|
const currentTier = activeTier();
|
|
|
|
if (currentTier === 'postgres') {
|
|
const redacted = redactedConnectionString() ?? '<DATABASE_URL>';
|
|
console.log('[storage] export for postgres tier');
|
|
console.log('');
|
|
console.log('postgres export is not yet wired in the CLI — use pg_dump directly:');
|
|
console.log('');
|
|
console.log(` pg_dump "${redacted}" > ${outputPath}`);
|
|
console.log('');
|
|
console.log('Or with Docker:');
|
|
console.log(
|
|
` docker exec <postgres-container> pg_dump -U <user> <dbname> > ${outputPath}`,
|
|
);
|
|
process.exitCode = 0;
|
|
} else {
|
|
const dataDir = process.env['PGLITE_DATA_DIR'];
|
|
console.log('[storage] export for pglite tier');
|
|
console.log('');
|
|
console.log(
|
|
'pglite export is not yet wired in the CLI — copy the data directory directly:',
|
|
);
|
|
console.log('');
|
|
if (dataDir) {
|
|
console.log(` cp -r ${dataDir} ${outputPath}`);
|
|
} else {
|
|
console.log(
|
|
' PGLITE_DATA_DIR is not set; the database is in-memory and cannot be exported.',
|
|
);
|
|
console.log(' Set PGLITE_DATA_DIR to a persistent path before running export.');
|
|
}
|
|
process.exitCode = 0;
|
|
}
|
|
});
|
|
|
|
// ── storage import ───────────────────────────────────────────────────────
|
|
|
|
storage
|
|
.command('import <path>')
|
|
.description('Restore storage contents from a previously exported file')
|
|
.action((inputPath: string) => {
|
|
const currentTier = activeTier();
|
|
|
|
if (currentTier === 'postgres') {
|
|
const redacted = redactedConnectionString() ?? '<DATABASE_URL>';
|
|
console.log('[storage] import for postgres tier');
|
|
console.log('');
|
|
console.log('postgres import is not yet wired in the CLI — use psql directly:');
|
|
console.log('');
|
|
console.log(` psql "${redacted}" < ${inputPath}`);
|
|
process.exitCode = 0;
|
|
} else {
|
|
const dataDir = process.env['PGLITE_DATA_DIR'];
|
|
console.log('[storage] import for pglite tier');
|
|
console.log('');
|
|
console.log(
|
|
'pglite import is not yet wired in the CLI — restore the data directory directly:',
|
|
);
|
|
console.log('');
|
|
if (dataDir) {
|
|
console.log(` rm -rf ${dataDir} && cp -r ${inputPath} ${dataDir}`);
|
|
console.log(' Then restart the gateway.');
|
|
} else {
|
|
console.log(
|
|
' PGLITE_DATA_DIR is not set; set it to a persistent path before running import.',
|
|
);
|
|
}
|
|
process.exitCode = 0;
|
|
}
|
|
});
|
|
|
|
// ── storage migrate-tier ─────────────────────────────────────────────────
|
|
|
|
storage
|
|
.command('migrate-tier')
|
|
.description('Migrate data from tier: local/standalone → tier: federated (Postgres + pgvector)')
|
|
.requiredOption(
|
|
'--to <tier>',
|
|
'Target tier to migrate to (only "federated" is supported)',
|
|
'federated',
|
|
)
|
|
.requiredOption('--target-url <url>', 'Target federated Postgres connection string (required)')
|
|
.option(
|
|
'--source-config <path>',
|
|
'Path to mosaic.config.json (default: cwd/mosaic.config.json)',
|
|
)
|
|
.option('--dry-run', 'Print what would be migrated without writing anything')
|
|
.option('--yes', 'Skip interactive confirmation prompt (required for non-TTY environments)')
|
|
.option('--batch-size <n>', 'Rows per transaction batch', '1000')
|
|
.option('--allow-non-empty', 'Allow writing to a non-empty target (upsert — idempotent)')
|
|
.action(
|
|
async (opts: {
|
|
to: string;
|
|
targetUrl: string;
|
|
sourceConfig?: string;
|
|
dryRun?: boolean;
|
|
yes?: boolean;
|
|
batchSize?: string;
|
|
allowNonEmpty?: boolean;
|
|
}) => {
|
|
if (opts.to !== 'federated') {
|
|
console.error(
|
|
`[migrate-tier] --to "${opts.to}" is not supported. Only "federated" is allowed.`,
|
|
);
|
|
process.exitCode = 1;
|
|
return;
|
|
}
|
|
|
|
const batchSize = parseInt(opts.batchSize ?? '1000', 10);
|
|
if (isNaN(batchSize) || batchSize < 1) {
|
|
console.error('[migrate-tier] --batch-size must be a positive integer.');
|
|
process.exitCode = 1;
|
|
return;
|
|
}
|
|
|
|
// Redact target URL password for display.
|
|
function redactUrl(url: string): string {
|
|
try {
|
|
const parsed = new URL(url);
|
|
if (parsed.password) parsed.password = '***';
|
|
return parsed.toString();
|
|
} catch {
|
|
return url.replace(/:([^@/]+)@/, ':***@');
|
|
}
|
|
}
|
|
|
|
const redactedTarget = redactUrl(opts.targetUrl);
|
|
const isDryRun = opts.dryRun ?? false;
|
|
const allowNonEmpty = opts.allowNonEmpty ?? false;
|
|
|
|
// Determine source tier from environment.
|
|
const sourceTier = activeTier();
|
|
const sourceDesc = configSource();
|
|
|
|
console.log('');
|
|
console.log('[migrate-tier] ─────────────────────────────────────────');
|
|
console.log(`[migrate-tier] Source tier: ${sourceTier}`);
|
|
console.log(`[migrate-tier] Source: ${sourceDesc}`);
|
|
console.log(`[migrate-tier] Target tier: federated (Postgres + pgvector)`);
|
|
console.log(`[migrate-tier] Target: ${redactedTarget}`);
|
|
console.log(`[migrate-tier] Batch size: ${batchSize.toString()}`);
|
|
console.log(`[migrate-tier] Dry run: ${isDryRun.toString()}`);
|
|
console.log(`[migrate-tier] Allow non-empty: ${allowNonEmpty.toString()}`);
|
|
console.log('[migrate-tier] ─────────────────────────────────────────');
|
|
console.log('');
|
|
|
|
// Lazy-import core migration logic to keep the CLI thin.
|
|
const {
|
|
runMigrateTier,
|
|
PostgresMigrationTarget,
|
|
DrizzleMigrationSource,
|
|
getMigrationOrder,
|
|
} = await import('./migrate-tier.js');
|
|
|
|
// Build source adapter using Drizzle-backed DrizzleMigrationSource.
|
|
// Both local (PGlite) and standalone (Postgres) sources expose the same
|
|
// normalized Drizzle schema — this is where the actual domain data lives.
|
|
let sourceAdapter: MigrationSource;
|
|
if (sourceTier === 'pglite') {
|
|
const { createPgliteDb } = await import('@mosaicstack/db');
|
|
const pgliteDataDir = process.env['PGLITE_DATA_DIR'];
|
|
if (!pgliteDataDir) {
|
|
console.error(
|
|
'[migrate-tier] PGLITE_DATA_DIR is not set. ' +
|
|
'Cannot open PGlite source — set it to the data directory path.',
|
|
);
|
|
process.exitCode = 1;
|
|
return;
|
|
}
|
|
const handle = createPgliteDb(pgliteDataDir);
|
|
// Local/PGlite sources do not have pgvector registered — the embedding
|
|
// column is omitted from the insights SELECT and set to null on target.
|
|
sourceAdapter = new DrizzleMigrationSource(handle.db, /* sourceHasVector= */ false);
|
|
} else {
|
|
const { createDb } = await import('@mosaicstack/db');
|
|
const url = process.env['DATABASE_URL'];
|
|
if (!url) {
|
|
console.error('[migrate-tier] DATABASE_URL is not set for postgres source.');
|
|
process.exitCode = 1;
|
|
return;
|
|
}
|
|
const handle = createDb(url);
|
|
// Standalone Postgres may or may not have pgvector — assume it does not
|
|
// (it is a non-federated tier) so embedding is treated as null.
|
|
sourceAdapter = new DrizzleMigrationSource(handle.db, /* sourceHasVector= */ false);
|
|
}
|
|
|
|
// Print per-table row counts for the confirmation prompt.
|
|
const tablesToMigrate = getMigrationOrder();
|
|
const counts: Array<{ table: string; count: number }> = [];
|
|
for (const table of tablesToMigrate) {
|
|
const n = await sourceAdapter.count(table);
|
|
counts.push({ table, count: n });
|
|
}
|
|
|
|
console.log('[migrate-tier] Source row counts:');
|
|
for (const { table, count } of counts) {
|
|
console.log(` ${table}: ${count.toString()}`);
|
|
}
|
|
console.log(' sessions: SKIPPED (ephemeral)');
|
|
console.log(' verifications: SKIPPED (ephemeral)');
|
|
console.log(' admin_tokens: SKIPPED (environment-specific)');
|
|
console.log('');
|
|
|
|
// Interactive confirmation unless --yes or dry-run.
|
|
const isTTY = process.stdin.isTTY;
|
|
if (!isDryRun) {
|
|
if (!opts.yes && !isTTY) {
|
|
console.error(
|
|
'[migrate-tier] Not running in a TTY and --yes was not passed. ' +
|
|
'Pass --yes to confirm in headless environments.',
|
|
);
|
|
process.exitCode = 1;
|
|
await sourceAdapter.close();
|
|
return;
|
|
}
|
|
|
|
if (!opts.yes) {
|
|
const { createInterface } = await import('node:readline');
|
|
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
const answer = await new Promise<string>((resolve) => {
|
|
rl.question(`This will WRITE to ${redactedTarget}. Continue? [y/N] `, (ans) => {
|
|
rl.close();
|
|
resolve(ans);
|
|
});
|
|
});
|
|
if (answer.trim().toLowerCase() !== 'y') {
|
|
console.log('[migrate-tier] Aborted.');
|
|
await sourceAdapter.close();
|
|
return;
|
|
}
|
|
}
|
|
}
|
|
|
|
const target = new PostgresMigrationTarget(opts.targetUrl);
|
|
|
|
try {
|
|
const result = await runMigrateTier(
|
|
sourceAdapter,
|
|
target,
|
|
{
|
|
targetUrl: opts.targetUrl,
|
|
dryRun: isDryRun,
|
|
allowNonEmpty,
|
|
batchSize,
|
|
onProgress: (msg) => console.log(msg),
|
|
},
|
|
/* sourceHasVector= */ sourceTier === 'postgres',
|
|
);
|
|
|
|
if (result.dryRun) {
|
|
console.log('[migrate-tier] Dry run complete. No data was written.');
|
|
} else {
|
|
console.log(
|
|
`[migrate-tier] Migration complete. ${result.totalRows.toString()} rows migrated.`,
|
|
);
|
|
}
|
|
} catch (err) {
|
|
console.error(
|
|
`[migrate-tier] ERROR: ${err instanceof Error ? err.message : String(err)}`,
|
|
);
|
|
process.exitCode = 1;
|
|
} finally {
|
|
await Promise.all([sourceAdapter.close(), target.close()]);
|
|
}
|
|
},
|
|
);
|
|
|
|
// ── storage migrate ──────────────────────────────────────────────────────
|
|
|
|
storage
|
|
.command('migrate')
|
|
.description(
|
|
'Run database migrations (thin wrapper — delegates to pnpm db:migrate or prints the command)',
|
|
)
|
|
.option('--run', 'Actually execute the migration command via shell')
|
|
.action(async (opts: { run?: boolean }) => {
|
|
const currentTier = activeTier();
|
|
|
|
if (currentTier === 'pglite') {
|
|
console.log('[storage] pglite tier detected');
|
|
console.log(
|
|
'pglite runs schema setup automatically on first connection via adapter.migrate().',
|
|
);
|
|
console.log('No separate migration step is required.');
|
|
return;
|
|
}
|
|
|
|
const migrateCmd = 'pnpm --filter @mosaicstack/db db:migrate';
|
|
console.log('[storage] postgres tier detected');
|
|
console.log(`Migration command: ${migrateCmd}`);
|
|
console.log('');
|
|
|
|
if (opts.run) {
|
|
console.log('Running migrations...');
|
|
const { execSync } = await import('node:child_process');
|
|
try {
|
|
execSync(migrateCmd, { stdio: 'inherit' });
|
|
console.log('[storage] migrations complete.');
|
|
} catch (err) {
|
|
console.error(
|
|
`[storage] migration failed: ${err instanceof Error ? err.message : String(err)}`,
|
|
);
|
|
process.exitCode = 1;
|
|
}
|
|
} else {
|
|
console.log('To run migrations, execute:');
|
|
console.log(` ${migrateCmd}`);
|
|
console.log('');
|
|
console.log('Or pass --run to have this command execute it for you.');
|
|
}
|
|
});
|
|
}
|