Compare commits
103 Commits
v0.2.0
...
feat/gatew
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9f73d3f5db | ||
|
|
6954e43bbb | ||
| 86d6c214fe | |||
| 39ccba95d0 | |||
| 202e375f41 | |||
|
|
d0378c5723 | ||
| d6f04a0757 | |||
| afedb8697e | |||
|
|
1274df7ffc | ||
|
|
1b4767bd8b | ||
| 0b0fe10b37 | |||
| acfb31f8f6 | |||
|
|
fd83bd4f2d | ||
|
|
ce3ca1dbd1 | ||
|
|
95e7b071d4 | ||
| d4c5797a65 | |||
| 70a51ba711 | |||
| db8023bdbb | |||
| 9e597ecf87 | |||
| a23c117ea4 | |||
| 0cf80dab8c | |||
|
|
04a80fb9ba | ||
|
|
626adac363 | ||
|
|
35fbd88a1d | ||
| 381b0eed7b | |||
|
|
25383ea645 | ||
|
|
e7db9ddf98 | ||
|
|
7bb878718d | ||
|
|
46a31d4e71 | ||
|
|
e128a7a322 | ||
|
|
27b1898ec6 | ||
|
|
d19ef45bb0 | ||
|
|
5e852df6c3 | ||
|
|
e0eca771c6 | ||
|
|
9d22ef4cc9 | ||
|
|
41961a6980 | ||
|
|
e797676a02 | ||
|
|
05d61e62be | ||
|
|
73043773d8 | ||
| 0be9729e40 | |||
|
|
e83674ac51 | ||
|
|
a6e59bf829 | ||
| e46f0641f6 | |||
|
|
07efaa9580 | ||
|
|
361fece023 | ||
| 80e69016b0 | |||
|
|
e084a88a9d | ||
| 990a88362f | |||
|
|
ea9782b2dc | ||
| 8efbaf100e | |||
|
|
15830e2f2a | ||
| 04db8591af | |||
|
|
785d30e065 | ||
| e57a10913d | |||
| 0d12471868 | |||
| ea371d760d | |||
|
|
3b9104429b | ||
|
|
8a83aed9b1 | ||
|
|
2f68237046 | ||
|
|
45f5b9062e | ||
| 147f5f1bec | |||
|
|
f05b198882 | ||
| d0a484cbb7 | |||
|
|
6e6ee37da0 | ||
| 53199122d8 | |||
|
|
b38cfac760 | ||
| f3cb3e6852 | |||
|
|
e599f5fe38 | ||
| 6357a3fc9c | |||
|
|
92998e6e65 | ||
| 2394a2a0dd | |||
|
|
13934d4879 | ||
| aa80013811 | |||
|
|
2ee7206c3a | ||
| be74ca3cf9 | |||
| 35123b21ce | |||
| 492dc18e14 | |||
|
|
a824a43ed1 | ||
|
|
9b72f0ea14 | ||
|
|
d367f00077 | ||
| 31a5751c6c | |||
| fa43989cd5 | |||
| 1b317e8a0a | |||
| 316807581c | |||
|
|
3321d4575a | ||
|
|
85d4527701 | ||
|
|
47b7509288 | ||
|
|
34fad9da81 | ||
|
|
48be0aa195 | ||
|
|
f544cc65d2 | ||
|
|
41e8f91b2d | ||
|
|
f161e3cb62 | ||
| da41724490 | |||
|
|
281e636e4d | ||
| 87dcd12a65 | |||
|
|
d3fdc4ff54 | ||
| 9690aba0f5 | |||
|
|
10689a30d2 | ||
| 40c068fcbc | |||
|
|
a9340adad7 | ||
| 5cb72e8ca6 | |||
|
|
48323e7d6e | ||
|
|
01259f56cd |
@@ -15,6 +15,7 @@ steps:
|
|||||||
image: *node_image
|
image: *node_image
|
||||||
commands:
|
commands:
|
||||||
- corepack enable
|
- corepack enable
|
||||||
|
- apk add --no-cache python3 make g++
|
||||||
- pnpm install --frozen-lockfile
|
- pnpm install --frozen-lockfile
|
||||||
|
|
||||||
typecheck:
|
typecheck:
|
||||||
@@ -44,18 +45,30 @@ steps:
|
|||||||
|
|
||||||
test:
|
test:
|
||||||
image: *node_image
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
DATABASE_URL: postgresql://mosaic:mosaic@postgres:5432/mosaic
|
||||||
commands:
|
commands:
|
||||||
- *enable_pnpm
|
- *enable_pnpm
|
||||||
|
# Install postgresql-client for pg_isready
|
||||||
|
- apk add --no-cache postgresql-client
|
||||||
|
# Wait up to 30s for postgres to be ready
|
||||||
|
- |
|
||||||
|
for i in $(seq 1 30); do
|
||||||
|
pg_isready -h postgres -p 5432 -U mosaic && break
|
||||||
|
echo "Waiting for postgres ($i/30)..."
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
# Run migrations (DATABASE_URL is set in environment above)
|
||||||
|
- pnpm --filter @mosaic/db run db:migrate
|
||||||
|
# Run all tests
|
||||||
- pnpm test
|
- pnpm test
|
||||||
depends_on:
|
depends_on:
|
||||||
- typecheck
|
- typecheck
|
||||||
|
|
||||||
build:
|
services:
|
||||||
image: *node_image
|
postgres:
|
||||||
commands:
|
image: pgvector/pgvector:pg17
|
||||||
- *enable_pnpm
|
environment:
|
||||||
- pnpm build
|
POSTGRES_USER: mosaic
|
||||||
depends_on:
|
POSTGRES_PASSWORD: mosaic
|
||||||
- lint
|
POSTGRES_DB: mosaic
|
||||||
- format
|
|
||||||
- test
|
|
||||||
|
|||||||
111
.woodpecker/publish.yml
Normal file
111
.woodpecker/publish.yml
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
# Build, publish npm packages, and push Docker images
|
||||||
|
# Runs only on main branch push/tag
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- &node_image 'node:22-alpine'
|
||||||
|
- &enable_pnpm 'corepack enable'
|
||||||
|
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
install:
|
||||||
|
image: *node_image
|
||||||
|
commands:
|
||||||
|
- corepack enable
|
||||||
|
- pnpm install --frozen-lockfile
|
||||||
|
|
||||||
|
build:
|
||||||
|
image: *node_image
|
||||||
|
commands:
|
||||||
|
- *enable_pnpm
|
||||||
|
- pnpm build
|
||||||
|
depends_on:
|
||||||
|
- install
|
||||||
|
|
||||||
|
publish-npm:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
NPM_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
commands:
|
||||||
|
- *enable_pnpm
|
||||||
|
# Configure auth for Gitea npm registry
|
||||||
|
- |
|
||||||
|
echo "//git.mosaicstack.dev/api/packages/mosaic/npm/:_authToken=$NPM_TOKEN" > ~/.npmrc
|
||||||
|
echo "@mosaic:registry=https://git.mosaicstack.dev/api/packages/mosaic/npm/" >> ~/.npmrc
|
||||||
|
# Publish non-private packages to Gitea (--no-git-checks skips dirty/branch checks in CI)
|
||||||
|
# --filter excludes web (private)
|
||||||
|
- >
|
||||||
|
pnpm --filter "@mosaic/*"
|
||||||
|
--filter "!@mosaic/web"
|
||||||
|
publish --no-git-checks --access public
|
||||||
|
|| echo "[publish] Some packages may already exist at this version — continuing"
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
# TODO: Uncomment when ready to publish to npmjs.org
|
||||||
|
# publish-npmjs:
|
||||||
|
# image: *node_image
|
||||||
|
# environment:
|
||||||
|
# NPM_TOKEN:
|
||||||
|
# from_secret: npmjs_token
|
||||||
|
# commands:
|
||||||
|
# - *enable_pnpm
|
||||||
|
# - apk add --no-cache jq bash
|
||||||
|
# - bash scripts/publish-npmjs.sh
|
||||||
|
# depends_on:
|
||||||
|
# - build
|
||||||
|
# when:
|
||||||
|
# - event: [tag]
|
||||||
|
|
||||||
|
build-gateway:
|
||||||
|
image: gcr.io/kaniko-project/executor:debug
|
||||||
|
environment:
|
||||||
|
REGISTRY_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
REGISTRY_PASS:
|
||||||
|
from_secret: gitea_password
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
CI_COMMIT_SHA: ${CI_COMMIT_SHA}
|
||||||
|
commands:
|
||||||
|
- mkdir -p /kaniko/.docker
|
||||||
|
- echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$REGISTRY_USER\",\"password\":\"$REGISTRY_PASS\"}}}" > /kaniko/.docker/config.json
|
||||||
|
- |
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/mosaic-stack/gateway:sha-${CI_COMMIT_SHA:0:7}"
|
||||||
|
if [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
|
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/mosaic-stack/gateway:latest"
|
||||||
|
fi
|
||||||
|
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||||
|
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/mosaic-stack/gateway:$CI_COMMIT_TAG"
|
||||||
|
fi
|
||||||
|
/kaniko/executor --context . --dockerfile docker/gateway.Dockerfile $DESTINATIONS
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
build-web:
|
||||||
|
image: gcr.io/kaniko-project/executor:debug
|
||||||
|
environment:
|
||||||
|
REGISTRY_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
REGISTRY_PASS:
|
||||||
|
from_secret: gitea_password
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
CI_COMMIT_SHA: ${CI_COMMIT_SHA}
|
||||||
|
commands:
|
||||||
|
- mkdir -p /kaniko/.docker
|
||||||
|
- echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$REGISTRY_USER\",\"password\":\"$REGISTRY_PASS\"}}}" > /kaniko/.docker/config.json
|
||||||
|
- |
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/mosaic-stack/web:sha-${CI_COMMIT_SHA:0:7}"
|
||||||
|
if [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
|
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/mosaic-stack/web:latest"
|
||||||
|
fi
|
||||||
|
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||||
|
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/mosaic-stack/web:$CI_COMMIT_TAG"
|
||||||
|
fi
|
||||||
|
/kaniko/executor --context . --dockerfile docker/web.Dockerfile $DESTINATIONS
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
244
README.md
Normal file
244
README.md
Normal file
@@ -0,0 +1,244 @@
|
|||||||
|
# Mosaic Stack
|
||||||
|
|
||||||
|
Self-hosted, multi-user AI agent platform. One config, every runtime, same standards.
|
||||||
|
|
||||||
|
Mosaic gives you a unified launcher for Claude Code, Codex, OpenCode, and Pi — injecting consistent system prompts, guardrails, skills, and mission context into every session. A NestJS gateway provides the API surface, a Next.js dashboard gives you the UI, and a plugin system connects Discord, Telegram, and more.
|
||||||
|
|
||||||
|
## Quick Install
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bash <(curl -fsSL https://git.mosaicstack.dev/mosaic/mosaic-stack/raw/branch/main/tools/install.sh)
|
||||||
|
```
|
||||||
|
|
||||||
|
This installs both components:
|
||||||
|
|
||||||
|
| Component | What | Where |
|
||||||
|
| --------------- | ----------------------------------------------------- | -------------------- |
|
||||||
|
| **Framework** | Bash launcher, guides, runtime configs, tools, skills | `~/.config/mosaic/` |
|
||||||
|
| **@mosaic/cli** | TUI, gateway client, wizard, auto-updater | `~/.npm-global/bin/` |
|
||||||
|
|
||||||
|
After install, set up your agent identity:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mosaic init # Interactive wizard
|
||||||
|
```
|
||||||
|
|
||||||
|
### Requirements
|
||||||
|
|
||||||
|
- Node.js ≥ 20
|
||||||
|
- npm (for global @mosaic/cli install)
|
||||||
|
- One or more runtimes: [Claude Code](https://docs.anthropic.com/en/docs/claude-code), [Codex](https://github.com/openai/codex), [OpenCode](https://opencode.ai), or [Pi](https://github.com/mariozechner/pi-coding-agent)
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Launching Agent Sessions
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mosaic pi # Launch Pi with Mosaic injection
|
||||||
|
mosaic claude # Launch Claude Code with Mosaic injection
|
||||||
|
mosaic codex # Launch Codex with Mosaic injection
|
||||||
|
mosaic opencode # Launch OpenCode with Mosaic injection
|
||||||
|
|
||||||
|
mosaic yolo claude # Claude with dangerous-permissions mode
|
||||||
|
mosaic yolo pi # Pi in yolo mode
|
||||||
|
```
|
||||||
|
|
||||||
|
The launcher verifies your config, checks for `SOUL.md`, injects your `AGENTS.md` standards into the runtime, and forwards all arguments.
|
||||||
|
|
||||||
|
### TUI & Gateway
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mosaic tui # Interactive TUI connected to the gateway
|
||||||
|
mosaic login # Authenticate with a gateway instance
|
||||||
|
mosaic sessions list # List active agent sessions
|
||||||
|
```
|
||||||
|
|
||||||
|
### Management
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mosaic doctor # Health audit — detect drift and missing files
|
||||||
|
mosaic sync # Sync skills from canonical source
|
||||||
|
mosaic update # Check for and install CLI updates
|
||||||
|
mosaic wizard # Full guided setup wizard
|
||||||
|
mosaic bootstrap <path> # Bootstrap a repo with Mosaic standards
|
||||||
|
mosaic coord init # Initialize a new orchestration mission
|
||||||
|
mosaic prdy init # Create a PRD via guided session
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Node.js ≥ 20
|
||||||
|
- pnpm 10.6+
|
||||||
|
- Docker & Docker Compose
|
||||||
|
|
||||||
|
### Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone git@git.mosaicstack.dev:mosaic/mosaic-stack.git
|
||||||
|
cd mosaic-stack
|
||||||
|
|
||||||
|
# Start infrastructure (Postgres, Valkey, Jaeger)
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
pnpm install
|
||||||
|
|
||||||
|
# Run migrations
|
||||||
|
pnpm --filter @mosaic/db run db:migrate
|
||||||
|
|
||||||
|
# Start all services in dev mode
|
||||||
|
pnpm dev
|
||||||
|
```
|
||||||
|
|
||||||
|
### Infrastructure
|
||||||
|
|
||||||
|
Docker Compose provides:
|
||||||
|
|
||||||
|
| Service | Port | Purpose |
|
||||||
|
| --------------------- | --------- | ---------------------- |
|
||||||
|
| PostgreSQL (pgvector) | 5433 | Primary database |
|
||||||
|
| Valkey | 6380 | Task queue + caching |
|
||||||
|
| Jaeger | 16686 | Distributed tracing UI |
|
||||||
|
| OTEL Collector | 4317/4318 | Telemetry ingestion |
|
||||||
|
|
||||||
|
### Quality Gates
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm typecheck # TypeScript type checking (all packages)
|
||||||
|
pnpm lint # ESLint (all packages)
|
||||||
|
pnpm test # Vitest (all packages)
|
||||||
|
pnpm format:check # Prettier check
|
||||||
|
pnpm format # Prettier auto-fix
|
||||||
|
```
|
||||||
|
|
||||||
|
### CI
|
||||||
|
|
||||||
|
Woodpecker CI runs on every push:
|
||||||
|
|
||||||
|
- `pnpm install --frozen-lockfile`
|
||||||
|
- Database migration against a fresh Postgres
|
||||||
|
- `pnpm test` (Turbo-orchestrated across all packages)
|
||||||
|
|
||||||
|
npm packages are published to the Gitea package registry on main merges.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
mosaic-stack/
|
||||||
|
├── apps/
|
||||||
|
│ ├── gateway/ NestJS API + WebSocket hub (Fastify, Socket.IO, OTEL)
|
||||||
|
│ └── web/ Next.js dashboard (React 19, Tailwind)
|
||||||
|
├── packages/
|
||||||
|
│ ├── cli/ Mosaic CLI — TUI, gateway client, wizard
|
||||||
|
│ ├── mosaic/ Framework — wizard, runtime detection, update checker
|
||||||
|
│ ├── types/ Shared TypeScript contracts (Socket.IO typed events)
|
||||||
|
│ ├── db/ Drizzle ORM schema + migrations (pgvector)
|
||||||
|
│ ├── auth/ BetterAuth configuration
|
||||||
|
│ ├── brain/ Data layer (PG-backed)
|
||||||
|
│ ├── queue/ Valkey task queue + MCP
|
||||||
|
│ ├── coord/ Mission coordination
|
||||||
|
│ ├── forge/ Multi-stage AI pipeline (intake → board → plan → code → review)
|
||||||
|
│ ├── macp/ MACP protocol — credential resolution, gate runner, events
|
||||||
|
│ ├── agent/ Agent session management
|
||||||
|
│ ├── memory/ Agent memory layer
|
||||||
|
│ ├── log/ Structured logging
|
||||||
|
│ ├── prdy/ PRD creation and validation
|
||||||
|
│ ├── quality-rails/ Quality templates (TypeScript, Next.js, monorepo)
|
||||||
|
│ └── design-tokens/ Shared design tokens
|
||||||
|
├── plugins/
|
||||||
|
│ ├── discord/ Discord channel plugin (discord.js)
|
||||||
|
│ ├── telegram/ Telegram channel plugin (Telegraf)
|
||||||
|
│ ├── macp/ OpenClaw MACP runtime plugin
|
||||||
|
│ └── mosaic-framework/ OpenClaw framework injection plugin
|
||||||
|
├── tools/
|
||||||
|
│ └── install.sh Unified installer (framework + npm CLI)
|
||||||
|
├── scripts/agent/ Agent session lifecycle scripts
|
||||||
|
├── docker-compose.yml Dev infrastructure
|
||||||
|
└── .woodpecker/ CI pipeline configs
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Design Decisions
|
||||||
|
|
||||||
|
- **Gateway is the single API surface** — all clients (TUI, web, Discord, Telegram) connect through it
|
||||||
|
- **ESM everywhere** — `"type": "module"`, `.js` extensions in imports, NodeNext resolution
|
||||||
|
- **Socket.IO typed events** — defined in `@mosaic/types`, enforced at compile time
|
||||||
|
- **OTEL auto-instrumentation** — loads before NestJS bootstrap
|
||||||
|
- **Explicit `@Inject()` decorators** — required since tsx/esbuild doesn't emit decorator metadata
|
||||||
|
|
||||||
|
### Framework (`~/.config/mosaic/`)
|
||||||
|
|
||||||
|
The framework is the bash-based standards layer installed to every developer machine:
|
||||||
|
|
||||||
|
```
|
||||||
|
~/.config/mosaic/
|
||||||
|
├── AGENTS.md ← Central standards (loaded into every runtime)
|
||||||
|
├── SOUL.md ← Agent identity (name, style, guardrails)
|
||||||
|
├── USER.md ← User profile (name, timezone, preferences)
|
||||||
|
├── TOOLS.md ← Machine-level tool reference
|
||||||
|
├── bin/mosaic ← Unified launcher (claude, codex, opencode, pi, yolo)
|
||||||
|
├── guides/ ← E2E delivery, orchestrator protocol, PRD, etc.
|
||||||
|
├── runtime/ ← Per-runtime configs (claude/, codex/, opencode/, pi/)
|
||||||
|
├── skills/ ← Universal skills (synced from agent-skills repo)
|
||||||
|
├── tools/ ← Tool suites (orchestrator, git, quality, prdy, etc.)
|
||||||
|
└── memory/ ← Persistent agent memory (preserved across upgrades)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Forge Pipeline
|
||||||
|
|
||||||
|
Forge is a multi-stage AI pipeline for autonomous feature delivery:
|
||||||
|
|
||||||
|
```
|
||||||
|
Intake → Discovery → Board Review → Planning (3 stages) → Coding → Review → Remediation → Test → Deploy
|
||||||
|
```
|
||||||
|
|
||||||
|
Each stage has a dispatch mode (`exec` for research/review, `yolo` for coding), quality gates, and timeouts. The board review uses multiple AI personas (CEO, CTO, CFO, COO + specialists) to evaluate briefs before committing resources.
|
||||||
|
|
||||||
|
## Upgrading
|
||||||
|
|
||||||
|
Run the installer again — it handles upgrades automatically:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bash <(curl -fsSL https://git.mosaicstack.dev/mosaic/mosaic-stack/raw/branch/main/tools/install.sh)
|
||||||
|
```
|
||||||
|
|
||||||
|
Or use the CLI:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mosaic update # Check + install CLI updates
|
||||||
|
mosaic update --check # Check only, don't install
|
||||||
|
```
|
||||||
|
|
||||||
|
The CLI also performs a background update check on every invocation (cached for 1 hour).
|
||||||
|
|
||||||
|
### Installer Flags
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bash tools/install.sh --check # Version check only
|
||||||
|
bash tools/install.sh --framework # Framework only (skip npm CLI)
|
||||||
|
bash tools/install.sh --cli # npm CLI only (skip framework)
|
||||||
|
bash tools/install.sh --ref v1.0 # Install from a specific git ref
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create a feature branch
|
||||||
|
git checkout -b feat/my-feature
|
||||||
|
|
||||||
|
# Make changes, then verify
|
||||||
|
pnpm typecheck && pnpm lint && pnpm test && pnpm format:check
|
||||||
|
|
||||||
|
# Commit (husky runs lint-staged automatically)
|
||||||
|
git commit -m "feat: description of change"
|
||||||
|
|
||||||
|
# Push and create PR
|
||||||
|
git push -u origin feat/my-feature
|
||||||
|
```
|
||||||
|
|
||||||
|
DTOs go in `*.dto.ts` files at module boundaries. Scratchpads (`docs/scratchpads/`) are mandatory for non-trivial tasks. See `AGENTS.md` for the full standards reference.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Proprietary — all rights reserved.
|
||||||
@@ -1,9 +1,23 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/gateway",
|
"name": "@mosaic/gateway",
|
||||||
"version": "0.0.0",
|
"version": "0.0.2",
|
||||||
"private": true,
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||||
|
"directory": "apps/gateway"
|
||||||
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/main.js",
|
"main": "dist/main.js",
|
||||||
|
"bin": {
|
||||||
|
"mosaic-gateway": "dist/main.js"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc",
|
"build": "tsc",
|
||||||
"dev": "tsx watch src/main.ts",
|
"dev": "tsx watch src/main.ts",
|
||||||
@@ -19,12 +33,14 @@
|
|||||||
"@modelcontextprotocol/sdk": "^1.27.1",
|
"@modelcontextprotocol/sdk": "^1.27.1",
|
||||||
"@mosaic/auth": "workspace:^",
|
"@mosaic/auth": "workspace:^",
|
||||||
"@mosaic/brain": "workspace:^",
|
"@mosaic/brain": "workspace:^",
|
||||||
|
"@mosaic/config": "workspace:^",
|
||||||
"@mosaic/coord": "workspace:^",
|
"@mosaic/coord": "workspace:^",
|
||||||
"@mosaic/db": "workspace:^",
|
"@mosaic/db": "workspace:^",
|
||||||
"@mosaic/discord-plugin": "workspace:^",
|
"@mosaic/discord-plugin": "workspace:^",
|
||||||
"@mosaic/log": "workspace:^",
|
"@mosaic/log": "workspace:^",
|
||||||
"@mosaic/memory": "workspace:^",
|
"@mosaic/memory": "workspace:^",
|
||||||
"@mosaic/queue": "workspace:^",
|
"@mosaic/queue": "workspace:^",
|
||||||
|
"@mosaic/storage": "workspace:^",
|
||||||
"@mosaic/telegram-plugin": "workspace:^",
|
"@mosaic/telegram-plugin": "workspace:^",
|
||||||
"@mosaic/types": "workspace:^",
|
"@mosaic/types": "workspace:^",
|
||||||
"@nestjs/common": "^11.0.0",
|
"@nestjs/common": "^11.0.0",
|
||||||
|
|||||||
90
apps/gateway/src/admin/admin-tokens.controller.ts
Normal file
90
apps/gateway/src/admin/admin-tokens.controller.ts
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import {
|
||||||
|
Body,
|
||||||
|
Controller,
|
||||||
|
Delete,
|
||||||
|
Get,
|
||||||
|
HttpCode,
|
||||||
|
HttpStatus,
|
||||||
|
Inject,
|
||||||
|
Param,
|
||||||
|
Post,
|
||||||
|
UseGuards,
|
||||||
|
} from '@nestjs/common';
|
||||||
|
import { randomBytes, createHash } from 'node:crypto';
|
||||||
|
import { eq, type Db, adminTokens } from '@mosaic/db';
|
||||||
|
import { v4 as uuid } from 'uuid';
|
||||||
|
import { DB } from '../database/database.module.js';
|
||||||
|
import { AdminGuard } from './admin.guard.js';
|
||||||
|
import { CurrentUser } from '../auth/current-user.decorator.js';
|
||||||
|
import type {
|
||||||
|
CreateTokenDto,
|
||||||
|
TokenCreatedDto,
|
||||||
|
TokenDto,
|
||||||
|
TokenListDto,
|
||||||
|
} from './admin-tokens.dto.js';
|
||||||
|
|
||||||
|
function hashToken(plaintext: string): string {
|
||||||
|
return createHash('sha256').update(plaintext).digest('hex');
|
||||||
|
}
|
||||||
|
|
||||||
|
function toTokenDto(row: typeof adminTokens.$inferSelect): TokenDto {
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
label: row.label,
|
||||||
|
scope: row.scope,
|
||||||
|
expiresAt: row.expiresAt?.toISOString() ?? null,
|
||||||
|
lastUsedAt: row.lastUsedAt?.toISOString() ?? null,
|
||||||
|
createdAt: row.createdAt.toISOString(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Controller('api/admin/tokens')
|
||||||
|
@UseGuards(AdminGuard)
|
||||||
|
export class AdminTokensController {
|
||||||
|
constructor(@Inject(DB) private readonly db: Db) {}
|
||||||
|
|
||||||
|
@Post()
|
||||||
|
async create(
|
||||||
|
@Body() dto: CreateTokenDto,
|
||||||
|
@CurrentUser() user: { id: string },
|
||||||
|
): Promise<TokenCreatedDto> {
|
||||||
|
const plaintext = randomBytes(32).toString('hex');
|
||||||
|
const tokenHash = hashToken(plaintext);
|
||||||
|
const id = uuid();
|
||||||
|
|
||||||
|
const expiresAt = dto.expiresInDays
|
||||||
|
? new Date(Date.now() + dto.expiresInDays * 24 * 60 * 60 * 1000)
|
||||||
|
: null;
|
||||||
|
|
||||||
|
const [row] = await this.db
|
||||||
|
.insert(adminTokens)
|
||||||
|
.values({
|
||||||
|
id,
|
||||||
|
userId: user.id,
|
||||||
|
tokenHash,
|
||||||
|
label: dto.label ?? 'CLI token',
|
||||||
|
scope: dto.scope ?? 'admin',
|
||||||
|
expiresAt,
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return { ...toTokenDto(row!), plaintext };
|
||||||
|
}
|
||||||
|
|
||||||
|
@Get()
|
||||||
|
async list(@CurrentUser() user: { id: string }): Promise<TokenListDto> {
|
||||||
|
const rows = await this.db
|
||||||
|
.select()
|
||||||
|
.from(adminTokens)
|
||||||
|
.where(eq(adminTokens.userId, user.id))
|
||||||
|
.orderBy(adminTokens.createdAt);
|
||||||
|
|
||||||
|
return { tokens: rows.map(toTokenDto), total: rows.length };
|
||||||
|
}
|
||||||
|
|
||||||
|
@Delete(':id')
|
||||||
|
@HttpCode(HttpStatus.NO_CONTENT)
|
||||||
|
async revoke(@Param('id') id: string, @CurrentUser() _user: { id: string }): Promise<void> {
|
||||||
|
await this.db.delete(adminTokens).where(eq(adminTokens.id, id));
|
||||||
|
}
|
||||||
|
}
|
||||||
33
apps/gateway/src/admin/admin-tokens.dto.ts
Normal file
33
apps/gateway/src/admin/admin-tokens.dto.ts
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import { IsString, IsOptional, IsInt, Min } from 'class-validator';
|
||||||
|
|
||||||
|
export class CreateTokenDto {
|
||||||
|
@IsString()
|
||||||
|
label!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString()
|
||||||
|
scope?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsInt()
|
||||||
|
@Min(1)
|
||||||
|
expiresInDays?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TokenDto {
|
||||||
|
id: string;
|
||||||
|
label: string;
|
||||||
|
scope: string;
|
||||||
|
expiresAt: string | null;
|
||||||
|
lastUsedAt: string | null;
|
||||||
|
createdAt: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TokenCreatedDto extends TokenDto {
|
||||||
|
plaintext: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TokenListDto {
|
||||||
|
tokens: TokenDto[];
|
||||||
|
total: number;
|
||||||
|
}
|
||||||
@@ -6,10 +6,11 @@ import {
|
|||||||
Injectable,
|
Injectable,
|
||||||
UnauthorizedException,
|
UnauthorizedException,
|
||||||
} from '@nestjs/common';
|
} from '@nestjs/common';
|
||||||
|
import { createHash } from 'node:crypto';
|
||||||
import { fromNodeHeaders } from 'better-auth/node';
|
import { fromNodeHeaders } from 'better-auth/node';
|
||||||
import type { Auth } from '@mosaic/auth';
|
import type { Auth } from '@mosaic/auth';
|
||||||
import type { Db } from '@mosaic/db';
|
import type { Db } from '@mosaic/db';
|
||||||
import { eq, users as usersTable } from '@mosaic/db';
|
import { eq, adminTokens, users as usersTable } from '@mosaic/db';
|
||||||
import type { FastifyRequest } from 'fastify';
|
import type { FastifyRequest } from 'fastify';
|
||||||
import { AUTH } from '../auth/auth.tokens.js';
|
import { AUTH } from '../auth/auth.tokens.js';
|
||||||
import { DB } from '../database/database.module.js';
|
import { DB } from '../database/database.module.js';
|
||||||
@@ -19,6 +20,8 @@ interface UserWithRole {
|
|||||||
role?: string;
|
role?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type AuthenticatedRequest = FastifyRequest & { user: unknown; session: unknown };
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AdminGuard implements CanActivate {
|
export class AdminGuard implements CanActivate {
|
||||||
constructor(
|
constructor(
|
||||||
@@ -28,8 +31,64 @@ export class AdminGuard implements CanActivate {
|
|||||||
|
|
||||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||||
const request = context.switchToHttp().getRequest<FastifyRequest>();
|
const request = context.switchToHttp().getRequest<FastifyRequest>();
|
||||||
const headers = fromNodeHeaders(request.raw.headers);
|
|
||||||
|
|
||||||
|
// Try bearer token auth first
|
||||||
|
const authHeader = request.raw.headers['authorization'];
|
||||||
|
if (authHeader?.startsWith('Bearer ')) {
|
||||||
|
return this.validateBearerToken(request, authHeader.slice(7));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to BetterAuth session
|
||||||
|
return this.validateSession(request);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async validateBearerToken(request: FastifyRequest, plaintext: string): Promise<boolean> {
|
||||||
|
const tokenHash = createHash('sha256').update(plaintext).digest('hex');
|
||||||
|
|
||||||
|
const [row] = await this.db
|
||||||
|
.select({
|
||||||
|
tokenId: adminTokens.id,
|
||||||
|
userId: adminTokens.userId,
|
||||||
|
scope: adminTokens.scope,
|
||||||
|
expiresAt: adminTokens.expiresAt,
|
||||||
|
userName: usersTable.name,
|
||||||
|
userEmail: usersTable.email,
|
||||||
|
userRole: usersTable.role,
|
||||||
|
})
|
||||||
|
.from(adminTokens)
|
||||||
|
.innerJoin(usersTable, eq(adminTokens.userId, usersTable.id))
|
||||||
|
.where(eq(adminTokens.tokenHash, tokenHash))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!row) {
|
||||||
|
throw new UnauthorizedException('Invalid API token');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (row.expiresAt && row.expiresAt < new Date()) {
|
||||||
|
throw new UnauthorizedException('API token expired');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (row.userRole !== 'admin') {
|
||||||
|
throw new ForbiddenException('Admin access required');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update last-used timestamp (fire-and-forget)
|
||||||
|
this.db
|
||||||
|
.update(adminTokens)
|
||||||
|
.set({ lastUsedAt: new Date() })
|
||||||
|
.where(eq(adminTokens.id, row.tokenId))
|
||||||
|
.then(() => {})
|
||||||
|
.catch(() => {});
|
||||||
|
|
||||||
|
const req = request as AuthenticatedRequest;
|
||||||
|
req.user = { id: row.userId, name: row.userName, email: row.userEmail, role: row.userRole };
|
||||||
|
req.session = { id: `token:${row.tokenId}`, userId: row.userId };
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async validateSession(request: FastifyRequest): Promise<boolean> {
|
||||||
|
const headers = fromNodeHeaders(request.raw.headers);
|
||||||
const result = await this.auth.api.getSession({ headers });
|
const result = await this.auth.api.getSession({ headers });
|
||||||
|
|
||||||
if (!result) {
|
if (!result) {
|
||||||
@@ -38,8 +97,6 @@ export class AdminGuard implements CanActivate {
|
|||||||
|
|
||||||
const user = result.user as UserWithRole;
|
const user = result.user as UserWithRole;
|
||||||
|
|
||||||
// Ensure the role field is populated. better-auth should include additionalFields
|
|
||||||
// in the session, but as a fallback, fetch the role from the database if needed.
|
|
||||||
let userRole = user.role;
|
let userRole = user.role;
|
||||||
if (!userRole) {
|
if (!userRole) {
|
||||||
const [dbUser] = await this.db
|
const [dbUser] = await this.db
|
||||||
@@ -48,7 +105,6 @@ export class AdminGuard implements CanActivate {
|
|||||||
.where(eq(usersTable.id, user.id))
|
.where(eq(usersTable.id, user.id))
|
||||||
.limit(1);
|
.limit(1);
|
||||||
userRole = dbUser?.role ?? 'member';
|
userRole = dbUser?.role ?? 'member';
|
||||||
// Update the session user object with the fetched role
|
|
||||||
(user as UserWithRole).role = userRole;
|
(user as UserWithRole).role = userRole;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -56,8 +112,9 @@ export class AdminGuard implements CanActivate {
|
|||||||
throw new ForbiddenException('Admin access required');
|
throw new ForbiddenException('Admin access required');
|
||||||
}
|
}
|
||||||
|
|
||||||
(request as FastifyRequest & { user: unknown; session: unknown }).user = result.user;
|
const req = request as AuthenticatedRequest;
|
||||||
(request as FastifyRequest & { user: unknown; session: unknown }).session = result.session;
|
req.user = result.user;
|
||||||
|
req.session = result.session;
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,10 +2,18 @@ import { Module } from '@nestjs/common';
|
|||||||
import { AdminController } from './admin.controller.js';
|
import { AdminController } from './admin.controller.js';
|
||||||
import { AdminHealthController } from './admin-health.controller.js';
|
import { AdminHealthController } from './admin-health.controller.js';
|
||||||
import { AdminJobsController } from './admin-jobs.controller.js';
|
import { AdminJobsController } from './admin-jobs.controller.js';
|
||||||
|
import { AdminTokensController } from './admin-tokens.controller.js';
|
||||||
|
import { BootstrapController } from './bootstrap.controller.js';
|
||||||
import { AdminGuard } from './admin.guard.js';
|
import { AdminGuard } from './admin.guard.js';
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
controllers: [AdminController, AdminHealthController, AdminJobsController],
|
controllers: [
|
||||||
|
AdminController,
|
||||||
|
AdminHealthController,
|
||||||
|
AdminJobsController,
|
||||||
|
AdminTokensController,
|
||||||
|
BootstrapController,
|
||||||
|
],
|
||||||
providers: [AdminGuard],
|
providers: [AdminGuard],
|
||||||
})
|
})
|
||||||
export class AdminModule {}
|
export class AdminModule {}
|
||||||
|
|||||||
101
apps/gateway/src/admin/bootstrap.controller.ts
Normal file
101
apps/gateway/src/admin/bootstrap.controller.ts
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
import {
|
||||||
|
Body,
|
||||||
|
Controller,
|
||||||
|
ForbiddenException,
|
||||||
|
Get,
|
||||||
|
Inject,
|
||||||
|
InternalServerErrorException,
|
||||||
|
Post,
|
||||||
|
} from '@nestjs/common';
|
||||||
|
import { randomBytes, createHash } from 'node:crypto';
|
||||||
|
import { count, eq, type Db, users as usersTable, adminTokens } from '@mosaic/db';
|
||||||
|
import type { Auth } from '@mosaic/auth';
|
||||||
|
import { v4 as uuid } from 'uuid';
|
||||||
|
import { AUTH } from '../auth/auth.tokens.js';
|
||||||
|
import { DB } from '../database/database.module.js';
|
||||||
|
import type { BootstrapSetupDto, BootstrapStatusDto, BootstrapResultDto } from './bootstrap.dto.js';
|
||||||
|
|
||||||
|
@Controller('api/bootstrap')
|
||||||
|
export class BootstrapController {
|
||||||
|
constructor(
|
||||||
|
@Inject(AUTH) private readonly auth: Auth,
|
||||||
|
@Inject(DB) private readonly db: Db,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
@Get('status')
|
||||||
|
async status(): Promise<BootstrapStatusDto> {
|
||||||
|
const [result] = await this.db.select({ total: count() }).from(usersTable);
|
||||||
|
return { needsSetup: (result?.total ?? 0) === 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post('setup')
|
||||||
|
async setup(@Body() dto: BootstrapSetupDto): Promise<BootstrapResultDto> {
|
||||||
|
// Only allow setup when zero users exist
|
||||||
|
const [result] = await this.db.select({ total: count() }).from(usersTable);
|
||||||
|
if ((result?.total ?? 0) > 0) {
|
||||||
|
throw new ForbiddenException('Setup already completed — users exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create admin user via BetterAuth API
|
||||||
|
const authApi = this.auth.api as unknown as {
|
||||||
|
createUser: (opts: {
|
||||||
|
body: { name: string; email: string; password: string; role?: string };
|
||||||
|
}) => Promise<{
|
||||||
|
user: { id: string; name: string; email: string };
|
||||||
|
}>;
|
||||||
|
};
|
||||||
|
|
||||||
|
const created = await authApi.createUser({
|
||||||
|
body: {
|
||||||
|
name: dto.name,
|
||||||
|
email: dto.email,
|
||||||
|
password: dto.password,
|
||||||
|
role: 'admin',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify user was created
|
||||||
|
const [user] = await this.db
|
||||||
|
.select()
|
||||||
|
.from(usersTable)
|
||||||
|
.where(eq(usersTable.id, created.user.id))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!user) throw new InternalServerErrorException('User created but not found');
|
||||||
|
|
||||||
|
// Ensure role is admin (createUser may not set it via BetterAuth)
|
||||||
|
if (user.role !== 'admin') {
|
||||||
|
await this.db.update(usersTable).set({ role: 'admin' }).where(eq(usersTable.id, user.id));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate admin API token
|
||||||
|
const plaintext = randomBytes(32).toString('hex');
|
||||||
|
const tokenHash = createHash('sha256').update(plaintext).digest('hex');
|
||||||
|
const tokenId = uuid();
|
||||||
|
|
||||||
|
const [token] = await this.db
|
||||||
|
.insert(adminTokens)
|
||||||
|
.values({
|
||||||
|
id: tokenId,
|
||||||
|
userId: user.id,
|
||||||
|
tokenHash,
|
||||||
|
label: 'Initial setup token',
|
||||||
|
scope: 'admin',
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return {
|
||||||
|
user: {
|
||||||
|
id: user.id,
|
||||||
|
name: user.name,
|
||||||
|
email: user.email,
|
||||||
|
role: 'admin',
|
||||||
|
},
|
||||||
|
token: {
|
||||||
|
id: token!.id,
|
||||||
|
plaintext,
|
||||||
|
label: token!.label,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
31
apps/gateway/src/admin/bootstrap.dto.ts
Normal file
31
apps/gateway/src/admin/bootstrap.dto.ts
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import { IsString, IsEmail, MinLength } from 'class-validator';
|
||||||
|
|
||||||
|
export class BootstrapSetupDto {
|
||||||
|
@IsString()
|
||||||
|
name!: string;
|
||||||
|
|
||||||
|
@IsEmail()
|
||||||
|
email!: string;
|
||||||
|
|
||||||
|
@IsString()
|
||||||
|
@MinLength(8)
|
||||||
|
password!: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BootstrapStatusDto {
|
||||||
|
needsSetup: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BootstrapResultDto {
|
||||||
|
user: {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
email: string;
|
||||||
|
role: string;
|
||||||
|
};
|
||||||
|
token: {
|
||||||
|
id: string;
|
||||||
|
plaintext: string;
|
||||||
|
label: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -23,6 +23,7 @@ import { createFileTools } from './tools/file-tools.js';
|
|||||||
import { createGitTools } from './tools/git-tools.js';
|
import { createGitTools } from './tools/git-tools.js';
|
||||||
import { createShellTools } from './tools/shell-tools.js';
|
import { createShellTools } from './tools/shell-tools.js';
|
||||||
import { createWebTools } from './tools/web-tools.js';
|
import { createWebTools } from './tools/web-tools.js';
|
||||||
|
import { createSearchTools } from './tools/search-tools.js';
|
||||||
import type { SessionInfoDto, SessionMetrics } from './session.dto.js';
|
import type { SessionInfoDto, SessionMetrics } from './session.dto.js';
|
||||||
import { SystemOverrideService } from '../preferences/system-override.service.js';
|
import { SystemOverrideService } from '../preferences/system-override.service.js';
|
||||||
import { PreferencesService } from '../preferences/preferences.service.js';
|
import { PreferencesService } from '../preferences/preferences.service.js';
|
||||||
@@ -146,6 +147,7 @@ export class AgentService implements OnModuleDestroy {
|
|||||||
...createGitTools(sandboxDir),
|
...createGitTools(sandboxDir),
|
||||||
...createShellTools(sandboxDir),
|
...createShellTools(sandboxDir),
|
||||||
...createWebTools(),
|
...createWebTools(),
|
||||||
|
...createSearchTools(),
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -190,5 +190,169 @@ export function createFileTools(baseDir: string): ToolDefinition[] {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return [readFileTool, writeFileTool, listDirectoryTool];
|
const editFileTool: ToolDefinition = {
|
||||||
|
name: 'fs_edit_file',
|
||||||
|
label: 'Edit File',
|
||||||
|
description:
|
||||||
|
'Make targeted text replacements in a file. Each edit replaces an exact match of oldText with newText. ' +
|
||||||
|
'All edits are matched against the original file content (not incrementally). ' +
|
||||||
|
'Each oldText must be unique in the file and edits must not overlap.',
|
||||||
|
parameters: Type.Object({
|
||||||
|
path: Type.String({
|
||||||
|
description: 'File path (relative to sandbox base or absolute within it)',
|
||||||
|
}),
|
||||||
|
edits: Type.Array(
|
||||||
|
Type.Object({
|
||||||
|
oldText: Type.String({
|
||||||
|
description: 'Exact text to find and replace (must be unique in the file)',
|
||||||
|
}),
|
||||||
|
newText: Type.String({ description: 'Replacement text' }),
|
||||||
|
}),
|
||||||
|
{ description: 'One or more targeted replacements', minItems: 1 },
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
async execute(_toolCallId, params) {
|
||||||
|
const { path, edits } = params as {
|
||||||
|
path: string;
|
||||||
|
edits: Array<{ oldText: string; newText: string }>;
|
||||||
|
};
|
||||||
|
|
||||||
|
let safePath: string;
|
||||||
|
try {
|
||||||
|
safePath = guardPath(path, baseDir);
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof SandboxEscapeError) {
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: `Error: ${err.message}` }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: `Error: ${String(err)}` }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const info = await stat(safePath);
|
||||||
|
if (!info.isFile()) {
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: `Error: path is not a file: ${path}` }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (info.size > MAX_READ_BYTES) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: `Error: file too large for editing (${info.size} bytes, limit ${MAX_READ_BYTES} bytes)`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: `Error reading file: ${String(err)}` }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let content: string;
|
||||||
|
try {
|
||||||
|
content = await readFile(safePath, { encoding: 'utf8' });
|
||||||
|
} catch (err) {
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: `Error reading file: ${String(err)}` }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate all edits before applying any
|
||||||
|
const errors: string[] = [];
|
||||||
|
for (let i = 0; i < edits.length; i++) {
|
||||||
|
const edit = edits[i]!;
|
||||||
|
const occurrences = content.split(edit.oldText).length - 1;
|
||||||
|
if (occurrences === 0) {
|
||||||
|
errors.push(`Edit ${i + 1}: oldText not found in file`);
|
||||||
|
} else if (occurrences > 1) {
|
||||||
|
errors.push(`Edit ${i + 1}: oldText matches ${occurrences} locations (must be unique)`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for overlapping edits
|
||||||
|
if (errors.length === 0) {
|
||||||
|
const positions = edits.map((edit, i) => ({
|
||||||
|
index: i,
|
||||||
|
start: content.indexOf(edit.oldText),
|
||||||
|
end: content.indexOf(edit.oldText) + edit.oldText.length,
|
||||||
|
}));
|
||||||
|
positions.sort((a, b) => a.start - b.start);
|
||||||
|
for (let i = 1; i < positions.length; i++) {
|
||||||
|
if (positions[i]!.start < positions[i - 1]!.end) {
|
||||||
|
errors.push(
|
||||||
|
`Edits ${positions[i - 1]!.index + 1} and ${positions[i]!.index + 1} overlap`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errors.length > 0) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: `Edit validation failed:\n${errors.join('\n')}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply edits: process from end to start to preserve positions
|
||||||
|
const positions = edits.map((edit) => ({
|
||||||
|
edit,
|
||||||
|
start: content.indexOf(edit.oldText),
|
||||||
|
}));
|
||||||
|
positions.sort((a, b) => b.start - a.start); // reverse order
|
||||||
|
|
||||||
|
let result = content;
|
||||||
|
for (const { edit } of positions) {
|
||||||
|
result = result.replace(edit.oldText, edit.newText);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Buffer.byteLength(result, 'utf8') > MAX_WRITE_BYTES) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: `Error: resulting file too large (limit ${MAX_WRITE_BYTES} bytes)`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await writeFile(safePath, result, { encoding: 'utf8' });
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: `File edited successfully: ${path} (${edits.length} edit(s) applied)`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: `Error writing file: ${String(err)}` }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return [readFileTool, writeFileTool, listDirectoryTool, editFileTool];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ export { createBrainTools } from './brain-tools.js';
|
|||||||
export { createCoordTools } from './coord-tools.js';
|
export { createCoordTools } from './coord-tools.js';
|
||||||
export { createFileTools } from './file-tools.js';
|
export { createFileTools } from './file-tools.js';
|
||||||
export { createGitTools } from './git-tools.js';
|
export { createGitTools } from './git-tools.js';
|
||||||
|
export { createSearchTools } from './search-tools.js';
|
||||||
export { createShellTools } from './shell-tools.js';
|
export { createShellTools } from './shell-tools.js';
|
||||||
export { createWebTools } from './web-tools.js';
|
export { createWebTools } from './web-tools.js';
|
||||||
export { createSkillTools } from './skill-tools.js';
|
export { createSkillTools } from './skill-tools.js';
|
||||||
|
|||||||
496
apps/gateway/src/agent/tools/search-tools.ts
Normal file
496
apps/gateway/src/agent/tools/search-tools.ts
Normal file
@@ -0,0 +1,496 @@
|
|||||||
|
import { Type } from '@sinclair/typebox';
|
||||||
|
import type { ToolDefinition } from '@mariozechner/pi-coding-agent';
|
||||||
|
|
||||||
|
const DEFAULT_TIMEOUT_MS = 15_000;
|
||||||
|
const MAX_RESULTS = 10;
|
||||||
|
const MAX_RESPONSE_BYTES = 256 * 1024; // 256 KB
|
||||||
|
|
||||||
|
// ─── Provider helpers ────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
interface SearchResult {
|
||||||
|
title: string;
|
||||||
|
url: string;
|
||||||
|
snippet: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SearchResponse {
|
||||||
|
provider: string;
|
||||||
|
query: string;
|
||||||
|
results: SearchResult[];
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchWithTimeout(
|
||||||
|
url: string,
|
||||||
|
init: RequestInit,
|
||||||
|
timeoutMs: number,
|
||||||
|
): Promise<Response> {
|
||||||
|
const controller = new AbortController();
|
||||||
|
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
||||||
|
try {
|
||||||
|
return await fetch(url, { ...init, signal: controller.signal });
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function readLimited(response: Response): Promise<string> {
|
||||||
|
const reader = response.body?.getReader();
|
||||||
|
if (!reader) return '';
|
||||||
|
const chunks: Uint8Array[] = [];
|
||||||
|
let total = 0;
|
||||||
|
while (true) {
|
||||||
|
const { done, value } = await reader.read();
|
||||||
|
if (done) break;
|
||||||
|
total += value.length;
|
||||||
|
if (total > MAX_RESPONSE_BYTES) {
|
||||||
|
chunks.push(value.subarray(0, MAX_RESPONSE_BYTES - (total - value.length)));
|
||||||
|
reader.cancel();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
chunks.push(value);
|
||||||
|
}
|
||||||
|
const combined = new Uint8Array(chunks.reduce((a, c) => a + c.length, 0));
|
||||||
|
let offset = 0;
|
||||||
|
for (const chunk of chunks) {
|
||||||
|
combined.set(chunk, offset);
|
||||||
|
offset += chunk.length;
|
||||||
|
}
|
||||||
|
return new TextDecoder().decode(combined);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Brave Search ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async function searchBrave(query: string, limit: number): Promise<SearchResponse> {
|
||||||
|
const apiKey = process.env['BRAVE_API_KEY'];
|
||||||
|
if (!apiKey) return { provider: 'brave', query, results: [], error: 'BRAVE_API_KEY not set' };
|
||||||
|
|
||||||
|
try {
|
||||||
|
const params = new URLSearchParams({
|
||||||
|
q: query,
|
||||||
|
count: String(Math.min(limit, 20)),
|
||||||
|
});
|
||||||
|
const res = await fetchWithTimeout(
|
||||||
|
`https://api.search.brave.com/res/v1/web/search?${params}`,
|
||||||
|
{ headers: { 'X-Subscription-Token': apiKey, Accept: 'application/json' } },
|
||||||
|
DEFAULT_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
if (!res.ok) {
|
||||||
|
const body = await res.text().catch(() => '');
|
||||||
|
return { provider: 'brave', query, results: [], error: `HTTP ${res.status}: ${body}` };
|
||||||
|
}
|
||||||
|
const data = (await res.json()) as {
|
||||||
|
web?: { results?: Array<{ title: string; url: string; description: string }> };
|
||||||
|
};
|
||||||
|
const results: SearchResult[] = (data.web?.results ?? []).slice(0, limit).map((r) => ({
|
||||||
|
title: r.title,
|
||||||
|
url: r.url,
|
||||||
|
snippet: r.description,
|
||||||
|
}));
|
||||||
|
return { provider: 'brave', query, results };
|
||||||
|
} catch (err) {
|
||||||
|
return {
|
||||||
|
provider: 'brave',
|
||||||
|
query,
|
||||||
|
results: [],
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Tavily Search ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async function searchTavily(query: string, limit: number): Promise<SearchResponse> {
|
||||||
|
const apiKey = process.env['TAVILY_API_KEY'];
|
||||||
|
if (!apiKey) return { provider: 'tavily', query, results: [], error: 'TAVILY_API_KEY not set' };
|
||||||
|
|
||||||
|
try {
|
||||||
|
const res = await fetchWithTimeout(
|
||||||
|
'https://api.tavily.com/search',
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({
|
||||||
|
api_key: apiKey,
|
||||||
|
query,
|
||||||
|
max_results: Math.min(limit, 10),
|
||||||
|
include_answer: false,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
DEFAULT_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
if (!res.ok) {
|
||||||
|
const body = await res.text().catch(() => '');
|
||||||
|
return { provider: 'tavily', query, results: [], error: `HTTP ${res.status}: ${body}` };
|
||||||
|
}
|
||||||
|
const data = (await res.json()) as {
|
||||||
|
results?: Array<{ title: string; url: string; content: string }>;
|
||||||
|
};
|
||||||
|
const results: SearchResult[] = (data.results ?? []).slice(0, limit).map((r) => ({
|
||||||
|
title: r.title,
|
||||||
|
url: r.url,
|
||||||
|
snippet: r.content,
|
||||||
|
}));
|
||||||
|
return { provider: 'tavily', query, results };
|
||||||
|
} catch (err) {
|
||||||
|
return {
|
||||||
|
provider: 'tavily',
|
||||||
|
query,
|
||||||
|
results: [],
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── SearXNG (self-hosted) ───────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async function searchSearxng(query: string, limit: number): Promise<SearchResponse> {
|
||||||
|
const baseUrl = process.env['SEARXNG_URL'];
|
||||||
|
if (!baseUrl) return { provider: 'searxng', query, results: [], error: 'SEARXNG_URL not set' };
|
||||||
|
|
||||||
|
try {
|
||||||
|
const params = new URLSearchParams({
|
||||||
|
q: query,
|
||||||
|
format: 'json',
|
||||||
|
pageno: '1',
|
||||||
|
});
|
||||||
|
const res = await fetchWithTimeout(
|
||||||
|
`${baseUrl.replace(/\/$/, '')}/search?${params}`,
|
||||||
|
{ headers: { Accept: 'application/json' } },
|
||||||
|
DEFAULT_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
if (!res.ok) {
|
||||||
|
const body = await res.text().catch(() => '');
|
||||||
|
return { provider: 'searxng', query, results: [], error: `HTTP ${res.status}: ${body}` };
|
||||||
|
}
|
||||||
|
const data = (await res.json()) as {
|
||||||
|
results?: Array<{ title: string; url: string; content: string }>;
|
||||||
|
};
|
||||||
|
const results: SearchResult[] = (data.results ?? []).slice(0, limit).map((r) => ({
|
||||||
|
title: r.title,
|
||||||
|
url: r.url,
|
||||||
|
snippet: r.content,
|
||||||
|
}));
|
||||||
|
return { provider: 'searxng', query, results };
|
||||||
|
} catch (err) {
|
||||||
|
return {
|
||||||
|
provider: 'searxng',
|
||||||
|
query,
|
||||||
|
results: [],
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── DuckDuckGo (lite HTML endpoint) ─────────────────────────────────────────
|
||||||
|
|
||||||
|
async function searchDuckDuckGo(query: string, limit: number): Promise<SearchResponse> {
|
||||||
|
try {
|
||||||
|
// Use the DuckDuckGo Instant Answer API (JSON, free, no key)
|
||||||
|
const params = new URLSearchParams({
|
||||||
|
q: query,
|
||||||
|
format: 'json',
|
||||||
|
no_html: '1',
|
||||||
|
skip_disambig: '1',
|
||||||
|
});
|
||||||
|
const res = await fetchWithTimeout(
|
||||||
|
`https://api.duckduckgo.com/?${params}`,
|
||||||
|
{ headers: { Accept: 'application/json' } },
|
||||||
|
DEFAULT_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
if (!res.ok) {
|
||||||
|
return {
|
||||||
|
provider: 'duckduckgo',
|
||||||
|
query,
|
||||||
|
results: [],
|
||||||
|
error: `HTTP ${res.status}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const text = await readLimited(res);
|
||||||
|
const data = JSON.parse(text) as {
|
||||||
|
AbstractText?: string;
|
||||||
|
AbstractURL?: string;
|
||||||
|
AbstractSource?: string;
|
||||||
|
RelatedTopics?: Array<{
|
||||||
|
Text?: string;
|
||||||
|
FirstURL?: string;
|
||||||
|
Result?: string;
|
||||||
|
Topics?: Array<{ Text?: string; FirstURL?: string }>;
|
||||||
|
}>;
|
||||||
|
};
|
||||||
|
|
||||||
|
const results: SearchResult[] = [];
|
||||||
|
|
||||||
|
// Main abstract result
|
||||||
|
if (data.AbstractText && data.AbstractURL) {
|
||||||
|
results.push({
|
||||||
|
title: data.AbstractSource ?? 'DuckDuckGo Abstract',
|
||||||
|
url: data.AbstractURL,
|
||||||
|
snippet: data.AbstractText,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Related topics
|
||||||
|
for (const topic of data.RelatedTopics ?? []) {
|
||||||
|
if (results.length >= limit) break;
|
||||||
|
if (topic.Text && topic.FirstURL) {
|
||||||
|
results.push({
|
||||||
|
title: topic.Text.slice(0, 120),
|
||||||
|
url: topic.FirstURL,
|
||||||
|
snippet: topic.Text,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// Sub-topics
|
||||||
|
for (const sub of topic.Topics ?? []) {
|
||||||
|
if (results.length >= limit) break;
|
||||||
|
if (sub.Text && sub.FirstURL) {
|
||||||
|
results.push({
|
||||||
|
title: sub.Text.slice(0, 120),
|
||||||
|
url: sub.FirstURL,
|
||||||
|
snippet: sub.Text,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { provider: 'duckduckgo', query, results: results.slice(0, limit) };
|
||||||
|
} catch (err) {
|
||||||
|
return {
|
||||||
|
provider: 'duckduckgo',
|
||||||
|
query,
|
||||||
|
results: [],
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Provider resolution ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
type SearchProvider = 'brave' | 'tavily' | 'searxng' | 'duckduckgo' | 'auto';
|
||||||
|
|
||||||
|
function getAvailableProviders(): SearchProvider[] {
|
||||||
|
const available: SearchProvider[] = [];
|
||||||
|
if (process.env['BRAVE_API_KEY']) available.push('brave');
|
||||||
|
if (process.env['TAVILY_API_KEY']) available.push('tavily');
|
||||||
|
if (process.env['SEARXNG_URL']) available.push('searxng');
|
||||||
|
// DuckDuckGo is always available (no API key needed)
|
||||||
|
available.push('duckduckgo');
|
||||||
|
return available;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function executeSearch(
|
||||||
|
provider: SearchProvider,
|
||||||
|
query: string,
|
||||||
|
limit: number,
|
||||||
|
): Promise<SearchResponse> {
|
||||||
|
switch (provider) {
|
||||||
|
case 'brave':
|
||||||
|
return searchBrave(query, limit);
|
||||||
|
case 'tavily':
|
||||||
|
return searchTavily(query, limit);
|
||||||
|
case 'searxng':
|
||||||
|
return searchSearxng(query, limit);
|
||||||
|
case 'duckduckgo':
|
||||||
|
return searchDuckDuckGo(query, limit);
|
||||||
|
case 'auto': {
|
||||||
|
// Try providers in priority order: Brave > Tavily > SearXNG > DuckDuckGo
|
||||||
|
const available = getAvailableProviders();
|
||||||
|
for (const p of available) {
|
||||||
|
const result = await executeSearch(p, query, limit);
|
||||||
|
if (!result.error && result.results.length > 0) return result;
|
||||||
|
}
|
||||||
|
// Fall back to DuckDuckGo if everything failed
|
||||||
|
return searchDuckDuckGo(query, limit);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatSearchResults(response: SearchResponse): string {
|
||||||
|
const lines: string[] = [];
|
||||||
|
lines.push(`Search provider: ${response.provider}`);
|
||||||
|
lines.push(`Query: "${response.query}"`);
|
||||||
|
|
||||||
|
if (response.error) {
|
||||||
|
lines.push(`Error: ${response.error}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.results.length === 0) {
|
||||||
|
lines.push('No results found.');
|
||||||
|
} else {
|
||||||
|
lines.push(`Results (${response.results.length}):\n`);
|
||||||
|
for (let i = 0; i < response.results.length; i++) {
|
||||||
|
const r = response.results[i]!;
|
||||||
|
lines.push(`${i + 1}. ${r.title}`);
|
||||||
|
lines.push(` URL: ${r.url}`);
|
||||||
|
lines.push(` ${r.snippet}`);
|
||||||
|
lines.push('');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return lines.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Tool exports ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export function createSearchTools(): ToolDefinition[] {
|
||||||
|
const webSearch: ToolDefinition = {
|
||||||
|
name: 'web_search',
|
||||||
|
label: 'Web Search',
|
||||||
|
description:
|
||||||
|
'Search the web using configured search providers. ' +
|
||||||
|
'Supports Brave, Tavily, SearXNG, and DuckDuckGo. ' +
|
||||||
|
'Use "auto" provider to pick the best available. ' +
|
||||||
|
'DuckDuckGo is always available as a fallback (no API key needed).',
|
||||||
|
parameters: Type.Object({
|
||||||
|
query: Type.String({ description: 'Search query' }),
|
||||||
|
provider: Type.Optional(
|
||||||
|
Type.String({
|
||||||
|
description:
|
||||||
|
'Search provider: "auto" (default), "brave", "tavily", "searxng", or "duckduckgo"',
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
limit: Type.Optional(
|
||||||
|
Type.Number({ description: `Max results to return (default 5, max ${MAX_RESULTS})` }),
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
async execute(_toolCallId, params) {
|
||||||
|
const { query, provider, limit } = params as {
|
||||||
|
query: string;
|
||||||
|
provider?: string;
|
||||||
|
limit?: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
const effectiveProvider = (provider ?? 'auto') as SearchProvider;
|
||||||
|
const validProviders = ['auto', 'brave', 'tavily', 'searxng', 'duckduckgo'];
|
||||||
|
if (!validProviders.includes(effectiveProvider)) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: `Invalid provider "${provider}". Valid: ${validProviders.join(', ')}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const effectiveLimit = Math.min(Math.max(limit ?? 5, 1), MAX_RESULTS);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await executeSearch(effectiveProvider, query, effectiveLimit);
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: formatSearchResults(response) }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: `Search failed: ${err instanceof Error ? err.message : String(err)}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const webSearchNews: ToolDefinition = {
|
||||||
|
name: 'web_search_news',
|
||||||
|
label: 'Web Search (News)',
|
||||||
|
description:
|
||||||
|
'Search for recent news articles. Uses Brave News API if available, falls back to standard search with news keywords.',
|
||||||
|
parameters: Type.Object({
|
||||||
|
query: Type.String({ description: 'News search query' }),
|
||||||
|
limit: Type.Optional(
|
||||||
|
Type.Number({ description: `Max results (default 5, max ${MAX_RESULTS})` }),
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
async execute(_toolCallId, params) {
|
||||||
|
const { query, limit } = params as { query: string; limit?: number };
|
||||||
|
const effectiveLimit = Math.min(Math.max(limit ?? 5, 1), MAX_RESULTS);
|
||||||
|
|
||||||
|
// Try Brave News API first (dedicated news endpoint)
|
||||||
|
const braveKey = process.env['BRAVE_API_KEY'];
|
||||||
|
if (braveKey) {
|
||||||
|
try {
|
||||||
|
const newsParams = new URLSearchParams({
|
||||||
|
q: query,
|
||||||
|
count: String(effectiveLimit),
|
||||||
|
});
|
||||||
|
const res = await fetchWithTimeout(
|
||||||
|
`https://api.search.brave.com/res/v1/news/search?${newsParams}`,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
'X-Subscription-Token': braveKey,
|
||||||
|
Accept: 'application/json',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
DEFAULT_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
if (res.ok) {
|
||||||
|
const data = (await res.json()) as {
|
||||||
|
results?: Array<{
|
||||||
|
title: string;
|
||||||
|
url: string;
|
||||||
|
description: string;
|
||||||
|
age?: string;
|
||||||
|
}>;
|
||||||
|
};
|
||||||
|
const results: SearchResult[] = (data.results ?? [])
|
||||||
|
.slice(0, effectiveLimit)
|
||||||
|
.map((r) => ({
|
||||||
|
title: r.title + (r.age ? ` (${r.age})` : ''),
|
||||||
|
url: r.url,
|
||||||
|
snippet: r.description,
|
||||||
|
}));
|
||||||
|
const response: SearchResponse = { provider: 'brave-news', query, results };
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: formatSearchResults(response) }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Fall through to generic search
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: standard search with "news" appended
|
||||||
|
const newsQuery = `${query} news latest`;
|
||||||
|
const response = await executeSearch('auto', newsQuery, effectiveLimit);
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: formatSearchResults(response) }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const searchProviders: ToolDefinition = {
|
||||||
|
name: 'web_search_providers',
|
||||||
|
label: 'List Search Providers',
|
||||||
|
description: 'List the currently available and configured web search providers.',
|
||||||
|
parameters: Type.Object({}),
|
||||||
|
async execute() {
|
||||||
|
const available = getAvailableProviders();
|
||||||
|
const allProviders = [
|
||||||
|
{ name: 'brave', configured: !!process.env['BRAVE_API_KEY'], envVar: 'BRAVE_API_KEY' },
|
||||||
|
{ name: 'tavily', configured: !!process.env['TAVILY_API_KEY'], envVar: 'TAVILY_API_KEY' },
|
||||||
|
{ name: 'searxng', configured: !!process.env['SEARXNG_URL'], envVar: 'SEARXNG_URL' },
|
||||||
|
{ name: 'duckduckgo', configured: true, envVar: '(none — always available)' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const lines = ['Search providers:\n'];
|
||||||
|
for (const p of allProviders) {
|
||||||
|
const status = p.configured ? '✓ configured' : '✗ not configured';
|
||||||
|
lines.push(` ${p.name}: ${status} (${p.envVar})`);
|
||||||
|
}
|
||||||
|
lines.push(`\nActive providers for "auto" mode: ${available.join(', ')}`);
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text' as const, text: lines.join('\n') }],
|
||||||
|
details: undefined,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return [webSearch, webSearchNews, searchProviders];
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import { Module } from '@nestjs/common';
|
import { Module } from '@nestjs/common';
|
||||||
import { APP_GUARD } from '@nestjs/core';
|
import { APP_GUARD } from '@nestjs/core';
|
||||||
import { HealthController } from './health/health.controller.js';
|
import { HealthController } from './health/health.controller.js';
|
||||||
|
import { ConfigModule } from './config/config.module.js';
|
||||||
import { DatabaseModule } from './database/database.module.js';
|
import { DatabaseModule } from './database/database.module.js';
|
||||||
import { AuthModule } from './auth/auth.module.js';
|
import { AuthModule } from './auth/auth.module.js';
|
||||||
import { BrainModule } from './brain/brain.module.js';
|
import { BrainModule } from './brain/brain.module.js';
|
||||||
@@ -28,6 +29,7 @@ import { ThrottlerGuard, ThrottlerModule } from '@nestjs/throttler';
|
|||||||
@Module({
|
@Module({
|
||||||
imports: [
|
imports: [
|
||||||
ThrottlerModule.forRoot([{ name: 'default', ttl: 60_000, limit: 60 }]),
|
ThrottlerModule.forRoot([{ name: 'default', ttl: 60_000, limit: 60 }]),
|
||||||
|
ConfigModule,
|
||||||
DatabaseModule,
|
DatabaseModule,
|
||||||
AuthModule,
|
AuthModule,
|
||||||
BrainModule,
|
BrainModule,
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import type {
|
|||||||
SlashCommandPayload,
|
SlashCommandPayload,
|
||||||
SystemReloadPayload,
|
SystemReloadPayload,
|
||||||
RoutingDecisionInfo,
|
RoutingDecisionInfo,
|
||||||
|
AbortPayload,
|
||||||
} from '@mosaic/types';
|
} from '@mosaic/types';
|
||||||
import { AgentService, type ConversationHistoryMessage } from '../agent/agent.service.js';
|
import { AgentService, type ConversationHistoryMessage } from '../agent/agent.service.js';
|
||||||
import { AUTH } from '../auth/auth.tokens.js';
|
import { AUTH } from '../auth/auth.tokens.js';
|
||||||
@@ -325,6 +326,38 @@ export class ChatGateway implements OnGatewayInit, OnGatewayConnection, OnGatewa
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SubscribeMessage('abort')
|
||||||
|
async handleAbort(
|
||||||
|
@ConnectedSocket() client: Socket,
|
||||||
|
@MessageBody() data: AbortPayload,
|
||||||
|
): Promise<void> {
|
||||||
|
const conversationId = data.conversationId;
|
||||||
|
this.logger.log(`Abort requested by ${client.id} for conversation ${conversationId}`);
|
||||||
|
|
||||||
|
const session = this.agentService.getSession(conversationId);
|
||||||
|
if (!session) {
|
||||||
|
client.emit('error', {
|
||||||
|
conversationId,
|
||||||
|
error: 'No active session to abort.',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await session.piSession.abort();
|
||||||
|
this.logger.log(`Agent session ${conversationId} aborted successfully`);
|
||||||
|
} catch (err) {
|
||||||
|
this.logger.error(
|
||||||
|
`Failed to abort session ${conversationId}`,
|
||||||
|
err instanceof Error ? err.stack : String(err),
|
||||||
|
);
|
||||||
|
client.emit('error', {
|
||||||
|
conversationId,
|
||||||
|
error: 'Failed to abort the agent operation.',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@SubscribeMessage('command:execute')
|
@SubscribeMessage('command:execute')
|
||||||
async handleCommandExecute(
|
async handleCommandExecute(
|
||||||
@ConnectedSocket() client: Socket,
|
@ConnectedSocket() client: Socket,
|
||||||
|
|||||||
@@ -82,6 +82,7 @@ function buildService(): CommandExecutorService {
|
|||||||
mockBrain as never,
|
mockBrain as never,
|
||||||
null,
|
null,
|
||||||
mockChatGateway as never,
|
mockChatGateway as never,
|
||||||
|
null,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import { ChatGateway } from '../chat/chat.gateway.js';
|
|||||||
import { SessionGCService } from '../gc/session-gc.service.js';
|
import { SessionGCService } from '../gc/session-gc.service.js';
|
||||||
import { SystemOverrideService } from '../preferences/system-override.service.js';
|
import { SystemOverrideService } from '../preferences/system-override.service.js';
|
||||||
import { ReloadService } from '../reload/reload.service.js';
|
import { ReloadService } from '../reload/reload.service.js';
|
||||||
|
import { McpClientService } from '../mcp-client/mcp-client.service.js';
|
||||||
import { BRAIN } from '../brain/brain.tokens.js';
|
import { BRAIN } from '../brain/brain.tokens.js';
|
||||||
import { COMMANDS_REDIS } from './commands.tokens.js';
|
import { COMMANDS_REDIS } from './commands.tokens.js';
|
||||||
import { CommandRegistryService } from './command-registry.service.js';
|
import { CommandRegistryService } from './command-registry.service.js';
|
||||||
@@ -28,6 +29,9 @@ export class CommandExecutorService {
|
|||||||
@Optional()
|
@Optional()
|
||||||
@Inject(forwardRef(() => ChatGateway))
|
@Inject(forwardRef(() => ChatGateway))
|
||||||
private readonly chatGateway: ChatGateway | null,
|
private readonly chatGateway: ChatGateway | null,
|
||||||
|
@Optional()
|
||||||
|
@Inject(McpClientService)
|
||||||
|
private readonly mcpClient: McpClientService | null,
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
async execute(payload: SlashCommandPayload, userId: string): Promise<SlashCommandResultPayload> {
|
async execute(payload: SlashCommandPayload, userId: string): Promise<SlashCommandResultPayload> {
|
||||||
@@ -105,6 +109,8 @@ export class CommandExecutorService {
|
|||||||
};
|
};
|
||||||
case 'tools':
|
case 'tools':
|
||||||
return await this.handleTools(conversationId, userId);
|
return await this.handleTools(conversationId, userId);
|
||||||
|
case 'mcp':
|
||||||
|
return await this.handleMcp(args ?? null, conversationId);
|
||||||
case 'reload': {
|
case 'reload': {
|
||||||
if (!this.reloadService) {
|
if (!this.reloadService) {
|
||||||
return {
|
return {
|
||||||
@@ -489,4 +495,92 @@ export class CommandExecutorService {
|
|||||||
conversationId,
|
conversationId,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async handleMcp(
|
||||||
|
args: string | null,
|
||||||
|
conversationId: string,
|
||||||
|
): Promise<SlashCommandResultPayload> {
|
||||||
|
if (!this.mcpClient) {
|
||||||
|
return {
|
||||||
|
command: 'mcp',
|
||||||
|
conversationId,
|
||||||
|
success: false,
|
||||||
|
message: 'MCP client service is not available.',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const action = args?.trim().split(/\s+/)[0] ?? 'status';
|
||||||
|
|
||||||
|
switch (action) {
|
||||||
|
case 'status':
|
||||||
|
case 'servers': {
|
||||||
|
const statuses = this.mcpClient.getServerStatuses();
|
||||||
|
if (statuses.length === 0) {
|
||||||
|
return {
|
||||||
|
command: 'mcp',
|
||||||
|
conversationId,
|
||||||
|
success: true,
|
||||||
|
message:
|
||||||
|
'No MCP servers configured. Set MCP_SERVERS env var to connect external tool servers.',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const lines = ['MCP Server Status:\n'];
|
||||||
|
for (const s of statuses) {
|
||||||
|
const status = s.connected ? '✓ connected' : '✗ disconnected';
|
||||||
|
lines.push(` ${s.name}: ${status}`);
|
||||||
|
lines.push(` URL: ${s.url}`);
|
||||||
|
lines.push(` Tools: ${s.toolCount}`);
|
||||||
|
if (s.error) lines.push(` Error: ${s.error}`);
|
||||||
|
lines.push('');
|
||||||
|
}
|
||||||
|
const tools = this.mcpClient.getToolDefinitions();
|
||||||
|
if (tools.length > 0) {
|
||||||
|
lines.push(`Total bridged tools: ${tools.length}`);
|
||||||
|
lines.push(`Tool names: ${tools.map((t) => t.name).join(', ')}`);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
command: 'mcp',
|
||||||
|
conversationId,
|
||||||
|
success: true,
|
||||||
|
message: lines.join('\n'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'reconnect': {
|
||||||
|
const serverName = args?.trim().split(/\s+/).slice(1).join(' ');
|
||||||
|
if (!serverName) {
|
||||||
|
return {
|
||||||
|
command: 'mcp',
|
||||||
|
conversationId,
|
||||||
|
success: false,
|
||||||
|
message: 'Usage: /mcp reconnect <server-name>',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await this.mcpClient.reconnectServer(serverName);
|
||||||
|
return {
|
||||||
|
command: 'mcp',
|
||||||
|
conversationId,
|
||||||
|
success: true,
|
||||||
|
message: `MCP server "${serverName}" reconnected successfully.`,
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
return {
|
||||||
|
command: 'mcp',
|
||||||
|
conversationId,
|
||||||
|
success: false,
|
||||||
|
message: `Failed to reconnect MCP server "${serverName}": ${err instanceof Error ? err.message : String(err)}`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
command: 'mcp',
|
||||||
|
conversationId,
|
||||||
|
success: false,
|
||||||
|
message: `Unknown MCP action: "${action}". Use: /mcp status, /mcp servers, /mcp reconnect <name>`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -260,6 +260,23 @@ export class CommandRegistryService implements OnModuleInit {
|
|||||||
execution: 'socket',
|
execution: 'socket',
|
||||||
available: true,
|
available: true,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: 'mcp',
|
||||||
|
description: 'Manage MCP server connections (status/reconnect/servers)',
|
||||||
|
aliases: [],
|
||||||
|
args: [
|
||||||
|
{
|
||||||
|
name: 'action',
|
||||||
|
type: 'enum',
|
||||||
|
optional: true,
|
||||||
|
values: ['status', 'reconnect', 'servers'],
|
||||||
|
description: 'Action: status (default), reconnect <name>, servers',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
scope: 'agent',
|
||||||
|
execution: 'socket',
|
||||||
|
available: true,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: 'reload',
|
name: 'reload',
|
||||||
description: 'Soft-reload gateway plugins and command manifest (admin)',
|
description: 'Soft-reload gateway plugins and command manifest (admin)',
|
||||||
|
|||||||
@@ -65,6 +65,7 @@ function buildExecutor(registry: CommandRegistryService): CommandExecutorService
|
|||||||
mockBrain as never,
|
mockBrain as never,
|
||||||
null, // reloadService (optional)
|
null, // reloadService (optional)
|
||||||
null, // chatGateway (optional)
|
null, // chatGateway (optional)
|
||||||
|
null, // mcpClient (optional)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
16
apps/gateway/src/config/config.module.ts
Normal file
16
apps/gateway/src/config/config.module.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { Global, Module } from '@nestjs/common';
|
||||||
|
import { loadConfig, type MosaicConfig } from '@mosaic/config';
|
||||||
|
|
||||||
|
export const MOSAIC_CONFIG = 'MOSAIC_CONFIG';
|
||||||
|
|
||||||
|
@Global()
|
||||||
|
@Module({
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: MOSAIC_CONFIG,
|
||||||
|
useFactory: (): MosaicConfig => loadConfig(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
exports: [MOSAIC_CONFIG],
|
||||||
|
})
|
||||||
|
export class ConfigModule {}
|
||||||
@@ -1,28 +1,51 @@
|
|||||||
|
import { mkdirSync } from 'node:fs';
|
||||||
|
import { homedir } from 'node:os';
|
||||||
|
import { join } from 'node:path';
|
||||||
import { Global, Inject, Module, type OnApplicationShutdown } from '@nestjs/common';
|
import { Global, Inject, Module, type OnApplicationShutdown } from '@nestjs/common';
|
||||||
import { createDb, type Db, type DbHandle } from '@mosaic/db';
|
import { createDb, createPgliteDb, type Db, type DbHandle } from '@mosaic/db';
|
||||||
|
import { createStorageAdapter, type StorageAdapter } from '@mosaic/storage';
|
||||||
|
import type { MosaicConfig } from '@mosaic/config';
|
||||||
|
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
||||||
|
|
||||||
export const DB_HANDLE = 'DB_HANDLE';
|
export const DB_HANDLE = 'DB_HANDLE';
|
||||||
export const DB = 'DB';
|
export const DB = 'DB';
|
||||||
|
export const STORAGE_ADAPTER = 'STORAGE_ADAPTER';
|
||||||
|
|
||||||
@Global()
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
providers: [
|
providers: [
|
||||||
{
|
{
|
||||||
provide: DB_HANDLE,
|
provide: DB_HANDLE,
|
||||||
useFactory: (): DbHandle => createDb(),
|
useFactory: (config: MosaicConfig): DbHandle => {
|
||||||
|
if (config.tier === 'local') {
|
||||||
|
const dataDir = join(homedir(), '.config', 'mosaic', 'gateway', 'pglite');
|
||||||
|
mkdirSync(dataDir, { recursive: true });
|
||||||
|
return createPgliteDb(dataDir);
|
||||||
|
}
|
||||||
|
return createDb(config.storage.type === 'postgres' ? config.storage.url : undefined);
|
||||||
|
},
|
||||||
|
inject: [MOSAIC_CONFIG],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
provide: DB,
|
provide: DB,
|
||||||
useFactory: (handle: DbHandle): Db => handle.db,
|
useFactory: (handle: DbHandle): Db => handle.db,
|
||||||
inject: [DB_HANDLE],
|
inject: [DB_HANDLE],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
provide: STORAGE_ADAPTER,
|
||||||
|
useFactory: (config: MosaicConfig): StorageAdapter => createStorageAdapter(config.storage),
|
||||||
|
inject: [MOSAIC_CONFIG],
|
||||||
|
},
|
||||||
],
|
],
|
||||||
exports: [DB],
|
exports: [DB, STORAGE_ADAPTER],
|
||||||
})
|
})
|
||||||
export class DatabaseModule implements OnApplicationShutdown {
|
export class DatabaseModule implements OnApplicationShutdown {
|
||||||
constructor(@Inject(DB_HANDLE) private readonly handle: DbHandle) {}
|
constructor(
|
||||||
|
@Inject(DB_HANDLE) private readonly handle: DbHandle,
|
||||||
|
@Inject(STORAGE_ADAPTER) private readonly storageAdapter: StorageAdapter,
|
||||||
|
) {}
|
||||||
|
|
||||||
async onApplicationShutdown(): Promise<void> {
|
async onApplicationShutdown(): Promise<void> {
|
||||||
await this.handle.close();
|
await Promise.all([this.handle.close(), this.storageAdapter.close()]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,13 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
import { config } from 'dotenv';
|
import { config } from 'dotenv';
|
||||||
import { resolve } from 'node:path';
|
import { existsSync } from 'node:fs';
|
||||||
|
import { resolve, join } from 'node:path';
|
||||||
|
import { homedir } from 'node:os';
|
||||||
|
|
||||||
|
// Load .env from daemon config dir (global install / daemon mode).
|
||||||
|
// Loaded first so monorepo .env can override for local dev.
|
||||||
|
const daemonEnv = join(homedir(), '.config', 'mosaic', 'gateway', '.env');
|
||||||
|
if (existsSync(daemonEnv)) config({ path: daemonEnv });
|
||||||
|
|
||||||
// Load .env from monorepo root (cwd is apps/gateway when run via pnpm filter)
|
// Load .env from monorepo root (cwd is apps/gateway when run via pnpm filter)
|
||||||
config({ path: resolve(process.cwd(), '../../.env') });
|
config({ path: resolve(process.cwd(), '../../.env') });
|
||||||
|
|||||||
@@ -1,11 +1,29 @@
|
|||||||
import { Global, Module } from '@nestjs/common';
|
import { Global, Module } from '@nestjs/common';
|
||||||
import { createMemory, type Memory } from '@mosaic/memory';
|
import {
|
||||||
|
createMemory,
|
||||||
|
type Memory,
|
||||||
|
createMemoryAdapter,
|
||||||
|
type MemoryAdapter,
|
||||||
|
type MemoryConfig,
|
||||||
|
} from '@mosaic/memory';
|
||||||
import type { Db } from '@mosaic/db';
|
import type { Db } from '@mosaic/db';
|
||||||
import { DB } from '../database/database.module.js';
|
import type { StorageAdapter } from '@mosaic/storage';
|
||||||
|
import type { MosaicConfig } from '@mosaic/config';
|
||||||
|
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
||||||
|
import { DB, STORAGE_ADAPTER } from '../database/database.module.js';
|
||||||
import { MEMORY } from './memory.tokens.js';
|
import { MEMORY } from './memory.tokens.js';
|
||||||
import { MemoryController } from './memory.controller.js';
|
import { MemoryController } from './memory.controller.js';
|
||||||
import { EmbeddingService } from './embedding.service.js';
|
import { EmbeddingService } from './embedding.service.js';
|
||||||
|
|
||||||
|
export const MEMORY_ADAPTER = 'MEMORY_ADAPTER';
|
||||||
|
|
||||||
|
function buildMemoryConfig(config: MosaicConfig, storageAdapter: StorageAdapter): MemoryConfig {
|
||||||
|
if (config.memory.type === 'keyword') {
|
||||||
|
return { type: 'keyword', storage: storageAdapter };
|
||||||
|
}
|
||||||
|
return { type: config.memory.type };
|
||||||
|
}
|
||||||
|
|
||||||
@Global()
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
providers: [
|
providers: [
|
||||||
@@ -14,9 +32,15 @@ import { EmbeddingService } from './embedding.service.js';
|
|||||||
useFactory: (db: Db): Memory => createMemory(db),
|
useFactory: (db: Db): Memory => createMemory(db),
|
||||||
inject: [DB],
|
inject: [DB],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
provide: MEMORY_ADAPTER,
|
||||||
|
useFactory: (config: MosaicConfig, storageAdapter: StorageAdapter): MemoryAdapter =>
|
||||||
|
createMemoryAdapter(buildMemoryConfig(config, storageAdapter)),
|
||||||
|
inject: [MOSAIC_CONFIG, STORAGE_ADAPTER],
|
||||||
|
},
|
||||||
EmbeddingService,
|
EmbeddingService,
|
||||||
],
|
],
|
||||||
controllers: [MemoryController],
|
controllers: [MemoryController],
|
||||||
exports: [MEMORY, EmbeddingService],
|
exports: [MEMORY, MEMORY_ADAPTER, EmbeddingService],
|
||||||
})
|
})
|
||||||
export class MemoryModule {}
|
export class MemoryModule {}
|
||||||
|
|||||||
@@ -1,9 +1,21 @@
|
|||||||
import { Global, Module } from '@nestjs/common';
|
import { Global, Module } from '@nestjs/common';
|
||||||
|
import { createQueueAdapter, type QueueAdapter } from '@mosaic/queue';
|
||||||
|
import type { MosaicConfig } from '@mosaic/config';
|
||||||
|
import { MOSAIC_CONFIG } from '../config/config.module.js';
|
||||||
import { QueueService } from './queue.service.js';
|
import { QueueService } from './queue.service.js';
|
||||||
|
|
||||||
|
export const QUEUE_ADAPTER = 'QUEUE_ADAPTER';
|
||||||
|
|
||||||
@Global()
|
@Global()
|
||||||
@Module({
|
@Module({
|
||||||
providers: [QueueService],
|
providers: [
|
||||||
exports: [QueueService],
|
QueueService,
|
||||||
|
{
|
||||||
|
provide: QUEUE_ADAPTER,
|
||||||
|
useFactory: (config: MosaicConfig): QueueAdapter => createQueueAdapter(config.queue),
|
||||||
|
inject: [MOSAIC_CONFIG],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
exports: [QueueService, QUEUE_ADAPTER],
|
||||||
})
|
})
|
||||||
export class QueueModule {}
|
export class QueueModule {}
|
||||||
|
|||||||
@@ -51,16 +51,42 @@ export interface QueueHealthStatus {
|
|||||||
// Constants
|
// Constants
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
export const QUEUE_SUMMARIZATION = 'mosaic:summarization';
|
export const QUEUE_SUMMARIZATION = 'mosaic-summarization';
|
||||||
export const QUEUE_GC = 'mosaic:gc';
|
export const QUEUE_GC = 'mosaic-gc';
|
||||||
export const QUEUE_TIER_MANAGEMENT = 'mosaic:tier-management';
|
export const QUEUE_TIER_MANAGEMENT = 'mosaic-tier-management';
|
||||||
|
|
||||||
const DEFAULT_VALKEY_URL = 'redis://localhost:6380';
|
const DEFAULT_VALKEY_URL = 'redis://localhost:6380';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a Redis URL string into a BullMQ-compatible ConnectionOptions object.
|
||||||
|
*
|
||||||
|
* BullMQ v5 does `Object.assign({ port: 6379, host: '127.0.0.1' }, opts)` in
|
||||||
|
* its RedisConnection constructor. If opts is a URL string, Object.assign only
|
||||||
|
* copies character-index properties and the defaults survive — so 6379 wins.
|
||||||
|
* We must parse the URL ourselves and return a plain RedisOptions object.
|
||||||
|
*/
|
||||||
function getConnection(): ConnectionOptions {
|
function getConnection(): ConnectionOptions {
|
||||||
const url = process.env['VALKEY_URL'] ?? DEFAULT_VALKEY_URL;
|
const url = process.env['VALKEY_URL'] ?? DEFAULT_VALKEY_URL;
|
||||||
// BullMQ ConnectionOptions accepts a URL string (ioredis-compatible)
|
try {
|
||||||
return url as unknown as ConnectionOptions;
|
const parsed = new URL(url);
|
||||||
|
const opts: ConnectionOptions = {
|
||||||
|
host: parsed.hostname || '127.0.0.1',
|
||||||
|
port: parsed.port ? parseInt(parsed.port, 10) : 6380,
|
||||||
|
};
|
||||||
|
if (parsed.password) {
|
||||||
|
(opts as Record<string, unknown>)['password'] = decodeURIComponent(parsed.password);
|
||||||
|
}
|
||||||
|
if (parsed.pathname && parsed.pathname.length > 1) {
|
||||||
|
const db = parseInt(parsed.pathname.slice(1), 10);
|
||||||
|
if (!isNaN(db)) {
|
||||||
|
(opts as Record<string, unknown>)['db'] = db;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return opts;
|
||||||
|
} catch {
|
||||||
|
// Fallback: hope the value is already a host string ioredis understands
|
||||||
|
return { host: '127.0.0.1', port: 6380 } as ConnectionOptions;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/web",
|
"name": "@mosaic/web",
|
||||||
"version": "0.0.0",
|
"version": "0.0.2",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "next build",
|
"build": "next build",
|
||||||
|
|||||||
0
apps/web/public/.gitkeep
Normal file
0
apps/web/public/.gitkeep
Normal file
231
briefs/monorepo-consolidation.md
Normal file
231
briefs/monorepo-consolidation.md
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
# Brief: Monorepo Consolidation — mosaic/stack → mosaic/mosaic-stack
|
||||||
|
|
||||||
|
## Source
|
||||||
|
|
||||||
|
Architecture consolidation — merge the mosaic/stack repo (Forge pipeline, MACP protocol, framework tools) into mosaic/mosaic-stack (Harness Foundation platform). Two repos doing related work that need to converge.
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
**mosaic/stack** (OLD) contains:
|
||||||
|
|
||||||
|
- Forge progressive refinement pipeline (stages, agents, personas, rails, debate protocol, brief classification)
|
||||||
|
- MACP protocol (JSON schemas, deterministic Python controller, dispatcher, event system, gate runner)
|
||||||
|
- Credential resolver (Python — OC config, mosaic files, ambient env, JSON5 parser)
|
||||||
|
- OC framework plugin (injects Mosaic rails into all agent sessions)
|
||||||
|
- Profiles (runtime-neutral context packs for tech stacks and domains)
|
||||||
|
- Stage adapter (Forge→MACP bridge)
|
||||||
|
- Board tasks (multi-agent board evaluation)
|
||||||
|
- OpenBrain specialist memory (learning capture/recall)
|
||||||
|
- 17 guides, 5 universal skills
|
||||||
|
|
||||||
|
**mosaic/mosaic-stack** (NEW) contains:
|
||||||
|
|
||||||
|
- Harness Foundation platform (NestJS gateway, Next.js web, Drizzle ORM, Pi SDK runtime)
|
||||||
|
- 5 provider adapters, task classifier, routing rules, model capability matrix
|
||||||
|
- MACP OC plugin (ACP runtime backend with Pi bridge)
|
||||||
|
- TS coord package (mission runner, tasks file manager, status tracker — 1635 lines)
|
||||||
|
- BullMQ job queue, OTEL telemetry, channel plugins (Discord, Telegram)
|
||||||
|
- CLI with TUI, 65/65 tasks done, v0.2.0
|
||||||
|
|
||||||
|
**Decision:** NEW repo is the base. All unique work from OLD gets ported into NEW as packages.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
### Work Package 1: Forge Pipeline Package (`packages/forge`)
|
||||||
|
|
||||||
|
Port the entire Forge progressive refinement pipeline as a TypeScript package.
|
||||||
|
|
||||||
|
**From OLD:**
|
||||||
|
|
||||||
|
- `forge/pipeline/stages/*.md` — 11 stage definitions
|
||||||
|
- `forge/pipeline/agents/{board,generalists,specialists,cross-cutting}/*.md` — all persona definitions
|
||||||
|
- `forge/pipeline/rails/*.md` — debate protocol, dynamic composition, worker rails
|
||||||
|
- `forge/pipeline/gates/` — gate reviewer definitions
|
||||||
|
- `forge/pipeline/orchestrator/run-structure.md` — file-based observability spec
|
||||||
|
- `forge/templates/` — brief and PRD templates
|
||||||
|
- `forge/pipeline/orchestrator/board_tasks.py` → rewrite in TS
|
||||||
|
- `forge/pipeline/orchestrator/stage_adapter.py` → rewrite in TS
|
||||||
|
- `forge/pipeline/orchestrator/pipeline_runner.py` → rewrite in TS
|
||||||
|
- `forge/forge` CLI (Python) → rewrite in TS, integrate with `packages/cli`
|
||||||
|
|
||||||
|
**Package structure:**
|
||||||
|
|
||||||
|
```
|
||||||
|
packages/forge/
|
||||||
|
├── src/
|
||||||
|
│ ├── index.ts # Public API
|
||||||
|
│ ├── pipeline-runner.ts # Orchestrates full pipeline run
|
||||||
|
│ ├── stage-adapter.ts # Maps stages to MACP/coord tasks
|
||||||
|
│ ├── board-tasks.ts # Multi-agent board evaluation task generator
|
||||||
|
│ ├── brief-classifier.ts # strategic/technical/hotfix classification
|
||||||
|
│ ├── types.ts # Stage specs, run manifest, gate results
|
||||||
|
│ └── constants.ts # Stage sequence, timeouts, labels
|
||||||
|
├── pipeline/
|
||||||
|
│ ├── stages/ # .md stage definitions (copied)
|
||||||
|
│ ├── agents/ # .md persona definitions (copied)
|
||||||
|
│ │ ├── board/
|
||||||
|
│ │ ├── cross-cutting/
|
||||||
|
│ │ ├── generalists/
|
||||||
|
│ │ └── specialists/
|
||||||
|
│ │ ├── language/
|
||||||
|
│ │ └── domain/
|
||||||
|
│ ├── rails/ # .md rails (copied)
|
||||||
|
│ ├── gates/ # .md gate definitions (copied)
|
||||||
|
│ └── templates/ # brief + PRD templates (copied)
|
||||||
|
└── package.json
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key design decisions:**
|
||||||
|
|
||||||
|
- Pipeline markdown assets are runtime data, not compiled — ship as-is in the package
|
||||||
|
- `pipeline-runner.ts` calls into `packages/coord` for task execution (not a separate controller)
|
||||||
|
- Stage adapter generates coord-compatible tasks, not MACP JSON directly
|
||||||
|
- Board tasks use `depends_on_policy: "all_terminal"` for synthesis
|
||||||
|
- Per-stage timeouts from `STAGE_TIMEOUTS` map
|
||||||
|
- Brief classifier supports CLI flag, YAML frontmatter, and keyword auto-detection
|
||||||
|
- Run output goes to project-scoped `.forge/runs/{run-id}/` (not inside the Forge package)
|
||||||
|
|
||||||
|
**Persona override system (new):**
|
||||||
|
|
||||||
|
- Base personas ship with the package (read-only)
|
||||||
|
- Project-level overrides in `.forge/personas/{role}.md` extend (not replace) base personas
|
||||||
|
- Board composition configurable via `.forge/config.yaml`:
|
||||||
|
```yaml
|
||||||
|
board:
|
||||||
|
additional_members:
|
||||||
|
- compliance-officer.md
|
||||||
|
skip_members: []
|
||||||
|
specialists:
|
||||||
|
always_include:
|
||||||
|
- proxmox-expert
|
||||||
|
```
|
||||||
|
- OpenBrain integration for cross-run specialist memory (when enabled)
|
||||||
|
|
||||||
|
### Work Package 2: MACP Protocol Package (`packages/macp`)
|
||||||
|
|
||||||
|
Port the MACP protocol layer, event system, and gate runner as a TypeScript package.
|
||||||
|
|
||||||
|
**From OLD:**
|
||||||
|
|
||||||
|
- `tools/macp/protocol/task.schema.json` — task JSON schema
|
||||||
|
- `tools/macp/protocol/` — event schemas
|
||||||
|
- `tools/macp/controller/gate_runner.py` → rewrite in TS as `gate-runner.ts`
|
||||||
|
- `tools/macp/events/` — event watcher, webhook adapter, Discord formatter → rewrite in TS
|
||||||
|
- `tools/macp/dispatcher/credential_resolver.py` → rewrite in TS as `credential-resolver.ts`
|
||||||
|
- `tools/macp/memory/learning_capture.py` + `learning_recall.py` → rewrite in TS
|
||||||
|
|
||||||
|
**Package structure:**
|
||||||
|
|
||||||
|
```
|
||||||
|
packages/macp/
|
||||||
|
├── src/
|
||||||
|
│ ├── index.ts # Public API
|
||||||
|
│ ├── types.ts # Task, event, result, gate types
|
||||||
|
│ ├── schemas/ # JSON schemas (copied)
|
||||||
|
│ ├── gate-runner.ts # Mechanical + AI review quality gates
|
||||||
|
│ ├── credential-resolver.ts # Provider credential resolution (mosaic files, OC config, ambient)
|
||||||
|
│ ├── event-emitter.ts # Append events to ndjson, structured event types
|
||||||
|
│ ├── event-watcher.ts # Poll events.ndjson with cursor persistence
|
||||||
|
│ ├── webhook-adapter.ts # POST events to configurable URL
|
||||||
|
│ ├── discord-formatter.ts # Human-readable event messages
|
||||||
|
│ └── learning.ts # OpenBrain capture + recall
|
||||||
|
└── package.json
|
||||||
|
```
|
||||||
|
|
||||||
|
**Integration with existing packages:**
|
||||||
|
|
||||||
|
- `packages/coord` uses `packages/macp` for event emission, gate running, and credential resolution
|
||||||
|
- `plugins/macp` uses `packages/macp` for protocol types and credential resolution
|
||||||
|
- `packages/forge` uses `packages/macp` gate types for stage gates
|
||||||
|
|
||||||
|
### Work Package 3: OC Framework Plugin (`plugins/mosaic-framework`)
|
||||||
|
|
||||||
|
Port the OC framework plugin that injects Mosaic rails into all agent sessions.
|
||||||
|
|
||||||
|
**From OLD:**
|
||||||
|
|
||||||
|
- `oc-plugins/mosaic-framework/index.ts` — `before_agent_start` + `subagent_spawning` hooks
|
||||||
|
- `oc-plugins/mosaic-framework/openclaw.plugin.json`
|
||||||
|
|
||||||
|
**Structure:**
|
||||||
|
|
||||||
|
```
|
||||||
|
plugins/mosaic-framework/
|
||||||
|
├── src/
|
||||||
|
│ └── index.ts # Plugin hooks
|
||||||
|
└── package.json
|
||||||
|
```
|
||||||
|
|
||||||
|
**This is separate from `plugins/macp`:**
|
||||||
|
|
||||||
|
- `mosaic-framework` = injects Mosaic rails/contracts into every OC session (passive enforcement)
|
||||||
|
- `macp` = provides an ACP runtime backend for MACP task execution (active runtime)
|
||||||
|
|
||||||
|
### Work Package 4: Profiles + Guides + Skills
|
||||||
|
|
||||||
|
Port reference content as a documentation/config package or top-level directories.
|
||||||
|
|
||||||
|
**From OLD:**
|
||||||
|
|
||||||
|
- `profiles/domains/*.json` — HIPAA, fintech, crypto context packs
|
||||||
|
- `profiles/tech-stacks/*.json` — NestJS, Next.js, FastAPI, React conventions
|
||||||
|
- `profiles/workflows/*.json` — API development, frontend component, testing workflows
|
||||||
|
- `guides/*.md` — 17 guides (auth, backend, QA, orchestrator, PRD, etc.)
|
||||||
|
- `skills-universal/` — jarvis, macp, mosaic-standards, prd, setup-cicd skills
|
||||||
|
|
||||||
|
**Destination:**
|
||||||
|
|
||||||
|
```
|
||||||
|
profiles/ # Top-level (same as OLD)
|
||||||
|
guides/ # Top-level (same as OLD)
|
||||||
|
skills/ # Top-level (renamed from skills-universal)
|
||||||
|
```
|
||||||
|
|
||||||
|
These are runtime-neutral assets consumed by any agent or profile loader — they don't belong in a compiled package.
|
||||||
|
|
||||||
|
## Out of Scope
|
||||||
|
|
||||||
|
- Rewriting the NestJS orchestrator app from OLD (`apps/orchestrator/`) — its functionality is subsumed by `packages/coord` + `apps/gateway`
|
||||||
|
- Porting the FastAPI coordinator from OLD (`apps/coordinator/`) — its functionality (webhook receiver, issue parser, quality orchestrator) is handled by `packages/coord` + `apps/gateway` in the new architecture
|
||||||
|
- Porting the Prisma schema or OLD's `apps/api` — Drizzle migration is complete
|
||||||
|
- Old Docker Compose configs (Traefik, Matrix, OpenBao) — NEW has its own infra setup
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
1. `packages/forge` exists with all 11 stage definitions, all persona markdowns, all rails, and TS implementations of pipeline-runner, stage-adapter, board-tasks, and brief-classifier
|
||||||
|
2. `packages/macp` exists with gate-runner, credential-resolver, event system, and learning capture/recall — all in TypeScript
|
||||||
|
3. `plugins/mosaic-framework` exists and registers OC hooks for rails injection
|
||||||
|
4. Profiles, guides, and skills are present at top-level
|
||||||
|
5. `packages/forge` integrates with `packages/coord` for task execution
|
||||||
|
6. `packages/macp` credential-resolver is used by `plugins/macp` Pi bridge
|
||||||
|
7. All existing tests pass (no regressions)
|
||||||
|
8. New packages have test coverage ≥85%
|
||||||
|
9. `pnpm lint && pnpm typecheck && pnpm build` passes
|
||||||
|
10. `.forge/runs/` project-scoped output directory works for at least one test run
|
||||||
|
|
||||||
|
## Technical Constraints
|
||||||
|
|
||||||
|
- All new code is ESM with NodeNext module resolution
|
||||||
|
- No Python in the new repo — everything rewrites to TypeScript
|
||||||
|
- Pipeline markdown assets (stages, personas, rails) are shipped as package data, not compiled
|
||||||
|
- Credential resolver must support: mosaic credential files, OC config (JSON5), ambient environment — same resolution order as the Python version
|
||||||
|
- Must preserve `depends_on_policy` semantics (all, any, all_terminal)
|
||||||
|
- Per-stage timeouts must be preserved
|
||||||
|
- JSON5 stripping must use the placeholder-extraction approach (not naive regex on string content)
|
||||||
|
|
||||||
|
## Estimated Complexity
|
||||||
|
|
||||||
|
High — crosses 4 work packages with protocol porting, TS rewrites, and integration wiring. Each work package is independently shippable.
|
||||||
|
|
||||||
|
**Suggested execution order:**
|
||||||
|
|
||||||
|
1. WP4 (profiles/guides/skills) — pure copy, no code, fast win
|
||||||
|
2. WP2 (packages/macp) — protocol foundation, needed by WP1 and WP3
|
||||||
|
3. WP1 (packages/forge) — the big one, depends on WP2
|
||||||
|
4. WP3 (plugins/mosaic-framework) — OC integration, can parallel with WP1
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
- `packages/coord` must be stable (it is — WP1 integrates with it)
|
||||||
|
- `plugins/macp` must be stable (it is — WP2 provides types/credentials to it)
|
||||||
|
- Pi SDK (`@mariozechner/pi-agent-core`) already in the dependency tree
|
||||||
@@ -1,73 +1,30 @@
|
|||||||
# Tasks — Harness Foundation
|
# Tasks — Storage Abstraction Retrofit
|
||||||
|
|
||||||
> Single-writer: orchestrator only. Workers read but never modify.
|
> Single-writer: orchestrator only. Workers read but never modify.
|
||||||
>
|
>
|
||||||
|
> **Mission:** Decouple gateway from hardcoded Postgres/Valkey backends. Introduce interface-driven middleware so the gateway is backend-agnostic. Default to local tier (SQLite + JSON) for zero-dependency installs.
|
||||||
|
>
|
||||||
> **`agent` column values:** `codex` | `sonnet` | `haiku` | `glm-5` | `opus` | `—` (auto/default)
|
> **`agent` column values:** `codex` | `sonnet` | `haiku` | `glm-5` | `opus` | `—` (auto/default)
|
||||||
|
|
||||||
| id | status | agent | milestone | description | pr | notes |
|
| id | status | agent | description | tokens |
|
||||||
| ------ | ------ | ------ | ------------------ | ------------------------------------------------------------------ | ---- | ----------- |
|
| --------- | ----------- | ------ | ---------------------------------------------------------------- | ------ |
|
||||||
| M1-001 | done | sonnet | M1: Persistence | Wire ChatGateway → ConversationsRepo for user messages | #292 | #224 closed |
|
| SA-P1-001 | done | sonnet | Define QueueAdapter interface in packages/queue/src/types.ts | 3K |
|
||||||
| M1-002 | done | sonnet | M1: Persistence | Wire agent event relay → ConversationsRepo for assistant responses | #292 | #225 closed |
|
| SA-P1-002 | done | sonnet | Define StorageAdapter interface in packages/storage/src/types.ts | 3K |
|
||||||
| M1-003 | done | sonnet | M1: Persistence | Store message metadata: model, provider, tokens, tool calls | #292 | #226 closed |
|
| SA-P1-003 | done | sonnet | Define MemoryAdapter interface in packages/memory/src/types.ts | 3K |
|
||||||
| M1-004 | done | sonnet | M1: Persistence | Load message history into Pi session on resume | #301 | #227 closed |
|
| SA-P1-004 | done | sonnet | Create adapter factory pattern + config types | 3K |
|
||||||
| M1-005 | done | sonnet | M1: Persistence | Context window management: summarize when >80% | #301 | #228 closed |
|
| SA-P2-001 | done | sonnet | Refactor @mosaic/queue: wrap ioredis as BullMQ adapter | 3K |
|
||||||
| M1-006 | done | sonnet | M1: Persistence | Conversation search endpoint | #299 | #229 closed |
|
| SA-P2-002 | done | sonnet | Create @mosaic/storage: wrap Drizzle as Postgres adapter | 6K |
|
||||||
| M1-007 | done | sonnet | M1: Persistence | TUI /history command | #297 | #230 closed |
|
| SA-P2-003 | done | sonnet | Refactor @mosaic/memory: extract pgvector adapter | 4K |
|
||||||
| M1-008 | done | sonnet | M1: Persistence | Verify persistence — 20 tests | #304 | #231 closed |
|
| SA-P2-004 | done | sonnet | Update gateway modules to use factories + DI tokens | 5K |
|
||||||
| M2-001 | done | sonnet | M2: Security | InsightsRepo userId on searchByEmbedding | #290 | #232 closed |
|
| SA-P2-005 | done | opus | Verify Phase 2: all tests pass, typecheck clean | — |
|
||||||
| M2-002 | done | sonnet | M2: Security | InsightsRepo userId on findByUser/decay | #290 | #233 closed |
|
| SA-P3-001 | done | sonnet | Implement local queue adapter: JSON file persistence | 5K |
|
||||||
| M2-003 | done | sonnet | M2: Security | PreferencesRepo userId verified | #294 | #234 closed |
|
| SA-P3-002 | done | sonnet | Implement SQLite storage adapter with better-sqlite3 | 8K |
|
||||||
| M2-004 | done | sonnet | M2: Security | Memory tools userId injection fixed | #294 | #235 closed |
|
| SA-P3-003 | done | sonnet | Implement keyword memory adapter — no vector dependency | 4K |
|
||||||
| M2-005 | done | sonnet | M2: Security | ConversationsRepo ownership checks | #293 | #236 closed |
|
| SA-P3-004 | done | opus | Verify Phase 3: 42 new tests, 347 total passing | — |
|
||||||
| M2-006 | done | sonnet | M2: Security | AgentsRepo findAccessible scoped | #293 | #237 closed |
|
| SA-P4-001 | done | sonnet | MosaicConfig schema + loader with tier auto-detection | 6K |
|
||||||
| M2-007 | done | sonnet | M2: Security | Cross-user isolation — 28 tests | #305 | #238 closed |
|
| SA-P4-002 | done | sonnet | CLI: mosaic gateway init — interactive wizard | 4K |
|
||||||
| M2-008 | done | sonnet | M2: Security | Valkey SCAN + /gc admin-only | #298 | #239 closed |
|
| SA-P4-003 | done | sonnet | CLI: mosaic gateway start/stop/status lifecycle | 5K |
|
||||||
| M3-001 | done | sonnet | M3: Providers | IProviderAdapter + OllamaAdapter | #306 | #240 closed |
|
| SA-P4-004 | done | opus | Verify Phase 4: 381 tests passing, 40/40 tasks clean | — |
|
||||||
| M3-002 | done | sonnet | M3: Providers | AnthropicAdapter | #309 | #241 closed |
|
| SA-P5-001 | not-started | codex | Migration tooling: mosaic storage export/import | — |
|
||||||
| M3-003 | done | sonnet | M3: Providers | OpenAIAdapter | #310 | #242 closed |
|
| SA-P5-002 | not-started | codex | Docker Compose profiles: local vs team | — |
|
||||||
| M3-004 | done | sonnet | M3: Providers | OpenRouterAdapter | #311 | #243 closed |
|
| SA-P5-003 | not-started | codex | Final verification + docs: README, architecture diagram | — |
|
||||||
| M3-005 | done | sonnet | M3: Providers | ZaiAdapter (GLM-5) | #314 | #244 closed |
|
|
||||||
| M3-006 | done | sonnet | M3: Providers | Ollama embedding support | #311 | #245 closed |
|
|
||||||
| M3-007 | done | sonnet | M3: Providers | Provider health checks | #308 | #246 closed |
|
|
||||||
| M3-008 | done | sonnet | M3: Providers | Model capability matrix | #303 | #247 closed |
|
|
||||||
| M3-009 | done | sonnet | M3: Providers | EmbeddingService → Ollama default | #308 | #248 closed |
|
|
||||||
| M3-010 | done | sonnet | M3: Providers | OAuth token storage (AES-256-GCM) | #317 | #249 closed |
|
|
||||||
| M3-011 | done | sonnet | M3: Providers | Provider credentials CRUD | #317 | #250 closed |
|
|
||||||
| M3-012 | done | sonnet | M3: Providers | Verify providers — 40 tests | #319 | #251 closed |
|
|
||||||
| M4-001 | done | sonnet | M4: Routing | routing_rules DB schema | #315 | #252 closed |
|
|
||||||
| M4-002 | done | sonnet | M4: Routing | Condition types | #315 | #253 closed |
|
|
||||||
| M4-003 | done | sonnet | M4: Routing | Action types | #315 | #254 closed |
|
|
||||||
| M4-004 | done | sonnet | M4: Routing | Default routing rules (11 seeds) | #316 | #255 closed |
|
|
||||||
| M4-005 | done | sonnet | M4: Routing | Task classifier (60+ tests) | #316 | #256 closed |
|
|
||||||
| M4-006 | done | sonnet | M4: Routing | Routing decision pipeline | #318 | #257 closed |
|
|
||||||
| M4-007 | done | sonnet | M4: Routing | /model override | #323 | #258 closed |
|
|
||||||
| M4-008 | done | sonnet | M4: Routing | Routing transparency in session:info | #323 | #259 closed |
|
|
||||||
| M4-009 | done | sonnet | M4: Routing | Routing rules CRUD API | #320 | #260 closed |
|
|
||||||
| M4-010 | done | sonnet | M4: Routing | Per-user routing overrides | #320 | #261 closed |
|
|
||||||
| M4-011 | done | sonnet | M4: Routing | Agent specialization capabilities | #320 | #262 closed |
|
|
||||||
| M4-012 | done | sonnet | M4: Routing | Routing wired into ChatGateway | #323 | #263 closed |
|
|
||||||
| M4-013 | done | sonnet | M4: Routing | Verify routing — 9 E2E tests | #323 | #264 closed |
|
|
||||||
| M5-001 | done | sonnet | M5: Sessions | Agent config loaded on session create | #323 | #265 closed |
|
|
||||||
| M5-002 | done | sonnet | M5: Sessions | /model command end-to-end | #323 | #266 closed |
|
|
||||||
| M5-003 | done | sonnet | M5: Sessions | /agent command mid-session | #323 | #267 closed |
|
|
||||||
| M5-004 | done | sonnet | M5: Sessions | Session ↔ conversation binding | #321 | #268 closed |
|
|
||||||
| M5-005 | done | sonnet | M5: Sessions | Session info broadcast | #321 | #269 closed |
|
|
||||||
| M5-006 | done | sonnet | M5: Sessions | /agent new from TUI | #321 | #270 closed |
|
|
||||||
| M5-007 | done | sonnet | M5: Sessions | Session metrics | #321 | #271 closed |
|
|
||||||
| M5-008 | done | sonnet | M5: Sessions | Verify sessions — 28 tests | #324 | #272 closed |
|
|
||||||
| M6-001 | done | sonnet | M6: Jobs | BullMQ + Valkey config | #324 | #273 closed |
|
|
||||||
| M6-002 | done | sonnet | M6: Jobs | Queue service with typed jobs | #324 | #274 closed |
|
|
||||||
| M6-003 | done | sonnet | M6: Jobs | Summarization → BullMQ | #324 | #275 closed |
|
|
||||||
| M6-004 | done | sonnet | M6: Jobs | GC → BullMQ | #324 | #276 closed |
|
|
||||||
| M6-005 | done | sonnet | M6: Jobs | Tier management → BullMQ | #324 | #277 closed |
|
|
||||||
| M6-006 | done | sonnet | M6: Jobs | Admin jobs API | #325 | #278 closed |
|
|
||||||
| M6-007 | done | sonnet | M6: Jobs | Job event logging | #325 | #279 closed |
|
|
||||||
| M6-008 | done | sonnet | M6: Jobs | Verify jobs | #324 | #280 closed |
|
|
||||||
| M7-001 | done | sonnet | M7: Channel Design | IChannelAdapter interface | #325 | #281 closed |
|
|
||||||
| M7-002 | done | sonnet | M7: Channel Design | Channel message protocol | #325 | #282 closed |
|
|
||||||
| M7-003 | done | sonnet | M7: Channel Design | Matrix integration design | #326 | #283 closed |
|
|
||||||
| M7-004 | done | sonnet | M7: Channel Design | Conversation multiplexing | #326 | #284 closed |
|
|
||||||
| M7-005 | done | sonnet | M7: Channel Design | Remote auth bridging | #326 | #285 closed |
|
|
||||||
| M7-006 | done | sonnet | M7: Channel Design | Agent-to-agent via Matrix | #326 | #286 closed |
|
|
||||||
| M7-007 | done | sonnet | M7: Channel Design | Multi-user isolation in Matrix | #326 | #287 closed |
|
|
||||||
| M7-008 | done | sonnet | M7: Channel Design | channel-protocol.md published | #326 | #288 closed |
|
|
||||||
|
|||||||
555
docs/design/storage-abstraction-middleware.md
Normal file
555
docs/design/storage-abstraction-middleware.md
Normal file
@@ -0,0 +1,555 @@
|
|||||||
|
# Storage & Queue Abstraction — Middleware Architecture
|
||||||
|
|
||||||
|
Design
|
||||||
|
Status: Design (retrofit required)
|
||||||
|
date: 2026-04-02
|
||||||
|
context: Agents coupled directly to infrastructure backends, bypassing intended middleware layer
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The Problem
|
||||||
|
|
||||||
|
Current packages are **direct adapters**, not **middleware**:
|
||||||
|
| Package | Current State | Intended Design |
|
||||||
|
|---------|---------------|-----------------|
|
||||||
|
| `@mosaic/queue` | `ioredis` hardcoded | Interface → BullMQ OR local-files |
|
||||||
|
| `@mosaic/db` | Drizzle + Postgres hardcoded | Interface → Postgres OR SQLite OR JSON/MD |
|
||||||
|
| `@mosaic/memory` | pgvector required | Interface → pgvector OR sqlite-vec OR keyword-search |
|
||||||
|
|
||||||
|
## The gateway and TUI import these packages directly, which means they they're coupled to specific infrastructure. Users cannot run Mosaic Stack without Postgres + Valkey.
|
||||||
|
|
||||||
|
## The Intended Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────────────┐
|
||||||
|
│ Gateway / TUI / CLI │
|
||||||
|
│ (agnostic of storage backend, talks to middleware) │
|
||||||
|
└───────────────────────────┬─────────────────────────────────────┘
|
||||||
|
│
|
||||||
|
┌───────────────────┼───────────────────┐
|
||||||
|
│ │ │
|
||||||
|
▼─────────────────┴─────────────────┴─────────────────┘
|
||||||
|
| | | |
|
||||||
|
▼─────────────────┴───────────────────┴─────────────────┘
|
||||||
|
| | | |
|
||||||
|
Queue Storage Memory
|
||||||
|
| | | |
|
||||||
|
┌─────────┬─────────┬─────────┬─────────────────────────────────┐
|
||||||
|
| BullMQ | | Local | | Postgres | SQLite | JSON/MD | pgvector | sqlite-vec | keyword |
|
||||||
|
|(Valkey)| |(files) | | | | | |
|
||||||
|
└─────────┴─────────┴─────────┴─────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
The gateway imports the interface, not the backend. At startup it reads config and instantiates the correct adapter.
|
||||||
|
|
||||||
|
## The Drift
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// What should have happened:
|
||||||
|
gateway/queue.service.ts → @mosaic/queue (interface) → queue.adapter.ts
|
||||||
|
|
||||||
|
// What actually happened:
|
||||||
|
gateway/queue.service.ts → @mosaic/queue → ioredis (hardcoded)
|
||||||
|
```
|
||||||
|
|
||||||
|
## The Current State Analysis
|
||||||
|
|
||||||
|
### `@mosaic/queue` (packages/queue/src/queue.ts)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import Redis from 'ioredis'; // ← Direct import of backend
|
||||||
|
|
||||||
|
export function createQueue(config?: QueueConfig): QueueHandle {
|
||||||
|
const url = config?.url ?? process.env['VALKEY_URL'] ?? DEFAULT_VALKEY_URL;
|
||||||
|
const redis = new Redis(url, { maxRetriesPerRequest: 3 });
|
||||||
|
// ...queue ops directly on redis...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Problem:** `ioredis` is imported in the package, not the adapter interface. Consumers cannot swap backends.
|
||||||
|
|
||||||
|
### `@mosaic/db` (packages/db/src/client.ts)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { drizzle, type PostgresJsDatabase } from 'drizzle-orm/postgres-js';
|
||||||
|
import postgres from 'postgres';
|
||||||
|
|
||||||
|
export function createDb(url?: string): DbHandle {
|
||||||
|
const connectionString = url ?? process.env['DATABASE_URL'] ?? DEFAULT_DATABASE_URL;
|
||||||
|
const sql = postgres(connectionString, { max: 20, idle_timeout: 30, connect_timeout: 5 });
|
||||||
|
const db = drizzle(sql, { schema });
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Problem:** Drizzle + Postgres is hardcoded. No SQLite, JSON, or file-based options.
|
||||||
|
|
||||||
|
### `@mosaic/memory` (packages/memory/src/memory.ts)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import type { Db } from '@mosaic/db'; // ← Depends on Drizzle/PG
|
||||||
|
|
||||||
|
export function createMemory(db: Db): Memory {
|
||||||
|
return {
|
||||||
|
preferences: createPreferencesRepo(db),
|
||||||
|
insights: createInsightsRepo(db),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Problem:** Memory package is tightly coupled to `@mosaic/db` (which is Postgres-only). No alternative storage backends.
|
||||||
|
|
||||||
|
## The Target Interfaces
|
||||||
|
|
||||||
|
### Queue Interface
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// packages/queue/src/types.ts
|
||||||
|
export interface QueueAdapter {
|
||||||
|
readonly name: string;
|
||||||
|
|
||||||
|
enqueue(queueName: string, payload: TaskPayload): Promise<void>;
|
||||||
|
dequeue(queueName: string): Promise<TaskPayload | null>;
|
||||||
|
length(queueName: string): Promise<number>;
|
||||||
|
publish(channel: string, message: string): Promise<void>;
|
||||||
|
subscribe(channel: string, handler: (message: string) => void): () => void;
|
||||||
|
close(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TaskPayload {
|
||||||
|
id: string;
|
||||||
|
type: string;
|
||||||
|
data: Record<string, unknown>;
|
||||||
|
createdAt: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface QueueConfig {
|
||||||
|
type: 'bullmq' | 'local';
|
||||||
|
url?: string; // For bullmq: Valkey/Redis URL
|
||||||
|
dataDir?: string; // For local: directory for JSON persistence
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Storage Interface
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// packages/storage/src/types.ts
|
||||||
|
export interface StorageAdapter {
|
||||||
|
readonly name: string;
|
||||||
|
|
||||||
|
// Entity CRUD
|
||||||
|
create<T>(collection: string, data: O): Promise<T>;
|
||||||
|
read<T>(collection: string, id: string): Promise<T | null>;
|
||||||
|
update<T>(collection: string, id: string, data: Partial<O>): Promise<T | null>;
|
||||||
|
delete(collection: string, id: string): Promise<boolean>;
|
||||||
|
|
||||||
|
// Queries
|
||||||
|
find<T>(collection: string, filter: Record<string, unknown>): Promise<T[]>;
|
||||||
|
findOne<T>(collection: string, filter: Record<string, unknown): Promise<T | null>;
|
||||||
|
|
||||||
|
// Bulk operations
|
||||||
|
createMany<T>(collection: string, items: O[]): Promise<T[]>;
|
||||||
|
updateMany<T>(collection: string, ids: string[], data: Partial<O>): Promise<number>;
|
||||||
|
deleteMany(collection: string, ids: string[]): Promise<number>;
|
||||||
|
|
||||||
|
// Raw queries (for complex queries)
|
||||||
|
query<T>(collection: string, query: string, params?: unknown[]): Promise<T[]>;
|
||||||
|
|
||||||
|
// Transaction support
|
||||||
|
transaction<T>(fn: (tx: StorageTransaction) => Promise<T>): Promise<T>;
|
||||||
|
|
||||||
|
close(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StorageTransaction {
|
||||||
|
commit(): Promise<void>;
|
||||||
|
rollback(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StorageConfig {
|
||||||
|
type: 'postgres' | 'sqlite' | 'files';
|
||||||
|
url?: string; // For postgres
|
||||||
|
path?: string; // For sqlite/files
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Memory Interface (Vector + Preferences)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// packages/memory/src/types.ts
|
||||||
|
export interface MemoryAdapter {
|
||||||
|
readonly name: string;
|
||||||
|
|
||||||
|
// Preferences (key-value storage)
|
||||||
|
getPreference(userId: string, key: string): Promise<unknown | null>;
|
||||||
|
setPreference(userId: string, key: string, value: unknown): Promise<void>;
|
||||||
|
deletePreference(userId: string, key: string): Promise<boolean>;
|
||||||
|
listPreferences(
|
||||||
|
userId: string,
|
||||||
|
category?: string,
|
||||||
|
): Promise<Array<{ key: string; value: unknown }>>;
|
||||||
|
|
||||||
|
// Insights (with optional vector search)
|
||||||
|
storeInsight(insight: NewInsight): Promise<Insight>;
|
||||||
|
getInsight(id: string): Promise<Insight | null>;
|
||||||
|
searchInsights(query: string, limit?: number, filter?: InsightFilter): Promise<SearchResult[]>;
|
||||||
|
deleteInsight(id: string): Promise<boolean>;
|
||||||
|
|
||||||
|
// Embedding provider (optional, null = no vector search)
|
||||||
|
readonly embedder?: EmbeddingProvider | null;
|
||||||
|
|
||||||
|
close(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NewInsight {
|
||||||
|
id: string;
|
||||||
|
userId: string;
|
||||||
|
content: string;
|
||||||
|
embedding?: number[]; // If embedder is available
|
||||||
|
source: 'agent' | 'user' | 'summarization' | 'system';
|
||||||
|
category: 'decision' | 'learning' | 'preference' | 'fact' | 'pattern' | 'general';
|
||||||
|
relevanceScore: number;
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
createdAt: Date;
|
||||||
|
decayedAt?: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface InsightFilter {
|
||||||
|
userId?: string;
|
||||||
|
category?: string;
|
||||||
|
source?: string;
|
||||||
|
minRelevance?: number;
|
||||||
|
fromDate?: Date;
|
||||||
|
toDate?: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SearchResult {
|
||||||
|
documentId: string;
|
||||||
|
content: string;
|
||||||
|
distance: number;
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MemoryConfig {
|
||||||
|
type: 'pgvector' | 'sqlite-vec' | 'keyword';
|
||||||
|
storage: StorageAdapter;
|
||||||
|
embedder?: EmbeddingProvider;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EmbeddingProvider {
|
||||||
|
embed(text: string): Promise<number[]>;
|
||||||
|
embedBatch(texts: string[]): Promise<number[][]>;
|
||||||
|
readonly dimensions: number;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Three Tiers
|
||||||
|
|
||||||
|
### Tier 1: Local (Zero Dependencies)
|
||||||
|
|
||||||
|
**Target:** Single user, single machine, no external services
|
||||||
|
|
||||||
|
| Component | Backend | Storage |
|
||||||
|
| --------- | --------------------------------------------- | ------------ |
|
||||||
|
| Queue | In-process + JSON files in `~/.mosaic/queue/` |
|
||||||
|
| Storage | SQLite (better-sqlite3) `~/.mosaic/data.db` |
|
||||||
|
| Memory | Keyword search | SQLite table |
|
||||||
|
| Vector | None | N/A |
|
||||||
|
|
||||||
|
**Dependencies:**
|
||||||
|
|
||||||
|
- `better-sqlite3` (bundled)
|
||||||
|
- No Postgres, No Valkey, No pgvector
|
||||||
|
|
||||||
|
**Upgrade path:**
|
||||||
|
|
||||||
|
1. Run `mosaic gateway configure` → select "local" tier
|
||||||
|
2. Gateway starts with SQLite database
|
||||||
|
3. Optional: run `mosaic gateway upgrade --tier team` to migrate to Postgres
|
||||||
|
|
||||||
|
### Tier 2: Team (Postgres + Valkey)
|
||||||
|
|
||||||
|
**Target:** Multiple users, shared server, CI/CD environments
|
||||||
|
|
||||||
|
| Component | Backend | Storage |
|
||||||
|
| --------- | -------------- | ------------------------------ |
|
||||||
|
| Queue | BullMQ | Valkey |
|
||||||
|
| Storage | Postgres | Shared PG instance |
|
||||||
|
| Memory | pgvector | Postgres with vector extension |
|
||||||
|
| Vector | LLM embeddings | Configured provider |
|
||||||
|
|
||||||
|
**Dependencies:**
|
||||||
|
|
||||||
|
- PostgreSQL 17+ with pgvector extension
|
||||||
|
- Valkey (Redis-compatible)
|
||||||
|
- LLM provider for embeddings
|
||||||
|
|
||||||
|
**Migration from Local → Team:**
|
||||||
|
|
||||||
|
1. `mosaic gateway backup` → creates dump of SQLite database
|
||||||
|
2. `mosaic gateway upgrade --tier team` → restores to Postgres
|
||||||
|
3. Queue replays from BullMQ (may need manual reconciliation for in-flight jobs)
|
||||||
|
4. Memory embeddings regenerated if vector search was new
|
||||||
|
|
||||||
|
### Tier 3: Enterprise (Clustered)
|
||||||
|
|
||||||
|
**Target:** Large teams, multi-region, high availability
|
||||||
|
|
||||||
|
| Component | Backend | Storage |
|
||||||
|
| --------- | --------------------------- | ----------------------------- |
|
||||||
|
| Queue | BullMQ cluster | Multiple Valkey nodes |
|
||||||
|
| Storage | Postgres cluster | Primary + replicas |
|
||||||
|
| Memory | Dedicated vector DB | Qdrant, Pinecone, or pgvector |
|
||||||
|
| Vector | Dedicated embedding service | Separate microservice |
|
||||||
|
|
||||||
|
## MarkdownDB Integration
|
||||||
|
|
||||||
|
For file-based storage, we use [MarkdownDB](https://markdowndb.com) to parse MD files into queryable data.
|
||||||
|
|
||||||
|
**What it provides:**
|
||||||
|
|
||||||
|
- Parses frontmatter (YAML/JSON/TOML)
|
||||||
|
- Extracts links, tags, metadata
|
||||||
|
- Builds index in JSON or SQLite
|
||||||
|
- Queryable via SQL-like interface
|
||||||
|
|
||||||
|
**Usage in Mosaic:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Local tier with MD files for documents
|
||||||
|
const storage = createStorageAdapter({
|
||||||
|
type: 'files',
|
||||||
|
path: path.join(mosaicHome, 'docs'),
|
||||||
|
markdowndb: {
|
||||||
|
parseFrontmatter: true,
|
||||||
|
extractLinks: true,
|
||||||
|
indexFile: 'index.json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Dream Mode — Memory Consolidation
|
||||||
|
|
||||||
|
Automated equivalent to Claude Code's "Dream: Memory Consolidation" cycle
|
||||||
|
|
||||||
|
**Trigger:** Every 24 hours (if 5+ sessions active)
|
||||||
|
|
||||||
|
**Phases:**
|
||||||
|
|
||||||
|
1. **Orient** — What happened, what's the current state
|
||||||
|
- Scan recent session logs
|
||||||
|
- Identify active tasks, missions, conversations
|
||||||
|
- Calculate time window (last 24h)
|
||||||
|
|
||||||
|
2. **Gather** — Pull in relevant context
|
||||||
|
- Load conversations, decisions, agent logs
|
||||||
|
- Extract key interactions and outcomes
|
||||||
|
- Identify patterns and learnings
|
||||||
|
|
||||||
|
3. **Consolidate** — Summarize and compress
|
||||||
|
- Generate summary of the last 24h
|
||||||
|
- Extract key decisions and their rationale
|
||||||
|
- Identify recurring patterns
|
||||||
|
- Compress verbose logs into concise insights
|
||||||
|
|
||||||
|
4. **Prune** — Archive and cleanup
|
||||||
|
- Archive raw session files to dated folders
|
||||||
|
- Delete redundant/temporary data
|
||||||
|
- Update MEMORY.md with consolidated content
|
||||||
|
- Update insight relevance scores
|
||||||
|
|
||||||
|
**Implementation:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In @mosaic/dream (new package)
|
||||||
|
export async function runDreamCycle(config: DreamConfig): Promise<DreamResult> {
|
||||||
|
const memory = await loadMemoryAdapter(config.storage);
|
||||||
|
|
||||||
|
// Orient
|
||||||
|
const sessions = await memory.getRecentSessions(24 * 60 * 60 * 1000);
|
||||||
|
if (sessions.length < 5) return { skipped: true, reason: 'insufficient_sessions' };
|
||||||
|
|
||||||
|
// Gather
|
||||||
|
const context = await gatherContext(memory, sessions);
|
||||||
|
|
||||||
|
// Consolidate
|
||||||
|
const consolidated = await consolidateWithLLM(context, config.llm);
|
||||||
|
|
||||||
|
// Prune
|
||||||
|
await pruneArchivedData(memory, config.retention);
|
||||||
|
|
||||||
|
// Store consolidated insights
|
||||||
|
await memory.storeInsights(consolidated.insights);
|
||||||
|
|
||||||
|
return {
|
||||||
|
sessionsProcessed: sessions.length,
|
||||||
|
insightsCreated: consolidated.insights.length,
|
||||||
|
bytesPruned: consolidated.bytesRemoved,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Retrofit Plan
|
||||||
|
|
||||||
|
### Phase 1: Interface Extraction (2-3 days)
|
||||||
|
|
||||||
|
**Goal:** Define interfaces without changing existing behavior
|
||||||
|
|
||||||
|
1. Create `packages/queue/src/types.ts` with `QueueAdapter` interface
|
||||||
|
2. Create `packages/storage/src/types.ts` with `StorageAdapter` interface
|
||||||
|
3. Create `packages/memory/src/types.ts` with `MemoryAdapter` interface (refactor existing)
|
||||||
|
4. Add adapter registry pattern to each package
|
||||||
|
5. No breaking changes — existing code continues to work
|
||||||
|
|
||||||
|
### Phase 2: Refactor Existing to Adapters (3-5 days)
|
||||||
|
|
||||||
|
**Goal:** Move existing implementations behind adapters
|
||||||
|
|
||||||
|
#### 2.1 Queue Refactor
|
||||||
|
|
||||||
|
1. Rename `packages/queue/src/queue.ts` → `packages/queue/src/adapters/bullmq.ts`
|
||||||
|
2. Create `packages/queue/src/index.ts` to export factory function
|
||||||
|
3. Factory function reads config, instantiates correct adapter
|
||||||
|
4. Update gateway imports to use factory
|
||||||
|
|
||||||
|
#### 2.2 Storage Refactor
|
||||||
|
|
||||||
|
1. Create `packages/storage/` (new package)
|
||||||
|
2. Move Drizzle logic to `packages/storage/src/adapters/postgres.ts`
|
||||||
|
3. Create SQLite adapter in `packages/storage/src/adapters/sqlite.ts`
|
||||||
|
4. Update gateway to use storage factory
|
||||||
|
5. Deprecate direct `@mosaic/db` imports
|
||||||
|
|
||||||
|
#### 2.3 Memory Refactor
|
||||||
|
|
||||||
|
1. Extract existing logic to `packages/memory/src/adapters/pgvector.ts`
|
||||||
|
2. Create keyword adapter in `packages/memory/src/adapters/keyword.ts`
|
||||||
|
3. Update vector-store.ts to be adapter-agnostic
|
||||||
|
|
||||||
|
### Phase 3: Local Tier Implementation (2-3 days)
|
||||||
|
|
||||||
|
**Goal:** Zero-dependency baseline
|
||||||
|
|
||||||
|
1. Implement `packages/queue/src/adapters/local.ts` (in-process + JSON persistence)
|
||||||
|
2. Implement `packages/storage/src/adapters/files.ts` (JSON + MD via MarkdownDB)
|
||||||
|
3. Implement `packages/memory/src/adapters/keyword.ts` (TF-IDF search)
|
||||||
|
4. Add `packages/dream/` for consolidation cycle
|
||||||
|
5. Wire up local tier in gateway startup
|
||||||
|
|
||||||
|
### Phase 4: Configuration System (1-2 days)
|
||||||
|
|
||||||
|
**Goal:** Runtime backend selection
|
||||||
|
|
||||||
|
1. Create `packages/config/src/storage.ts` for storage configuration
|
||||||
|
2. Add `mosaic.config.ts` schema with storage tier settings
|
||||||
|
3. Update gateway to read config on startup
|
||||||
|
4. Add `mosaic gateway configure` CLI command
|
||||||
|
5. Add tier migration commands (`mosaic gateway upgrade`)
|
||||||
|
|
||||||
|
### Phase 5: Testing & Documentation (2-3 days)
|
||||||
|
|
||||||
|
1. Unit tests for each adapter
|
||||||
|
2. Integration tests for factory pattern
|
||||||
|
3. Migration tests (local → team)
|
||||||
|
4. Update README and architecture docs
|
||||||
|
5. Add configuration guide
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## File Changes Summary
|
||||||
|
|
||||||
|
### New Files
|
||||||
|
|
||||||
|
```
|
||||||
|
packages/
|
||||||
|
├── config/
|
||||||
|
│ └── src/
|
||||||
|
│ ├── storage.ts # Storage config schema
|
||||||
|
│ └── index.ts
|
||||||
|
├── dream/ # NEW: Dream mode consolidation
|
||||||
|
│ ├── src/
|
||||||
|
│ │ ├── index.ts
|
||||||
|
│ │ ├── orient.ts
|
||||||
|
│ │ ├── gather.ts
|
||||||
|
│ │ ├── consolidate.ts
|
||||||
|
│ │ └── prune.ts
|
||||||
|
│ └── package.json
|
||||||
|
├── queue/
|
||||||
|
│ └── src/
|
||||||
|
│ ├── types.ts # NEW: QueueAdapter interface
|
||||||
|
│ ├── index.ts # NEW: Factory function
|
||||||
|
│ └── adapters/
|
||||||
|
│ ├── bullmq.ts # MOVED from queue.ts
|
||||||
|
│ └── local.ts # NEW: In-process adapter
|
||||||
|
├── storage/ # NEW: Storage abstraction
|
||||||
|
│ ├── src/
|
||||||
|
│ │ ├── types.ts # StorageAdapter interface
|
||||||
|
│ │ ├── index.ts # Factory function
|
||||||
|
│ │ └── adapters/
|
||||||
|
│ │ ├── postgres.ts # MOVED from @mosaic/db
|
||||||
|
│ │ ├── sqlite.ts # NEW: SQLite adapter
|
||||||
|
│ │ └── files.ts # NEW: JSON/MD adapter
|
||||||
|
│ └── package.json
|
||||||
|
└── memory/
|
||||||
|
└── src/
|
||||||
|
├── types.ts # UPDATED: MemoryAdapter interface
|
||||||
|
├── index.ts # UPDATED: Factory function
|
||||||
|
└── adapters/
|
||||||
|
├── pgvector.ts # EXTRACTED from existing code
|
||||||
|
├── sqlite-vec.ts # NEW: SQLite with vectors
|
||||||
|
└── keyword.ts # NEW: TF-IDF search
|
||||||
|
```
|
||||||
|
|
||||||
|
### Modified Files
|
||||||
|
|
||||||
|
```
|
||||||
|
packages/
|
||||||
|
├── db/ # DEPRECATED: Logic moved to storage adapters
|
||||||
|
├── queue/
|
||||||
|
│ └── src/
|
||||||
|
│ └── queue.ts # → adapters/bullmq.ts
|
||||||
|
├── memory/
|
||||||
|
│ ├── src/
|
||||||
|
│ │ ├── memory.ts # → use factory
|
||||||
|
│ │ ├── insights.ts # → use factory
|
||||||
|
│ │ └── preferences.ts # → use factory
|
||||||
|
│ └── package.json # Remove pgvector from dependencies
|
||||||
|
└── gateway/
|
||||||
|
└── src/
|
||||||
|
├── database/
|
||||||
|
│ └── database.module.ts # Update to use storage factory
|
||||||
|
├── memory/
|
||||||
|
│ └── memory.module.ts # Update to use memory factory
|
||||||
|
└── queue/
|
||||||
|
└── queue.module.ts # Update to use queue factory
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Breaking Changes
|
||||||
|
|
||||||
|
1. **`@mosaic/db`** → **`@mosaic/storage`** (with migration guide)
|
||||||
|
2. Direct `ioredis` imports → Use `@mosaic/queue` factory
|
||||||
|
3. Direct `pgvector` queries → Use `@mosaic/memory` factory
|
||||||
|
4. Gateway startup now requires storage config (defaults to local)
|
||||||
|
|
||||||
|
## Non-Breaking Migration Path
|
||||||
|
|
||||||
|
1. Existing deployments with Postgres/Valkey continue to work (default config)
|
||||||
|
2. New deployments can choose local tier
|
||||||
|
3. Migration commands available when ready to upgrade
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
- [ ] Local tier runs with zero external dependencies
|
||||||
|
- [ ] All three tiers (local, team, enterprise) work correctly
|
||||||
|
- [ ] Factory pattern correctly selects backend at runtime
|
||||||
|
- [ ] Migration from local → team preserves all data
|
||||||
|
- [ ] Dream mode consolidates 24h of sessions
|
||||||
|
- [ ] Documentation covers all three tiers and migration paths
|
||||||
|
- [ ] All existing tests pass
|
||||||
|
- [ ] New adapters have >80% coverage
|
||||||
1256
docs/reviews/consolidation-board-memo.md
Normal file
1256
docs/reviews/consolidation-board-memo.md
Normal file
File diff suppressed because it is too large
Load Diff
30
docs/scratchpads/ci-docker-publish-20260330.md
Normal file
30
docs/scratchpads/ci-docker-publish-20260330.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Scratchpad: CI Docker Publish (2026-03-30)
|
||||||
|
|
||||||
|
- Objective: Add Woodpecker Docker build+push steps for gateway and web images on `main` pushes.
|
||||||
|
- Scope: `.woodpecker/ci.yml`.
|
||||||
|
- Constraints:
|
||||||
|
- Use existing Dockerfiles at `docker/gateway.Dockerfile` and `docker/web.Dockerfile`.
|
||||||
|
- Publish to `git.mosaicstack.dev` with `from_secret` credentials.
|
||||||
|
- Tag both `latest` and `${CI_COMMIT_SHA}`.
|
||||||
|
- Do not run publish steps on pull requests.
|
||||||
|
- ASSUMPTION: Publishing `latest` is required by the task for registry convenience, even though immutable tags remain the safer deployment reference.
|
||||||
|
- Findings:
|
||||||
|
- Existing pipeline already has `build` after `lint`, `format`, and `test`.
|
||||||
|
- `apps/gateway/package.json` uses `tsc` for `build`; no Prisma dependency or `prisma generate` hook is present.
|
||||||
|
- Plan:
|
||||||
|
1. Patch `.woodpecker/ci.yml` to keep `build` as the quality gate successor and add `publish-gateway` plus `publish-web`.
|
||||||
|
2. Validate YAML and run repo quality gates relevant to the change.
|
||||||
|
3. Review the diff, then commit/push/PR if validation passes.
|
||||||
|
- Verification:
|
||||||
|
- `python3 -c "import yaml; yaml.safe_load(open('.woodpecker/ci.yml'))" && echo "YAML valid"`
|
||||||
|
- `pnpm lint`
|
||||||
|
- `pnpm typecheck`
|
||||||
|
- `pnpm format:check`
|
||||||
|
- `docker compose up -d`
|
||||||
|
- `pnpm --filter @mosaic/db db:push`
|
||||||
|
- `pnpm test`
|
||||||
|
- `pnpm build`
|
||||||
|
- Manual review of `.woodpecker/ci.yml` diff: publish steps are main-only, depend on `build`, and use secret-backed registry auth plus dual tags.
|
||||||
|
- Risks:
|
||||||
|
- Pipeline behavior beyond YAML validation cannot be fully proven locally; remote Woodpecker execution will be the final situational check after push.
|
||||||
|
- Repo baseline required two existing `plugins/macp` files to be reformatted before `pnpm format:check` would pass.
|
||||||
20
docs/scratchpads/fix-ci-migrations-20260330.md
Normal file
20
docs/scratchpads/fix-ci-migrations-20260330.md
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Scratchpad — fix-ci-migrations-20260330
|
||||||
|
|
||||||
|
## Objective
|
||||||
|
|
||||||
|
- Fix Drizzle migration journal ordering so fresh Postgres instances apply all schema migrations in CI.
|
||||||
|
|
||||||
|
## Plan
|
||||||
|
|
||||||
|
- Inspect migration SQL, journal, and snapshot chain.
|
||||||
|
- Patch metadata consistently.
|
||||||
|
- Run full test suite.
|
||||||
|
- Record result and risks.
|
||||||
|
|
||||||
|
## Progress
|
||||||
|
|
||||||
|
- Inspected migration files and confirmed 0001_cynical_ultimatum.sql is missing from journal while 0002 depends on it.
|
||||||
|
- Patched `packages/db/drizzle/meta/_journal.json` to insert `0001_cynical_ultimatum` at idx 1 and shift later entries to idx 2-7.
|
||||||
|
- Verified snapshot content is already semantically aligned with the fixed ordering: the missing tables exist in `0001_snapshot.json`, and `mutable` appears in later snapshots.
|
||||||
|
- Installed workspace dependencies in this worktree to make the requested test command runnable.
|
||||||
|
- Ran `pnpm test` successfully; gateway reported `347 passed (347)` and Turbo finished with `35 successful, 35 total`.
|
||||||
41
docs/scratchpads/fix-turbo-env-passthrough.md
Normal file
41
docs/scratchpads/fix-turbo-env-passthrough.md
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Task Scratchpad — Turbo DATABASE_URL passthrough
|
||||||
|
|
||||||
|
## Objective
|
||||||
|
|
||||||
|
- Fix CI test execution under Turborepo by passing DB-related environment variables through the `test` task.
|
||||||
|
- Preserve the repo's documented local Postgres fallback on host port `5433`.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
- Edit `turbo.json`
|
||||||
|
- Run baseline checks requested by the user and repo rules where feasible
|
||||||
|
- Publish branch and open PR if checks are acceptable
|
||||||
|
|
||||||
|
## Assumptions
|
||||||
|
|
||||||
|
- ASSUMPTION: No PRD update is required because this is a narrow CI/config bug fix that does not change product requirements.
|
||||||
|
- ASSUMPTION: `docs/TASKS.md` remains untouched because project guidance marks it orchestrator-only.
|
||||||
|
- ASSUMPTION: Local Postgres-backed tests are not runnable in this worktree, so baseline verification will rely on static gates unless the environment already provides DB access.
|
||||||
|
|
||||||
|
## Plan
|
||||||
|
|
||||||
|
- Record current repo state and avoid touching `.mosaic/orchestrator/session.lock`
|
||||||
|
- Add Turbo DB env declarations for test execution and cache invalidation
|
||||||
|
- Run `pnpm format:check`, `pnpm typecheck`, and `pnpm lint`
|
||||||
|
- Review the diff, then commit/push/PR via provider-appropriate flow
|
||||||
|
|
||||||
|
## Verification Log
|
||||||
|
|
||||||
|
- `pnpm install` completed in this worktree so repo tooling is available
|
||||||
|
- `pnpm format:check` initially failed on `turbo.json`; fixed by formatting the file
|
||||||
|
- `pnpm format:check` passed after formatting
|
||||||
|
- `pnpm typecheck` passed
|
||||||
|
- `pnpm lint` passed
|
||||||
|
- Independent review flagged two important adjustments:
|
||||||
|
- keep `DEFAULT_DATABASE_URL` on `5433` because local Docker Compose maps host `5433 -> container 5432`
|
||||||
|
- prefer Turbo `env` over `passThroughEnv` so DB config changes also affect the test cache key
|
||||||
|
|
||||||
|
## Risks
|
||||||
|
|
||||||
|
- Existing mission/orchestrator files are dirty in the worktree and must be excluded from the scoped change.
|
||||||
|
- Local tests were not run because no Postgres service is available in this worktree; CI remains the primary situational validation for the test-task behavior.
|
||||||
16
docs/scratchpads/macp-oc-bridge-20260330.md
Normal file
16
docs/scratchpads/macp-oc-bridge-20260330.md
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# Scratchpad: MACP OC Bridge (2026-03-30)
|
||||||
|
|
||||||
|
- Objective: Replace the OpenClaw MACP plugin's Pi-direct `runTurn` with the MACP controller queue bridge.
|
||||||
|
- Scope: `plugins/macp/src/macp-runtime.ts`, `plugins/macp/src/index.ts`, `plugins/macp/openclaw.plugin.json`, `plugins/macp/README.md`.
|
||||||
|
- Plan:
|
||||||
|
1. Read controller/dispatcher/plugin docs and confirm queue/result contract.
|
||||||
|
2. Queue tasks through `.mosaic/orchestrator/tasks.json` using a brief file and controller trigger.
|
||||||
|
3. Poll result JSON, stream output back to ACP, then validate with typecheck/format checks.
|
||||||
|
- Risks:
|
||||||
|
- The repo orchestrator must be enabled in `.mosaic/orchestrator/config.json`.
|
||||||
|
- Result JSON does not always embed worker output, so the runtime falls back to metadata-linked output files or a formatted result summary.
|
||||||
|
- Verification:
|
||||||
|
- `npx tsc --noEmit --target es2022 --module nodenext --moduleResolution nodenext --skipLibCheck plugins/macp/src/macp-runtime.ts plugins/macp/src/index.ts`
|
||||||
|
- `pnpm prettier --write "plugins/macp/**/*.{ts,json,md}"`
|
||||||
|
- `pnpm format:check`
|
||||||
|
- `npx tsc --noEmit -p plugins/macp/tsconfig.json` still fails in this branch because `plugins/macp/tsconfig.json` extends a missing `packages/config/typescript/library.json` file and also pulls in pre-existing external OpenClaw type noise.
|
||||||
265
docs/tasks/WP1-forge-package.md
Normal file
265
docs/tasks/WP1-forge-package.md
Normal file
@@ -0,0 +1,265 @@
|
|||||||
|
# WP1: packages/forge — Forge Pipeline Package
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
Port the Forge progressive refinement pipeline from Python (~/src/mosaic-stack/forge/) to TypeScript as `packages/forge` in this monorepo. The pipeline markdown assets (stages, agents, personas, rails, gates, templates) are already copied to `packages/forge/pipeline/`. This task is the TypeScript implementation layer.
|
||||||
|
|
||||||
|
**Board decisions that constrain this work:**
|
||||||
|
|
||||||
|
- Abstract TaskExecutor interface — packages/forge must NOT hard-import packages/coord. Define an abstract interface; coord satisfies it.
|
||||||
|
- Clean index.ts exports, no internal path leakage, no hardcoded paths
|
||||||
|
- 85% test coverage on TS implementation files (markdown assets excluded)
|
||||||
|
- Test strategy for non-deterministic AI orchestration: fixture-based integration tests
|
||||||
|
- OpenBrain is OUT OF SCOPE
|
||||||
|
- ESM only, zero Python
|
||||||
|
|
||||||
|
**Dependencies available:**
|
||||||
|
|
||||||
|
- `@mosaic/macp` (packages/macp) is built and provides: GateEntry, GateResult, Task types, credential resolution, gate running, event emission
|
||||||
|
|
||||||
|
## Source Files (Python → TypeScript)
|
||||||
|
|
||||||
|
### 1. types.ts
|
||||||
|
|
||||||
|
Define all Forge-specific types:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Stage specification
|
||||||
|
interface StageSpec {
|
||||||
|
number: string;
|
||||||
|
title: string;
|
||||||
|
dispatch: 'exec' | 'yolo' | 'pi';
|
||||||
|
type: 'research' | 'review' | 'coding' | 'deploy';
|
||||||
|
gate: string;
|
||||||
|
promptFile: string;
|
||||||
|
qualityGates: (string | GateEntry)[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Brief classification
|
||||||
|
type BriefClass = 'strategic' | 'technical' | 'hotfix';
|
||||||
|
type ClassSource = 'cli' | 'frontmatter' | 'auto';
|
||||||
|
|
||||||
|
// Run manifest (persisted to disk)
|
||||||
|
interface RunManifest {
|
||||||
|
runId: string;
|
||||||
|
brief: string;
|
||||||
|
codebase: string;
|
||||||
|
briefClass: BriefClass;
|
||||||
|
classSource: ClassSource;
|
||||||
|
forceBoard: boolean;
|
||||||
|
createdAt: string;
|
||||||
|
updatedAt: string;
|
||||||
|
currentStage: string;
|
||||||
|
status: 'in_progress' | 'completed' | 'failed' | 'interrupted' | 'rejected';
|
||||||
|
stages: Record<string, StageStatus>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Abstract task executor (decouples from packages/coord)
|
||||||
|
interface TaskExecutor {
|
||||||
|
submitTask(task: ForgeTask): Promise<void>;
|
||||||
|
waitForCompletion(taskId: string, timeoutMs: number): Promise<TaskResult>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Persona override config
|
||||||
|
interface ForgeConfig {
|
||||||
|
board?: {
|
||||||
|
additionalMembers?: string[];
|
||||||
|
skipMembers?: string[];
|
||||||
|
};
|
||||||
|
specialists?: {
|
||||||
|
alwaysInclude?: string[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. constants.ts
|
||||||
|
|
||||||
|
**Source:** Top of `~/src/mosaic-stack/forge/lib` (ALL_STAGES, LABELS, STAGE_SPECS equivalent) + `~/src/mosaic-stack/forge/pipeline/orchestrator/stage_adapter.py` (STAGE_TIMEOUTS)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export const STAGE_SEQUENCE = [
|
||||||
|
'00-intake',
|
||||||
|
'00b-discovery',
|
||||||
|
'01-board',
|
||||||
|
'01b-brief-analyzer',
|
||||||
|
'02-planning-1',
|
||||||
|
'03-planning-2',
|
||||||
|
'04-planning-3',
|
||||||
|
'05-coding',
|
||||||
|
'06-review',
|
||||||
|
'07-remediate',
|
||||||
|
'08-test',
|
||||||
|
'09-deploy',
|
||||||
|
];
|
||||||
|
|
||||||
|
export const STAGE_TIMEOUTS: Record<string, number> = {
|
||||||
|
'00-intake': 120,
|
||||||
|
'00b-discovery': 300,
|
||||||
|
'01-board': 120,
|
||||||
|
'02-planning-1': 600,
|
||||||
|
// ... etc
|
||||||
|
};
|
||||||
|
|
||||||
|
export const STAGE_LABELS: Record<string, string> = {
|
||||||
|
'00-intake': 'INTAKE',
|
||||||
|
// ... etc
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Also: STRATEGIC_KEYWORDS, TECHNICAL_KEYWORDS for brief classification.
|
||||||
|
|
||||||
|
### 3. brief-classifier.ts
|
||||||
|
|
||||||
|
**Source:** `classify_brief()`, `parse_brief_frontmatter()`, `stages_for_class()` from `~/src/mosaic-stack/forge/lib`
|
||||||
|
|
||||||
|
- Auto-classify brief by keyword analysis (strategic vs technical)
|
||||||
|
- Parse YAML frontmatter for explicit `class:` field
|
||||||
|
- CLI flag override
|
||||||
|
- Return stage list based on classification (strategic = full pipeline, technical = skip board, hotfix = skip board + brief analyzer)
|
||||||
|
|
||||||
|
### 4. stage-adapter.ts
|
||||||
|
|
||||||
|
**Source:** `~/src/mosaic-stack/forge/pipeline/orchestrator/stage_adapter.py`
|
||||||
|
|
||||||
|
- `mapStageToTask()`: Convert a Forge stage into a task compatible with TaskExecutor
|
||||||
|
- Stage briefs written to `{runDir}/{stageName}/brief.md`
|
||||||
|
- Result paths at `{runDir}/{stageName}/result.json`
|
||||||
|
- Previous results read from disk at runtime (not baked into brief)
|
||||||
|
- Per-stage timeouts from STAGE_TIMEOUTS
|
||||||
|
- depends_on chain built from stage sequence
|
||||||
|
|
||||||
|
### 5. board-tasks.ts
|
||||||
|
|
||||||
|
**Source:** `~/src/mosaic-stack/forge/pipeline/orchestrator/board_tasks.py`
|
||||||
|
|
||||||
|
- `loadBoardPersonas()`: Read all .md files from `pipeline/agents/board/`
|
||||||
|
- `generateBoardTasks()`: One task per persona + synthesis task
|
||||||
|
- Synthesis depends on all persona tasks with `depends_on_policy: 'all_terminal'`
|
||||||
|
- Persona briefs include role description + brief under review
|
||||||
|
- Synthesis script merges independent reviews into board memo
|
||||||
|
|
||||||
|
### 6. pipeline-runner.ts
|
||||||
|
|
||||||
|
**Source:** `~/src/mosaic-stack/forge/pipeline/orchestrator/pipeline_runner.py` + `~/src/mosaic-stack/forge/lib` (cmd_run, cmd_resume, cmd_status)
|
||||||
|
|
||||||
|
- `runPipeline(briefPath, projectRoot, options)`: Full pipeline execution
|
||||||
|
- Creates run directory at `{projectRoot}/.forge/runs/{runId}/`
|
||||||
|
- Generates tasks for all stages, submits to TaskExecutor
|
||||||
|
- Tracks manifest.json with stage statuses
|
||||||
|
- `resumePipeline(runDir)`: Pick up from last incomplete stage
|
||||||
|
- `getPipelineStatus(runDir)`: Read manifest and report
|
||||||
|
|
||||||
|
**Key difference from Python:** Run output goes to PROJECT-scoped `.forge/runs/`, not inside the Forge package.
|
||||||
|
|
||||||
|
### 7. Persona Override System (NEW — not in Python)
|
||||||
|
|
||||||
|
- Base personas read from `packages/forge/pipeline/agents/`
|
||||||
|
- Project overrides read from `{projectRoot}/.forge/personas/{role}.md`
|
||||||
|
- Merge strategy: project persona content APPENDED to base persona (not replaced)
|
||||||
|
- Board composition configurable via `{projectRoot}/.forge/config.yaml`
|
||||||
|
- If no project config exists, use defaults (all base personas, no overrides)
|
||||||
|
|
||||||
|
## Package Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
packages/forge/
|
||||||
|
├── src/
|
||||||
|
│ ├── index.ts
|
||||||
|
│ ├── types.ts
|
||||||
|
│ ├── constants.ts
|
||||||
|
│ ├── brief-classifier.ts
|
||||||
|
│ ├── stage-adapter.ts
|
||||||
|
│ ├── board-tasks.ts
|
||||||
|
│ ├── pipeline-runner.ts
|
||||||
|
│ └── persona-loader.ts
|
||||||
|
├── pipeline/ # Already copied (WP4) — markdown assets
|
||||||
|
│ ├── stages/
|
||||||
|
│ ├── agents/
|
||||||
|
│ ├── rails/
|
||||||
|
│ ├── gates/
|
||||||
|
│ └── templates/
|
||||||
|
├── __tests__/
|
||||||
|
│ ├── brief-classifier.test.ts
|
||||||
|
│ ├── stage-adapter.test.ts
|
||||||
|
│ ├── board-tasks.test.ts
|
||||||
|
│ ├── pipeline-runner.test.ts
|
||||||
|
│ └── persona-loader.test.ts
|
||||||
|
├── package.json
|
||||||
|
├── tsconfig.json
|
||||||
|
└── vitest.config.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
## Package.json
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "@mosaic/forge",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"type": "module",
|
||||||
|
"exports": {
|
||||||
|
".": "./src/index.ts"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@mosaic/macp": "workspace:*"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"vitest": "workspace:*",
|
||||||
|
"typescript": "workspace:*"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Only dependency: @mosaic/macp (for gate types, event emission).
|
||||||
|
|
||||||
|
## Test Strategy (Board requirement)
|
||||||
|
|
||||||
|
**Deterministic code (brief-classifier, stage-adapter, board-tasks, persona-loader, constants):**
|
||||||
|
|
||||||
|
- Standard unit tests with known inputs/outputs
|
||||||
|
- 100% of classification logic, stage mapping, persona loading covered
|
||||||
|
|
||||||
|
**Non-deterministic code (pipeline-runner):**
|
||||||
|
|
||||||
|
- Fixture-based integration tests using a mock TaskExecutor
|
||||||
|
- Mock executor returns pre-recorded results for each stage
|
||||||
|
- Tests verify: manifest progression, stage ordering, dependency enforcement, resume behavior, error handling
|
||||||
|
- NO real AI calls in tests
|
||||||
|
|
||||||
|
**Markdown assets:** Excluded from coverage measurement (configure vitest to exclude `pipeline/` directory).
|
||||||
|
|
||||||
|
## ESM Requirements
|
||||||
|
|
||||||
|
- `"type": "module"` in package.json
|
||||||
|
- NodeNext module resolution in tsconfig
|
||||||
|
- `.js` extensions in all imports
|
||||||
|
- No CommonJS
|
||||||
|
|
||||||
|
## Key Design: Abstract TaskExecutor
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In packages/forge/src/types.ts
|
||||||
|
export interface TaskExecutor {
|
||||||
|
submitTask(task: ForgeTask): Promise<void>;
|
||||||
|
waitForCompletion(taskId: string, timeoutMs: number): Promise<TaskResult>;
|
||||||
|
getTaskStatus(taskId: string): Promise<TaskStatus>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// In packages/coord (or wherever the concrete impl lives)
|
||||||
|
export class CoordTaskExecutor implements TaskExecutor {
|
||||||
|
// ... uses packages/coord runner
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This means packages/forge can be tested with a mock executor and deployed with any backend.
|
||||||
|
|
||||||
|
## Asset Resolution
|
||||||
|
|
||||||
|
Pipeline markdown assets (stages, personas, rails) must be resolved relative to the package installation, NOT hardcoded paths:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Use import.meta.url to find package root
|
||||||
|
const PACKAGE_ROOT = new URL('..', import.meta.url).pathname;
|
||||||
|
const PIPELINE_DIR = path.join(PACKAGE_ROOT, 'pipeline');
|
||||||
|
```
|
||||||
|
|
||||||
|
Project-level overrides resolved relative to projectRoot parameter.
|
||||||
150
docs/tasks/WP2-macp-package.md
Normal file
150
docs/tasks/WP2-macp-package.md
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
# WP2: packages/macp — MACP Protocol Package
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
Port the MACP protocol layer from Python (in ~/src/mosaic-stack/tools/macp/) to TypeScript as `packages/macp` in this monorepo. This package provides the foundational protocol types, quality gate execution, credential resolution, and event system that `packages/coord` and `plugins/macp` depend on.
|
||||||
|
|
||||||
|
**Board decisions that constrain this work:**
|
||||||
|
|
||||||
|
- No Python in the new repo — everything rewrites to TypeScript
|
||||||
|
- OpenBrain learning capture/recall is OUT OF SCOPE (deferred to future brief)
|
||||||
|
- 85% test coverage on TS implementation files
|
||||||
|
- Credential resolver behavior must be captured as test fixtures BEFORE rewrite
|
||||||
|
- Clean index.ts exports, no internal path leakage
|
||||||
|
|
||||||
|
## Source Files (Python → TypeScript)
|
||||||
|
|
||||||
|
### 1. credential-resolver.ts
|
||||||
|
|
||||||
|
**Source:** `~/src/mosaic-stack/tools/macp/dispatcher/credential_resolver.py`
|
||||||
|
|
||||||
|
Resolution order (MUST preserve exactly):
|
||||||
|
|
||||||
|
1. Mosaic credential files (`~/.config/mosaic/credentials/{provider}.env`)
|
||||||
|
2. OpenClaw config (`~/.openclaw/openclaw.json`) — env block + models.providers.{provider}.apiKey
|
||||||
|
3. Ambient environment variables
|
||||||
|
4. CredentialError (failure)
|
||||||
|
|
||||||
|
Key behaviors to preserve:
|
||||||
|
|
||||||
|
- Provider registry: anthropic, openai, zai → env var names + credential file paths + OC config paths
|
||||||
|
- Dotenv parser: handles single/double quotes, comments, blank lines
|
||||||
|
- JSON5 stripping: placeholder-extraction approach (NOT naive regex) — protects URLs and timestamps inside string values
|
||||||
|
- OC config permission check: warn on world-readable, skip if wrong owner
|
||||||
|
- Redacted marker detection: `__OPENCLAW_REDACTED__` values skipped
|
||||||
|
- Task-level override via `credentials.provider_key_env`
|
||||||
|
|
||||||
|
### 2. gate-runner.ts
|
||||||
|
|
||||||
|
**Source:** `~/src/mosaic-stack/tools/macp/controller/gate_runner.py`
|
||||||
|
|
||||||
|
Three gate types:
|
||||||
|
|
||||||
|
- `mechanical`: shell command, pass = exit code 0
|
||||||
|
- `ai-review`: shell command producing JSON, parse findings, fail on blockers
|
||||||
|
- `ci-pipeline`: placeholder (always passes for now)
|
||||||
|
|
||||||
|
Key behaviors:
|
||||||
|
|
||||||
|
- `normalize_gate()`: accepts string or dict, normalizes to gate entry
|
||||||
|
- `run_gate()`: executes single gate, returns result with pass/fail
|
||||||
|
- `run_gates()`: executes all gates, emits events, returns (all_passed, results)
|
||||||
|
- AI review parsing: `_count_ai_findings()` reads stats.blockers or findings[].severity
|
||||||
|
- `fail_on` modes: "blocker" (default) or "any"
|
||||||
|
|
||||||
|
### 3. event-emitter.ts
|
||||||
|
|
||||||
|
**Source:** `~/src/mosaic-stack/tools/macp/controller/gate_runner.py` (emit_event, append_event functions) + `~/src/mosaic-stack/tools/macp/events/`
|
||||||
|
|
||||||
|
- Append structured events to ndjson file
|
||||||
|
- Event types: task.assigned, task.started, task.completed, task.failed, task.escalated, task.gated, task.retry.scheduled, rail.check.started, rail.check.passed, rail.check.failed
|
||||||
|
- Each event: event_id (uuid), event_type, task_id, status, timestamp, source, message, metadata
|
||||||
|
|
||||||
|
### 4. types.ts
|
||||||
|
|
||||||
|
**Source:** `~/src/mosaic-stack/tools/macp/protocol/task.schema.json`
|
||||||
|
|
||||||
|
TypeScript types for:
|
||||||
|
|
||||||
|
- Task (id, title, status, dispatch, runtime, depends_on, depends_on_policy, quality_gates, timeout_seconds, metadata, etc.)
|
||||||
|
- Event (event_id, event_type, task_id, status, timestamp, source, message, metadata)
|
||||||
|
- GateResult (command, exit_code, type, passed, output, findings, blockers)
|
||||||
|
- TaskResult (task_id, status, completed_at, exit_code, gate_results, files_changed, etc.)
|
||||||
|
- CredentialError, ProviderRegistry
|
||||||
|
|
||||||
|
### 5. schemas/ (copy)
|
||||||
|
|
||||||
|
Copy `~/src/mosaic-stack/tools/macp/protocol/task.schema.json` as-is.
|
||||||
|
|
||||||
|
## Package Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
packages/macp/
|
||||||
|
├── src/
|
||||||
|
│ ├── index.ts
|
||||||
|
│ ├── types.ts
|
||||||
|
│ ├── credential-resolver.ts
|
||||||
|
│ ├── gate-runner.ts
|
||||||
|
│ ├── event-emitter.ts
|
||||||
|
│ └── schemas/
|
||||||
|
│ └── task.schema.json
|
||||||
|
├── __tests__/
|
||||||
|
│ ├── credential-resolver.test.ts
|
||||||
|
│ ├── gate-runner.test.ts
|
||||||
|
│ └── event-emitter.test.ts
|
||||||
|
├── package.json
|
||||||
|
├── tsconfig.json
|
||||||
|
└── vitest.config.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
## Package.json
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "@mosaic/macp",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"type": "module",
|
||||||
|
"exports": {
|
||||||
|
".": "./src/index.ts"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"devDependencies": {
|
||||||
|
"vitest": "workspace:*",
|
||||||
|
"typescript": "workspace:*"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Zero external dependencies. Uses node:fs, node:path, node:child_process, node:crypto only.
|
||||||
|
|
||||||
|
## Test Requirements
|
||||||
|
|
||||||
|
Port ALL existing Python tests as TypeScript equivalents:
|
||||||
|
|
||||||
|
- `test_resolve_from_file` → credential file resolution
|
||||||
|
- `test_resolve_from_ambient` → ambient env resolution
|
||||||
|
- `test_resolve_from_oc_config_env_block` → OC config env block
|
||||||
|
- `test_resolve_from_oc_config_provider_apikey` → OC config provider
|
||||||
|
- `test_oc_config_precedence` → mosaic file wins over OC config
|
||||||
|
- `test_oc_config_missing_file` → graceful fallback
|
||||||
|
- `test_json5_strip` → structural transforms
|
||||||
|
- `test_json5_strip_urls_and_timestamps` → URLs/timestamps survive
|
||||||
|
- `test_redacted_values_skipped` → redacted marker detection
|
||||||
|
- `test_oc_config_permission_warning` → file permission check
|
||||||
|
- `test_resolve_missing_raises` → CredentialError thrown
|
||||||
|
- Gate runner: mechanical pass/fail, AI review parsing, ci-pipeline placeholder
|
||||||
|
- Event emitter: append to ndjson, event structure validation
|
||||||
|
|
||||||
|
## ESM Requirements
|
||||||
|
|
||||||
|
- `"type": "module"` in package.json
|
||||||
|
- NodeNext module resolution in tsconfig
|
||||||
|
- `.js` extensions in all imports
|
||||||
|
- No CommonJS (`require`, `module.exports`)
|
||||||
|
|
||||||
|
## Integration Points
|
||||||
|
|
||||||
|
After this package is built:
|
||||||
|
|
||||||
|
- `packages/coord` should import `@mosaic/macp` for event emission and gate types
|
||||||
|
- `plugins/macp` should import `@mosaic/macp` for credential resolution and protocol types
|
||||||
63
docs/tasks/WP3-mosaic-framework-plugin.md
Normal file
63
docs/tasks/WP3-mosaic-framework-plugin.md
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
# WP3: plugins/mosaic-framework — OC Rails Injection Plugin
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
Port the OpenClaw framework plugin from ~/src/mosaic-stack/oc-plugins/mosaic-framework/ to `plugins/mosaic-framework` in this monorepo. This plugin injects Mosaic framework contracts (rails, completion gates, worktree requirements) into every OpenClaw agent session.
|
||||||
|
|
||||||
|
**This is SEPARATE from plugins/macp:**
|
||||||
|
|
||||||
|
- `mosaic-framework` = passive enforcement — injects rails into all OC sessions
|
||||||
|
- `macp` = active runtime — provides ACP backend for MACP task execution
|
||||||
|
|
||||||
|
## Source Files
|
||||||
|
|
||||||
|
**Source:** `~/src/mosaic-stack/oc-plugins/mosaic-framework/`
|
||||||
|
|
||||||
|
- `index.ts` — plugin hooks (before_agent_start, subagent_spawning)
|
||||||
|
- `openclaw.plugin.json` — plugin manifest
|
||||||
|
- `package.json`
|
||||||
|
|
||||||
|
## What It Does
|
||||||
|
|
||||||
|
### For OC native agents (before_agent_start hook):
|
||||||
|
|
||||||
|
- Injects Mosaic global hard rules via `appendSystemContext`
|
||||||
|
- Completion gates: code review ✓ | security review ✓ | tests GREEN ✓ | CI green ✓
|
||||||
|
- Worker completion protocol: open PR → fire system event → EXIT — never merge
|
||||||
|
- Worktree requirement: `~/src/{repo}-worktrees/{task-slug}`, never `/tmp`
|
||||||
|
- Injects dynamic mission state via `prependContext` (reads from project's `.mosaic/orchestrator/mission.json`)
|
||||||
|
|
||||||
|
### For ACP coding workers (subagent_spawning hook):
|
||||||
|
|
||||||
|
- Writes `~/.codex/instructions.md` or `~/.claude/CLAUDE.md` BEFORE the process starts
|
||||||
|
- Full runtime contract: mandatory load order, hard gates, mode declaration
|
||||||
|
- Global framework rules + worktree + completion gate requirements
|
||||||
|
|
||||||
|
## Implementation
|
||||||
|
|
||||||
|
Port the TypeScript source, updating hardcoded paths to be configurable. The OC plugin SDK imports should reference the installed OpenClaw location dynamically (not hardcoded `/home/jarvis/` paths like the OLD version).
|
||||||
|
|
||||||
|
**Structure:**
|
||||||
|
|
||||||
|
```
|
||||||
|
plugins/mosaic-framework/
|
||||||
|
├── src/
|
||||||
|
│ └── index.ts
|
||||||
|
├── openclaw.plugin.json
|
||||||
|
├── package.json
|
||||||
|
└── tsconfig.json
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Constraint
|
||||||
|
|
||||||
|
The plugin SDK imports in the OLD version use absolute paths:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import type { OpenClawPluginApi } from '/home/jarvis/.npm-global/lib/node_modules/openclaw/dist/plugin-sdk/index.js';
|
||||||
|
```
|
||||||
|
|
||||||
|
This must be resolved dynamically or via a peer dependency. Check how `plugins/macp` handles this in the new repo and follow the same pattern.
|
||||||
|
|
||||||
|
## Tests
|
||||||
|
|
||||||
|
Minimal — plugin hooks are integration-tested against OC runtime. Unit test the context string builders and config resolution.
|
||||||
@@ -10,6 +10,8 @@ export default tseslint.config(
|
|||||||
'**/.next/**',
|
'**/.next/**',
|
||||||
'**/coverage/**',
|
'**/coverage/**',
|
||||||
'**/drizzle.config.ts',
|
'**/drizzle.config.ts',
|
||||||
|
'**/framework/**',
|
||||||
|
'packages/mosaic/__tests__/**',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -25,6 +27,7 @@ export default tseslint.config(
|
|||||||
'apps/web/e2e/*.ts',
|
'apps/web/e2e/*.ts',
|
||||||
'apps/web/e2e/helpers/*.ts',
|
'apps/web/e2e/helpers/*.ts',
|
||||||
'apps/web/playwright.config.ts',
|
'apps/web/playwright.config.ts',
|
||||||
|
'packages/mosaic/__tests__/*.ts',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
193
guides/AUTHENTICATION.md
Normal file
193
guides/AUTHENTICATION.md
Normal file
@@ -0,0 +1,193 @@
|
|||||||
|
# Authentication & Authorization Guide
|
||||||
|
|
||||||
|
## Before Starting
|
||||||
|
|
||||||
|
1. Check assigned issue: `~/.config/mosaic/tools/git/issue-list.sh -a @me`
|
||||||
|
2. Review existing auth implementation in codebase
|
||||||
|
3. Review Vault secrets structure: `docs/vault-secrets-structure.md`
|
||||||
|
|
||||||
|
## Authentication Patterns
|
||||||
|
|
||||||
|
### JWT (JSON Web Tokens)
|
||||||
|
|
||||||
|
```
|
||||||
|
Vault Path: secret-{env}/backend-api/jwt/signing-key
|
||||||
|
Fields: key, algorithm, expiry_seconds
|
||||||
|
```
|
||||||
|
|
||||||
|
**Best Practices:**
|
||||||
|
|
||||||
|
- Use RS256 or ES256 (asymmetric) for distributed systems
|
||||||
|
- Use HS256 (symmetric) only for single-service auth
|
||||||
|
- Set reasonable expiry (15min-1hr for access tokens)
|
||||||
|
- Include minimal claims (sub, exp, iat, roles)
|
||||||
|
- Never store sensitive data in JWT payload
|
||||||
|
|
||||||
|
### Session-Based
|
||||||
|
|
||||||
|
```
|
||||||
|
Vault Path: secret-{env}/{service}/session/secret
|
||||||
|
Fields: secret, cookie_name, max_age
|
||||||
|
```
|
||||||
|
|
||||||
|
**Best Practices:**
|
||||||
|
|
||||||
|
- Use secure, httpOnly, sameSite cookies
|
||||||
|
- Regenerate session ID on privilege change
|
||||||
|
- Implement session timeout
|
||||||
|
- Store sessions server-side (Redis/database)
|
||||||
|
|
||||||
|
### OAuth2/OIDC
|
||||||
|
|
||||||
|
```
|
||||||
|
Vault Paths:
|
||||||
|
- secret-{env}/{service}/oauth/{provider}/client_id
|
||||||
|
- secret-{env}/{service}/oauth/{provider}/client_secret
|
||||||
|
```
|
||||||
|
|
||||||
|
**Best Practices:**
|
||||||
|
|
||||||
|
- Use PKCE for public clients
|
||||||
|
- Validate state parameter
|
||||||
|
- Verify token signatures
|
||||||
|
- Check issuer and audience claims
|
||||||
|
|
||||||
|
## Authorization Patterns
|
||||||
|
|
||||||
|
### Role-Based Access Control (RBAC)
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Example middleware
|
||||||
|
def require_role(roles: list):
|
||||||
|
def decorator(handler):
|
||||||
|
def wrapper(request):
|
||||||
|
user_roles = get_user_roles(request.user_id)
|
||||||
|
if not any(role in user_roles for role in roles):
|
||||||
|
raise ForbiddenError()
|
||||||
|
return handler(request)
|
||||||
|
return wrapper
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
@require_role(['admin', 'moderator'])
|
||||||
|
def delete_user(request):
|
||||||
|
pass
|
||||||
|
```
|
||||||
|
|
||||||
|
### Permission-Based
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Check specific permissions
|
||||||
|
def check_permission(user_id, resource, action):
|
||||||
|
permissions = get_user_permissions(user_id)
|
||||||
|
return f"{resource}:{action}" in permissions
|
||||||
|
```
|
||||||
|
|
||||||
|
## Security Requirements
|
||||||
|
|
||||||
|
### Password Handling
|
||||||
|
|
||||||
|
- Use bcrypt, scrypt, or Argon2 for hashing
|
||||||
|
- Minimum 12 character passwords
|
||||||
|
- Check against breached password lists
|
||||||
|
- Implement account lockout after failed attempts
|
||||||
|
|
||||||
|
### Token Security
|
||||||
|
|
||||||
|
- Rotate secrets regularly
|
||||||
|
- Implement token revocation
|
||||||
|
- Use short-lived access tokens with refresh tokens
|
||||||
|
- Store refresh tokens securely (httpOnly cookies or encrypted storage)
|
||||||
|
|
||||||
|
### Multi-Factor Authentication
|
||||||
|
|
||||||
|
- Support TOTP (Google Authenticator compatible)
|
||||||
|
- Consider WebAuthn for passwordless
|
||||||
|
- Require MFA for sensitive operations
|
||||||
|
|
||||||
|
## Testing Authentication
|
||||||
|
|
||||||
|
### Test Cases Required
|
||||||
|
|
||||||
|
```python
|
||||||
|
class TestAuthentication:
|
||||||
|
def test_login_success_returns_token(self):
|
||||||
|
pass
|
||||||
|
def test_login_failure_returns_401(self):
|
||||||
|
pass
|
||||||
|
def test_invalid_token_returns_401(self):
|
||||||
|
pass
|
||||||
|
def test_expired_token_returns_401(self):
|
||||||
|
pass
|
||||||
|
def test_missing_token_returns_401(self):
|
||||||
|
pass
|
||||||
|
def test_insufficient_permissions_returns_403(self):
|
||||||
|
pass
|
||||||
|
def test_token_refresh_works(self):
|
||||||
|
pass
|
||||||
|
def test_logout_invalidates_token(self):
|
||||||
|
pass
|
||||||
|
```
|
||||||
|
|
||||||
|
## Authentik SSO Administration
|
||||||
|
|
||||||
|
Authentik is the identity provider for the Mosaic Stack. Use the Authentik tool suite for administration.
|
||||||
|
|
||||||
|
### Tool Suite
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# System health
|
||||||
|
~/.config/mosaic/tools/authentik/admin-status.sh
|
||||||
|
|
||||||
|
# User management
|
||||||
|
~/.config/mosaic/tools/authentik/user-list.sh
|
||||||
|
~/.config/mosaic/tools/authentik/user-create.sh -u <username> -n <name> -e <email>
|
||||||
|
|
||||||
|
# Group and app management
|
||||||
|
~/.config/mosaic/tools/authentik/group-list.sh
|
||||||
|
~/.config/mosaic/tools/authentik/app-list.sh
|
||||||
|
~/.config/mosaic/tools/authentik/flow-list.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
### Registering an OAuth Application
|
||||||
|
|
||||||
|
1. Create an OAuth2 provider in Authentik admin (Applications > Providers)
|
||||||
|
2. Create an application linked to the provider (Applications > Applications)
|
||||||
|
3. Configure redirect URIs for the application
|
||||||
|
4. Store client_id and client_secret in Vault: `secret-{env}/{service}/oauth/authentik/`
|
||||||
|
5. Verify with: `~/.config/mosaic/tools/authentik/app-list.sh`
|
||||||
|
|
||||||
|
### API Reference
|
||||||
|
|
||||||
|
- Base URL: `https://auth.diversecanvas.com`
|
||||||
|
- API prefix: `/api/v3/`
|
||||||
|
- OpenAPI schema: `/api/v3/schema/`
|
||||||
|
- Auth: Bearer token (obtained via `auth-token.sh`)
|
||||||
|
|
||||||
|
## Common Vulnerabilities to Avoid
|
||||||
|
|
||||||
|
1. **Broken Authentication**
|
||||||
|
- Weak password requirements
|
||||||
|
- Missing brute-force protection
|
||||||
|
- Session fixation
|
||||||
|
|
||||||
|
2. **Broken Access Control**
|
||||||
|
- Missing authorization checks
|
||||||
|
- IDOR (Insecure Direct Object Reference)
|
||||||
|
- Privilege escalation
|
||||||
|
|
||||||
|
3. **Security Misconfiguration**
|
||||||
|
- Default credentials
|
||||||
|
- Verbose error messages
|
||||||
|
- Missing security headers
|
||||||
|
|
||||||
|
## Commit Format
|
||||||
|
|
||||||
|
```
|
||||||
|
feat(#89): Implement JWT authentication
|
||||||
|
|
||||||
|
- Add /auth/login and /auth/refresh endpoints
|
||||||
|
- Implement token validation middleware
|
||||||
|
- Configure 15min access token expiry
|
||||||
|
|
||||||
|
Fixes #89
|
||||||
|
```
|
||||||
125
guides/BACKEND.md
Normal file
125
guides/BACKEND.md
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
# Backend Development Guide
|
||||||
|
|
||||||
|
## Before Starting
|
||||||
|
|
||||||
|
1. Check assigned issue: `~/.config/mosaic/tools/git/issue-list.sh -a @me`
|
||||||
|
2. Create scratchpad: `docs/scratchpads/{issue-number}-{short-name}.md`
|
||||||
|
3. Review API contracts and database schema
|
||||||
|
|
||||||
|
## Development Standards
|
||||||
|
|
||||||
|
### API Design
|
||||||
|
|
||||||
|
- Follow RESTful conventions (or GraphQL patterns if applicable)
|
||||||
|
- Use consistent endpoint naming: `/api/v1/resource-name`
|
||||||
|
- Return appropriate HTTP status codes
|
||||||
|
- Include pagination for list endpoints
|
||||||
|
- Document all endpoints (OpenAPI/Swagger preferred)
|
||||||
|
|
||||||
|
### Database
|
||||||
|
|
||||||
|
- Write migrations for schema changes
|
||||||
|
- Use parameterized queries (prevent SQL injection)
|
||||||
|
- Index frequently queried columns
|
||||||
|
- Document relationships and constraints
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
- Return structured error responses
|
||||||
|
- Log errors with context (request ID, user ID if applicable)
|
||||||
|
- Never expose internal errors to clients
|
||||||
|
- Use appropriate error codes
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "VALIDATION_ERROR",
|
||||||
|
"message": "User-friendly message",
|
||||||
|
"details": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Security
|
||||||
|
|
||||||
|
- Validate all input at API boundaries
|
||||||
|
- Implement rate limiting on public endpoints
|
||||||
|
- Use secrets from Vault (see `docs/vault-secrets-structure.md`)
|
||||||
|
- Never log sensitive data (passwords, tokens, PII)
|
||||||
|
- Follow OWASP guidelines
|
||||||
|
|
||||||
|
### Authentication/Authorization
|
||||||
|
|
||||||
|
- Use project's established auth pattern
|
||||||
|
- Validate tokens on every request
|
||||||
|
- Check permissions before operations
|
||||||
|
- See `~/.config/mosaic/guides/AUTHENTICATION.md` for details
|
||||||
|
|
||||||
|
## Testing Requirements (TDD)
|
||||||
|
|
||||||
|
1. Write tests BEFORE implementation
|
||||||
|
2. Minimum 85% coverage
|
||||||
|
3. Test categories:
|
||||||
|
- Unit tests for business logic
|
||||||
|
- Integration tests for API endpoints
|
||||||
|
- Database tests with transactions/rollback
|
||||||
|
|
||||||
|
### Test Patterns
|
||||||
|
|
||||||
|
```python
|
||||||
|
# API test example structure
|
||||||
|
class TestResourceEndpoint:
|
||||||
|
def test_create_returns_201(self):
|
||||||
|
pass
|
||||||
|
def test_create_validates_input(self):
|
||||||
|
pass
|
||||||
|
def test_get_returns_404_for_missing(self):
|
||||||
|
pass
|
||||||
|
def test_requires_authentication(self):
|
||||||
|
pass
|
||||||
|
```
|
||||||
|
|
||||||
|
## Code Style
|
||||||
|
|
||||||
|
- Follow Google Style Guide for your language
|
||||||
|
- **TypeScript: Follow `~/.config/mosaic/guides/TYPESCRIPT.md` — MANDATORY**
|
||||||
|
- Use linter/formatter from project configuration
|
||||||
|
- Keep functions focused and small
|
||||||
|
- Document complex business logic
|
||||||
|
|
||||||
|
### TypeScript Quick Rules (see TYPESCRIPT.md for full guide)
|
||||||
|
|
||||||
|
- **NO `any`** — define explicit types always
|
||||||
|
- **NO lazy `unknown`** — only for error catches and external data with validation
|
||||||
|
- **Explicit return types** on all exported functions
|
||||||
|
- **Explicit parameter types** always
|
||||||
|
- **DTO files are REQUIRED** for module/API boundaries (`*.dto.ts`)
|
||||||
|
- **Interface for DTOs** — never inline object types
|
||||||
|
- **Typed errors** — use custom error classes
|
||||||
|
|
||||||
|
## Performance
|
||||||
|
|
||||||
|
- Use database connection pooling
|
||||||
|
- Implement caching where appropriate
|
||||||
|
- Profile slow endpoints
|
||||||
|
- Use async operations for I/O
|
||||||
|
|
||||||
|
## Commit Format
|
||||||
|
|
||||||
|
```
|
||||||
|
feat(#45): Add user registration endpoint
|
||||||
|
|
||||||
|
- POST /api/v1/users for registration
|
||||||
|
- Email validation and uniqueness check
|
||||||
|
- Password hashing with bcrypt
|
||||||
|
|
||||||
|
Fixes #45
|
||||||
|
```
|
||||||
|
|
||||||
|
## Before Completing
|
||||||
|
|
||||||
|
1. Run full test suite
|
||||||
|
2. Verify migrations work (up and down)
|
||||||
|
3. Test API with curl/httpie
|
||||||
|
4. Update scratchpad with completion notes
|
||||||
|
5. Reference issue in commit
|
||||||
487
guides/BOOTSTRAP.md
Executable file
487
guides/BOOTSTRAP.md
Executable file
@@ -0,0 +1,487 @@
|
|||||||
|
# Project Bootstrap Guide
|
||||||
|
|
||||||
|
> Load this guide when setting up a new project for AI-assisted development.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This guide covers how to bootstrap a project so AI agents (Claude, Codex, etc.) can work on it effectively. Proper bootstrapping ensures:
|
||||||
|
|
||||||
|
1. Agents understand the project structure and conventions
|
||||||
|
2. Orchestration works correctly with quality gates
|
||||||
|
3. Independent code review and security review are configured
|
||||||
|
4. Issue tracking is consistent across projects
|
||||||
|
5. Documentation standards and API contracts are enforced from day one
|
||||||
|
6. PRD requirements are established before coding begins
|
||||||
|
7. Branching/merging is consistent: `branch -> main` via PR with squash-only merges
|
||||||
|
8. Steered-autonomy execution is enabled so agents can run end-to-end with escalation-only human intervention
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Automated bootstrap (recommended)
|
||||||
|
~/.config/mosaic/tools/bootstrap/init-project.sh \
|
||||||
|
--name "my-project" \
|
||||||
|
--type "nestjs-nextjs" \
|
||||||
|
--repo "https://git.mosaicstack.dev/owner/repo"
|
||||||
|
|
||||||
|
# Or manually using templates
|
||||||
|
export PROJECT_NAME="My Project"
|
||||||
|
export PROJECT_DESCRIPTION="What this project does"
|
||||||
|
export TASK_PREFIX="MP"
|
||||||
|
envsubst < ~/.config/mosaic/templates/agent/AGENTS.md.template > AGENTS.md
|
||||||
|
envsubst < ~/.config/mosaic/templates/agent/CLAUDE.md.template > CLAUDE.md
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 0: Enforce Sequential-Thinking MCP (Hard Requirement)
|
||||||
|
|
||||||
|
`sequential-thinking` MCP must be installed and configured before project bootstrapping.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Auto-configure sequential-thinking MCP for installed runtimes
|
||||||
|
~/.config/mosaic/bin/mosaic-ensure-sequential-thinking
|
||||||
|
|
||||||
|
# Verification-only check
|
||||||
|
~/.config/mosaic/bin/mosaic-ensure-sequential-thinking --check
|
||||||
|
```
|
||||||
|
|
||||||
|
If this step fails, STOP and remediate Mosaic runtime configuration before continuing.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 1: Detect Project Type
|
||||||
|
|
||||||
|
Check what files exist in the project root to determine the type:
|
||||||
|
|
||||||
|
| File Present | Project Type | Template |
|
||||||
|
| ------------------------------------------------------- | ------------------------- | ------------------------- |
|
||||||
|
| `package.json` + `pnpm-workspace.yaml` + NestJS+Next.js | NestJS + Next.js Monorepo | `projects/nestjs-nextjs/` |
|
||||||
|
| `pyproject.toml` + `manage.py` | Django | `projects/django/` |
|
||||||
|
| `pyproject.toml` (no Django) | Python (generic) | Generic template |
|
||||||
|
| `package.json` (no monorepo) | Node.js (generic) | Generic template |
|
||||||
|
| Other | Generic | Generic template |
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Auto-detect project type
|
||||||
|
detect_project_type() {
|
||||||
|
if [[ -f "pnpm-workspace.yaml" ]] && [[ -f "turbo.json" ]]; then
|
||||||
|
# Check for NestJS + Next.js
|
||||||
|
if grep -q "nestjs" package.json 2>/dev/null && grep -q "next" package.json 2>/dev/null; then
|
||||||
|
echo "nestjs-nextjs"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
if [[ -f "manage.py" ]] && [[ -f "pyproject.toml" ]]; then
|
||||||
|
echo "django"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
if [[ -f "pyproject.toml" ]]; then
|
||||||
|
echo "python"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
if [[ -f "package.json" ]]; then
|
||||||
|
echo "nodejs"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
echo "generic"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 2: Create AGENTS.md (Primary Project Contract)
|
||||||
|
|
||||||
|
`AGENTS.md` is the primary project-level contract for all agent runtimes.
|
||||||
|
It defines project-specific requirements, quality gates, patterns, and testing expectations.
|
||||||
|
|
||||||
|
### Using a Tech-Stack Template
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Set variables
|
||||||
|
export PROJECT_NAME="My Project"
|
||||||
|
export PROJECT_DESCRIPTION="Multi-tenant SaaS platform"
|
||||||
|
export PROJECT_DIR="my-project"
|
||||||
|
export REPO_URL="https://git.mosaicstack.dev/owner/repo"
|
||||||
|
export TASK_PREFIX="MP"
|
||||||
|
|
||||||
|
# Use tech-stack-specific template if available
|
||||||
|
TYPE=$(detect_project_type)
|
||||||
|
TEMPLATE_DIR="$HOME/.config/mosaic/templates/agent/projects/$TYPE"
|
||||||
|
|
||||||
|
if [[ -d "$TEMPLATE_DIR" ]]; then
|
||||||
|
envsubst < "$TEMPLATE_DIR/AGENTS.md.template" > AGENTS.md
|
||||||
|
else
|
||||||
|
envsubst < "$HOME/.config/mosaic/templates/agent/AGENTS.md.template" > AGENTS.md
|
||||||
|
fi
|
||||||
|
```
|
||||||
|
|
||||||
|
### Using the Generic Template
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Set all required variables
|
||||||
|
export PROJECT_NAME="My Project"
|
||||||
|
export PROJECT_DESCRIPTION="What this project does"
|
||||||
|
export REPO_URL="https://git.mosaicstack.dev/owner/repo"
|
||||||
|
export PROJECT_DIR="my-project"
|
||||||
|
export SOURCE_DIR="src"
|
||||||
|
export CONFIG_FILES="pyproject.toml / package.json"
|
||||||
|
export FRONTEND_STACK="N/A"
|
||||||
|
export BACKEND_STACK="Python / FastAPI"
|
||||||
|
export DATABASE_STACK="PostgreSQL"
|
||||||
|
export TESTING_STACK="pytest"
|
||||||
|
export DEPLOYMENT_STACK="Docker"
|
||||||
|
export BUILD_COMMAND="pip install -e ."
|
||||||
|
export TEST_COMMAND="pytest tests/"
|
||||||
|
export LINT_COMMAND="ruff check ."
|
||||||
|
export TYPECHECK_COMMAND="mypy ."
|
||||||
|
export QUALITY_GATES="ruff check . && mypy . && pytest tests/"
|
||||||
|
|
||||||
|
envsubst < ~/.config/mosaic/templates/agent/AGENTS.md.template > AGENTS.md
|
||||||
|
```
|
||||||
|
|
||||||
|
### Required Sections
|
||||||
|
|
||||||
|
Every AGENTS.md should contain:
|
||||||
|
|
||||||
|
1. **Project description** — One-line summary
|
||||||
|
2. **Quality gates** — Commands that must pass
|
||||||
|
3. **Codebase patterns** — Reusable implementation rules
|
||||||
|
4. **Common gotchas** — Non-obvious constraints
|
||||||
|
5. **Testing approaches** — Project-specific test strategy
|
||||||
|
6. **Testing policy** — Situational-first validation and risk-based TDD
|
||||||
|
7. **Orchestrator integration** — Task prefix, worker checklist
|
||||||
|
8. **Documentation contract** — Required documentation gates and update expectations
|
||||||
|
9. **PRD requirement** — `docs/PRD.md` or `docs/PRD.json` required before coding
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 3: Create Runtime Context File (Runtime-Specific)
|
||||||
|
|
||||||
|
Runtime context files are runtime adapters. They are not the primary project contract.
|
||||||
|
Use `CLAUDE.md` for Claude runtime compatibility. Use other runtime adapters as required by your environment.
|
||||||
|
|
||||||
|
Claude runtime mandate (HARD RULE):
|
||||||
|
|
||||||
|
- `CLAUDE.md` MUST explicitly instruct Claude agents to read and use `AGENTS.md`.
|
||||||
|
- `CLAUDE.md` MUST treat `AGENTS.md` as the authoritative project-level contract.
|
||||||
|
- If `AGENTS.md` and runtime wording conflict, `AGENTS.md` project rules win.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
TYPE=$(detect_project_type)
|
||||||
|
TEMPLATE_DIR="$HOME/.config/mosaic/templates/agent/projects/$TYPE"
|
||||||
|
|
||||||
|
if [[ -d "$TEMPLATE_DIR" ]]; then
|
||||||
|
envsubst < "$TEMPLATE_DIR/CLAUDE.md.template" > CLAUDE.md
|
||||||
|
else
|
||||||
|
envsubst < "$HOME/.config/mosaic/templates/agent/CLAUDE.md.template" > CLAUDE.md
|
||||||
|
fi
|
||||||
|
```
|
||||||
|
|
||||||
|
### Required Runtime Sections
|
||||||
|
|
||||||
|
Every runtime context file should contain:
|
||||||
|
|
||||||
|
1. **AGENTS handoff rule** — Runtime MUST direct agents to read/use `AGENTS.md`
|
||||||
|
2. **Conditional documentation loading** — Required guide loading map
|
||||||
|
3. **Technology stack** — Runtime-facing architecture summary
|
||||||
|
4. **Repository structure** — Important paths
|
||||||
|
5. **Development workflow** — Build/test/lint/typecheck commands
|
||||||
|
6. **Issue tracking** — Issue and commit conventions
|
||||||
|
7. **Code review** — Required review process
|
||||||
|
8. **Runtime notes** — Runtime-specific behavior references
|
||||||
|
9. **Branch and merge policy** — Trunk workflow (`branch -> main` via PR, squash-only)
|
||||||
|
10. **Autonomy and escalation policy** — Agent owns coding/review/PR/release/deploy lifecycle
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 4: Create Directory Structure
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create standard directories
|
||||||
|
mkdir -p docs/scratchpads
|
||||||
|
mkdir -p docs/templates
|
||||||
|
mkdir -p docs/reports/qa-automation/pending
|
||||||
|
mkdir -p docs/reports/qa-automation/in-progress
|
||||||
|
mkdir -p docs/reports/qa-automation/done
|
||||||
|
mkdir -p docs/reports/qa-automation/escalated
|
||||||
|
mkdir -p docs/reports/deferred
|
||||||
|
mkdir -p docs/tasks
|
||||||
|
mkdir -p docs/releases
|
||||||
|
mkdir -p docs/USER-GUIDE docs/ADMIN-GUIDE docs/DEVELOPER-GUIDE docs/API
|
||||||
|
|
||||||
|
# Documentation baseline files
|
||||||
|
touch docs/USER-GUIDE/README.md
|
||||||
|
touch docs/ADMIN-GUIDE/README.md
|
||||||
|
touch docs/DEVELOPER-GUIDE/README.md
|
||||||
|
touch docs/API/OPENAPI.yaml
|
||||||
|
touch docs/API/ENDPOINTS.md
|
||||||
|
touch docs/SITEMAP.md
|
||||||
|
|
||||||
|
# PRD baseline file (requirements source before coding)
|
||||||
|
cp ~/.config/mosaic/templates/docs/PRD.md.template docs/PRD.md
|
||||||
|
|
||||||
|
# TASKS baseline file (canonical tracking)
|
||||||
|
cp ~/.config/mosaic/templates/docs/TASKS.md.template docs/TASKS.md
|
||||||
|
|
||||||
|
# Deployment baseline file (target/platform/runbook)
|
||||||
|
touch docs/DEPLOYMENT.md
|
||||||
|
```
|
||||||
|
|
||||||
|
Documentation root hygiene (HARD RULE):
|
||||||
|
|
||||||
|
- Keep `docs/` root clean.
|
||||||
|
- Store reports in `docs/reports/`, archived task artifacts in `docs/tasks/`, releases in `docs/releases/`, and scratchpads in `docs/scratchpads/`.
|
||||||
|
- Do not place ad-hoc report files directly under `docs/`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 5: Initialize Repository Labels & Milestones
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Use the init script
|
||||||
|
~/.config/mosaic/tools/bootstrap/init-repo-labels.sh
|
||||||
|
|
||||||
|
# Or manually create standard labels
|
||||||
|
~/.config/mosaic/tools/git/issue-create.sh # (labels are created on first use)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Standard Labels
|
||||||
|
|
||||||
|
| Label | Color | Purpose |
|
||||||
|
| --------------- | --------- | -------------------------------------- |
|
||||||
|
| `epic` | `#3E4B9E` | Large feature spanning multiple issues |
|
||||||
|
| `feature` | `#0E8A16` | New functionality |
|
||||||
|
| `bug` | `#D73A4A` | Defect fix |
|
||||||
|
| `task` | `#0075CA` | General work item |
|
||||||
|
| `documentation` | `#0075CA` | Documentation updates |
|
||||||
|
| `security` | `#B60205` | Security-related |
|
||||||
|
| `breaking` | `#D93F0B` | Breaking change |
|
||||||
|
|
||||||
|
### Initial Milestone (Hard Rule)
|
||||||
|
|
||||||
|
Create the first pre-MVP milestone at `0.0.1`.
|
||||||
|
Reserve `0.1.0` for the MVP release milestone.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.config/mosaic/tools/git/milestone-create.sh -t "0.0.1" -d "Pre-MVP - Foundation Sprint"
|
||||||
|
|
||||||
|
# Create when MVP scope is complete and release-ready:
|
||||||
|
~/.config/mosaic/tools/git/milestone-create.sh -t "0.1.0" -d "MVP - Minimum Viable Product"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 5b: Configure Main Branch Protection (Hard Rule)
|
||||||
|
|
||||||
|
Apply equivalent settings in Gitea, GitHub, or GitLab:
|
||||||
|
|
||||||
|
1. Protect `main` from direct pushes.
|
||||||
|
2. Require pull requests to merge into `main`.
|
||||||
|
3. Require required CI/status checks to pass before merge.
|
||||||
|
4. Require code review approval before merge.
|
||||||
|
5. Allow **squash merge only** for PRs into `main` (disable merge commits and rebase merges for `main`).
|
||||||
|
|
||||||
|
This enforces one merge strategy across human and agent workflows.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 6: Set Up CI/CD Review Pipeline
|
||||||
|
|
||||||
|
### Woodpecker CI
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Copy Codex review pipeline
|
||||||
|
mkdir -p .woodpecker/schemas
|
||||||
|
cp ~/.config/mosaic/tools/codex/woodpecker/codex-review.yml .woodpecker/
|
||||||
|
cp ~/.config/mosaic/tools/codex/schemas/*.json .woodpecker/schemas/
|
||||||
|
|
||||||
|
# Add codex_api_key secret to Woodpecker CI dashboard
|
||||||
|
```
|
||||||
|
|
||||||
|
### GitHub Actions
|
||||||
|
|
||||||
|
For GitHub repos, use the official Codex GitHub Action instead:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# .github/workflows/codex-review.yml
|
||||||
|
uses: openai/codex-action@v1
|
||||||
|
```
|
||||||
|
|
||||||
|
### Python Package Publishing (Gitea PyPI)
|
||||||
|
|
||||||
|
If the project publishes Python packages, use Gitea's PyPI registry.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build and publish
|
||||||
|
python -m pip install --upgrade build twine
|
||||||
|
python -m build
|
||||||
|
python -m twine upload \
|
||||||
|
--repository-url "https://GITEA_HOST/api/packages/ORG/pypi" \
|
||||||
|
--username "$GITEA_USERNAME" \
|
||||||
|
--password "$GITEA_TOKEN" \
|
||||||
|
dist/*
|
||||||
|
```
|
||||||
|
|
||||||
|
Use the same `gitea_username` and `gitea_token` CI secrets used for container and npm publishing.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step 7: Verify Bootstrap
|
||||||
|
|
||||||
|
After bootstrapping, verify everything works:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check files exist
|
||||||
|
ls AGENTS.md docs/scratchpads/
|
||||||
|
ls docs/reports/qa-automation/pending docs/reports/deferred docs/tasks docs/releases
|
||||||
|
ls docs/USER-GUIDE/README.md docs/ADMIN-GUIDE/README.md docs/DEVELOPER-GUIDE/README.md
|
||||||
|
ls docs/API/OPENAPI.yaml docs/API/ENDPOINTS.md docs/SITEMAP.md
|
||||||
|
ls docs/PRD.md
|
||||||
|
ls docs/TASKS.md
|
||||||
|
|
||||||
|
# Verify AGENTS.md has required sections
|
||||||
|
grep -c "Quality Gates" AGENTS.md
|
||||||
|
grep -c "Orchestrator Integration" AGENTS.md
|
||||||
|
grep -c "Testing Approaches" AGENTS.md
|
||||||
|
grep -c "Testing Policy" AGENTS.md
|
||||||
|
grep -c "Documentation Contract" AGENTS.md
|
||||||
|
grep -c "PRD Requirement" AGENTS.md
|
||||||
|
|
||||||
|
# Verify runtime context file has required sections
|
||||||
|
if [[ -f CLAUDE.md ]]; then
|
||||||
|
grep -c "AGENTS.md" CLAUDE.md
|
||||||
|
grep -c "Conditional Documentation Loading" CLAUDE.md
|
||||||
|
grep -c "Technology Stack" CLAUDE.md
|
||||||
|
grep -c "Code Review" CLAUDE.md
|
||||||
|
elif [[ -f RUNTIME.md ]]; then
|
||||||
|
grep -c "Conditional Documentation Loading" RUNTIME.md
|
||||||
|
grep -c "Technology Stack" RUNTIME.md
|
||||||
|
grep -c "Code Review" RUNTIME.md
|
||||||
|
else
|
||||||
|
echo "Missing runtime context file (CLAUDE.md or RUNTIME.md)" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run quality gates from AGENTS.md
|
||||||
|
# (execute the command block under "Quality Gates")
|
||||||
|
|
||||||
|
# Test Codex review (if configured)
|
||||||
|
~/.config/mosaic/tools/codex/codex-code-review.sh --help
|
||||||
|
|
||||||
|
# Verify sequential-thinking MCP remains configured
|
||||||
|
~/.config/mosaic/bin/mosaic-ensure-sequential-thinking --check
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Available Templates
|
||||||
|
|
||||||
|
### Generic Templates
|
||||||
|
|
||||||
|
| Template | Path | Purpose |
|
||||||
|
| ---------------------------- | ----------------------------------- | ------------------------------------------ |
|
||||||
|
| `AGENTS.md.template` | `~/.config/mosaic/templates/agent/` | Primary project agent contract |
|
||||||
|
| `CLAUDE.md.template` | `~/.config/mosaic/templates/agent/` | Runtime compatibility context (Claude) |
|
||||||
|
| `DOCUMENTATION-CHECKLIST.md` | `~/.config/mosaic/templates/docs/` | Documentation completion gate |
|
||||||
|
| `PRD.md.template` | `~/.config/mosaic/templates/docs/` | Requirements source template |
|
||||||
|
| `TASKS.md.template` | `~/.config/mosaic/templates/docs/` | Canonical task and issue tracking template |
|
||||||
|
|
||||||
|
### Tech-Stack Templates
|
||||||
|
|
||||||
|
| Stack | Path | Includes |
|
||||||
|
| ---------------- | ---------------------------------------------------------- | ------------------------------------ |
|
||||||
|
| NestJS + Next.js | `~/.config/mosaic/templates/agent/projects/nestjs-nextjs/` | AGENTS.md + runtime context template |
|
||||||
|
| Django | `~/.config/mosaic/templates/agent/projects/django/` | AGENTS.md + runtime context template |
|
||||||
|
|
||||||
|
### Orchestrator Templates
|
||||||
|
|
||||||
|
| Template | Path | Purpose |
|
||||||
|
| -------------------------------------- | ------------------------------------------------- | ----------------------- |
|
||||||
|
| `tasks.md.template` | `~/src/jarvis-brain/docs/templates/orchestrator/` | Task tracking |
|
||||||
|
| `orchestrator-learnings.json.template` | `~/src/jarvis-brain/docs/templates/orchestrator/` | Variance tracking |
|
||||||
|
| `phase-issue-body.md.template` | `~/src/jarvis-brain/docs/templates/orchestrator/` | Git provider issue body |
|
||||||
|
| `scratchpad.md.template` | `~/src/jarvis-brain/docs/templates/` | Per-task working doc |
|
||||||
|
|
||||||
|
### Variables Reference
|
||||||
|
|
||||||
|
| Variable | Description | Example |
|
||||||
|
| ------------------------ | --------------------------- | ------------------------------------------ |
|
||||||
|
| `${PROJECT_NAME}` | Human-readable project name | "Mosaic Stack" |
|
||||||
|
| `${PROJECT_DESCRIPTION}` | One-line description | "Multi-tenant platform" |
|
||||||
|
| `${PROJECT_DIR}` | Directory name | "mosaic-stack" |
|
||||||
|
| `${PROJECT_SLUG}` | Python package slug | "mosaic_stack" |
|
||||||
|
| `${REPO_URL}` | Git remote URL | "https://git.mosaicstack.dev/mosaic/stack" |
|
||||||
|
| `${TASK_PREFIX}` | Orchestrator task prefix | "MS" |
|
||||||
|
| `${SOURCE_DIR}` | Source code directory | "src" or "apps" |
|
||||||
|
| `${QUALITY_GATES}` | Quality gate commands | "pnpm typecheck && pnpm lint && pnpm test" |
|
||||||
|
| `${BUILD_COMMAND}` | Build command | "pnpm build" |
|
||||||
|
| `${TEST_COMMAND}` | Test command | "pnpm test" |
|
||||||
|
| `${LINT_COMMAND}` | Lint command | "pnpm lint" |
|
||||||
|
| `${TYPECHECK_COMMAND}` | Type check command | "pnpm typecheck" |
|
||||||
|
| `${FRONTEND_STACK}` | Frontend technologies | "Next.js + React" |
|
||||||
|
| `${BACKEND_STACK}` | Backend technologies | "NestJS + Prisma" |
|
||||||
|
| `${DATABASE_STACK}` | Database technologies | "PostgreSQL" |
|
||||||
|
| `${TESTING_STACK}` | Testing technologies | "Vitest + Playwright" |
|
||||||
|
| `${DEPLOYMENT_STACK}` | Deployment technologies | "Docker" |
|
||||||
|
| `${CONFIG_FILES}` | Key config files | "package.json, tsconfig.json" |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Bootstrap Scripts
|
||||||
|
|
||||||
|
### init-project.sh
|
||||||
|
|
||||||
|
Full project bootstrap with interactive and flag-based modes:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.config/mosaic/tools/bootstrap/init-project.sh \
|
||||||
|
--name "My Project" \
|
||||||
|
--type "nestjs-nextjs" \
|
||||||
|
--repo "https://git.mosaicstack.dev/owner/repo" \
|
||||||
|
--prefix "MP" \
|
||||||
|
--description "Multi-tenant platform"
|
||||||
|
```
|
||||||
|
|
||||||
|
### init-repo-labels.sh
|
||||||
|
|
||||||
|
Initialize standard labels and the first pre-MVP milestone:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.config/mosaic/tools/bootstrap/init-repo-labels.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Checklist
|
||||||
|
|
||||||
|
After bootstrapping, verify:
|
||||||
|
|
||||||
|
- [ ] `AGENTS.md` exists and is the primary project contract
|
||||||
|
- [ ] Runtime context file exists (`CLAUDE.md` or `RUNTIME.md`)
|
||||||
|
- [ ] `docs/scratchpads/` directory exists
|
||||||
|
- [ ] `docs/reports/qa-automation/pending` directory exists
|
||||||
|
- [ ] `docs/reports/deferred/` directory exists
|
||||||
|
- [ ] `docs/tasks/` directory exists
|
||||||
|
- [ ] `docs/releases/` directory exists
|
||||||
|
- [ ] `docs/USER-GUIDE/README.md` exists
|
||||||
|
- [ ] `docs/ADMIN-GUIDE/README.md` exists
|
||||||
|
- [ ] `docs/DEVELOPER-GUIDE/README.md` exists
|
||||||
|
- [ ] `docs/API/OPENAPI.yaml` exists
|
||||||
|
- [ ] `docs/API/ENDPOINTS.md` exists
|
||||||
|
- [ ] `docs/SITEMAP.md` exists
|
||||||
|
- [ ] `docs/PRD.md` or `docs/PRD.json` exists
|
||||||
|
- [ ] `docs/TASKS.md` exists and is ready for active tracking
|
||||||
|
- [ ] `docs/DEPLOYMENT.md` exists with target platform and rollback notes
|
||||||
|
- [ ] `sequential-thinking` MCP is configured and verification check passes
|
||||||
|
- [ ] Git labels created (epic, feature, bug, task, etc.)
|
||||||
|
- [ ] Initial pre-MVP milestone created (0.0.1)
|
||||||
|
- [ ] MVP milestone reserved for release (0.1.0)
|
||||||
|
- [ ] `main` is protected from direct pushes
|
||||||
|
- [ ] PRs into `main` are required
|
||||||
|
- [ ] Merge method for `main` is squash-only
|
||||||
|
- [ ] Quality gates run successfully
|
||||||
|
- [ ] `.env.example` exists (if project uses env vars)
|
||||||
|
- [ ] CI/CD pipeline configured (if using Woodpecker/GitHub Actions)
|
||||||
|
- [ ] Python publish path configured in CI (if project ships Python packages)
|
||||||
|
- [ ] Codex review scripts accessible (`~/.config/mosaic/tools/codex/`)
|
||||||
1082
guides/CI-CD-PIPELINES.md
Normal file
1082
guides/CI-CD-PIPELINES.md
Normal file
File diff suppressed because it is too large
Load Diff
154
guides/CODE-REVIEW.md
Executable file
154
guides/CODE-REVIEW.md
Executable file
@@ -0,0 +1,154 @@
|
|||||||
|
# Code Review Guide
|
||||||
|
|
||||||
|
## Hard Requirement
|
||||||
|
|
||||||
|
If an agent modifies source code, code review is REQUIRED before completion.
|
||||||
|
Do not mark code-change tasks done until review is completed and blockers are resolved or explicitly tracked.
|
||||||
|
If code/config/API contract/auth behavior changed and required docs are missing, this is a BLOCKER.
|
||||||
|
If tests pass but acceptance criteria are not verified by situational evidence, this is a BLOCKER.
|
||||||
|
If implementation diverges from `docs/PRD.md` or `docs/PRD.json` without PRD updates, this is a BLOCKER.
|
||||||
|
|
||||||
|
Merge strategy enforcement (HARD RULE):
|
||||||
|
|
||||||
|
- PR target for delivery is `main`.
|
||||||
|
- Direct pushes to `main` are prohibited.
|
||||||
|
- Merge to `main` MUST be squash-only.
|
||||||
|
- Use `~/.config/mosaic/tools/git/pr-merge.sh -n {PR_NUMBER} -m squash` (or PowerShell equivalent).
|
||||||
|
|
||||||
|
## Review Checklist
|
||||||
|
|
||||||
|
### 1. Correctness
|
||||||
|
|
||||||
|
- [ ] Code does what the issue/PR description says
|
||||||
|
- [ ] Code aligns with active PRD requirements
|
||||||
|
- [ ] Acceptance criteria are mapped to concrete verification evidence
|
||||||
|
- [ ] Edge cases are handled
|
||||||
|
- [ ] Error conditions are managed properly
|
||||||
|
- [ ] No obvious bugs or logic errors
|
||||||
|
|
||||||
|
### 2. Security
|
||||||
|
|
||||||
|
- [ ] No hardcoded secrets or credentials
|
||||||
|
- [ ] Input validation at boundaries
|
||||||
|
- [ ] SQL injection prevention (parameterized queries)
|
||||||
|
- [ ] XSS prevention (output encoding)
|
||||||
|
- [ ] Authentication/authorization checks present
|
||||||
|
- [ ] Sensitive data not logged
|
||||||
|
- [ ] Secrets follow Vault structure (see `docs/vault-secrets-structure.md`)
|
||||||
|
|
||||||
|
### 2a. OWASP Coverage (Required)
|
||||||
|
|
||||||
|
- [ ] OWASP Top 10 categories were reviewed for change impact
|
||||||
|
- [ ] Access control checks verified on protected actions
|
||||||
|
- [ ] Cryptographic handling validated (keys, hashing, TLS assumptions)
|
||||||
|
- [ ] Injection risks reviewed for all untrusted inputs
|
||||||
|
- [ ] Security misconfiguration risks reviewed (headers, CORS, defaults)
|
||||||
|
- [ ] Dependency/component risk reviewed (known vulnerable components)
|
||||||
|
- [ ] Authentication/session flows reviewed for failure paths
|
||||||
|
- [ ] Logging/monitoring preserves detection without leaking sensitive data
|
||||||
|
|
||||||
|
### 3. Testing
|
||||||
|
|
||||||
|
- [ ] Tests exist for new functionality
|
||||||
|
- [ ] Tests cover happy path AND error cases
|
||||||
|
- [ ] Situational tests cover all impacted change surfaces (primary gate)
|
||||||
|
- [ ] Tests validate required behavior/outcomes, not only internal implementation details
|
||||||
|
- [ ] TDD was applied when required by `~/.config/mosaic/guides/QA-TESTING.md`
|
||||||
|
- [ ] Coverage meets 85% minimum
|
||||||
|
- [ ] Tests are readable and maintainable
|
||||||
|
- [ ] No flaky tests introduced
|
||||||
|
|
||||||
|
### 4. Code Quality
|
||||||
|
|
||||||
|
- [ ] Follows Google Style Guide for the language
|
||||||
|
- [ ] Functions are focused and reasonably sized
|
||||||
|
- [ ] No unnecessary complexity
|
||||||
|
- [ ] DRY - no significant duplication
|
||||||
|
- [ ] Clear naming for variables and functions
|
||||||
|
- [ ] No dead code or commented-out code
|
||||||
|
|
||||||
|
### 4a. TypeScript Strict Typing (see `TYPESCRIPT.md`)
|
||||||
|
|
||||||
|
- [ ] **NO `any` types** — explicit types required everywhere
|
||||||
|
- [ ] **NO lazy `unknown`** — only for error catches with immediate narrowing
|
||||||
|
- [ ] **Explicit return types** on all exported/public functions
|
||||||
|
- [ ] **Explicit parameter types** — never implicit any
|
||||||
|
- [ ] **No type assertions** (`as Type`) — use type guards instead
|
||||||
|
- [ ] **No non-null assertions** (`!`) — use proper null handling
|
||||||
|
- [ ] **Interfaces for objects** — not inline types
|
||||||
|
- [ ] **Discriminated unions** for variant types
|
||||||
|
- [ ] **DTO files used at boundaries** — module/API contracts are in `*.dto.ts`, not inline payload types
|
||||||
|
|
||||||
|
### 5. Documentation
|
||||||
|
|
||||||
|
- [ ] Complex logic has explanatory comments
|
||||||
|
- [ ] Required docs updated per `~/.config/mosaic/guides/DOCUMENTATION.md`
|
||||||
|
- [ ] Public APIs are documented
|
||||||
|
- [ ] Private/internal APIs are documented
|
||||||
|
- [ ] API input/output schemas are documented
|
||||||
|
- [ ] API permissions/auth requirements are documented
|
||||||
|
- [ ] Site map updates are present when navigation changed
|
||||||
|
- [ ] README updated if needed
|
||||||
|
- [ ] Breaking changes noted
|
||||||
|
|
||||||
|
### 6. Performance
|
||||||
|
|
||||||
|
- [ ] No obvious N+1 queries
|
||||||
|
- [ ] No blocking operations in hot paths
|
||||||
|
- [ ] Resource cleanup (connections, file handles)
|
||||||
|
- [ ] Reasonable memory usage
|
||||||
|
|
||||||
|
### 7. Dependencies
|
||||||
|
|
||||||
|
- [ ] No deprecated packages
|
||||||
|
- [ ] No unnecessary new dependencies
|
||||||
|
- [ ] Dependency versions pinned appropriately
|
||||||
|
|
||||||
|
## Review Process
|
||||||
|
|
||||||
|
Use `~/.config/mosaic/templates/docs/DOCUMENTATION-CHECKLIST.md` whenever code/API/auth/infra changes are present.
|
||||||
|
|
||||||
|
### Getting Context
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# List the issue being addressed
|
||||||
|
~/.config/mosaic/tools/git/issue-list.sh -i {issue-number}
|
||||||
|
|
||||||
|
# View the changes
|
||||||
|
git diff main...HEAD
|
||||||
|
```
|
||||||
|
|
||||||
|
### Providing Feedback
|
||||||
|
|
||||||
|
- Be specific: point to exact lines/files
|
||||||
|
- Explain WHY something is problematic
|
||||||
|
- Suggest alternatives when possible
|
||||||
|
- Distinguish between blocking issues and suggestions
|
||||||
|
- Be constructive, not critical of the person
|
||||||
|
|
||||||
|
### Feedback Categories
|
||||||
|
|
||||||
|
- **Blocker**: Must fix before merge (security, bugs, test failures)
|
||||||
|
- **Should Fix**: Important but not blocking (code quality, minor issues)
|
||||||
|
- **Suggestion**: Optional improvements (style preferences, nice-to-haves)
|
||||||
|
- **Question**: Seeking clarification
|
||||||
|
|
||||||
|
### Review Comment Format
|
||||||
|
|
||||||
|
```
|
||||||
|
[BLOCKER] Line 42: SQL injection vulnerability
|
||||||
|
The user input is directly interpolated into the query.
|
||||||
|
Use parameterized queries instead:
|
||||||
|
`db.query("SELECT * FROM users WHERE id = ?", [userId])`
|
||||||
|
|
||||||
|
[SUGGESTION] Line 78: Consider extracting to helper
|
||||||
|
This pattern appears in 3 places. A shared helper would reduce duplication.
|
||||||
|
```
|
||||||
|
|
||||||
|
## After Review
|
||||||
|
|
||||||
|
1. Update issue with review status
|
||||||
|
2. If changes requested, assign back to author
|
||||||
|
3. If approved, note approval in issue comments
|
||||||
|
4. For merges, ensure CI passes first
|
||||||
|
5. Merge PR to `main` with squash strategy only
|
||||||
132
guides/DOCUMENTATION.md
Normal file
132
guides/DOCUMENTATION.md
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
# Documentation Standard (MANDATORY)
|
||||||
|
|
||||||
|
This guide defines REQUIRED documentation behavior for all Mosaic projects.
|
||||||
|
If code, API contracts, auth, or infrastructure changes, documentation updates are REQUIRED before completion.
|
||||||
|
|
||||||
|
## Hard Rules
|
||||||
|
|
||||||
|
1. Documentation is a delivery gate. Missing required documentation is a BLOCKER.
|
||||||
|
2. `docs/PRD.md` or `docs/PRD.json` is REQUIRED as the project requirements source before coding begins.
|
||||||
|
3. API documentation is OpenAPI-first. `docs/API/OPENAPI.yaml` (or `.json`) is the canonical API contract.
|
||||||
|
4. Public and private/internal endpoints MUST be documented.
|
||||||
|
5. API input and output schemas MUST be documented.
|
||||||
|
6. API authentication and permissions MUST be documented per endpoint.
|
||||||
|
7. A current site map MUST exist at `docs/SITEMAP.md`.
|
||||||
|
8. Documentation updates MUST be committed in the same logical change set as the code/API change.
|
||||||
|
9. Generated publishing output (Docusaurus/VitePress/MkDocs artifacts) is not canonical unless the project explicitly declares it canonical.
|
||||||
|
10. `docs/` root MUST stay clean. Reports and working artifacts MUST be stored in dedicated subdirectories, not dumped at `docs/` root.
|
||||||
|
|
||||||
|
## Required Documentation Structure
|
||||||
|
|
||||||
|
```text
|
||||||
|
docs/
|
||||||
|
PRD.md (or PRD.json)
|
||||||
|
TASKS.md (active orchestrator tracking, when orchestrator is used)
|
||||||
|
SITEMAP.md
|
||||||
|
USER-GUIDE/
|
||||||
|
ADMIN-GUIDE/
|
||||||
|
DEVELOPER-GUIDE/
|
||||||
|
API/
|
||||||
|
OPENAPI.yaml
|
||||||
|
ENDPOINTS.md
|
||||||
|
scratchpads/
|
||||||
|
reports/
|
||||||
|
tasks/
|
||||||
|
releases/
|
||||||
|
templates/ (optional)
|
||||||
|
```
|
||||||
|
|
||||||
|
Minimum requirements:
|
||||||
|
|
||||||
|
- `docs/PRD.md` or `docs/PRD.json`: authoritative requirements source for implementation and testing.
|
||||||
|
- `docs/USER-GUIDE/`: End-user workflows, feature behavior, common troubleshooting.
|
||||||
|
- `docs/ADMIN-GUIDE/`: Configuration, deployment, operations, incident/recovery procedures.
|
||||||
|
- `docs/DEVELOPER-GUIDE/`: Architecture, local setup, contribution/testing workflow, design constraints.
|
||||||
|
- `docs/API/OPENAPI.yaml`: API SSOT for all HTTP endpoints.
|
||||||
|
- `docs/API/ENDPOINTS.md`: Human-readable index for API endpoints, permissions, and change notes.
|
||||||
|
- `docs/SITEMAP.md`: Navigation index for all user/admin/developer/API documentation pages.
|
||||||
|
- `docs/reports/`: Review outputs, QA automation reports, deferrals, and audit artifacts.
|
||||||
|
- `docs/tasks/`: Archived task snapshots and orchestrator learnings.
|
||||||
|
- `docs/releases/`: Release notes and release-specific documentation.
|
||||||
|
- `docs/scratchpads/`: Active task-level working notes.
|
||||||
|
|
||||||
|
## Root Hygiene Rule (MANDATORY)
|
||||||
|
|
||||||
|
Allowed root documentation files are intentionally limited:
|
||||||
|
|
||||||
|
1. `docs/PRD.md` or `docs/PRD.json`
|
||||||
|
2. `docs/TASKS.md` (active milestone only, when task orchestration is in use)
|
||||||
|
3. `docs/SITEMAP.md`
|
||||||
|
4. `docs/README.md` (optional index)
|
||||||
|
|
||||||
|
All other docs MUST be placed in scoped folders (`docs/reports/`, `docs/tasks/`, `docs/releases/`, `docs/scratchpads/`, `docs/API/`, guide books).
|
||||||
|
|
||||||
|
## Artifact Placement Rules
|
||||||
|
|
||||||
|
| Artifact Type | REQUIRED Location |
|
||||||
|
| ------------------------------------------ | ---------------------------------------- |
|
||||||
|
| Code review reports, QA reports, audits | `docs/reports/<category>/` |
|
||||||
|
| Deferred error lists / unresolved findings | `docs/reports/deferred/` |
|
||||||
|
| Archived milestone task snapshots | `docs/tasks/` |
|
||||||
|
| Orchestrator learnings JSON | `docs/tasks/orchestrator-learnings.json` |
|
||||||
|
| Release notes | `docs/releases/` |
|
||||||
|
| Active scratchpads | `docs/scratchpads/` |
|
||||||
|
|
||||||
|
## API Documentation Contract (OpenAPI-First)
|
||||||
|
|
||||||
|
For every API endpoint, documentation MUST include:
|
||||||
|
|
||||||
|
1. visibility: `public` or `private/internal`
|
||||||
|
2. method and path
|
||||||
|
3. endpoint purpose
|
||||||
|
4. request/input schema
|
||||||
|
5. response/output schema(s)
|
||||||
|
6. auth method and required permission/role/scope
|
||||||
|
7. error status codes and behavior
|
||||||
|
|
||||||
|
If OpenAPI cannot fully express an internal constraint, document it in `docs/API/ENDPOINTS.md`.
|
||||||
|
|
||||||
|
## Book/Chapter/Page Structure
|
||||||
|
|
||||||
|
Use this structure for every guide:
|
||||||
|
|
||||||
|
1. Book: one root guide folder (`USER-GUIDE`, `ADMIN-GUIDE`, `DEVELOPER-GUIDE`)
|
||||||
|
2. Chapter: one subdirectory per topic area
|
||||||
|
3. Page: one focused markdown file per concern
|
||||||
|
|
||||||
|
Required index files:
|
||||||
|
|
||||||
|
1. `docs/USER-GUIDE/README.md`
|
||||||
|
2. `docs/ADMIN-GUIDE/README.md`
|
||||||
|
3. `docs/DEVELOPER-GUIDE/README.md`
|
||||||
|
|
||||||
|
Each index file MUST link to all chapters and pages in that book.
|
||||||
|
|
||||||
|
## Situational Documentation Matrix
|
||||||
|
|
||||||
|
| Change Surface | REQUIRED Documentation Updates |
|
||||||
|
| ---------------------------------------------- | ----------------------------------------------------------- |
|
||||||
|
| New feature or behavior change | User guide + developer guide + sitemap |
|
||||||
|
| API endpoint added/changed/removed | OpenAPI + API endpoint index + sitemap |
|
||||||
|
| Auth/RBAC/permission change | API auth/permission docs + admin guide + developer guide |
|
||||||
|
| Database schema/migration change | Developer guide + admin operational notes if runbook impact |
|
||||||
|
| CI/CD or deployment change | Admin guide + developer guide |
|
||||||
|
| Incident, recovery, or security control change | Admin guide runbook + security notes + sitemap |
|
||||||
|
|
||||||
|
## Publishing Target Rule (MANDATORY)
|
||||||
|
|
||||||
|
If the user does not specify documentation publishing target, the agent MUST ask:
|
||||||
|
|
||||||
|
1. Publish in-app (embedded docs)
|
||||||
|
2. Publish on external docs platform (for example: Docusaurus, VitePress, MkDocs)
|
||||||
|
|
||||||
|
Default behavior before publishing decision:
|
||||||
|
|
||||||
|
- Keep canonical docs in-repo under `docs/`.
|
||||||
|
- Do not assume external publishing platform.
|
||||||
|
|
||||||
|
## Completion Gate
|
||||||
|
|
||||||
|
You MUST NOT declare completion until all required documentation updates are done.
|
||||||
|
|
||||||
|
Use `~/.config/mosaic/templates/docs/DOCUMENTATION-CHECKLIST.md` as the final gate.
|
||||||
210
guides/E2E-DELIVERY.md
Normal file
210
guides/E2E-DELIVERY.md
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
# E2E Delivery Procedure (MANDATORY)
|
||||||
|
|
||||||
|
This guide is REQUIRED for all agent sessions.
|
||||||
|
|
||||||
|
## 0. Mode Handshake (Before Any Action)
|
||||||
|
|
||||||
|
First response MUST declare mode before tool calls or implementation steps:
|
||||||
|
|
||||||
|
1. Orchestration mission: `Now initiating Orchestrator mode...`
|
||||||
|
2. Implementation mission: `Now initiating Delivery mode...`
|
||||||
|
3. Review-only mission: `Now initiating Review mode...`
|
||||||
|
|
||||||
|
## 1. PRD Gate (Before Coding)
|
||||||
|
|
||||||
|
1. Ensure `docs/PRD.md` or `docs/PRD.json` exists before coding.
|
||||||
|
2. Load `~/.config/mosaic/guides/PRD.md`.
|
||||||
|
3. Prepare/update PRD from user input and available project context.
|
||||||
|
4. If requirements are missing:
|
||||||
|
- proceed with best-guess assumptions by default,
|
||||||
|
- mark each assumption with `ASSUMPTION:` and rationale,
|
||||||
|
- escalate only when uncertainty is high-impact and cannot be bounded safely.
|
||||||
|
5. Treat PRD as the requirement source for implementation, testing, and review.
|
||||||
|
|
||||||
|
## 1a. Tracking Gate (Before Coding)
|
||||||
|
|
||||||
|
1. For non-trivial work, `docs/TASKS.md` MUST exist before coding.
|
||||||
|
2. If `docs/TASKS.md` is missing, create it from `~/.config/mosaic/templates/docs/TASKS.md.template`.
|
||||||
|
3. Detect provider first via `~/.config/mosaic/tools/git/detect-platform.sh`.
|
||||||
|
4. For issue/PR/milestone operations, use Mosaic wrappers first (`~/.config/mosaic/tools/git/*.sh`).
|
||||||
|
5. If external git provider is available (Gitea/GitHub/GitLab), create or update issue(s) before coding.
|
||||||
|
6. Record provider issue reference(s) in `docs/TASKS.md` (example: `#123`).
|
||||||
|
7. If no external provider is available, use internal task refs in `docs/TASKS.md` (example: `TASKS:T1`).
|
||||||
|
8. Scratchpad MUST reference both task ID and issue/internal ref.
|
||||||
|
|
||||||
|
## 2. Intake and Scope
|
||||||
|
|
||||||
|
> **COMPLEXITY TRAP WARNING:** Intake applies to ALL tasks regardless of perceived complexity. "Simple" tasks (commit, push, deploy) have caused the most severe framework violations because agents skip intake when they pattern-match a task as mechanical. The procedure is unconditional.
|
||||||
|
|
||||||
|
1. Define scope, constraints, and acceptance criteria.
|
||||||
|
2. Identify affected surfaces (API, DB, UI, infra, auth, CI/CD, docs).
|
||||||
|
3. **Deployment surface check (MANDATORY if task involves deploy, images, or containers):** Before ANY build or deploy action, check for CI/CD pipeline config (`.woodpecker/`, `.woodpecker.yml`, `.github/workflows/`). If pipelines exist, CI is the canonical build path — manual `docker build`/`docker push` is forbidden. Load `~/.config/mosaic/guides/CI-CD-PIPELINES.md` immediately.
|
||||||
|
4. Identify required guides and load them before implementation.
|
||||||
|
5. For code/API/auth/infra changes, load `~/.config/mosaic/guides/DOCUMENTATION.md`.
|
||||||
|
6. Determine budget constraints:
|
||||||
|
- if the user provided a plan limit or token budget, treat it as a HARD cap,
|
||||||
|
- if budget is unknown, derive a working budget from estimates and runtime limits, then continue autonomously.
|
||||||
|
7. Record budget assumptions and caps in the scratchpad before implementation starts.
|
||||||
|
8. Track estimated vs used tokens per logical unit and adapt strategy to remain inside budget.
|
||||||
|
9. If projected usage exceeds budget, auto-reduce scope/parallelism first; escalate only if cap still cannot be met.
|
||||||
|
|
||||||
|
## 2a. Steered Autonomy (Lights-Out)
|
||||||
|
|
||||||
|
1. Agent owns delivery end-to-end: planning, coding, testing, review, PR/repo operations, release/tag, and deployment (when in scope).
|
||||||
|
2. Human intervention is escalation-only; do not pause for routine approvals or handoffs.
|
||||||
|
3. Continue execution until completion criteria are met or an escalation trigger is hit.
|
||||||
|
|
||||||
|
## 3. Scratchpad Requirement
|
||||||
|
|
||||||
|
1. Create a task-specific scratchpad before implementation.
|
||||||
|
2. Record:
|
||||||
|
- objective
|
||||||
|
- plan
|
||||||
|
- progress checkpoints
|
||||||
|
- tests run
|
||||||
|
- risks/blockers
|
||||||
|
- final verification evidence
|
||||||
|
|
||||||
|
## 4. Embedded Execution Cycle (MANDATORY)
|
||||||
|
|
||||||
|
For implementation work, you MUST run this cycle in order:
|
||||||
|
|
||||||
|
1. `plan` - map PRD requirements to concrete implementation steps.
|
||||||
|
2. `code` - implement one logical unit.
|
||||||
|
3. `test` - run required baseline and situational checks for that unit.
|
||||||
|
4. `review` - perform independent code review on the current delta.
|
||||||
|
5. `remediate` - fix all findings and any test failures.
|
||||||
|
6. `review` - re-review remediated changes until blockers are cleared.
|
||||||
|
7. `commit` - commit only when the logical unit passes tests and review.
|
||||||
|
8. `pre-push queue guard` - before pushing, wait for running/queued project pipelines to clear: `~/.config/mosaic/tools/git/ci-queue-wait.sh --purpose push`.
|
||||||
|
9. `push` - push immediately after queue guard passes.
|
||||||
|
10. `PR integration` - if external git provider is available, create/update PR to `main` and merge with required strategy via Mosaic wrappers.
|
||||||
|
11. `pre-merge queue guard` - before merging PR, wait for running/queued project pipelines to clear: `~/.config/mosaic/tools/git/ci-queue-wait.sh --purpose merge`.
|
||||||
|
12. `CI/pipeline verification` - wait for terminal CI status and require green before completion (`~/.config/mosaic/tools/git/pr-ci-wait.sh` for PR-based workflow).
|
||||||
|
13. `issue closure` - close linked external issue (or close internal `docs/TASKS.md` task ref when provider is unavailable).
|
||||||
|
14. `greenfield situational test` - validate required user flows in a clean environment/startup path (post-merge for trunk workflow changes).
|
||||||
|
15. `deploy + post-deploy validation` - when deployment is in scope, deploy to configured target and run post-deploy health/smoke checks.
|
||||||
|
16. `repeat` - continue until all acceptance criteria are complete.
|
||||||
|
|
||||||
|
### Post-PR Hard Gate (Execute Sequentially, No Exceptions)
|
||||||
|
|
||||||
|
1. `~/.config/mosaic/tools/git/ci-queue-wait.sh --purpose merge -B main`
|
||||||
|
2. `~/.config/mosaic/tools/git/pr-merge.sh -n <PR_NUMBER> -m squash`
|
||||||
|
3. `~/.config/mosaic/tools/git/pr-ci-wait.sh -n <PR_NUMBER>`
|
||||||
|
4. `~/.config/mosaic/tools/git/issue-close.sh -i <ISSUE_NUMBER>` (or close internal `docs/TASKS.md` ref when no provider exists)
|
||||||
|
5. If any step fails: set status `blocked`, report the exact failed wrapper command, and stop.
|
||||||
|
6. Do not ask the human to perform routine merge/close operations.
|
||||||
|
7. Do not claim completion before step 4 succeeds.
|
||||||
|
|
||||||
|
### Forbidden Anti-Patterns
|
||||||
|
|
||||||
|
**PR/Merge:**
|
||||||
|
|
||||||
|
1. Do NOT stop at "PR created" or "PR updated".
|
||||||
|
2. Do NOT ask "should I merge?" for routine delivery PRs.
|
||||||
|
3. Do NOT ask "should I close the issue?" after merge + green CI.
|
||||||
|
|
||||||
|
**Build/Deploy:** 4. Do NOT run `docker build` or `docker push` locally to deploy images when CI/CD pipelines exist in the repository. CI is the ONLY canonical build path. 5. Do NOT skip intake and surface identification because a task "seems simple." This is the #1 cause of framework violations. 6. Do NOT deploy without first verifying whether CI/CD pipelines exist (`.woodpecker/`, `.woodpecker.yml`, `.github/workflows/`). If they exist, use them. 7. If you are about to run `docker build` and have NOT loaded `ci-cd-pipelines.md`, STOP — you are violating the framework.
|
||||||
|
|
||||||
|
If any step fails, you MUST remediate and re-run from the relevant step before proceeding.
|
||||||
|
If push-queue/merge-queue/PR merge/CI/issue closure fails, status is `blocked` (not complete) and you MUST report the exact failed wrapper command.
|
||||||
|
|
||||||
|
## 5. Testing Priority Model
|
||||||
|
|
||||||
|
Use this order of priority:
|
||||||
|
|
||||||
|
1. Situational tests are the PRIMARY gate and MUST prove changed behavior meets requirements.
|
||||||
|
2. Baseline tests are REQUIRED safety checks and MUST run for all software changes.
|
||||||
|
3. TDD is risk-based and REQUIRED only for specific high-risk change types.
|
||||||
|
|
||||||
|
## 6. Mandatory Test Baseline
|
||||||
|
|
||||||
|
For all software changes, you MUST run baseline checks applicable to the repo/toolchain:
|
||||||
|
|
||||||
|
1. lint (or equivalent static checks)
|
||||||
|
2. type checks (if language/tooling supports it)
|
||||||
|
3. unit tests for changed logic
|
||||||
|
4. integration tests for changed boundaries
|
||||||
|
|
||||||
|
## 7. Situational Testing Matrix (PRIMARY GATE)
|
||||||
|
|
||||||
|
Run additional tests based on what changed:
|
||||||
|
|
||||||
|
| Change Surface | Required Situational Tests |
|
||||||
|
| ---------------------------- | ----------------------------------------------------------------------------- |
|
||||||
|
| Authentication/authorization | auth failure-path tests, permission boundary tests, token/session validation |
|
||||||
|
| Database schema/migrations | migration up/down validation, rollback safety, data integrity checks |
|
||||||
|
| API contract changes | backward compatibility checks, consumer-impact tests, contract tests |
|
||||||
|
| Frontend/UI workflow changes | end-to-end flow tests, accessibility sanity checks, state transition checks |
|
||||||
|
| CI/CD or deployment changes | pipeline execution validation, artifact integrity checks, rollback path check |
|
||||||
|
| Security-sensitive logic | abuse-case tests, input validation fuzzing/sanitization checks |
|
||||||
|
| Performance-critical path | baseline comparison, regression threshold checks |
|
||||||
|
|
||||||
|
## 8. Risk-Based TDD Requirement
|
||||||
|
|
||||||
|
TDD is REQUIRED for:
|
||||||
|
|
||||||
|
1. bug fixes (write a reproducer test first)
|
||||||
|
2. security/auth/permission logic changes
|
||||||
|
3. critical business logic and data-mutation rules
|
||||||
|
|
||||||
|
TDD is RECOMMENDED (not mandatory) for low-risk UI, copy, styling, and mechanical refactors.
|
||||||
|
If TDD is skipped for a non-required case, record the rationale in the scratchpad.
|
||||||
|
|
||||||
|
## 9. Mandatory Code Review Gate
|
||||||
|
|
||||||
|
If you modify source code, you MUST run an independent code review before completion.
|
||||||
|
|
||||||
|
1. Use automated review tooling when available.
|
||||||
|
2. If automated tooling is unavailable, run manual review using `~/.config/mosaic/guides/CODE-REVIEW.md`.
|
||||||
|
3. Any blocker or critical finding MUST be fixed or tracked as an explicit remediation task before closure.
|
||||||
|
|
||||||
|
## 10. Mandatory Documentation Gate
|
||||||
|
|
||||||
|
For code/API/auth/infra changes, documentation updates are REQUIRED before completion.
|
||||||
|
|
||||||
|
1. Apply the standard in `~/.config/mosaic/guides/DOCUMENTATION.md`.
|
||||||
|
2. Update required docs in the same logical change set as implementation.
|
||||||
|
3. Complete `~/.config/mosaic/templates/docs/DOCUMENTATION-CHECKLIST.md`.
|
||||||
|
4. If publish platform is unspecified, ask the user to choose in-app or external platform before publishing.
|
||||||
|
5. Missing required documentation is a BLOCKER.
|
||||||
|
|
||||||
|
## 11. Completion Gate (All Required)
|
||||||
|
|
||||||
|
You MUST satisfy all items before completion:
|
||||||
|
|
||||||
|
1. Acceptance criteria met.
|
||||||
|
2. Baseline tests passed.
|
||||||
|
3. Situational tests passed (primary gate), including required greenfield situational validation.
|
||||||
|
4. PRD is current and implementation is aligned with PRD.
|
||||||
|
5. Acceptance criteria mapped to verification evidence.
|
||||||
|
6. Code review completed for source code changes.
|
||||||
|
7. Required documentation updates completed and reviewed.
|
||||||
|
8. Scratchpad updated with evidence.
|
||||||
|
9. Known risks documented.
|
||||||
|
10. No unresolved blocker hidden.
|
||||||
|
11. If deployment is in scope, deployment target, release version, and post-deploy verification evidence are documented.
|
||||||
|
12. `docs/TASKS.md` status and issue/internal references are updated to match delivered work.
|
||||||
|
13. If source code changed and external provider is available: PR merged to `main` (squash), with merge evidence recorded.
|
||||||
|
14. CI/pipeline status is terminal green for the merged PR/head commit.
|
||||||
|
15. Linked external issue is closed (or internal task ref is closed when no provider exists).
|
||||||
|
16. If any of items 13-15 fail due access/tooling, report `blocked` with exact failed wrapper command and do not claim completion.
|
||||||
|
|
||||||
|
## 12. Review and Reporting
|
||||||
|
|
||||||
|
Completion report MUST include:
|
||||||
|
|
||||||
|
1. what changed
|
||||||
|
2. PRD alignment summary
|
||||||
|
3. acceptance criteria to evidence mapping
|
||||||
|
4. what was tested (baseline + situational)
|
||||||
|
5. what was reviewed (code review scope)
|
||||||
|
6. what documentation was updated
|
||||||
|
7. command-level evidence summary
|
||||||
|
8. residual risks
|
||||||
|
9. deployment and post-deploy verification summary (if in scope)
|
||||||
|
10. explicit pass/fail status
|
||||||
|
11. tracking summary (`docs/TASKS.md` updates and issue/internal refs)
|
||||||
|
12. PR lifecycle summary (PR number, merge commit, merge method)
|
||||||
|
13. CI/pipeline summary (run/check URL, terminal status)
|
||||||
|
14. issue closure summary (issue number/ref and close evidence)
|
||||||
91
guides/FRONTEND.md
Normal file
91
guides/FRONTEND.md
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
# Frontend Development Guide
|
||||||
|
|
||||||
|
## Before Starting
|
||||||
|
|
||||||
|
1. Check assigned issue in git repo: `~/.config/mosaic/tools/git/issue-list.sh -a @me`
|
||||||
|
2. Create scratchpad: `docs/scratchpads/{issue-number}-{short-name}.md`
|
||||||
|
3. Review existing components and patterns in the codebase
|
||||||
|
|
||||||
|
## Development Standards
|
||||||
|
|
||||||
|
### Framework Conventions
|
||||||
|
|
||||||
|
- Follow project's existing framework patterns (React, Vue, Svelte, etc.)
|
||||||
|
- Use existing component library/design system if present
|
||||||
|
- Maintain consistent file structure with existing code
|
||||||
|
|
||||||
|
### Styling
|
||||||
|
|
||||||
|
- Use project's established styling approach (CSS modules, Tailwind, styled-components, etc.)
|
||||||
|
- Follow existing naming conventions for CSS classes
|
||||||
|
- Ensure responsive design unless explicitly single-platform
|
||||||
|
|
||||||
|
### State Management
|
||||||
|
|
||||||
|
- Use project's existing state management solution
|
||||||
|
- Keep component state local when possible
|
||||||
|
- Document any new global state additions
|
||||||
|
|
||||||
|
### Accessibility
|
||||||
|
|
||||||
|
- Include proper ARIA labels
|
||||||
|
- Ensure keyboard navigation works
|
||||||
|
- Test with screen reader considerations
|
||||||
|
- Maintain color contrast ratios (WCAG 2.1 AA minimum)
|
||||||
|
|
||||||
|
## Testing Requirements (TDD)
|
||||||
|
|
||||||
|
1. Write tests BEFORE implementation
|
||||||
|
2. Minimum 85% coverage
|
||||||
|
3. Test categories:
|
||||||
|
- Unit tests for utility functions
|
||||||
|
- Component tests for UI behavior
|
||||||
|
- Integration tests for user flows
|
||||||
|
|
||||||
|
### Test Patterns
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Component test example structure
|
||||||
|
describe('ComponentName', () => {
|
||||||
|
it('renders without crashing', () => {});
|
||||||
|
it('handles user interaction correctly', () => {});
|
||||||
|
it('displays error states appropriately', () => {});
|
||||||
|
it('is accessible', () => {});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Code Style
|
||||||
|
|
||||||
|
- Follow Google JavaScript/TypeScript Style Guide
|
||||||
|
- **TypeScript: Follow `~/.config/mosaic/guides/TYPESCRIPT.md` — MANDATORY**
|
||||||
|
- Use ESLint/Prettier configuration from project
|
||||||
|
- Prefer functional components over class components (React)
|
||||||
|
- TypeScript strict mode is REQUIRED, not optional
|
||||||
|
|
||||||
|
### TypeScript Quick Rules (see TYPESCRIPT.md for full guide)
|
||||||
|
|
||||||
|
- **NO `any`** — define explicit types always
|
||||||
|
- **NO lazy `unknown`** — only for error catches and external data with validation
|
||||||
|
- **Explicit return types** on all exported functions
|
||||||
|
- **Explicit parameter types** always
|
||||||
|
- **Interface for props** — never inline object types
|
||||||
|
- **Event handlers** — use proper React event types
|
||||||
|
|
||||||
|
## Commit Format
|
||||||
|
|
||||||
|
```
|
||||||
|
feat(#123): Add user profile component
|
||||||
|
|
||||||
|
- Implement avatar display
|
||||||
|
- Add edit mode toggle
|
||||||
|
- Include form validation
|
||||||
|
|
||||||
|
Refs #123
|
||||||
|
```
|
||||||
|
|
||||||
|
## Before Completing
|
||||||
|
|
||||||
|
1. Run full test suite
|
||||||
|
2. Verify build succeeds
|
||||||
|
3. Update scratchpad with completion notes
|
||||||
|
4. Reference issue in commit: `Fixes #N` or `Refs #N`
|
||||||
339
guides/INFRASTRUCTURE.md
Normal file
339
guides/INFRASTRUCTURE.md
Normal file
@@ -0,0 +1,339 @@
|
|||||||
|
# Infrastructure & DevOps Guide
|
||||||
|
|
||||||
|
## Before Starting
|
||||||
|
|
||||||
|
1. Check assigned issue: `~/.config/mosaic/tools/git/issue-list.sh -a @me`
|
||||||
|
2. Create scratchpad: `docs/scratchpads/{issue-number}-{short-name}.md`
|
||||||
|
3. Review existing infrastructure configuration
|
||||||
|
|
||||||
|
## Vault Secrets Management
|
||||||
|
|
||||||
|
**CRITICAL**: Follow canonical Vault structure for ALL secrets.
|
||||||
|
|
||||||
|
### Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
{mount}/{service}/{component}/{secret-name}
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- secret-prod/postgres/database/app
|
||||||
|
- secret-prod/redis/auth/default
|
||||||
|
- secret-prod/authentik/admin/token
|
||||||
|
```
|
||||||
|
|
||||||
|
### Environment Mounts
|
||||||
|
|
||||||
|
- `secret-dev/` - Development environment
|
||||||
|
- `secret-staging/` - Staging environment
|
||||||
|
- `secret-prod/` - Production environment
|
||||||
|
|
||||||
|
### Standard Field Names
|
||||||
|
|
||||||
|
- Credentials: `username`, `password`
|
||||||
|
- Tokens: `token`
|
||||||
|
- OAuth: `client_id`, `client_secret`
|
||||||
|
- Connection strings: `url`, `host`, `port`
|
||||||
|
|
||||||
|
See `docs/vault-secrets-structure.md` for complete reference.
|
||||||
|
|
||||||
|
## Container Standards
|
||||||
|
|
||||||
|
### Dockerfile Best Practices
|
||||||
|
|
||||||
|
```dockerfile
|
||||||
|
# Use specific version tags
|
||||||
|
FROM node:20-alpine
|
||||||
|
|
||||||
|
# Create non-root user
|
||||||
|
RUN addgroup -S app && adduser -S app -G app
|
||||||
|
|
||||||
|
# Set working directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy dependency files first (layer caching)
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci --only=production
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY --chown=app:app . .
|
||||||
|
|
||||||
|
# Switch to non-root user
|
||||||
|
USER app
|
||||||
|
|
||||||
|
# Use exec form for CMD
|
||||||
|
CMD ["node", "server.js"]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Container Security
|
||||||
|
|
||||||
|
- Use minimal base images (alpine, distroless)
|
||||||
|
- Run as non-root user
|
||||||
|
- Don't store secrets in images
|
||||||
|
- Scan images for vulnerabilities
|
||||||
|
- Pin dependency versions
|
||||||
|
|
||||||
|
## Kubernetes/Docker Compose
|
||||||
|
|
||||||
|
### Resource Limits
|
||||||
|
|
||||||
|
Always set resource limits to prevent runaway containers:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: '128Mi'
|
||||||
|
cpu: '100m'
|
||||||
|
limits:
|
||||||
|
memory: '256Mi'
|
||||||
|
cpu: '500m'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Health Checks
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: 8080
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 5
|
||||||
|
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /ready
|
||||||
|
port: 8080
|
||||||
|
initialDelaySeconds: 5
|
||||||
|
periodSeconds: 3
|
||||||
|
```
|
||||||
|
|
||||||
|
## CI/CD Pipelines
|
||||||
|
|
||||||
|
### Pipeline Stages
|
||||||
|
|
||||||
|
1. **Lint**: Code style and static analysis
|
||||||
|
2. **Test**: Unit and integration tests
|
||||||
|
3. **Build**: Compile and package
|
||||||
|
4. **Scan**: Security and vulnerability scanning
|
||||||
|
5. **Deploy**: Environment-specific deployment
|
||||||
|
|
||||||
|
### Pipeline Security
|
||||||
|
|
||||||
|
- Use secrets management (not hardcoded)
|
||||||
|
- Pin action/image versions
|
||||||
|
- Implement approval gates for production
|
||||||
|
- Audit pipeline access
|
||||||
|
|
||||||
|
## Steered-Autonomous Deployment (Hard Rule)
|
||||||
|
|
||||||
|
In lights-out mode, the agent owns deployment end-to-end when deployment is in scope.
|
||||||
|
The human is escalation-only for missing access, hard policy conflicts, or irreversible risk.
|
||||||
|
|
||||||
|
### Deployment Target Selection
|
||||||
|
|
||||||
|
1. Use explicit target from `docs/PRD.md` / `docs/PRD.json` or `docs/DEPLOYMENT.md`.
|
||||||
|
2. If unspecified, infer from existing project config/integration.
|
||||||
|
3. If multiple targets exist, choose the target already wired in CI/CD and document rationale.
|
||||||
|
|
||||||
|
### Supported Targets
|
||||||
|
|
||||||
|
- **Portainer**: Deploy via `~/.config/mosaic/tools/portainer/stack-redeploy.sh`, then verify with `stack-status.sh`.
|
||||||
|
- **Coolify**: Deploy via `~/.config/mosaic/tools/coolify/deploy.sh -u <uuid>`, then verify with `service-status.sh`.
|
||||||
|
- **Vercel**: Deploy via `vercel` CLI or connected Git integration, then verify preview/production URL health.
|
||||||
|
- **Other SaaS providers**: Use provider CLI/API/runbook with the same validation and rollback gates.
|
||||||
|
|
||||||
|
### Coolify API Operations
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# List projects and services
|
||||||
|
~/.config/mosaic/tools/coolify/project-list.sh
|
||||||
|
~/.config/mosaic/tools/coolify/service-list.sh
|
||||||
|
|
||||||
|
# Check service status
|
||||||
|
~/.config/mosaic/tools/coolify/service-status.sh -u <uuid>
|
||||||
|
|
||||||
|
# Set env vars (takes effect on next deploy)
|
||||||
|
~/.config/mosaic/tools/coolify/env-set.sh -u <uuid> -k KEY -v VALUE
|
||||||
|
|
||||||
|
# Deploy
|
||||||
|
~/.config/mosaic/tools/coolify/deploy.sh -u <uuid>
|
||||||
|
```
|
||||||
|
|
||||||
|
**Known Coolify Limitations:**
|
||||||
|
|
||||||
|
- FQDN updates on compose sub-apps not supported via API (DB workaround required)
|
||||||
|
- Compose files must be base64-encoded in `docker_compose_raw` field
|
||||||
|
- Magic variables (`SERVICE_FQDN_*`) require list-style env syntax, not dict-style
|
||||||
|
- Rate limit: 200 requests per interval
|
||||||
|
|
||||||
|
### Cloudflare DNS Operations
|
||||||
|
|
||||||
|
Use the Cloudflare tools for any DNS configuration: pointing domains at services, adding TXT verification records, managing MX records, etc.
|
||||||
|
|
||||||
|
**Multi-instance support**: Credentials support named instances (e.g. `personal`, `work`). A `default` key in credentials.json determines which instance is used when `-a` is omitted. Pass `-a <instance>` to target a specific account.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# List all zones (domains) in the account
|
||||||
|
~/.config/mosaic/tools/cloudflare/zone-list.sh [-a instance]
|
||||||
|
|
||||||
|
# List DNS records for a zone (accepts zone name or ID)
|
||||||
|
~/.config/mosaic/tools/cloudflare/record-list.sh -z <zone> [-t type] [-n name]
|
||||||
|
|
||||||
|
# Create a DNS record
|
||||||
|
~/.config/mosaic/tools/cloudflare/record-create.sh -z <zone> -t <type> -n <name> -c <content> [-p] [-l ttl] [-P priority]
|
||||||
|
|
||||||
|
# Update a DNS record (requires record ID from record-list)
|
||||||
|
~/.config/mosaic/tools/cloudflare/record-update.sh -z <zone> -r <record-id> -t <type> -n <name> -c <content> [-p]
|
||||||
|
|
||||||
|
# Delete a DNS record
|
||||||
|
~/.config/mosaic/tools/cloudflare/record-delete.sh -z <zone> -r <record-id>
|
||||||
|
```
|
||||||
|
|
||||||
|
**Flag reference:**
|
||||||
|
|
||||||
|
| Flag | Purpose |
|
||||||
|
| ---- | ----------------------------------------------------------------------- |
|
||||||
|
| `-z` | Zone name (e.g. `mosaicstack.dev`) or 32-char zone ID |
|
||||||
|
| `-a` | Named Cloudflare instance (omit for default) |
|
||||||
|
| `-t` | Record type: `A`, `AAAA`, `CNAME`, `MX`, `TXT`, `SRV`, etc. |
|
||||||
|
| `-n` | Record name: short (`app`) or FQDN (`app.example.com`) |
|
||||||
|
| `-c` | Record content/value (IP, hostname, TXT string, etc.) |
|
||||||
|
| `-r` | Record ID (from `record-list.sh` output) |
|
||||||
|
| `-p` | Enable Cloudflare proxy (orange cloud) — omit for DNS-only (grey cloud) |
|
||||||
|
| `-l` | TTL in seconds (default: `1` = auto) |
|
||||||
|
| `-P` | Priority for MX/SRV records |
|
||||||
|
| `-f` | Output format: `table` (default) or `json` |
|
||||||
|
|
||||||
|
**Common workflows:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Point a new subdomain at a server (proxied through Cloudflare)
|
||||||
|
~/.config/mosaic/tools/cloudflare/record-create.sh \
|
||||||
|
-z example.com -t A -n myapp -c 203.0.113.10 -p
|
||||||
|
|
||||||
|
# Add a TXT record for domain verification (never proxied)
|
||||||
|
~/.config/mosaic/tools/cloudflare/record-create.sh \
|
||||||
|
-z example.com -t TXT -n _verify -c "verification=abc123"
|
||||||
|
|
||||||
|
# Check what records exist before making changes
|
||||||
|
~/.config/mosaic/tools/cloudflare/record-list.sh -z example.com -t CNAME
|
||||||
|
|
||||||
|
# Update an existing record (get record ID from record-list first)
|
||||||
|
~/.config/mosaic/tools/cloudflare/record-update.sh \
|
||||||
|
-z example.com -r <record-id> -t A -n myapp -c 10.0.0.5 -p
|
||||||
|
```
|
||||||
|
|
||||||
|
**DNS + Deployment integration**: When deploying a new service via Coolify or Portainer that needs a public domain, the typical sequence is:
|
||||||
|
|
||||||
|
1. Create the DNS record pointing at the host IP (with `-p` for Cloudflare proxy if desired)
|
||||||
|
2. Deploy the service via Coolify/Portainer
|
||||||
|
3. Verify the domain resolves and the service is reachable
|
||||||
|
|
||||||
|
**Proxy (`-p`) guidance:**
|
||||||
|
|
||||||
|
- Use proxy (orange cloud) for web services — provides CDN, DDoS protection, and hides origin IP
|
||||||
|
- Skip proxy (grey cloud) for non-HTTP services (mail, SSH), wildcard records, or when the service handles its own TLS termination and needs direct client IP visibility
|
||||||
|
- Proxy is NOT compatible with non-standard ports outside Cloudflare's supported range
|
||||||
|
|
||||||
|
### Stack Health Check
|
||||||
|
|
||||||
|
Verify all infrastructure services are reachable:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
~/.config/mosaic/tools/health/stack-health.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
### Image Tagging and Promotion (Hard Rule)
|
||||||
|
|
||||||
|
For containerized deployments:
|
||||||
|
|
||||||
|
1. Build immutable image tags: `sha-<shortsha>` and `v{base-version}-rc.{build}`.
|
||||||
|
2. Use mutable environment tags only as pointers: `testing`, optional `staging`, and `prod`.
|
||||||
|
3. Deploy by immutable digest, not by mutable tag alone.
|
||||||
|
4. Promote the exact tested digest between environments (no rebuild between testing and prod).
|
||||||
|
5. Do not use `latest` or `dev` as deployment references.
|
||||||
|
|
||||||
|
Blue-green is the default strategy for production promotion.
|
||||||
|
Canary is allowed only when automated SLO/error-rate gates and auto-rollback triggers are implemented.
|
||||||
|
|
||||||
|
### Post-Deploy Validation (REQUIRED)
|
||||||
|
|
||||||
|
1. Health endpoints return expected status.
|
||||||
|
2. Critical smoke tests pass in target environment.
|
||||||
|
3. Running version and digest match the promoted release candidate.
|
||||||
|
4. Observability signals (errors/latency) are within expected thresholds.
|
||||||
|
|
||||||
|
### Rollback Rule
|
||||||
|
|
||||||
|
If post-deploy validation fails:
|
||||||
|
|
||||||
|
1. Execute rollback/redeploy-safe path immediately.
|
||||||
|
2. Mark deployment as blocked in `docs/TASKS.md`.
|
||||||
|
3. Record failure evidence and next remediation step in scratchpad and release notes.
|
||||||
|
|
||||||
|
### Registry Retention and Cleanup
|
||||||
|
|
||||||
|
Cleanup MUST be automated.
|
||||||
|
|
||||||
|
- Keep all final release tags (`vX.Y.Z`) indefinitely.
|
||||||
|
- Keep active environment digests (`prod`, `testing`, and active blue/green slots).
|
||||||
|
- Keep recent RC tags (`vX.Y.Z-rc.N`) based on retention window.
|
||||||
|
- Remove stale `sha-*` and RC tags outside retention window if they are not actively deployed.
|
||||||
|
|
||||||
|
## Monitoring & Logging
|
||||||
|
|
||||||
|
### Logging Standards
|
||||||
|
|
||||||
|
- Use structured logging (JSON)
|
||||||
|
- Include correlation IDs
|
||||||
|
- Log at appropriate levels (ERROR, WARN, INFO, DEBUG)
|
||||||
|
- Never log sensitive data
|
||||||
|
|
||||||
|
### Metrics to Collect
|
||||||
|
|
||||||
|
- Request latency (p50, p95, p99)
|
||||||
|
- Error rates
|
||||||
|
- Resource utilization (CPU, memory)
|
||||||
|
- Business metrics
|
||||||
|
|
||||||
|
### Alerting
|
||||||
|
|
||||||
|
- Define SLOs (Service Level Objectives)
|
||||||
|
- Alert on symptoms, not causes
|
||||||
|
- Include runbook links in alerts
|
||||||
|
- Avoid alert fatigue
|
||||||
|
|
||||||
|
## Testing Infrastructure
|
||||||
|
|
||||||
|
### Test Categories
|
||||||
|
|
||||||
|
1. **Unit tests**: Terraform/Ansible logic
|
||||||
|
2. **Integration tests**: Deployed resources work together
|
||||||
|
3. **Smoke tests**: Critical paths after deployment
|
||||||
|
4. **Chaos tests**: Failure mode validation
|
||||||
|
|
||||||
|
### Infrastructure Testing Tools
|
||||||
|
|
||||||
|
- Terraform: `terraform validate`, `terraform plan`
|
||||||
|
- Ansible: `ansible-lint`, molecule
|
||||||
|
- Kubernetes: `kubectl dry-run`, kubeval
|
||||||
|
- General: Terratest, ServerSpec
|
||||||
|
|
||||||
|
## Commit Format
|
||||||
|
|
||||||
|
```
|
||||||
|
chore(#67): Configure Redis cluster
|
||||||
|
|
||||||
|
- Add Redis StatefulSet with 3 replicas
|
||||||
|
- Configure persistence with PVC
|
||||||
|
- Add Vault secret for auth password
|
||||||
|
|
||||||
|
Refs #67
|
||||||
|
```
|
||||||
|
|
||||||
|
## Before Completing
|
||||||
|
|
||||||
|
1. Validate configuration syntax
|
||||||
|
2. Run infrastructure tests
|
||||||
|
3. Test in dev/staging first
|
||||||
|
4. Document any manual steps required
|
||||||
|
5. Update scratchpad and close issue
|
||||||
51
guides/MEMORY.md
Normal file
51
guides/MEMORY.md
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# Memory and Retention Rules
|
||||||
|
|
||||||
|
## Primary Memory Layer: OpenBrain
|
||||||
|
|
||||||
|
**OpenBrain is the canonical shared memory for all Mosaic agents across all harnesses and sessions.**
|
||||||
|
|
||||||
|
Use the `capture` MCP tool (or REST `POST /v1/thoughts`) to store:
|
||||||
|
|
||||||
|
- Discovered gotchas and workarounds
|
||||||
|
- Architectural decisions and rationale
|
||||||
|
- Project state and context for handoffs
|
||||||
|
- Anything a future agent should know
|
||||||
|
|
||||||
|
Use `search` or `recent` at session start to load prior context before acting.
|
||||||
|
|
||||||
|
This is not optional. An agent that uses local file-based memory instead of OpenBrain is a broken agent — its knowledge is invisible to every other agent on the platform.
|
||||||
|
|
||||||
|
## Hard Rules
|
||||||
|
|
||||||
|
1. Agent learnings MUST go to OpenBrain — not to any file-based memory location.
|
||||||
|
2. You MUST NOT write to runtime-native memory silos (they are write-blocked by hook).
|
||||||
|
3. Active execution state belongs in project `docs/` — not in memory files.
|
||||||
|
4. `~/.config/mosaic/memory/` is for mosaic framework technical notes only, not project knowledge.
|
||||||
|
|
||||||
|
## Runtime-Native Memory Silos (WRITE-BLOCKED)
|
||||||
|
|
||||||
|
These locations are blocked by PreToolUse hooks. Attempting to write there fails at the tool level.
|
||||||
|
|
||||||
|
| Runtime | Blocked silo | Use instead |
|
||||||
|
| ----------- | ---------------------------------- | ------------------- |
|
||||||
|
| Claude Code | `~/.claude/projects/*/memory/*.md` | OpenBrain `capture` |
|
||||||
|
| Codex | Runtime session memory | OpenBrain `capture` |
|
||||||
|
| OpenCode | Runtime session memory | OpenBrain `capture` |
|
||||||
|
|
||||||
|
MEMORY.md files may only contain behavioral guardrails that must be injected at load-path — not knowledge.
|
||||||
|
|
||||||
|
## Project Continuity Files (MANDATORY)
|
||||||
|
|
||||||
|
| File | Purpose | Location |
|
||||||
|
| -------------------------------- | ----------------------------------------- | --------------------------- |
|
||||||
|
| `docs/PRD.md` or `docs/PRD.json` | Source of requirements | Project `docs/` |
|
||||||
|
| `docs/TASKS.md` | Task tracking, milestones, issues, status | Project `docs/` |
|
||||||
|
| `docs/scratchpads/<task>.md` | Task-specific working memory | Project `docs/scratchpads/` |
|
||||||
|
| `AGENTS.md` | Project-local patterns and conventions | Project root |
|
||||||
|
|
||||||
|
## How the Block Works
|
||||||
|
|
||||||
|
`~/.config/mosaic/tools/qa/prevent-memory-write.sh` is registered as a `PreToolUse` hook in
|
||||||
|
`~/.claude/settings.json`. It intercepts Write/Edit/MultiEdit calls and rejects any targeting
|
||||||
|
`~/.claude/projects/*/memory/*.md` before the tool executes. Exit code 2 blocks the call and
|
||||||
|
the agent sees a message directing it to OpenBrain instead.
|
||||||
127
guides/ORCHESTRATOR-LEARNINGS.md
Normal file
127
guides/ORCHESTRATOR-LEARNINGS.md
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
# Orchestrator Learnings (Universal)
|
||||||
|
|
||||||
|
> Cross-project heuristic adjustments based on observed variance data.
|
||||||
|
>
|
||||||
|
> **Note:** This file contains generic patterns only. Project-specific evidence is stored in each project's `docs/tasks/orchestrator-learnings.json`.
|
||||||
|
|
||||||
|
## Task Type Multipliers
|
||||||
|
|
||||||
|
Apply these multipliers to base estimates from `ORCHESTRATOR.md`:
|
||||||
|
|
||||||
|
| Task Type | Base Estimate | Multiplier | Confidence | Samples | Last Updated |
|
||||||
|
| --------------------- | ---------------- | ---------- | ---------- | ------- | ------------ |
|
||||||
|
| STYLE_FIX | 3-5K | 0.64 | MEDIUM | n=1 | 2026-02-05 |
|
||||||
|
| BULK_CLEANUP | file_count × 550 | 1.0 | MEDIUM | n=2 | 2026-02-05 |
|
||||||
|
| GUARD_ADD | 5-8K | 1.0 | LOW | n=0 | - |
|
||||||
|
| SECURITY_FIX | 8-12K | 2.5 | LOW | n=0 | - |
|
||||||
|
| AUTH_ADD | 15-25K | 1.0 | HIGH | n=1 | 2026-02-05 |
|
||||||
|
| REFACTOR | 10-15K | 1.0 | LOW | n=0 | - |
|
||||||
|
| TEST_ADD | 15-25K | 1.0 | LOW | n=0 | - |
|
||||||
|
| ERROR_HANDLING | 8-12K | 2.3 | MEDIUM | n=1 | 2026-02-05 |
|
||||||
|
| CONFIG_DEFAULT_CHANGE | 5-10K | 1.8 | MEDIUM | n=1 | 2026-02-05 |
|
||||||
|
| INPUT_VALIDATION | 5-8K | 1.7 | MEDIUM | n=1 | 2026-02-05 |
|
||||||
|
|
||||||
|
## Phase Factors
|
||||||
|
|
||||||
|
Apply to all estimates based on task position in milestone:
|
||||||
|
|
||||||
|
| Phase Position | Factor | Rationale |
|
||||||
|
| ----------------- | ------ | -------------------------- |
|
||||||
|
| Early (tasks 1-3) | 1.45 | Codebase learning overhead |
|
||||||
|
| Mid (tasks 4-7) | 1.25 | Pattern recognition phase |
|
||||||
|
| Late (tasks 8+) | 1.10 | Established patterns |
|
||||||
|
|
||||||
|
## Estimation Formula
|
||||||
|
|
||||||
|
```
|
||||||
|
Final Estimate = Base Estimate × Type Multiplier × Phase Factor × TDD Overhead
|
||||||
|
|
||||||
|
Where:
|
||||||
|
- Base Estimate: From ORCHESTRATOR.md task type table
|
||||||
|
- Type Multiplier: From table above (default 1.0)
|
||||||
|
- Phase Factor: 1.45 / 1.25 / 1.10 based on position
|
||||||
|
- TDD Overhead: 1.20 if tests required
|
||||||
|
```
|
||||||
|
|
||||||
|
## Known Patterns
|
||||||
|
|
||||||
|
### BULK_CLEANUP
|
||||||
|
|
||||||
|
**Pattern:** Multi-file cleanup tasks are severely underestimated.
|
||||||
|
|
||||||
|
**Why:** Iterative testing across many files, cascading fixes, and debugging compound the effort.
|
||||||
|
|
||||||
|
**Observed:** +112% to +276% variance when using fixed estimates.
|
||||||
|
|
||||||
|
**Recommendation:** Use `file_count × 550` instead of fixed estimate.
|
||||||
|
|
||||||
|
### ERROR_HANDLING
|
||||||
|
|
||||||
|
**Pattern:** Error handling changes that modify type interfaces cascade through the codebase.
|
||||||
|
|
||||||
|
**Why:** Adding fields to result types requires updating all callers, error messages, and tests.
|
||||||
|
|
||||||
|
**Observed:** +131% variance.
|
||||||
|
|
||||||
|
**Multiplier:** 2.3x base estimate when type interfaces are modified.
|
||||||
|
|
||||||
|
### CONFIG_DEFAULT_CHANGE
|
||||||
|
|
||||||
|
**Pattern:** Config default changes require more test coverage than expected.
|
||||||
|
|
||||||
|
**Why:** Security-sensitive defaults need validation tests, warning tests, and edge case coverage.
|
||||||
|
|
||||||
|
**Observed:** +80% variance.
|
||||||
|
|
||||||
|
**Multiplier:** 1.8x when config changes need security validation.
|
||||||
|
|
||||||
|
### INPUT_VALIDATION
|
||||||
|
|
||||||
|
**Pattern:** Security input validation with allowlists is more complex than simple validation.
|
||||||
|
|
||||||
|
**Why:** Comprehensive allowlists (e.g., OAuth error codes), encoding requirements, and security tests add up.
|
||||||
|
|
||||||
|
**Observed:** +70% variance.
|
||||||
|
|
||||||
|
**Multiplier:** 1.7x when security allowlists are involved.
|
||||||
|
|
||||||
|
### STYLE_FIX
|
||||||
|
|
||||||
|
**Pattern:** Pure formatting fixes are faster than estimated when isolated.
|
||||||
|
|
||||||
|
**Observed:** -36% variance.
|
||||||
|
|
||||||
|
**Multiplier:** 0.64x for isolated style-only fixes.
|
||||||
|
|
||||||
|
## Changelog
|
||||||
|
|
||||||
|
| Date | Change | Samples | Confidence |
|
||||||
|
| ---------- | ------------------------------------------- | ------- | ---------- |
|
||||||
|
| 2026-02-05 | Added BULK_CLEANUP category | n=2 | MEDIUM |
|
||||||
|
| 2026-02-05 | Added STYLE_FIX multiplier 0.64 | n=1 | MEDIUM |
|
||||||
|
| 2026-02-05 | Confirmed AUTH_ADD heuristic accurate | n=1 | HIGH |
|
||||||
|
| 2026-02-05 | Added ERROR_HANDLING multiplier 2.3x | n=1 | MEDIUM |
|
||||||
|
| 2026-02-05 | Added CONFIG_DEFAULT_CHANGE multiplier 1.8x | n=1 | MEDIUM |
|
||||||
|
| 2026-02-05 | Added INPUT_VALIDATION multiplier 1.7x | n=1 | MEDIUM |
|
||||||
|
|
||||||
|
## Update Protocol
|
||||||
|
|
||||||
|
**Graduated Autonomy:**
|
||||||
|
|
||||||
|
| Phase | Condition | Action |
|
||||||
|
| ---------------------- | ----------------------------------------- | -------------------------------------------- |
|
||||||
|
| **Now** | All proposals | Human review required |
|
||||||
|
| **After 3 milestones** | <30% change, n≥3 samples, HIGH confidence | Auto-update allowed |
|
||||||
|
| **Mature** | All changes | Auto with notification, revert on regression |
|
||||||
|
|
||||||
|
**Validation Before Update:**
|
||||||
|
|
||||||
|
1. Minimum 3 samples for same task type
|
||||||
|
2. Standard deviation < 30% of mean
|
||||||
|
3. Outliers (>2σ) excluded
|
||||||
|
4. New formula must not increase variance on historical data
|
||||||
|
|
||||||
|
## Where to Find Project-Specific Data
|
||||||
|
|
||||||
|
- **Project learnings:** `<project>/docs/tasks/orchestrator-learnings.json`
|
||||||
|
- **Cross-project metrics:** `jarvis-brain/data/orchestrator-metrics.json`
|
||||||
268
guides/ORCHESTRATOR-PROTOCOL.md
Normal file
268
guides/ORCHESTRATOR-PROTOCOL.md
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
# Orchestrator Protocol — Mission Lifecycle Guide
|
||||||
|
|
||||||
|
> **Operational guide for agent sessions.** Distilled from the full specification at
|
||||||
|
> `jarvis-brain/docs/protocols/ORCHESTRATOR-PROTOCOL.md` (1,066 lines).
|
||||||
|
>
|
||||||
|
> Load this guide when: active mission detected, multi-milestone orchestration, mission continuation.
|
||||||
|
> Load `ORCHESTRATOR.md` for per-session execution protocol (planning, coding, review, commit cycle).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Relationship to ORCHESTRATOR.md
|
||||||
|
|
||||||
|
| Concern | Guide |
|
||||||
|
| -------------------------------------------------------------------- | ----------------- |
|
||||||
|
| How to execute within a session (plan, code, test, review, commit) | `ORCHESTRATOR.md` |
|
||||||
|
| How to manage a mission across sessions (resume, continue, handoff) | **This guide** |
|
||||||
|
| Both guides are active simultaneously during orchestration missions. |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Mission Manifest
|
||||||
|
|
||||||
|
**Location:** `docs/MISSION-MANIFEST.md`
|
||||||
|
**Owner:** Orchestrator (sole writer)
|
||||||
|
**Template:** `~/.config/mosaic/templates/docs/MISSION-MANIFEST.md.template`
|
||||||
|
|
||||||
|
The manifest is the persistent document tracking full mission scope, status, milestones, and session history. It survives session death.
|
||||||
|
|
||||||
|
### Update Rules
|
||||||
|
|
||||||
|
- Update **Phase** when transitioning (Intake → Planning → Execution → Continuation → Completion)
|
||||||
|
- Update **Current Milestone** when starting a new milestone
|
||||||
|
- Update **Progress** after each milestone completion
|
||||||
|
- Append to **Session History** at session start and end
|
||||||
|
- Update **Status** to `completed` only when ALL success criteria are verified
|
||||||
|
|
||||||
|
### Hard Rule
|
||||||
|
|
||||||
|
The manifest is the source of truth for mission scope. If the manifest says a milestone is done, it is done. If it says remaining, it remains.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Scratchpad Protocol
|
||||||
|
|
||||||
|
**Location:** `docs/scratchpads/{mission-id}.md`
|
||||||
|
**Template:** `~/.config/mosaic/templates/docs/mission-scratchpad.md.template`
|
||||||
|
|
||||||
|
### Rules
|
||||||
|
|
||||||
|
1. **First action** — Before ANY planning or coding, write the mission prompt to the scratchpad
|
||||||
|
2. **Append-only** — NEVER delete or overwrite previous entries
|
||||||
|
3. **Session log** — Record session start, tasks done, and outcome at session end
|
||||||
|
4. **Decisions** — Record all planning decisions with rationale
|
||||||
|
5. **Corrections** — Record course corrections from human or coordinator
|
||||||
|
6. **Never deleted** — Scratchpads survive mission completion (archival reference)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. TASKS.md as Control Plane
|
||||||
|
|
||||||
|
**Location:** `docs/TASKS.md`
|
||||||
|
**Owner:** Orchestrator (sole writer). Workers read but NEVER modify.
|
||||||
|
|
||||||
|
### Table Schema
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
| id | status | milestone | description | pr | notes |
|
||||||
|
```
|
||||||
|
|
||||||
|
### Status Values
|
||||||
|
|
||||||
|
`not-started` → `in-progress` → `done` (or `blocked` / `failed`)
|
||||||
|
|
||||||
|
### Planning Tasks Are First-Class
|
||||||
|
|
||||||
|
Include explicit planning tasks (e.g., `PLAN-001: Break down milestone into tasks`). These count toward progress.
|
||||||
|
|
||||||
|
### Post-Merge Tasks Are Explicit
|
||||||
|
|
||||||
|
Include verification tasks after merge: CI check, deployment verification, Playwright test. Don't assume they happen automatically.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Session Resume Protocol
|
||||||
|
|
||||||
|
When starting a session and an active mission is detected, follow this checklist:
|
||||||
|
|
||||||
|
### Detection (5-point check)
|
||||||
|
|
||||||
|
1. `docs/MISSION-MANIFEST.md` exists → read Phase, Current Milestone, Progress
|
||||||
|
2. `docs/scratchpads/*.md` exists → read latest scratchpad for decisions and corrections
|
||||||
|
3. `docs/TASKS.md` exists → read task state (what's done, what's next)
|
||||||
|
4. Git state → current branch, open PRs, recent commits
|
||||||
|
5. Provider state → open issues, milestone status (if accessible)
|
||||||
|
|
||||||
|
### Resume Procedure
|
||||||
|
|
||||||
|
1. Read the mission manifest FIRST
|
||||||
|
2. Read the scratchpad for session history and corrections
|
||||||
|
3. Read TASKS.md for current task state
|
||||||
|
4. Identify the next `not-started` or `in-progress` task
|
||||||
|
5. Continue execution from that task
|
||||||
|
6. Update Session History in the manifest
|
||||||
|
|
||||||
|
### Dirty State Recovery
|
||||||
|
|
||||||
|
| State | Recovery |
|
||||||
|
| ------------------------ | ------------------------------------------------------------------- |
|
||||||
|
| Dirty git working tree | Stash changes, log stash ref in scratchpad, resume clean |
|
||||||
|
| Open PR in bad state | Check PR status, close if broken, re-create if needed |
|
||||||
|
| Half-created issues | Audit issues against TASKS.md, reconcile |
|
||||||
|
| Tasks marked in-progress | Check if work was committed; if so, mark done; if not, restart task |
|
||||||
|
|
||||||
|
### Hard Rule
|
||||||
|
|
||||||
|
Session state is NEVER automatically deleted. The coordinator (human or automated) must explicitly request cleanup.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Mission Continuation
|
||||||
|
|
||||||
|
When a milestone completes and more milestones remain:
|
||||||
|
|
||||||
|
### Agent Handoff (at ~55-60% context)
|
||||||
|
|
||||||
|
If context usage is high, produce a handoff message:
|
||||||
|
|
||||||
|
1. Update TASKS.md with final task statuses
|
||||||
|
2. Update mission manifest with session results
|
||||||
|
3. Append session summary to scratchpad
|
||||||
|
4. Commit all state files
|
||||||
|
5. The coordinator will generate a continuation prompt for the next session
|
||||||
|
|
||||||
|
### Continuation Prompt and Capsule Format
|
||||||
|
|
||||||
|
The coordinator generates this (via `mosaic coord continue`) and writes a machine-readable capsule at `.mosaic/orchestrator/next-task.json`:
|
||||||
|
|
||||||
|
```
|
||||||
|
## Continuation Mission
|
||||||
|
Continue **{mission}** from existing state.
|
||||||
|
- Read docs/MISSION-MANIFEST.md for scope and status
|
||||||
|
- Read docs/scratchpads/{id}.md for decisions
|
||||||
|
- Read docs/TASKS.md for current state
|
||||||
|
- Continue from task {next-task-id}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Between Sessions (r0 manual)
|
||||||
|
|
||||||
|
1. Agent stops (expected — this is the confirmed stamina limitation)
|
||||||
|
2. Human runs `mosaic coord mission` to check status
|
||||||
|
3. Human runs `mosaic coord continue` to generate continuation prompt
|
||||||
|
4. Human launches new session and pastes the prompt
|
||||||
|
5. New agent reads manifest, scratchpad, TASKS.md and continues
|
||||||
|
|
||||||
|
### Between Sessions (r0 assisted)
|
||||||
|
|
||||||
|
Use `mosaic coord run` to remove copy/paste steps:
|
||||||
|
|
||||||
|
1. Agent stops
|
||||||
|
2. Human runs `mosaic coord run [--claude|--codex]`
|
||||||
|
3. Coordinator regenerates continuation prompt + `next-task.json`
|
||||||
|
4. Coordinator launches selected runtime with scoped kickoff context
|
||||||
|
5. New session resumes from next task
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Failure Taxonomy Quick Reference
|
||||||
|
|
||||||
|
| Code | Type | Recovery |
|
||||||
|
| ---- | ---------------------- | ----------------------------------------------------- |
|
||||||
|
| F1 | Premature Stop | Continuation prompt → new session (most common) |
|
||||||
|
| F2 | Context Exhaustion | Handoff message → new session |
|
||||||
|
| F3 | Session Crash | Check git state → `mosaic coord resume` → new session |
|
||||||
|
| F4 | Error Spiral | Kill session, mark task blocked, skip to next |
|
||||||
|
| F5 | Quality Gate Failure | Create QA remediation task |
|
||||||
|
| F6 | Infrastructure Failure | Pause, retry when service recovers |
|
||||||
|
| F7 | False Completion | Append correction to scratchpad, relaunch |
|
||||||
|
| F8 | Scope Drift | Kill session, relaunch with scratchpad ref |
|
||||||
|
| F9 | Subagent Failure | Orchestrator retries or creates remediation |
|
||||||
|
| F10 | Deadlock | Escalate to human |
|
||||||
|
|
||||||
|
### F1: Premature Stop — Detailed Recovery
|
||||||
|
|
||||||
|
This is the confirmed, most common failure. Every session will eventually trigger F1.
|
||||||
|
|
||||||
|
1. Session ends with tasks remaining in TASKS.md
|
||||||
|
2. Run `mosaic coord mission` — verify milestone status
|
||||||
|
3. If milestone complete: verify CI green, deployed, issues closed
|
||||||
|
4. Run `mosaic coord continue` — generates scoped continuation prompt
|
||||||
|
5. Launch new session, paste prompt
|
||||||
|
6. New session reads state and continues from next pending task
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. r0 Manual Coordinator Process
|
||||||
|
|
||||||
|
In r0, the Coordinator is Jason + shell scripts. No daemon. No automation.
|
||||||
|
|
||||||
|
### Commands
|
||||||
|
|
||||||
|
| Command | Purpose |
|
||||||
|
| --------------------------------------------------- | ------------------------------------------------- | ------------------------------------------------ |
|
||||||
|
| `mosaic coord init --name "..." --milestones "..."` | Initialize a new mission |
|
||||||
|
| `mosaic coord mission` | Show mission progress dashboard |
|
||||||
|
| `mosaic coord status` | Check if agent session is still running |
|
||||||
|
| `mosaic coord continue` | Generate continuation prompt for next session |
|
||||||
|
| `mosaic coord run [--claude | --codex]` | Generate continuation context and launch runtime |
|
||||||
|
| `mosaic coord resume` | Crash recovery (detect dirty state, generate fix) |
|
||||||
|
| `mosaic coord resume --clean-lock` | Clear stale session lock after review |
|
||||||
|
|
||||||
|
### Typical Workflow
|
||||||
|
|
||||||
|
```
|
||||||
|
init → launch agent → [agent works] → agent stops →
|
||||||
|
status → mission → run → repeat
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Operational Checklist
|
||||||
|
|
||||||
|
### Pre-Mission
|
||||||
|
|
||||||
|
- [ ] Mission initialized: `mosaic coord init`
|
||||||
|
- [ ] docs/MISSION-MANIFEST.md exists with scope and milestones
|
||||||
|
- [ ] docs/TASKS.md scaffolded
|
||||||
|
- [ ] docs/scratchpads/{id}.md scaffolded
|
||||||
|
- [ ] Success criteria defined in manifest
|
||||||
|
|
||||||
|
### Session Start
|
||||||
|
|
||||||
|
- [ ] Read manifest → know phase, milestone, progress
|
||||||
|
- [ ] Read scratchpad → know decisions, corrections, history
|
||||||
|
- [ ] Read TASKS.md → know what's done and what's next
|
||||||
|
- [ ] Write session start to scratchpad
|
||||||
|
- [ ] Update Session History in manifest
|
||||||
|
|
||||||
|
### Planning Gate (Hard Gate — No Coding Until Complete)
|
||||||
|
|
||||||
|
- [ ] Milestones created in provider (Gitea/GitHub)
|
||||||
|
- [ ] Issues created for all milestone tasks
|
||||||
|
- [ ] TASKS.md populated with all planned tasks (including planning + verification tasks)
|
||||||
|
- [ ] All planning artifacts committed and pushed
|
||||||
|
|
||||||
|
### Per-Task
|
||||||
|
|
||||||
|
- [ ] Update task status to `in-progress` in TASKS.md
|
||||||
|
- [ ] Execute task following ORCHESTRATOR.md cycle
|
||||||
|
- [ ] Update task status to `done` (or `blocked`/`failed`)
|
||||||
|
- [ ] Commit, push
|
||||||
|
|
||||||
|
### Milestone Completion
|
||||||
|
|
||||||
|
- [ ] All milestone tasks in TASKS.md are `done`
|
||||||
|
- [ ] CI/pipeline green
|
||||||
|
- [ ] PR merged to `main`
|
||||||
|
- [ ] Issues closed
|
||||||
|
- [ ] Update manifest: milestone status → completed
|
||||||
|
- [ ] Update scratchpad: session log entry
|
||||||
|
- [ ] If deployment target: verify accessible
|
||||||
|
|
||||||
|
### Mission Completion
|
||||||
|
|
||||||
|
- [ ] ALL milestones completed
|
||||||
|
- [ ] ALL success criteria verified with evidence
|
||||||
|
- [ ] manifest status → completed
|
||||||
|
- [ ] Final scratchpad entry with completion evidence
|
||||||
|
- [ ] Release tag created and pushed (if applicable)
|
||||||
1175
guides/ORCHESTRATOR.md
Normal file
1175
guides/ORCHESTRATOR.md
Normal file
File diff suppressed because it is too large
Load Diff
63
guides/PRD.md
Normal file
63
guides/PRD.md
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
# PRD Requirement Guide (MANDATORY)
|
||||||
|
|
||||||
|
This guide defines how requirements are captured before coding.
|
||||||
|
|
||||||
|
## Hard Rules
|
||||||
|
|
||||||
|
1. Before coding begins, `docs/PRD.md` or `docs/PRD.json` MUST exist.
|
||||||
|
2. The PRD is the authoritative requirements source for implementation and testing.
|
||||||
|
3. The main agent MUST prepare or update the PRD using user input and available project context before implementation starts.
|
||||||
|
4. The agent MUST NOT invent requirements silently.
|
||||||
|
5. In steered autonomy mode, best-guess decisions are REQUIRED when needed; each guessed decision MUST be marked with `ASSUMPTION:` and rationale.
|
||||||
|
|
||||||
|
## PRD Format
|
||||||
|
|
||||||
|
Allowed canonical formats:
|
||||||
|
|
||||||
|
1. `docs/PRD.md`
|
||||||
|
2. `docs/PRD.json`
|
||||||
|
|
||||||
|
Either format is valid. Both may exist if one is a transformed representation of the other.
|
||||||
|
For markdown PRDs, start from `~/.config/mosaic/templates/docs/PRD.md.template`.
|
||||||
|
|
||||||
|
## Best-Guess Mode
|
||||||
|
|
||||||
|
Steered autonomy is the default operating mode.
|
||||||
|
|
||||||
|
1. Agent SHOULD fill missing decisions in the PRD without waiting for routine confirmation.
|
||||||
|
2. Agent MUST mark each guessed decision with `ASSUMPTION:` and rationale.
|
||||||
|
3. If user explicitly requests strict-confirmation mode, the agent MUST ask before unresolved decisions are finalized.
|
||||||
|
4. For high-impact security/compliance/release uncertainty, escalate only if the decision cannot be safely constrained with rollback-ready defaults.
|
||||||
|
|
||||||
|
## Minimum PRD Content
|
||||||
|
|
||||||
|
Every PRD MUST include:
|
||||||
|
|
||||||
|
1. Problem statement and objective
|
||||||
|
2. In-scope and out-of-scope
|
||||||
|
3. User/stakeholder requirements
|
||||||
|
4. Functional requirements
|
||||||
|
5. Non-functional requirements (security, performance, reliability, observability)
|
||||||
|
6. Acceptance criteria
|
||||||
|
7. Constraints and dependencies
|
||||||
|
8. Risks and open questions
|
||||||
|
9. Testing and verification expectations
|
||||||
|
10. Delivery/milestone intent
|
||||||
|
|
||||||
|
## Pre-Coding Gate
|
||||||
|
|
||||||
|
Coding MUST NOT begin until:
|
||||||
|
|
||||||
|
1. PRD file exists (`docs/PRD.md` or `docs/PRD.json`)
|
||||||
|
2. PRD has required sections
|
||||||
|
3. Unresolved decisions are captured as explicit `ASSUMPTION:` entries with rationale and planned validation
|
||||||
|
|
||||||
|
## Change Control
|
||||||
|
|
||||||
|
When requirements materially change:
|
||||||
|
|
||||||
|
1. Update PRD first.
|
||||||
|
2. Then update implementation plan/tasks.
|
||||||
|
3. Then implement code changes.
|
||||||
|
|
||||||
|
Implementation that diverges from PRD without PRD updates is a blocker.
|
||||||
125
guides/QA-TESTING.md
Normal file
125
guides/QA-TESTING.md
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
# QA & Testing Guide
|
||||||
|
|
||||||
|
## Before Starting
|
||||||
|
|
||||||
|
1. Check assigned issue: `~/.config/mosaic/tools/git/issue-list.sh -a @me`
|
||||||
|
2. Create scratchpad: `docs/scratchpads/{issue-number}-{short-name}.md`
|
||||||
|
3. Review `docs/PRD.md` or `docs/PRD.json` as the requirements source.
|
||||||
|
4. Review acceptance criteria and affected change surfaces.
|
||||||
|
|
||||||
|
## Testing Policy (Hard Rules)
|
||||||
|
|
||||||
|
1. Situational testing is the PRIMARY validation gate.
|
||||||
|
2. Baseline testing is REQUIRED for all software changes.
|
||||||
|
3. TDD is risk-based and REQUIRED only for defined high-risk change types.
|
||||||
|
4. Tests MUST validate requirements and behavior, not only internal implementation details.
|
||||||
|
|
||||||
|
## Priority Order
|
||||||
|
|
||||||
|
1. Situational tests: prove requirements and real behavior on changed surfaces.
|
||||||
|
2. Baseline tests: lint/type/unit/integration safety checks.
|
||||||
|
3. TDD discipline: applied where risk justifies test-first workflow.
|
||||||
|
|
||||||
|
## Risk-Based TDD Requirement
|
||||||
|
|
||||||
|
| Change Type | TDD Requirement | Required Action |
|
||||||
|
| ---------------------------------------------- | --------------- | ---------------------------------------------------------------------- |
|
||||||
|
| Bug fix | REQUIRED | Write a failing reproducer test first, then fix. |
|
||||||
|
| Security/auth/permission logic | REQUIRED | Write failing security/permission-path test first. |
|
||||||
|
| Critical business logic or data mutation rules | REQUIRED | Write failing rule/invariant test first. |
|
||||||
|
| API behavior regression | REQUIRED | Write failing contract/behavior test first. |
|
||||||
|
| Low-risk UI copy/style/layout | OPTIONAL | Add verification tests as appropriate; TDD recommended, not mandatory. |
|
||||||
|
| Mechanical refactor with unchanged behavior | OPTIONAL | Ensure regression/smoke coverage and situational evidence. |
|
||||||
|
|
||||||
|
If TDD is not required and skipped, record rationale in scratchpad.
|
||||||
|
If TDD is required and skipped, task is NOT complete.
|
||||||
|
|
||||||
|
## Baseline Test Requirements
|
||||||
|
|
||||||
|
For all software changes, run baseline checks applicable to the repo:
|
||||||
|
|
||||||
|
1. lint/static checks
|
||||||
|
2. type checks
|
||||||
|
3. unit tests for changed logic
|
||||||
|
4. integration tests for changed boundaries
|
||||||
|
|
||||||
|
## Situational Testing Matrix (Primary Gate)
|
||||||
|
|
||||||
|
| Change Surface | Required Situational Tests |
|
||||||
|
| ---------------------------- | ----------------------------------------------------------------------------- |
|
||||||
|
| Authentication/authorization | auth failure-path tests, permission boundary tests, token/session validation |
|
||||||
|
| Database schema/migrations | migration up/down validation, rollback safety, data integrity checks |
|
||||||
|
| API contract changes | backward compatibility checks, consumer-impact tests, contract tests |
|
||||||
|
| Frontend/UI workflow changes | end-to-end flow tests, accessibility sanity checks, state transition checks |
|
||||||
|
| CI/CD or deployment changes | pipeline execution validation, artifact integrity checks, rollback path check |
|
||||||
|
| Security-sensitive logic | abuse-case tests, input validation fuzzing/sanitization checks |
|
||||||
|
| Performance-critical path | baseline comparison, regression threshold checks |
|
||||||
|
|
||||||
|
## Coverage Requirements
|
||||||
|
|
||||||
|
### Minimum Standards
|
||||||
|
|
||||||
|
- Overall Coverage: 85% minimum
|
||||||
|
- Critical Paths: 95% minimum (auth, payments, data mutations)
|
||||||
|
- New Code: 90% minimum
|
||||||
|
|
||||||
|
Coverage is necessary but NOT sufficient. Passing coverage does not replace situational verification.
|
||||||
|
|
||||||
|
## Requirements-to-Evidence Mapping (Mandatory)
|
||||||
|
|
||||||
|
Before completion, map each acceptance criterion to concrete evidence.
|
||||||
|
Acceptance criteria MUST come from the active PRD.
|
||||||
|
|
||||||
|
Template:
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
| Acceptance Criterion | Verification Method | Evidence |
|
||||||
|
| -------------------- | ------------------------------------------------------ | ---------------- |
|
||||||
|
| AC-1: ... | Situational test / baseline test / manual verification | command + result |
|
||||||
|
| AC-2: ... | ... | ... |
|
||||||
|
```
|
||||||
|
|
||||||
|
## Browser Automation (Hard Rule)
|
||||||
|
|
||||||
|
All browser automation (Playwright, Cypress, Puppeteer) MUST run in **headless mode**.
|
||||||
|
Launching a visible browser collides with the user's display and active session.
|
||||||
|
|
||||||
|
- Playwright: use `headless: true` in config or `--headed` must NOT be passed
|
||||||
|
- Cypress: use `cypress run` (headless by default), never `cypress open`
|
||||||
|
- Puppeteer: use `headless: true` (default)
|
||||||
|
|
||||||
|
If a project's `playwright.config.ts` does not explicitly set `headless: true`, add it before running tests.
|
||||||
|
|
||||||
|
## Test Quality Rules
|
||||||
|
|
||||||
|
1. Test behavior and outcomes, not private implementation details.
|
||||||
|
2. Include failure-path and edge-case assertions for changed behavior.
|
||||||
|
3. Keep tests deterministic; no new flaky tests.
|
||||||
|
4. Keep tests isolated; no dependency on execution order.
|
||||||
|
|
||||||
|
## Anti-Gaming Rules
|
||||||
|
|
||||||
|
1. Do NOT stop at "tests pass" if acceptance criteria are not verified.
|
||||||
|
2. Do NOT write narrow tests that only satisfy assertions while missing real workflow behavior.
|
||||||
|
3. Do NOT claim completion without situational evidence for impacted surfaces.
|
||||||
|
|
||||||
|
## Reporting
|
||||||
|
|
||||||
|
QA report MUST include:
|
||||||
|
|
||||||
|
1. baseline tests run and outcomes
|
||||||
|
2. situational tests run and outcomes
|
||||||
|
3. TDD usage decision (required/applied or optional/skipped with rationale)
|
||||||
|
4. acceptance-criteria-to-evidence mapping
|
||||||
|
5. coverage results
|
||||||
|
6. residual risk notes
|
||||||
|
|
||||||
|
## Before Completing
|
||||||
|
|
||||||
|
1. Baseline tests pass.
|
||||||
|
2. Required situational tests pass.
|
||||||
|
3. TDD obligations met for required change types.
|
||||||
|
4. Acceptance criteria mapped to evidence.
|
||||||
|
5. No flaky tests introduced.
|
||||||
|
6. CI pipeline passes (if available).
|
||||||
|
7. Scratchpad updated with results.
|
||||||
440
guides/TYPESCRIPT.md
Normal file
440
guides/TYPESCRIPT.md
Normal file
@@ -0,0 +1,440 @@
|
|||||||
|
# TypeScript Style Guide
|
||||||
|
|
||||||
|
**Authority**: This guide is MANDATORY for all TypeScript code. No exceptions without explicit approval.
|
||||||
|
|
||||||
|
Based on Google TypeScript Style Guide with stricter enforcement.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Core Principles
|
||||||
|
|
||||||
|
1. **Explicit over implicit** — Always declare types, never rely on inference for public APIs
|
||||||
|
2. **Specific over generic** — Use the narrowest type that works
|
||||||
|
3. **Safe over convenient** — Type safety is not negotiable
|
||||||
|
4. **Contract-first boundaries** — Cross-module and API payloads MUST use dedicated DTO files
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## DTO Contract (MANDATORY)
|
||||||
|
|
||||||
|
DTO files are REQUIRED for TypeScript module boundaries to preserve shared context and consistency.
|
||||||
|
|
||||||
|
Hard requirements:
|
||||||
|
|
||||||
|
1. Input and output payloads crossing module boundaries MUST be defined in `*.dto.ts` files.
|
||||||
|
2. Controller/service boundary payloads MUST use DTO types; inline object literal types are NOT allowed.
|
||||||
|
3. Public API request/response contracts MUST use DTO files and remain stable across modules.
|
||||||
|
4. Shared DTOs used by multiple modules MUST live in a shared location (for example `src/shared/dto/` or `packages/shared/dto/`).
|
||||||
|
5. ORM/entity models MUST NOT be exposed directly across module boundaries; map them to DTOs.
|
||||||
|
6. DTO changes MUST be reflected in tests and documentation when contracts change.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG: inline payload contract at boundary
|
||||||
|
export function createUser(payload: { email: string; role: string }): Promise<User> {}
|
||||||
|
|
||||||
|
// ✅ CORRECT: dedicated DTO file contract
|
||||||
|
// user-create.dto.ts
|
||||||
|
export interface UserCreateDto {
|
||||||
|
email: string;
|
||||||
|
role: UserRole;
|
||||||
|
}
|
||||||
|
|
||||||
|
// user-response.dto.ts
|
||||||
|
export interface UserResponseDto {
|
||||||
|
id: string;
|
||||||
|
email: string;
|
||||||
|
role: UserRole;
|
||||||
|
}
|
||||||
|
|
||||||
|
// service.ts
|
||||||
|
export function createUser(payload: UserCreateDto): Promise<UserResponseDto> {}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Forbidden Patterns (NEVER USE)
|
||||||
|
|
||||||
|
### `any` Type — FORBIDDEN
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ NEVER
|
||||||
|
function process(data: any) {}
|
||||||
|
const result: any = fetchData();
|
||||||
|
Record<string, any>;
|
||||||
|
|
||||||
|
// ✅ ALWAYS define explicit types
|
||||||
|
interface UserData {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
email: string;
|
||||||
|
}
|
||||||
|
function process(data: UserData) {}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `unknown` as Lazy Typing — FORBIDDEN
|
||||||
|
|
||||||
|
`unknown` is only acceptable in these specific cases:
|
||||||
|
|
||||||
|
1. Error catch blocks (then immediately narrow)
|
||||||
|
2. JSON.parse results (then validate with Zod/schema)
|
||||||
|
3. External API responses before validation
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ NEVER - using unknown to avoid typing
|
||||||
|
function getData(): unknown {}
|
||||||
|
const config: Record<string, unknown> = {};
|
||||||
|
|
||||||
|
// ✅ ACCEPTABLE - error handling with immediate narrowing
|
||||||
|
try {
|
||||||
|
riskyOperation();
|
||||||
|
} catch (error: unknown) {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
logger.error(error.message);
|
||||||
|
} else {
|
||||||
|
logger.error('Unknown error', { error: String(error) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ✅ ACCEPTABLE - external data with validation
|
||||||
|
const raw: unknown = JSON.parse(response);
|
||||||
|
const validated = UserSchema.parse(raw); // Zod validation
|
||||||
|
```
|
||||||
|
|
||||||
|
### Implicit `any` — FORBIDDEN
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ NEVER - implicit any from missing types
|
||||||
|
function process(data) {} // Parameter has implicit any
|
||||||
|
const handler = (e) => {}; // Parameter has implicit any
|
||||||
|
|
||||||
|
// ✅ ALWAYS - explicit types
|
||||||
|
function process(data: RequestPayload): ProcessedResult {}
|
||||||
|
const handler = (e: React.MouseEvent<HTMLButtonElement>): void => {};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Type Assertions to Bypass Safety — FORBIDDEN
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ NEVER - lying to the compiler
|
||||||
|
const user = data as User;
|
||||||
|
const element = document.getElementById('app') as HTMLDivElement;
|
||||||
|
|
||||||
|
// ✅ USE - type guards and narrowing
|
||||||
|
function isUser(data: unknown): data is User {
|
||||||
|
return typeof data === 'object' && data !== null && 'id' in data;
|
||||||
|
}
|
||||||
|
if (isUser(data)) {
|
||||||
|
console.log(data.id); // Safe
|
||||||
|
}
|
||||||
|
|
||||||
|
// ✅ USE - null checks
|
||||||
|
const element = document.getElementById('app');
|
||||||
|
if (element instanceof HTMLDivElement) {
|
||||||
|
element.style.display = 'none'; // Safe
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Non-null Assertion (`!`) — FORBIDDEN (except tests)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ NEVER in production code
|
||||||
|
const name = user!.name;
|
||||||
|
const element = document.getElementById('app')!;
|
||||||
|
|
||||||
|
// ✅ USE - proper null handling
|
||||||
|
const name = user?.name ?? 'Anonymous';
|
||||||
|
const element = document.getElementById('app');
|
||||||
|
if (element) {
|
||||||
|
// Safe to use element
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Required Patterns
|
||||||
|
|
||||||
|
### Explicit Return Types — REQUIRED for all public functions
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG - missing return type
|
||||||
|
export function calculateTotal(items: Item[]) {
|
||||||
|
return items.reduce((sum, item) => sum + item.price, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ✅ CORRECT - explicit return type
|
||||||
|
export function calculateTotal(items: Item[]): number {
|
||||||
|
return items.reduce((sum, item) => sum + item.price, 0);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Explicit Parameter Types — REQUIRED always
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG
|
||||||
|
const multiply = (a, b) => a * b;
|
||||||
|
users.map((user) => user.name); // If user type isn't inferred
|
||||||
|
|
||||||
|
// ✅ CORRECT
|
||||||
|
const multiply = (a: number, b: number): number => a * b;
|
||||||
|
users.map((user: User): string => user.name);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Interface Over Type Alias — PREFERRED for objects
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ✅ PREFERRED - interface (extendable, better error messages)
|
||||||
|
interface User {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
email: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ✅ ACCEPTABLE - type alias for unions, intersections, primitives
|
||||||
|
type Status = 'active' | 'inactive' | 'pending';
|
||||||
|
type ID = string | number;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Const Assertions for Literals — REQUIRED
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG - loses literal types
|
||||||
|
const config = {
|
||||||
|
endpoint: '/api/users',
|
||||||
|
method: 'GET',
|
||||||
|
};
|
||||||
|
// config.method is string, not 'GET'
|
||||||
|
|
||||||
|
// ✅ CORRECT - preserves literal types
|
||||||
|
const config = {
|
||||||
|
endpoint: '/api/users',
|
||||||
|
method: 'GET',
|
||||||
|
} as const;
|
||||||
|
// config.method is 'GET'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Discriminated Unions — REQUIRED for variants
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG - optional properties for variants
|
||||||
|
interface ApiResponse {
|
||||||
|
success: boolean;
|
||||||
|
data?: User;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ✅ CORRECT - discriminated union
|
||||||
|
interface SuccessResponse {
|
||||||
|
success: true;
|
||||||
|
data: User;
|
||||||
|
}
|
||||||
|
interface ErrorResponse {
|
||||||
|
success: false;
|
||||||
|
error: string;
|
||||||
|
}
|
||||||
|
type ApiResponse = SuccessResponse | ErrorResponse;
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Generic Constraints
|
||||||
|
|
||||||
|
### Meaningful Constraints — REQUIRED
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG - unconstrained generic
|
||||||
|
function merge<T>(a: T, b: T): T {}
|
||||||
|
|
||||||
|
// ✅ CORRECT - constrained generic
|
||||||
|
function merge<T extends object>(a: T, b: Partial<T>): T {}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Default Generic Parameters — USE SPECIFIC TYPES
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG
|
||||||
|
interface Repository<T = unknown> {}
|
||||||
|
|
||||||
|
// ✅ CORRECT - no default if type should be explicit
|
||||||
|
interface Repository<T extends Entity> {}
|
||||||
|
|
||||||
|
// ✅ ACCEPTABLE - meaningful default
|
||||||
|
interface Cache<T extends Serializable = JsonValue> {}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## React/JSX Specific
|
||||||
|
|
||||||
|
### Event Handlers — EXPLICIT TYPES REQUIRED
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG
|
||||||
|
const handleClick = (e) => {};
|
||||||
|
const handleChange = (e) => {};
|
||||||
|
|
||||||
|
// ✅ CORRECT
|
||||||
|
const handleClick = (e: React.MouseEvent<HTMLButtonElement>): void => {};
|
||||||
|
const handleChange = (e: React.ChangeEvent<HTMLInputElement>): void => {};
|
||||||
|
const handleSubmit = (e: React.FormEvent<HTMLFormElement>): void => {};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Component Props — INTERFACE REQUIRED
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG - inline types
|
||||||
|
function Button({ label, onClick }: { label: string; onClick: () => void }) { }
|
||||||
|
|
||||||
|
// ✅ CORRECT - named interface
|
||||||
|
interface ButtonProps {
|
||||||
|
label: string;
|
||||||
|
onClick: () => void;
|
||||||
|
disabled?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function Button({ label, onClick, disabled = false }: ButtonProps): JSX.Element {
|
||||||
|
return <button onClick={onClick} disabled={disabled}>{label}</button>;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Children Prop — USE React.ReactNode
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface LayoutProps {
|
||||||
|
children: React.ReactNode;
|
||||||
|
sidebar?: React.ReactNode;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## API Response Typing
|
||||||
|
|
||||||
|
### Define Explicit Response Types
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG
|
||||||
|
const response = await fetch('/api/users');
|
||||||
|
const data = await response.json(); // data is any
|
||||||
|
|
||||||
|
// ✅ CORRECT
|
||||||
|
interface UsersResponse {
|
||||||
|
users: User[];
|
||||||
|
pagination: PaginationInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch('/api/users');
|
||||||
|
const data: UsersResponse = await response.json();
|
||||||
|
|
||||||
|
// ✅ BEST - with runtime validation
|
||||||
|
const response = await fetch('/api/users');
|
||||||
|
const raw = await response.json();
|
||||||
|
const data = UsersResponseSchema.parse(raw); // Zod validates at runtime
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
### Typed Error Classes — REQUIRED for domain errors
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
class ValidationError extends Error {
|
||||||
|
constructor(
|
||||||
|
message: string,
|
||||||
|
public readonly field: string,
|
||||||
|
public readonly code: string,
|
||||||
|
) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'ValidationError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class NotFoundError extends Error {
|
||||||
|
constructor(
|
||||||
|
public readonly resource: string,
|
||||||
|
public readonly id: string,
|
||||||
|
) {
|
||||||
|
super(`${resource} with id ${id} not found`);
|
||||||
|
this.name = 'NotFoundError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Error Narrowing — REQUIRED
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
try {
|
||||||
|
await saveUser(user);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
if (error instanceof ValidationError) {
|
||||||
|
return { error: error.message, field: error.field };
|
||||||
|
}
|
||||||
|
if (error instanceof NotFoundError) {
|
||||||
|
return { error: 'Not found', resource: error.resource };
|
||||||
|
}
|
||||||
|
if (error instanceof Error) {
|
||||||
|
logger.error('Unexpected error', { message: error.message, stack: error.stack });
|
||||||
|
return { error: 'Internal error' };
|
||||||
|
}
|
||||||
|
logger.error('Unknown error type', { error: String(error) });
|
||||||
|
return { error: 'Internal error' };
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ESLint Rules — ENFORCE THESE
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
{
|
||||||
|
"@typescript-eslint/no-explicit-any": "error",
|
||||||
|
"@typescript-eslint/explicit-function-return-type": ["error", {
|
||||||
|
"allowExpressions": true,
|
||||||
|
"allowTypedFunctionExpressions": true
|
||||||
|
}],
|
||||||
|
"@typescript-eslint/explicit-module-boundary-types": "error",
|
||||||
|
"@typescript-eslint/no-inferrable-types": "off", // Allow explicit primitives
|
||||||
|
"@typescript-eslint/no-non-null-assertion": "error",
|
||||||
|
"@typescript-eslint/strict-boolean-expressions": "error",
|
||||||
|
"@typescript-eslint/no-unsafe-assignment": "error",
|
||||||
|
"@typescript-eslint/no-unsafe-member-access": "error",
|
||||||
|
"@typescript-eslint/no-unsafe-call": "error",
|
||||||
|
"@typescript-eslint/no-unsafe-return": "error"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## TSConfig Strict Mode — REQUIRED
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"strict": true,
|
||||||
|
"noImplicitAny": true,
|
||||||
|
"strictNullChecks": true,
|
||||||
|
"strictFunctionTypes": true,
|
||||||
|
"strictBindCallApply": true,
|
||||||
|
"strictPropertyInitialization": true,
|
||||||
|
"noImplicitThis": true,
|
||||||
|
"useUnknownInCatchVariables": true,
|
||||||
|
"noUncheckedIndexedAccess": true,
|
||||||
|
"noImplicitReturns": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noImplicitOverride": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Summary: The Type Safety Hierarchy
|
||||||
|
|
||||||
|
From best to worst:
|
||||||
|
|
||||||
|
1. **Explicit specific type** (interface/type) — REQUIRED
|
||||||
|
2. **Generic with constraints** — ACCEPTABLE
|
||||||
|
3. **`unknown` with immediate validation** — ONLY for external data
|
||||||
|
4. **`any`** — FORBIDDEN
|
||||||
|
|
||||||
|
**When in doubt, define an interface.**
|
||||||
205
guides/VAULT-SECRETS.md
Normal file
205
guides/VAULT-SECRETS.md
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
# Vault Secrets Management Guide
|
||||||
|
|
||||||
|
This guide applies when the project uses HashiCorp Vault for secrets management.
|
||||||
|
|
||||||
|
## Before Starting
|
||||||
|
|
||||||
|
1. Verify Vault access: `vault status`
|
||||||
|
2. Authenticate: `vault login` (method depends on environment)
|
||||||
|
3. Check your permissions for the required paths
|
||||||
|
|
||||||
|
## Canonical Structure
|
||||||
|
|
||||||
|
**ALL Vault secrets MUST follow this structure:**
|
||||||
|
|
||||||
|
```
|
||||||
|
{mount}/{service}/{component}/{secret-name}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Components
|
||||||
|
|
||||||
|
- **mount**: Environment-specific mount point
|
||||||
|
- **service**: The service or application name
|
||||||
|
- **component**: Logical grouping (database, api, oauth, etc.)
|
||||||
|
- **secret-name**: Specific secret identifier
|
||||||
|
|
||||||
|
## Environment Mounts
|
||||||
|
|
||||||
|
| Mount | Environment | Usage |
|
||||||
|
| ----------------- | ----------- | ---------------------- |
|
||||||
|
| `secret-dev/` | Development | Local dev, CI |
|
||||||
|
| `secret-staging/` | Staging | Pre-production testing |
|
||||||
|
| `secret-prod/` | Production | Live systems |
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Database credentials
|
||||||
|
secret-prod/postgres/database/app
|
||||||
|
secret-prod/mysql/database/readonly
|
||||||
|
secret-staging/redis/auth/default
|
||||||
|
|
||||||
|
# API tokens
|
||||||
|
secret-prod/authentik/admin/token
|
||||||
|
secret-prod/stripe/api/live-key
|
||||||
|
secret-dev/sendgrid/api/test-key
|
||||||
|
|
||||||
|
# JWT/Authentication
|
||||||
|
secret-prod/backend-api/jwt/signing-key
|
||||||
|
secret-prod/auth-service/session/secret
|
||||||
|
|
||||||
|
# OAuth providers
|
||||||
|
secret-prod/backend-api/oauth/google
|
||||||
|
secret-prod/backend-api/oauth/github
|
||||||
|
|
||||||
|
# Internal services
|
||||||
|
secret-prod/loki/read-auth/admin
|
||||||
|
secret-prod/grafana/admin/password
|
||||||
|
```
|
||||||
|
|
||||||
|
## Standard Field Names
|
||||||
|
|
||||||
|
Use consistent field names within secrets:
|
||||||
|
|
||||||
|
| Purpose | Fields |
|
||||||
|
| ----------- | ---------------------------- |
|
||||||
|
| Credentials | `username`, `password` |
|
||||||
|
| Tokens | `token` |
|
||||||
|
| OAuth | `client_id`, `client_secret` |
|
||||||
|
| Connection | `url`, `host`, `port` |
|
||||||
|
| Keys | `public_key`, `private_key` |
|
||||||
|
|
||||||
|
### Example Secret Structure
|
||||||
|
|
||||||
|
```json
|
||||||
|
// secret-prod/postgres/database/app
|
||||||
|
{
|
||||||
|
"username": "app_user",
|
||||||
|
"password": "secure-password-here",
|
||||||
|
"host": "db.example.com",
|
||||||
|
"port": "5432",
|
||||||
|
"database": "myapp"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Rules
|
||||||
|
|
||||||
|
1. **DO NOT GUESS** secret paths - Always verify the path exists
|
||||||
|
2. **Use helper scripts** in `scripts/vault/` when available
|
||||||
|
3. **All lowercase, hyphenated** (kebab-case) for all path segments
|
||||||
|
4. **Standard field names** - Use the conventions above
|
||||||
|
5. **No sensitive data in path names** - Path itself should not reveal secrets
|
||||||
|
6. **Environment separation** - Never reference prod secrets from dev
|
||||||
|
|
||||||
|
## Deprecated Paths (DO NOT USE)
|
||||||
|
|
||||||
|
These legacy patterns are deprecated and should be migrated:
|
||||||
|
|
||||||
|
| Deprecated | Migrate To |
|
||||||
|
| ------------------------- | ------------------------------------------- |
|
||||||
|
| `secret/infrastructure/*` | `secret-{env}/{service}/...` |
|
||||||
|
| `secret/oauth/*` | `secret-{env}/{service}/oauth/{provider}` |
|
||||||
|
| `secret/database/*` | `secret-{env}/{service}/database/{user}` |
|
||||||
|
| `secret/credentials/*` | `secret-{env}/{service}/{component}/{name}` |
|
||||||
|
|
||||||
|
## Reading Secrets
|
||||||
|
|
||||||
|
### CLI
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Read a secret
|
||||||
|
vault kv get secret-prod/postgres/database/app
|
||||||
|
|
||||||
|
# Get specific field
|
||||||
|
vault kv get -field=password secret-prod/postgres/database/app
|
||||||
|
|
||||||
|
# JSON output
|
||||||
|
vault kv get -format=json secret-prod/postgres/database/app
|
||||||
|
```
|
||||||
|
|
||||||
|
### Application Code
|
||||||
|
|
||||||
|
**Python (hvac):**
|
||||||
|
|
||||||
|
```python
|
||||||
|
import hvac
|
||||||
|
|
||||||
|
client = hvac.Client(url='https://vault.example.com')
|
||||||
|
secret = client.secrets.kv.v2.read_secret_version(
|
||||||
|
path='postgres/database/app',
|
||||||
|
mount_point='secret-prod'
|
||||||
|
)
|
||||||
|
password = secret['data']['data']['password']
|
||||||
|
```
|
||||||
|
|
||||||
|
**Node.js (node-vault):**
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const vault = require('node-vault')({ endpoint: 'https://vault.example.com' });
|
||||||
|
const secret = await vault.read('secret-prod/data/postgres/database/app');
|
||||||
|
const password = secret.data.data.password;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Go:**
|
||||||
|
|
||||||
|
```go
|
||||||
|
secret, err := client.Logical().Read("secret-prod/data/postgres/database/app")
|
||||||
|
password := secret.Data["data"].(map[string]interface{})["password"].(string)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Writing Secrets
|
||||||
|
|
||||||
|
Only authorized personnel should write secrets. If you need a new secret:
|
||||||
|
|
||||||
|
1. Request through proper channels (ticket, PR to IaC repo)
|
||||||
|
2. Follow the canonical structure
|
||||||
|
3. Document the secret's purpose
|
||||||
|
4. Set appropriate access policies
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Example (requires write permissions)
|
||||||
|
vault kv put secret-dev/myapp/database/app \
|
||||||
|
username="dev_user" \
|
||||||
|
password="dev-password" \
|
||||||
|
host="localhost" \
|
||||||
|
port="5432"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Permission Denied
|
||||||
|
|
||||||
|
```
|
||||||
|
Error: permission denied
|
||||||
|
```
|
||||||
|
|
||||||
|
- Verify your token has read access to the path
|
||||||
|
- Check if you're using the correct mount point
|
||||||
|
- Confirm the secret path exists
|
||||||
|
|
||||||
|
### Secret Not Found
|
||||||
|
|
||||||
|
```
|
||||||
|
Error: no value found at secret-prod/data/service/component/name
|
||||||
|
```
|
||||||
|
|
||||||
|
- Verify the exact path (use `vault kv list` to explore)
|
||||||
|
- Check for typos in service/component names
|
||||||
|
- Confirm you're using the correct environment mount
|
||||||
|
|
||||||
|
### Token Expired
|
||||||
|
|
||||||
|
```
|
||||||
|
Error: token expired
|
||||||
|
```
|
||||||
|
|
||||||
|
- Re-authenticate: `vault login`
|
||||||
|
- Check token TTL: `vault token lookup`
|
||||||
|
|
||||||
|
## Security Best Practices
|
||||||
|
|
||||||
|
1. **Least privilege** - Request only the permissions you need
|
||||||
|
2. **Short-lived tokens** - Use tokens with appropriate TTLs
|
||||||
|
3. **Audit logging** - All access is logged; act accordingly
|
||||||
|
4. **No local copies** - Don't store secrets in files or env vars long-term
|
||||||
|
5. **Rotate on compromise** - Immediately rotate any exposed secrets
|
||||||
6
mosaic.config.json
Normal file
6
mosaic.config.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"tier": "local",
|
||||||
|
"storage": { "type": "sqlite", "path": ".mosaic/data.db" },
|
||||||
|
"queue": { "type": "local", "dataDir": ".mosaic/queue" },
|
||||||
|
"memory": { "type": "keyword" }
|
||||||
|
}
|
||||||
@@ -23,5 +23,10 @@
|
|||||||
"turbo": "^2.0.0",
|
"turbo": "^2.0.0",
|
||||||
"typescript": "^5.8.0",
|
"typescript": "^5.8.0",
|
||||||
"vitest": "^2.0.0"
|
"vitest": "^2.0.0"
|
||||||
|
},
|
||||||
|
"pnpm": {
|
||||||
|
"onlyBuiltDependencies": [
|
||||||
|
"better-sqlite3"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/agent",
|
"name": "@mosaic/agent",
|
||||||
"version": "0.0.0",
|
"version": "0.0.2",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||||
|
"directory": "packages/agent"
|
||||||
|
},
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
@@ -21,5 +26,12 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"typescript": "^5.8.0",
|
"typescript": "^5.8.0",
|
||||||
"vitest": "^2.0.0"
|
"vitest": "^2.0.0"
|
||||||
}
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/auth",
|
"name": "@mosaic/auth",
|
||||||
"version": "0.0.0",
|
"version": "0.0.2",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||||
|
"directory": "packages/auth"
|
||||||
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -25,5 +30,12 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@mosaic/db": "workspace:^",
|
"@mosaic/db": "workspace:^",
|
||||||
"better-auth": "^1.5.5"
|
"better-auth": "^1.5.5"
|
||||||
}
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/brain",
|
"name": "@mosaic/brain",
|
||||||
"version": "0.0.0",
|
"version": "0.0.2",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||||
|
"directory": "packages/brain"
|
||||||
|
},
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
@@ -22,5 +27,12 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"typescript": "^5.8.0",
|
"typescript": "^5.8.0",
|
||||||
"vitest": "^2.0.0"
|
"vitest": "^2.0.0"
|
||||||
}
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/cli",
|
"name": "@mosaic/cli",
|
||||||
"version": "0.0.0",
|
"version": "0.0.10",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||||
|
"directory": "packages/cli"
|
||||||
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -14,7 +19,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc",
|
"build": "tsc -p tsconfig.build.json",
|
||||||
"dev": "tsx src/cli.ts",
|
"dev": "tsx src/cli.ts",
|
||||||
"lint": "eslint src",
|
"lint": "eslint src",
|
||||||
"typecheck": "tsc --noEmit",
|
"typecheck": "tsc --noEmit",
|
||||||
@@ -22,6 +27,7 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@clack/prompts": "^0.9.0",
|
"@clack/prompts": "^0.9.0",
|
||||||
|
"@mosaic/config": "workspace:^",
|
||||||
"@mosaic/mosaic": "workspace:^",
|
"@mosaic/mosaic": "workspace:^",
|
||||||
"@mosaic/prdy": "workspace:^",
|
"@mosaic/prdy": "workspace:^",
|
||||||
"@mosaic/quality-rails": "workspace:^",
|
"@mosaic/quality-rails": "workspace:^",
|
||||||
@@ -39,5 +45,12 @@
|
|||||||
"tsx": "^4.0.0",
|
"tsx": "^4.0.0",
|
||||||
"typescript": "^5.8.0",
|
"typescript": "^5.8.0",
|
||||||
"vitest": "^2.0.0"
|
"vitest": "^2.0.0"
|
||||||
}
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,18 +2,32 @@
|
|||||||
|
|
||||||
import { createRequire } from 'module';
|
import { createRequire } from 'module';
|
||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { createQualityRailsCli } from '@mosaic/quality-rails';
|
import { registerQualityRails } from '@mosaic/quality-rails';
|
||||||
import { registerAgentCommand } from './commands/agent.js';
|
import { registerAgentCommand } from './commands/agent.js';
|
||||||
import { registerMissionCommand } from './commands/mission.js';
|
import { registerMissionCommand } from './commands/mission.js';
|
||||||
import { registerPrdyCommand } from './commands/prdy.js';
|
// prdy is registered via launch.ts
|
||||||
|
import { registerLaunchCommands } from './commands/launch.js';
|
||||||
|
import { registerGatewayCommand } from './commands/gateway.js';
|
||||||
|
|
||||||
const _require = createRequire(import.meta.url);
|
const _require = createRequire(import.meta.url);
|
||||||
const CLI_VERSION: string = (_require('../package.json') as { version: string }).version;
|
const CLI_VERSION: string = (_require('../package.json') as { version: string }).version;
|
||||||
|
|
||||||
|
// Fire-and-forget update check at startup (non-blocking, cached 1h)
|
||||||
|
try {
|
||||||
|
const { backgroundUpdateCheck } = await import('@mosaic/mosaic');
|
||||||
|
backgroundUpdateCheck();
|
||||||
|
} catch {
|
||||||
|
// Silently ignore — update check is best-effort
|
||||||
|
}
|
||||||
|
|
||||||
const program = new Command();
|
const program = new Command();
|
||||||
|
|
||||||
program.name('mosaic').description('Mosaic Stack CLI').version(CLI_VERSION);
|
program.name('mosaic').description('Mosaic Stack CLI').version(CLI_VERSION);
|
||||||
|
|
||||||
|
// ─── runtime launchers + framework commands ────────────────────────────
|
||||||
|
|
||||||
|
registerLaunchCommands(program);
|
||||||
|
|
||||||
// ─── login ──────────────────────────────────────────────────────────────
|
// ─── login ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
program
|
program
|
||||||
@@ -277,6 +291,10 @@ sessionsCmd
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// ─── gateway ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
registerGatewayCommand(program);
|
||||||
|
|
||||||
// ─── agent ─────────────────────────────────────────────────────────────
|
// ─── agent ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
registerAgentCommand(program);
|
registerAgentCommand(program);
|
||||||
@@ -285,17 +303,57 @@ registerAgentCommand(program);
|
|||||||
|
|
||||||
registerMissionCommand(program);
|
registerMissionCommand(program);
|
||||||
|
|
||||||
// ─── prdy ──────────────────────────────────────────────────────────────
|
|
||||||
|
|
||||||
registerPrdyCommand(program);
|
|
||||||
|
|
||||||
// ─── quality-rails ──────────────────────────────────────────────────────
|
// ─── quality-rails ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
const qrWrapper = createQualityRailsCli();
|
registerQualityRails(program);
|
||||||
const qrCmd = qrWrapper.commands.find((c) => c.name() === 'quality-rails');
|
|
||||||
if (qrCmd !== undefined) {
|
// ─── update ─────────────────────────────────────────────────────────────
|
||||||
program.addCommand(qrCmd as unknown as Command);
|
|
||||||
}
|
program
|
||||||
|
.command('update')
|
||||||
|
.description('Check for and install Mosaic CLI updates')
|
||||||
|
.option('--check', 'Check only, do not install')
|
||||||
|
.action(async (opts: { check?: boolean }) => {
|
||||||
|
const { checkForUpdate, formatUpdateNotice } = await import('@mosaic/mosaic');
|
||||||
|
const { execSync } = await import('node:child_process');
|
||||||
|
|
||||||
|
console.log('Checking for updates…');
|
||||||
|
const result = checkForUpdate({ skipCache: true });
|
||||||
|
|
||||||
|
if (!result.latest) {
|
||||||
|
console.error('Could not reach the Mosaic registry.');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` Installed: ${result.current || '(none)'}`);
|
||||||
|
console.log(` Latest: ${result.latest}`);
|
||||||
|
|
||||||
|
if (!result.updateAvailable) {
|
||||||
|
console.log('\n✔ Up to date.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const notice = formatUpdateNotice(result);
|
||||||
|
if (notice) console.log(notice);
|
||||||
|
|
||||||
|
if (opts.check) {
|
||||||
|
process.exit(2); // Signal to callers that an update exists
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Installing update…');
|
||||||
|
try {
|
||||||
|
// Relies on @mosaic:registry in ~/.npmrc — do NOT pass --registry
|
||||||
|
// globally or non-@mosaic deps will 404 against the Gitea registry.
|
||||||
|
execSync('npm install -g @mosaic/cli@latest', {
|
||||||
|
stdio: 'inherit',
|
||||||
|
timeout: 60_000,
|
||||||
|
});
|
||||||
|
console.log('\n✔ Updated successfully.');
|
||||||
|
} catch {
|
||||||
|
console.error('\nUpdate failed. Try manually: bash tools/install.sh');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
// ─── wizard ─────────────────────────────────────────────────────────────
|
// ─── wizard ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
|||||||
152
packages/cli/src/commands/gateway.ts
Normal file
152
packages/cli/src/commands/gateway.ts
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
import type { Command } from 'commander';
|
||||||
|
import {
|
||||||
|
getDaemonPid,
|
||||||
|
readMeta,
|
||||||
|
startDaemon,
|
||||||
|
stopDaemon,
|
||||||
|
waitForHealth,
|
||||||
|
} from './gateway/daemon.js';
|
||||||
|
|
||||||
|
interface GatewayParentOpts {
|
||||||
|
host: string;
|
||||||
|
port: string;
|
||||||
|
token?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveOpts(raw: GatewayParentOpts): { host: string; port: number; token?: string } {
|
||||||
|
const meta = readMeta();
|
||||||
|
return {
|
||||||
|
host: raw.host ?? meta?.host ?? 'localhost',
|
||||||
|
port: parseInt(raw.port, 10) || meta?.port || 4000,
|
||||||
|
token: raw.token ?? meta?.adminToken,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function registerGatewayCommand(program: Command): void {
|
||||||
|
const gw = program
|
||||||
|
.command('gateway')
|
||||||
|
.description('Manage the Mosaic gateway daemon')
|
||||||
|
.helpOption('--help', 'Display help')
|
||||||
|
.option('-h, --host <host>', 'Gateway host', 'localhost')
|
||||||
|
.option('-p, --port <port>', 'Gateway port', '4000')
|
||||||
|
.option('-t, --token <token>', 'Admin API token')
|
||||||
|
.action(() => {
|
||||||
|
gw.outputHelp();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ─── install ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
gw.command('install')
|
||||||
|
.description('Install and configure the gateway daemon')
|
||||||
|
.option('--skip-install', 'Skip npm package installation (use local build)')
|
||||||
|
.action(async (cmdOpts: { skipInstall?: boolean }) => {
|
||||||
|
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
||||||
|
const { runInstall } = await import('./gateway/install.js');
|
||||||
|
await runInstall({ ...opts, skipInstall: cmdOpts.skipInstall });
|
||||||
|
});
|
||||||
|
|
||||||
|
// ─── start ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
gw.command('start')
|
||||||
|
.description('Start the gateway daemon')
|
||||||
|
.action(async () => {
|
||||||
|
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
||||||
|
try {
|
||||||
|
const pid = startDaemon();
|
||||||
|
console.log(`Gateway started (PID ${pid.toString()})`);
|
||||||
|
console.log('Waiting for health...');
|
||||||
|
const healthy = await waitForHealth(opts.host, opts.port);
|
||||||
|
if (healthy) {
|
||||||
|
console.log(`Gateway ready at http://${opts.host}:${opts.port.toString()}`);
|
||||||
|
} else {
|
||||||
|
console.warn('Gateway started but health check timed out. Check logs.');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err instanceof Error ? err.message : String(err));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ─── stop ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
gw.command('stop')
|
||||||
|
.description('Stop the gateway daemon')
|
||||||
|
.action(async () => {
|
||||||
|
try {
|
||||||
|
await stopDaemon();
|
||||||
|
console.log('Gateway stopped.');
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err instanceof Error ? err.message : String(err));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ─── restart ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
gw.command('restart')
|
||||||
|
.description('Restart the gateway daemon')
|
||||||
|
.action(async () => {
|
||||||
|
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
||||||
|
const pid = getDaemonPid();
|
||||||
|
if (pid !== null) {
|
||||||
|
console.log('Stopping gateway...');
|
||||||
|
await stopDaemon();
|
||||||
|
}
|
||||||
|
console.log('Starting gateway...');
|
||||||
|
try {
|
||||||
|
const newPid = startDaemon();
|
||||||
|
console.log(`Gateway started (PID ${newPid.toString()})`);
|
||||||
|
const healthy = await waitForHealth(opts.host, opts.port);
|
||||||
|
if (healthy) {
|
||||||
|
console.log(`Gateway ready at http://${opts.host}:${opts.port.toString()}`);
|
||||||
|
} else {
|
||||||
|
console.warn('Gateway started but health check timed out. Check logs.');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err instanceof Error ? err.message : String(err));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ─── status ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
gw.command('status')
|
||||||
|
.description('Show gateway daemon status and health')
|
||||||
|
.action(async () => {
|
||||||
|
const opts = resolveOpts(gw.opts() as GatewayParentOpts);
|
||||||
|
const { runStatus } = await import('./gateway/status.js');
|
||||||
|
await runStatus(opts);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ─── config ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
gw.command('config')
|
||||||
|
.description('View or modify gateway configuration')
|
||||||
|
.option('--set <KEY=VALUE>', 'Set a configuration value')
|
||||||
|
.option('--unset <KEY>', 'Remove a configuration key')
|
||||||
|
.option('--edit', 'Open config in $EDITOR')
|
||||||
|
.action(async (cmdOpts: { set?: string; unset?: string; edit?: boolean }) => {
|
||||||
|
const { runConfig } = await import('./gateway/config.js');
|
||||||
|
await runConfig(cmdOpts);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ─── logs ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
gw.command('logs')
|
||||||
|
.description('View gateway daemon logs')
|
||||||
|
.option('-f, --follow', 'Follow log output')
|
||||||
|
.option('-n, --lines <count>', 'Number of lines to show', '50')
|
||||||
|
.action(async (cmdOpts: { follow?: boolean; lines?: string }) => {
|
||||||
|
const { runLogs } = await import('./gateway/logs.js');
|
||||||
|
runLogs({ follow: cmdOpts.follow, lines: parseInt(cmdOpts.lines ?? '50', 10) });
|
||||||
|
});
|
||||||
|
|
||||||
|
// ─── uninstall ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
gw.command('uninstall')
|
||||||
|
.description('Uninstall the gateway daemon and optionally remove data')
|
||||||
|
.action(async () => {
|
||||||
|
const { runUninstall } = await import('./gateway/uninstall.js');
|
||||||
|
await runUninstall();
|
||||||
|
});
|
||||||
|
}
|
||||||
143
packages/cli/src/commands/gateway/config.ts
Normal file
143
packages/cli/src/commands/gateway/config.ts
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
import { existsSync, readFileSync, writeFileSync } from 'node:fs';
|
||||||
|
import { execSync } from 'node:child_process';
|
||||||
|
import { ENV_FILE, getDaemonPid, readMeta, META_FILE, ensureDirs } from './daemon.js';
|
||||||
|
|
||||||
|
// Keys that should be masked in output
|
||||||
|
const SECRET_KEYS = new Set([
|
||||||
|
'BETTER_AUTH_SECRET',
|
||||||
|
'ANTHROPIC_API_KEY',
|
||||||
|
'OPENAI_API_KEY',
|
||||||
|
'ZAI_API_KEY',
|
||||||
|
'OPENROUTER_API_KEY',
|
||||||
|
'DISCORD_BOT_TOKEN',
|
||||||
|
'TELEGRAM_BOT_TOKEN',
|
||||||
|
]);
|
||||||
|
|
||||||
|
function maskValue(key: string, value: string): string {
|
||||||
|
if (SECRET_KEYS.has(key) && value.length > 8) {
|
||||||
|
return value.slice(0, 4) + '…' + value.slice(-4);
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseEnvFile(): Map<string, string> {
|
||||||
|
const map = new Map<string, string>();
|
||||||
|
if (!existsSync(ENV_FILE)) return map;
|
||||||
|
|
||||||
|
const lines = readFileSync(ENV_FILE, 'utf-8').split('\n');
|
||||||
|
for (const line of lines) {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||||
|
const eqIdx = trimmed.indexOf('=');
|
||||||
|
if (eqIdx === -1) continue;
|
||||||
|
map.set(trimmed.slice(0, eqIdx), trimmed.slice(eqIdx + 1));
|
||||||
|
}
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
|
||||||
|
function writeEnvFile(entries: Map<string, string>): void {
|
||||||
|
ensureDirs();
|
||||||
|
const lines: string[] = [];
|
||||||
|
for (const [key, value] of entries) {
|
||||||
|
lines.push(`${key}=${value}`);
|
||||||
|
}
|
||||||
|
writeFileSync(ENV_FILE, lines.join('\n') + '\n', { mode: 0o600 });
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ConfigOpts {
|
||||||
|
set?: string;
|
||||||
|
unset?: string;
|
||||||
|
edit?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runConfig(opts: ConfigOpts): Promise<void> {
|
||||||
|
// Set a value
|
||||||
|
if (opts.set) {
|
||||||
|
const eqIdx = opts.set.indexOf('=');
|
||||||
|
if (eqIdx === -1) {
|
||||||
|
console.error('Usage: mosaic gateway config --set KEY=VALUE');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
const key = opts.set.slice(0, eqIdx);
|
||||||
|
const value = opts.set.slice(eqIdx + 1);
|
||||||
|
const entries = parseEnvFile();
|
||||||
|
entries.set(key, value);
|
||||||
|
writeEnvFile(entries);
|
||||||
|
console.log(`Set ${key}=${maskValue(key, value)}`);
|
||||||
|
promptRestart();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unset a value
|
||||||
|
if (opts.unset) {
|
||||||
|
const entries = parseEnvFile();
|
||||||
|
if (!entries.has(opts.unset)) {
|
||||||
|
console.error(`Key not found: ${opts.unset}`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
entries.delete(opts.unset);
|
||||||
|
writeEnvFile(entries);
|
||||||
|
console.log(`Removed ${opts.unset}`);
|
||||||
|
promptRestart();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open in editor
|
||||||
|
if (opts.edit) {
|
||||||
|
if (!existsSync(ENV_FILE)) {
|
||||||
|
console.error(`No config file found at ${ENV_FILE}`);
|
||||||
|
console.error('Run `mosaic gateway install` first.');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
const editor = process.env['EDITOR'] ?? process.env['VISUAL'] ?? 'vi';
|
||||||
|
try {
|
||||||
|
execSync(`${editor} "${ENV_FILE}"`, { stdio: 'inherit' });
|
||||||
|
promptRestart();
|
||||||
|
} catch {
|
||||||
|
console.error('Editor exited with error.');
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: show current config
|
||||||
|
showConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
function showConfig(): void {
|
||||||
|
if (!existsSync(ENV_FILE)) {
|
||||||
|
console.log('No gateway configuration found.');
|
||||||
|
console.log('Run `mosaic gateway install` to set up.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const entries = parseEnvFile();
|
||||||
|
const meta = readMeta();
|
||||||
|
|
||||||
|
console.log('Mosaic Gateway Configuration');
|
||||||
|
console.log('────────────────────────────');
|
||||||
|
console.log(` Config file: ${ENV_FILE}`);
|
||||||
|
console.log(` Meta file: ${META_FILE}`);
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
if (entries.size === 0) {
|
||||||
|
console.log(' (empty)');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const maxKeyLen = Math.max(...[...entries.keys()].map((k) => k.length));
|
||||||
|
for (const [key, value] of entries) {
|
||||||
|
const padding = ' '.repeat(maxKeyLen - key.length);
|
||||||
|
console.log(` ${key}${padding} ${maskValue(key, value)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta?.adminToken) {
|
||||||
|
console.log();
|
||||||
|
console.log(` Admin token: ${maskValue('token', meta.adminToken)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function promptRestart(): void {
|
||||||
|
if (getDaemonPid() !== null) {
|
||||||
|
console.log('\nGateway is running — restart to apply changes: mosaic gateway restart');
|
||||||
|
}
|
||||||
|
}
|
||||||
253
packages/cli/src/commands/gateway/daemon.ts
Normal file
253
packages/cli/src/commands/gateway/daemon.ts
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
import { spawn, execSync } from 'node:child_process';
|
||||||
|
import {
|
||||||
|
existsSync,
|
||||||
|
mkdirSync,
|
||||||
|
readFileSync,
|
||||||
|
writeFileSync,
|
||||||
|
unlinkSync,
|
||||||
|
openSync,
|
||||||
|
constants,
|
||||||
|
} from 'node:fs';
|
||||||
|
import { join, resolve } from 'node:path';
|
||||||
|
import { homedir } from 'node:os';
|
||||||
|
import { createRequire } from 'node:module';
|
||||||
|
|
||||||
|
// ─── Paths ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export const GATEWAY_HOME = resolve(
|
||||||
|
process.env['MOSAIC_GATEWAY_HOME'] ?? join(homedir(), '.config', 'mosaic', 'gateway'),
|
||||||
|
);
|
||||||
|
export const PID_FILE = join(GATEWAY_HOME, 'daemon.pid');
|
||||||
|
export const LOG_DIR = join(GATEWAY_HOME, 'logs');
|
||||||
|
export const LOG_FILE = join(LOG_DIR, 'gateway.log');
|
||||||
|
export const ENV_FILE = join(GATEWAY_HOME, '.env');
|
||||||
|
export const META_FILE = join(GATEWAY_HOME, 'meta.json');
|
||||||
|
|
||||||
|
// ─── Meta ───────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export interface GatewayMeta {
|
||||||
|
version: string;
|
||||||
|
installedAt: string;
|
||||||
|
entryPoint: string;
|
||||||
|
adminToken?: string;
|
||||||
|
host: string;
|
||||||
|
port: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function readMeta(): GatewayMeta | null {
|
||||||
|
if (!existsSync(META_FILE)) return null;
|
||||||
|
try {
|
||||||
|
return JSON.parse(readFileSync(META_FILE, 'utf-8')) as GatewayMeta;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function writeMeta(meta: GatewayMeta): void {
|
||||||
|
ensureDirs();
|
||||||
|
writeFileSync(META_FILE, JSON.stringify(meta, null, 2), { mode: 0o600 });
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Directories ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export function ensureDirs(): void {
|
||||||
|
mkdirSync(GATEWAY_HOME, { recursive: true, mode: 0o700 });
|
||||||
|
mkdirSync(LOG_DIR, { recursive: true, mode: 0o700 });
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── PID management ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export function readPid(): number | null {
|
||||||
|
if (!existsSync(PID_FILE)) return null;
|
||||||
|
try {
|
||||||
|
const pid = parseInt(readFileSync(PID_FILE, 'utf-8').trim(), 10);
|
||||||
|
return Number.isNaN(pid) ? null : pid;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isRunning(pid: number): boolean {
|
||||||
|
try {
|
||||||
|
process.kill(pid, 0);
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDaemonPid(): number | null {
|
||||||
|
const pid = readPid();
|
||||||
|
if (pid === null) return null;
|
||||||
|
return isRunning(pid) ? pid : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Entry point resolution ─────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export function resolveGatewayEntry(): string {
|
||||||
|
// Check meta.json for custom entry point
|
||||||
|
const meta = readMeta();
|
||||||
|
if (meta?.entryPoint && existsSync(meta.entryPoint)) {
|
||||||
|
return meta.entryPoint;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to resolve from globally installed @mosaicstack/gateway
|
||||||
|
try {
|
||||||
|
const req = createRequire(import.meta.url);
|
||||||
|
const pkgPath = req.resolve('@mosaicstack/gateway/package.json');
|
||||||
|
const mainEntry = join(resolve(pkgPath, '..'), 'dist', 'main.js');
|
||||||
|
if (existsSync(mainEntry)) return mainEntry;
|
||||||
|
} catch {
|
||||||
|
// Not installed globally via @mosaicstack
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try @mosaic/gateway (workspace / dev)
|
||||||
|
try {
|
||||||
|
const req = createRequire(import.meta.url);
|
||||||
|
const pkgPath = req.resolve('@mosaic/gateway/package.json');
|
||||||
|
const mainEntry = join(resolve(pkgPath, '..'), 'dist', 'main.js');
|
||||||
|
if (existsSync(mainEntry)) return mainEntry;
|
||||||
|
} catch {
|
||||||
|
// Not available
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Cannot find gateway entry point. Run `mosaic gateway install` first.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Start / Stop / Health ──────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export function startDaemon(): number {
|
||||||
|
const running = getDaemonPid();
|
||||||
|
if (running !== null) {
|
||||||
|
throw new Error(`Gateway is already running (PID ${running.toString()})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
ensureDirs();
|
||||||
|
const entryPoint = resolveGatewayEntry();
|
||||||
|
|
||||||
|
// Load env vars from gateway .env
|
||||||
|
const env: Record<string, string> = { ...process.env } as Record<string, string>;
|
||||||
|
if (existsSync(ENV_FILE)) {
|
||||||
|
for (const line of readFileSync(ENV_FILE, 'utf-8').split('\n')) {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||||
|
const eqIdx = trimmed.indexOf('=');
|
||||||
|
if (eqIdx > 0) env[trimmed.slice(0, eqIdx)] = trimmed.slice(eqIdx + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const logFd = openSync(LOG_FILE, constants.O_WRONLY | constants.O_CREAT | constants.O_APPEND);
|
||||||
|
|
||||||
|
const child = spawn('node', [entryPoint], {
|
||||||
|
detached: true,
|
||||||
|
stdio: ['ignore', logFd, logFd],
|
||||||
|
env,
|
||||||
|
cwd: GATEWAY_HOME,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!child.pid) {
|
||||||
|
throw new Error('Failed to spawn gateway process');
|
||||||
|
}
|
||||||
|
|
||||||
|
writeFileSync(PID_FILE, child.pid.toString(), { mode: 0o600 });
|
||||||
|
child.unref();
|
||||||
|
|
||||||
|
return child.pid;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function stopDaemon(timeoutMs = 10_000): Promise<void> {
|
||||||
|
const pid = getDaemonPid();
|
||||||
|
if (pid === null) {
|
||||||
|
throw new Error('Gateway is not running');
|
||||||
|
}
|
||||||
|
|
||||||
|
process.kill(pid, 'SIGTERM');
|
||||||
|
|
||||||
|
// Poll for exit
|
||||||
|
const start = Date.now();
|
||||||
|
while (Date.now() - start < timeoutMs) {
|
||||||
|
if (!isRunning(pid)) {
|
||||||
|
cleanPidFile();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await sleep(250);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Force kill
|
||||||
|
try {
|
||||||
|
process.kill(pid, 'SIGKILL');
|
||||||
|
} catch {
|
||||||
|
// Already dead
|
||||||
|
}
|
||||||
|
cleanPidFile();
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanPidFile(): void {
|
||||||
|
try {
|
||||||
|
unlinkSync(PID_FILE);
|
||||||
|
} catch {
|
||||||
|
// Ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function waitForHealth(
|
||||||
|
host: string,
|
||||||
|
port: number,
|
||||||
|
timeoutMs = 30_000,
|
||||||
|
): Promise<boolean> {
|
||||||
|
const start = Date.now();
|
||||||
|
let delay = 500;
|
||||||
|
|
||||||
|
while (Date.now() - start < timeoutMs) {
|
||||||
|
try {
|
||||||
|
const res = await fetch(`http://${host}:${port.toString()}/health`);
|
||||||
|
if (res.ok) return true;
|
||||||
|
} catch {
|
||||||
|
// Not ready yet
|
||||||
|
}
|
||||||
|
await sleep(delay);
|
||||||
|
delay = Math.min(delay * 1.5, 3000);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function sleep(ms: number): Promise<void> {
|
||||||
|
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── npm install helper ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export function installGatewayPackage(): void {
|
||||||
|
console.log('Installing @mosaicstack/gateway...');
|
||||||
|
execSync('npm install -g @mosaicstack/gateway@latest', {
|
||||||
|
stdio: 'inherit',
|
||||||
|
timeout: 120_000,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function uninstallGatewayPackage(): void {
|
||||||
|
try {
|
||||||
|
execSync('npm uninstall -g @mosaicstack/gateway', {
|
||||||
|
stdio: 'inherit',
|
||||||
|
timeout: 60_000,
|
||||||
|
});
|
||||||
|
} catch {
|
||||||
|
console.warn('Warning: npm uninstall may not have completed cleanly.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getInstalledGatewayVersion(): string | null {
|
||||||
|
try {
|
||||||
|
const output = execSync('npm ls -g @mosaicstack/gateway --json --depth=0', {
|
||||||
|
encoding: 'utf-8',
|
||||||
|
timeout: 15_000,
|
||||||
|
stdio: ['pipe', 'pipe', 'pipe'],
|
||||||
|
});
|
||||||
|
const data = JSON.parse(output) as {
|
||||||
|
dependencies?: { '@mosaicstack/gateway'?: { version?: string } };
|
||||||
|
};
|
||||||
|
return data.dependencies?.['@mosaicstack/gateway']?.version ?? null;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
259
packages/cli/src/commands/gateway/install.ts
Normal file
259
packages/cli/src/commands/gateway/install.ts
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
import { randomBytes } from 'node:crypto';
|
||||||
|
import { writeFileSync } from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { createInterface } from 'node:readline';
|
||||||
|
import type { GatewayMeta } from './daemon.js';
|
||||||
|
import {
|
||||||
|
ENV_FILE,
|
||||||
|
GATEWAY_HOME,
|
||||||
|
ensureDirs,
|
||||||
|
installGatewayPackage,
|
||||||
|
readMeta,
|
||||||
|
resolveGatewayEntry,
|
||||||
|
startDaemon,
|
||||||
|
waitForHealth,
|
||||||
|
writeMeta,
|
||||||
|
getInstalledGatewayVersion,
|
||||||
|
} from './daemon.js';
|
||||||
|
|
||||||
|
interface InstallOpts {
|
||||||
|
host: string;
|
||||||
|
port: number;
|
||||||
|
skipInstall?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function prompt(rl: ReturnType<typeof createInterface>, question: string): Promise<string> {
|
||||||
|
return new Promise((resolve) => rl.question(question, resolve));
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runInstall(opts: InstallOpts): Promise<void> {
|
||||||
|
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
||||||
|
try {
|
||||||
|
await doInstall(rl, opts);
|
||||||
|
} finally {
|
||||||
|
rl.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doInstall(rl: ReturnType<typeof createInterface>, opts: InstallOpts): Promise<void> {
|
||||||
|
// Check existing installation
|
||||||
|
const existing = readMeta();
|
||||||
|
if (existing) {
|
||||||
|
const answer = await prompt(
|
||||||
|
rl,
|
||||||
|
`Gateway already installed (v${existing.version}). Reinstall? [y/N] `,
|
||||||
|
);
|
||||||
|
if (answer.toLowerCase() !== 'y') {
|
||||||
|
console.log('Aborted.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 1: Install npm package
|
||||||
|
if (!opts.skipInstall) {
|
||||||
|
installGatewayPackage();
|
||||||
|
}
|
||||||
|
|
||||||
|
ensureDirs();
|
||||||
|
|
||||||
|
// Step 2: Collect configuration
|
||||||
|
console.log('\n─── Gateway Configuration ───\n');
|
||||||
|
|
||||||
|
// Tier selection
|
||||||
|
console.log('Storage tier:');
|
||||||
|
console.log(' 1. Local (embedded database, no dependencies)');
|
||||||
|
console.log(' 2. Team (PostgreSQL + Valkey required)');
|
||||||
|
const tierAnswer = (await prompt(rl, 'Select [1]: ')).trim() || '1';
|
||||||
|
const tier = tierAnswer === '2' ? 'team' : 'local';
|
||||||
|
|
||||||
|
const port =
|
||||||
|
opts.port !== 4000
|
||||||
|
? opts.port
|
||||||
|
: parseInt(
|
||||||
|
(await prompt(rl, `Gateway port [${opts.port.toString()}]: `)) || opts.port.toString(),
|
||||||
|
10,
|
||||||
|
);
|
||||||
|
|
||||||
|
let databaseUrl: string | undefined;
|
||||||
|
let valkeyUrl: string | undefined;
|
||||||
|
|
||||||
|
if (tier === 'team') {
|
||||||
|
databaseUrl =
|
||||||
|
(await prompt(rl, 'DATABASE_URL [postgresql://mosaic:mosaic@localhost:5433/mosaic]: ')) ||
|
||||||
|
'postgresql://mosaic:mosaic@localhost:5433/mosaic';
|
||||||
|
|
||||||
|
valkeyUrl =
|
||||||
|
(await prompt(rl, 'VALKEY_URL [redis://localhost:6380]: ')) || 'redis://localhost:6380';
|
||||||
|
}
|
||||||
|
|
||||||
|
const anthropicKey = await prompt(rl, 'ANTHROPIC_API_KEY (optional, press Enter to skip): ');
|
||||||
|
|
||||||
|
const corsOrigin =
|
||||||
|
(await prompt(rl, 'CORS origin [http://localhost:3000]: ')) || 'http://localhost:3000';
|
||||||
|
|
||||||
|
// Generate auth secret
|
||||||
|
const authSecret = randomBytes(32).toString('hex');
|
||||||
|
|
||||||
|
// Step 3: Write .env
|
||||||
|
const envLines = [
|
||||||
|
`GATEWAY_PORT=${port.toString()}`,
|
||||||
|
`BETTER_AUTH_SECRET=${authSecret}`,
|
||||||
|
`BETTER_AUTH_URL=http://${opts.host}:${port.toString()}`,
|
||||||
|
`GATEWAY_CORS_ORIGIN=${corsOrigin}`,
|
||||||
|
`OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318`,
|
||||||
|
`OTEL_SERVICE_NAME=mosaic-gateway`,
|
||||||
|
];
|
||||||
|
|
||||||
|
if (tier === 'team' && databaseUrl && valkeyUrl) {
|
||||||
|
envLines.push(`DATABASE_URL=${databaseUrl}`);
|
||||||
|
envLines.push(`VALKEY_URL=${valkeyUrl}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (anthropicKey) {
|
||||||
|
envLines.push(`ANTHROPIC_API_KEY=${anthropicKey}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
writeFileSync(ENV_FILE, envLines.join('\n') + '\n', { mode: 0o600 });
|
||||||
|
console.log(`\nConfig written to ${ENV_FILE}`);
|
||||||
|
|
||||||
|
// Step 3b: Write mosaic.config.json
|
||||||
|
const mosaicConfig =
|
||||||
|
tier === 'local'
|
||||||
|
? {
|
||||||
|
tier: 'local',
|
||||||
|
storage: { type: 'sqlite', path: join(GATEWAY_HOME, 'data.db') },
|
||||||
|
queue: { type: 'local', dataDir: join(GATEWAY_HOME, 'queue') },
|
||||||
|
memory: { type: 'keyword' },
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
tier: 'team',
|
||||||
|
storage: { type: 'postgres', url: databaseUrl },
|
||||||
|
queue: { type: 'bullmq', url: valkeyUrl },
|
||||||
|
memory: { type: 'pgvector' },
|
||||||
|
};
|
||||||
|
|
||||||
|
const configFile = join(GATEWAY_HOME, 'mosaic.config.json');
|
||||||
|
writeFileSync(configFile, JSON.stringify(mosaicConfig, null, 2) + '\n', { mode: 0o600 });
|
||||||
|
console.log(`Config written to ${configFile}`);
|
||||||
|
|
||||||
|
// Step 4: Write meta.json
|
||||||
|
let entryPoint: string;
|
||||||
|
try {
|
||||||
|
entryPoint = resolveGatewayEntry();
|
||||||
|
} catch {
|
||||||
|
console.error('Error: Gateway package not found after install.');
|
||||||
|
console.error('Check that @mosaicstack/gateway installed correctly.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const version = getInstalledGatewayVersion() ?? 'unknown';
|
||||||
|
|
||||||
|
const meta = {
|
||||||
|
version,
|
||||||
|
installedAt: new Date().toISOString(),
|
||||||
|
entryPoint,
|
||||||
|
host: opts.host,
|
||||||
|
port,
|
||||||
|
};
|
||||||
|
writeMeta(meta);
|
||||||
|
|
||||||
|
// Step 5: Start the daemon
|
||||||
|
console.log('\nStarting gateway daemon...');
|
||||||
|
try {
|
||||||
|
const pid = startDaemon();
|
||||||
|
console.log(`Gateway started (PID ${pid.toString()})`);
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Failed to start: ${err instanceof Error ? err.message : String(err)}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 6: Wait for health
|
||||||
|
console.log('Waiting for gateway to become healthy...');
|
||||||
|
const healthy = await waitForHealth(opts.host, port, 30_000);
|
||||||
|
if (!healthy) {
|
||||||
|
console.error('Gateway did not become healthy within 30 seconds.');
|
||||||
|
console.error(`Check logs: mosaic gateway logs`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
console.log('Gateway is healthy.\n');
|
||||||
|
|
||||||
|
// Step 7: Bootstrap — first user setup
|
||||||
|
await bootstrapFirstUser(rl, opts.host, port, meta);
|
||||||
|
|
||||||
|
console.log('\n─── Installation Complete ───');
|
||||||
|
console.log(` Endpoint: http://${opts.host}:${port.toString()}`);
|
||||||
|
console.log(` Config: ${GATEWAY_HOME}`);
|
||||||
|
console.log(` Logs: mosaic gateway logs`);
|
||||||
|
console.log(` Status: mosaic gateway status`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function bootstrapFirstUser(
|
||||||
|
rl: ReturnType<typeof createInterface>,
|
||||||
|
host: string,
|
||||||
|
port: number,
|
||||||
|
meta: Omit<GatewayMeta, 'adminToken'> & { adminToken?: string },
|
||||||
|
): Promise<void> {
|
||||||
|
const baseUrl = `http://${host}:${port.toString()}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const statusRes = await fetch(`${baseUrl}/api/bootstrap/status`);
|
||||||
|
if (!statusRes.ok) return;
|
||||||
|
|
||||||
|
const status = (await statusRes.json()) as { needsSetup: boolean };
|
||||||
|
if (!status.needsSetup) {
|
||||||
|
console.log('Admin user already exists — skipping setup.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
console.warn('Could not check bootstrap status — skipping first user setup.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('─── Admin User Setup ───\n');
|
||||||
|
|
||||||
|
const name = (await prompt(rl, 'Admin name: ')).trim();
|
||||||
|
if (!name) {
|
||||||
|
console.error('Name is required.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const email = (await prompt(rl, 'Admin email: ')).trim();
|
||||||
|
if (!email) {
|
||||||
|
console.error('Email is required.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const password = (await prompt(rl, 'Admin password (min 8 chars): ')).trim();
|
||||||
|
if (password.length < 8) {
|
||||||
|
console.error('Password must be at least 8 characters.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const res = await fetch(`${baseUrl}/api/bootstrap/setup`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ name, email, password }),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
const body = await res.text().catch(() => '');
|
||||||
|
console.error(`Bootstrap failed (${res.status.toString()}): ${body}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = (await res.json()) as {
|
||||||
|
user: { id: string; email: string };
|
||||||
|
token: { plaintext: string };
|
||||||
|
};
|
||||||
|
|
||||||
|
// Save admin token to meta
|
||||||
|
meta.adminToken = result.token.plaintext;
|
||||||
|
writeMeta(meta as GatewayMeta);
|
||||||
|
|
||||||
|
console.log(`\nAdmin user created: ${result.user.email}`);
|
||||||
|
console.log('Admin API token saved to gateway config.');
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Bootstrap error: ${err instanceof Error ? err.message : String(err)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
37
packages/cli/src/commands/gateway/logs.ts
Normal file
37
packages/cli/src/commands/gateway/logs.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { existsSync, readFileSync } from 'node:fs';
|
||||||
|
import { spawn } from 'node:child_process';
|
||||||
|
import { LOG_FILE } from './daemon.js';
|
||||||
|
|
||||||
|
interface LogsOpts {
|
||||||
|
follow?: boolean;
|
||||||
|
lines?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function runLogs(opts: LogsOpts): void {
|
||||||
|
if (!existsSync(LOG_FILE)) {
|
||||||
|
console.log('No log file found. Is the gateway installed?');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (opts.follow) {
|
||||||
|
const lines = opts.lines ?? 50;
|
||||||
|
const tail = spawn('tail', ['-n', lines.toString(), '-f', LOG_FILE], {
|
||||||
|
stdio: 'inherit',
|
||||||
|
});
|
||||||
|
tail.on('error', () => {
|
||||||
|
// Fallback for systems without tail
|
||||||
|
console.log(readLastLines(opts.lines ?? 50));
|
||||||
|
console.log('\n(--follow requires `tail` command)');
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Just print last N lines
|
||||||
|
console.log(readLastLines(opts.lines ?? 50));
|
||||||
|
}
|
||||||
|
|
||||||
|
function readLastLines(n: number): string {
|
||||||
|
const content = readFileSync(LOG_FILE, 'utf-8');
|
||||||
|
const lines = content.split('\n');
|
||||||
|
return lines.slice(-n).join('\n');
|
||||||
|
}
|
||||||
115
packages/cli/src/commands/gateway/status.ts
Normal file
115
packages/cli/src/commands/gateway/status.ts
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
import { getDaemonPid, readMeta, LOG_FILE, GATEWAY_HOME } from './daemon.js';
|
||||||
|
|
||||||
|
interface GatewayOpts {
|
||||||
|
host: string;
|
||||||
|
port: number;
|
||||||
|
token?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ServiceStatus {
|
||||||
|
name: string;
|
||||||
|
status: string;
|
||||||
|
latency?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface AdminHealth {
|
||||||
|
status: string;
|
||||||
|
services: {
|
||||||
|
database: { status: string; latencyMs: number };
|
||||||
|
cache: { status: string; latencyMs: number };
|
||||||
|
};
|
||||||
|
agentPool?: { active: number };
|
||||||
|
providers?: Array<{ name: string; available: boolean; models: number }>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runStatus(opts: GatewayOpts): Promise<void> {
|
||||||
|
const meta = readMeta();
|
||||||
|
const pid = getDaemonPid();
|
||||||
|
|
||||||
|
console.log('Mosaic Gateway Status');
|
||||||
|
console.log('─────────────────────');
|
||||||
|
|
||||||
|
// Daemon status
|
||||||
|
if (pid !== null) {
|
||||||
|
console.log(` Status: running (PID ${pid.toString()})`);
|
||||||
|
} else {
|
||||||
|
console.log(' Status: stopped');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Version
|
||||||
|
console.log(` Version: ${meta?.version ?? 'unknown'}`);
|
||||||
|
|
||||||
|
// Endpoint
|
||||||
|
const host = opts.host;
|
||||||
|
const port = opts.port;
|
||||||
|
console.log(` Endpoint: http://${host}:${port.toString()}`);
|
||||||
|
console.log(` Config: ${GATEWAY_HOME}`);
|
||||||
|
console.log(` Logs: ${LOG_FILE}`);
|
||||||
|
|
||||||
|
if (pid === null) return;
|
||||||
|
|
||||||
|
// Health check
|
||||||
|
try {
|
||||||
|
const healthRes = await fetch(`http://${host}:${port.toString()}/health`);
|
||||||
|
if (!healthRes.ok) {
|
||||||
|
console.log('\n Health: unreachable');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
console.log('\n Health: unreachable');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Admin health (requires token)
|
||||||
|
const token = opts.token ?? meta?.adminToken;
|
||||||
|
if (!token) {
|
||||||
|
console.log(
|
||||||
|
'\n (No admin token — run `mosaic gateway config` to set one for detailed status)',
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const res = await fetch(`http://${host}:${port.toString()}/api/admin/health`, {
|
||||||
|
headers: { Authorization: `Bearer ${token}` },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
console.log('\n Admin health: unauthorized or unavailable');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const health = (await res.json()) as AdminHealth;
|
||||||
|
|
||||||
|
console.log('\n Services:');
|
||||||
|
const services: ServiceStatus[] = [
|
||||||
|
{
|
||||||
|
name: 'Database',
|
||||||
|
status: health.services.database.status,
|
||||||
|
latency: `${health.services.database.latencyMs.toString()}ms`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Cache',
|
||||||
|
status: health.services.cache.status,
|
||||||
|
latency: `${health.services.cache.latencyMs.toString()}ms`,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const svc of services) {
|
||||||
|
const latStr = svc.latency ? ` (${svc.latency})` : '';
|
||||||
|
console.log(` ${svc.name}:${' '.repeat(10 - svc.name.length)}${svc.status}${latStr}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (health.providers && health.providers.length > 0) {
|
||||||
|
const available = health.providers.filter((p) => p.available);
|
||||||
|
const names = available.map((p) => p.name).join(', ');
|
||||||
|
console.log(`\n Providers: ${available.length.toString()} active (${names})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (health.agentPool) {
|
||||||
|
console.log(` Sessions: ${health.agentPool.active.toString()} active`);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
console.log('\n Admin health: connection error');
|
||||||
|
}
|
||||||
|
}
|
||||||
62
packages/cli/src/commands/gateway/uninstall.ts
Normal file
62
packages/cli/src/commands/gateway/uninstall.ts
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import { existsSync, rmSync } from 'node:fs';
|
||||||
|
import { createInterface } from 'node:readline';
|
||||||
|
import {
|
||||||
|
GATEWAY_HOME,
|
||||||
|
getDaemonPid,
|
||||||
|
readMeta,
|
||||||
|
stopDaemon,
|
||||||
|
uninstallGatewayPackage,
|
||||||
|
} from './daemon.js';
|
||||||
|
|
||||||
|
export async function runUninstall(): Promise<void> {
|
||||||
|
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
||||||
|
try {
|
||||||
|
await doUninstall(rl);
|
||||||
|
} finally {
|
||||||
|
rl.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function prompt(rl: ReturnType<typeof createInterface>, question: string): Promise<string> {
|
||||||
|
return new Promise((resolve) => rl.question(question, resolve));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doUninstall(rl: ReturnType<typeof createInterface>): Promise<void> {
|
||||||
|
const meta = readMeta();
|
||||||
|
if (!meta) {
|
||||||
|
console.log('Gateway is not installed.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const answer = await prompt(rl, 'Uninstall Mosaic Gateway? [y/N] ');
|
||||||
|
if (answer.toLowerCase() !== 'y') {
|
||||||
|
console.log('Aborted.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop if running
|
||||||
|
if (getDaemonPid() !== null) {
|
||||||
|
console.log('Stopping gateway daemon...');
|
||||||
|
try {
|
||||||
|
await stopDaemon();
|
||||||
|
console.log('Stopped.');
|
||||||
|
} catch (err) {
|
||||||
|
console.warn(`Warning: ${err instanceof Error ? err.message : String(err)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove config/data
|
||||||
|
const removeData = await prompt(rl, `Remove all gateway data at ${GATEWAY_HOME}? [y/N] `);
|
||||||
|
if (removeData.toLowerCase() === 'y') {
|
||||||
|
if (existsSync(GATEWAY_HOME)) {
|
||||||
|
rmSync(GATEWAY_HOME, { recursive: true, force: true });
|
||||||
|
console.log('Gateway data removed.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Uninstall npm package
|
||||||
|
console.log('Uninstalling npm package...');
|
||||||
|
uninstallGatewayPackage();
|
||||||
|
|
||||||
|
console.log('\nGateway uninstalled.');
|
||||||
|
}
|
||||||
772
packages/cli/src/commands/launch.ts
Normal file
772
packages/cli/src/commands/launch.ts
Normal file
@@ -0,0 +1,772 @@
|
|||||||
|
/**
|
||||||
|
* Native runtime launcher — replaces the bash mosaic-launch script.
|
||||||
|
*
|
||||||
|
* Builds a composed runtime prompt from AGENTS.md + RUNTIME.md + USER.md +
|
||||||
|
* TOOLS.md + mission context + PRD status, then exec's into the target CLI.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { execFileSync, execSync, spawnSync } from 'node:child_process';
|
||||||
|
import { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync, rmSync } from 'node:fs';
|
||||||
|
import { createRequire } from 'node:module';
|
||||||
|
import { homedir } from 'node:os';
|
||||||
|
import { join, dirname } from 'node:path';
|
||||||
|
import type { Command } from 'commander';
|
||||||
|
|
||||||
|
const MOSAIC_HOME = process.env['MOSAIC_HOME'] ?? join(homedir(), '.config', 'mosaic');
|
||||||
|
|
||||||
|
type RuntimeName = 'claude' | 'codex' | 'opencode' | 'pi';
|
||||||
|
|
||||||
|
const RUNTIME_LABELS: Record<RuntimeName, string> = {
|
||||||
|
claude: 'Claude Code',
|
||||||
|
codex: 'Codex',
|
||||||
|
opencode: 'OpenCode',
|
||||||
|
pi: 'Pi',
|
||||||
|
};
|
||||||
|
|
||||||
|
// ─── Pre-flight checks ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function checkMosaicHome(): void {
|
||||||
|
if (!existsSync(MOSAIC_HOME)) {
|
||||||
|
console.error(`[mosaic] ERROR: ${MOSAIC_HOME} not found.`);
|
||||||
|
console.error(
|
||||||
|
'[mosaic] Install: bash <(curl -fsSL https://git.mosaicstack.dev/mosaic/mosaic-stack/raw/branch/main/tools/install.sh)',
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkFile(path: string, label: string): void {
|
||||||
|
if (!existsSync(path)) {
|
||||||
|
console.error(`[mosaic] ERROR: ${label} not found: ${path}`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkRuntime(cmd: string): void {
|
||||||
|
try {
|
||||||
|
execSync(`which ${cmd}`, { stdio: 'ignore' });
|
||||||
|
} catch {
|
||||||
|
console.error(`[mosaic] ERROR: '${cmd}' not found in PATH.`);
|
||||||
|
console.error(`[mosaic] Install ${cmd} before launching.`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkSoul(): void {
|
||||||
|
const soulPath = join(MOSAIC_HOME, 'SOUL.md');
|
||||||
|
if (!existsSync(soulPath)) {
|
||||||
|
console.log('[mosaic] SOUL.md not found. Running setup wizard...');
|
||||||
|
|
||||||
|
// Prefer the TypeScript wizard (idempotent, detects existing files)
|
||||||
|
try {
|
||||||
|
const result = spawnSync(process.execPath, [process.argv[1]!, 'wizard'], {
|
||||||
|
stdio: 'inherit',
|
||||||
|
});
|
||||||
|
if (result.status === 0 && existsSync(soulPath)) return;
|
||||||
|
} catch {
|
||||||
|
// Fall through to legacy init
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: legacy bash mosaic-init
|
||||||
|
const initBin = fwScript('mosaic-init');
|
||||||
|
if (existsSync(initBin)) {
|
||||||
|
spawnSync(initBin, [], { stdio: 'inherit' });
|
||||||
|
} else {
|
||||||
|
console.error('[mosaic] Setup failed. Run: mosaic wizard');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkSequentialThinking(runtime: string): void {
|
||||||
|
const checker = fwScript('mosaic-ensure-sequential-thinking');
|
||||||
|
if (!existsSync(checker)) return; // Skip if checker doesn't exist
|
||||||
|
const result = spawnSync(checker, ['--check', '--runtime', runtime], { stdio: 'ignore' });
|
||||||
|
if (result.status !== 0) {
|
||||||
|
console.error('[mosaic] ERROR: sequential-thinking MCP is required but not configured.');
|
||||||
|
console.error(`[mosaic] Fix: ${checker} --runtime ${runtime}`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── File helpers ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function readOptional(path: string): string {
|
||||||
|
try {
|
||||||
|
return readFileSync(path, 'utf-8');
|
||||||
|
} catch {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function readJson(path: string): Record<string, unknown> | null {
|
||||||
|
try {
|
||||||
|
return JSON.parse(readFileSync(path, 'utf-8')) as Record<string, unknown>;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Mission context ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
interface MissionInfo {
|
||||||
|
name: string;
|
||||||
|
id: string;
|
||||||
|
status: string;
|
||||||
|
milestoneCount: number;
|
||||||
|
completedCount: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
function detectMission(): MissionInfo | null {
|
||||||
|
const missionFile = '.mosaic/orchestrator/mission.json';
|
||||||
|
const data = readJson(missionFile);
|
||||||
|
if (!data) return null;
|
||||||
|
|
||||||
|
const status = String(data['status'] ?? 'inactive');
|
||||||
|
if (status !== 'active' && status !== 'paused') return null;
|
||||||
|
|
||||||
|
const milestones = Array.isArray(data['milestones']) ? data['milestones'] : [];
|
||||||
|
const completed = milestones.filter(
|
||||||
|
(m) =>
|
||||||
|
typeof m === 'object' &&
|
||||||
|
m !== null &&
|
||||||
|
(m as Record<string, unknown>)['status'] === 'completed',
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: String(data['name'] ?? 'unnamed'),
|
||||||
|
id: String(data['mission_id'] ?? ''),
|
||||||
|
status,
|
||||||
|
milestoneCount: milestones.length,
|
||||||
|
completedCount: completed.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildMissionBlock(mission: MissionInfo): string {
|
||||||
|
return `# ACTIVE MISSION — HARD GATE (Read Before Anything Else)
|
||||||
|
|
||||||
|
An active orchestration mission exists in this project. This is a BLOCKING requirement.
|
||||||
|
|
||||||
|
**Mission:** ${mission.name}
|
||||||
|
**ID:** ${mission.id}
|
||||||
|
**Status:** ${mission.status}
|
||||||
|
**Milestones:** ${mission.completedCount} / ${mission.milestoneCount} completed
|
||||||
|
|
||||||
|
## MANDATORY — Before ANY Response to the User
|
||||||
|
|
||||||
|
You MUST complete these steps before responding to any user message, including simple greetings:
|
||||||
|
|
||||||
|
1. Read \`~/.config/mosaic/guides/ORCHESTRATOR-PROTOCOL.md\` (mission lifecycle protocol)
|
||||||
|
2. Read \`docs/MISSION-MANIFEST.md\` for full mission scope, milestones, and success criteria
|
||||||
|
3. Read the latest scratchpad in \`docs/scratchpads/\` for session history, decisions, and corrections
|
||||||
|
4. Read \`docs/TASKS.md\` for current task state (what is done, what is next)
|
||||||
|
5. After reading all four, acknowledge the mission state to the user before proceeding
|
||||||
|
|
||||||
|
If the user gives a task, execute it within the mission context. If no task is given, present mission status and ask how to proceed.
|
||||||
|
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── PRD status ──────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function buildPrdBlock(): string {
|
||||||
|
const prdFile = 'docs/PRD.md';
|
||||||
|
if (!existsSync(prdFile)) return '';
|
||||||
|
|
||||||
|
const content = readFileSync(prdFile, 'utf-8');
|
||||||
|
const patterns = [
|
||||||
|
/^#{2,3} .*(problem statement|objective)/im,
|
||||||
|
/^#{2,3} .*(scope|non.goal|out of scope|in.scope)/im,
|
||||||
|
/^#{2,3} .*(user stor|stakeholder|user.*requirement)/im,
|
||||||
|
/^#{2,3} .*functional requirement/im,
|
||||||
|
/^#{2,3} .*non.functional/im,
|
||||||
|
/^#{2,3} .*acceptance criteria/im,
|
||||||
|
/^#{2,3} .*(technical consideration|constraint|dependenc)/im,
|
||||||
|
/^#{2,3} .*(risk|open question)/im,
|
||||||
|
/^#{2,3} .*(success metric|test|verification)/im,
|
||||||
|
/^#{2,3} .*(milestone|delivery|scope version)/im,
|
||||||
|
];
|
||||||
|
|
||||||
|
let sections = 0;
|
||||||
|
for (const pattern of patterns) {
|
||||||
|
if (pattern.test(content)) sections++;
|
||||||
|
}
|
||||||
|
|
||||||
|
const assumptions = (content.match(/ASSUMPTION:/g) ?? []).length;
|
||||||
|
const status = sections < 10 ? `incomplete (${sections}/10 sections)` : 'ready';
|
||||||
|
|
||||||
|
return `
|
||||||
|
# PRD Status
|
||||||
|
|
||||||
|
- **File:** docs/PRD.md
|
||||||
|
- **Status:** ${status}
|
||||||
|
- **Assumptions:** ${assumptions}
|
||||||
|
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Runtime prompt builder ──────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function buildRuntimePrompt(runtime: RuntimeName): string {
|
||||||
|
const runtimeContractPaths: Record<RuntimeName, string> = {
|
||||||
|
claude: join(MOSAIC_HOME, 'runtime', 'claude', 'RUNTIME.md'),
|
||||||
|
codex: join(MOSAIC_HOME, 'runtime', 'codex', 'RUNTIME.md'),
|
||||||
|
opencode: join(MOSAIC_HOME, 'runtime', 'opencode', 'RUNTIME.md'),
|
||||||
|
pi: join(MOSAIC_HOME, 'runtime', 'pi', 'RUNTIME.md'),
|
||||||
|
};
|
||||||
|
|
||||||
|
const runtimeFile = runtimeContractPaths[runtime];
|
||||||
|
checkFile(runtimeFile, `Runtime contract for ${runtime}`);
|
||||||
|
|
||||||
|
const parts: string[] = [];
|
||||||
|
|
||||||
|
// Mission context (injected first)
|
||||||
|
const mission = detectMission();
|
||||||
|
if (mission) {
|
||||||
|
parts.push(buildMissionBlock(mission));
|
||||||
|
}
|
||||||
|
|
||||||
|
// PRD status
|
||||||
|
const prdBlock = buildPrdBlock();
|
||||||
|
if (prdBlock) parts.push(prdBlock);
|
||||||
|
|
||||||
|
// Hard gate
|
||||||
|
parts.push(`# Mosaic Launcher Runtime Contract (Hard Gate)
|
||||||
|
|
||||||
|
This contract is injected by \`mosaic\` launch and is mandatory.
|
||||||
|
|
||||||
|
First assistant response MUST start with exactly one mode declaration line:
|
||||||
|
1. Orchestration mission: \`Now initiating Orchestrator mode...\`
|
||||||
|
2. Implementation mission: \`Now initiating Delivery mode...\`
|
||||||
|
3. Review-only mission: \`Now initiating Review mode...\`
|
||||||
|
|
||||||
|
No tool call or implementation step may occur before that first line.
|
||||||
|
|
||||||
|
Mosaic hard gates OVERRIDE runtime-default caution for routine delivery operations.
|
||||||
|
For required push/merge/issue-close/release actions, execute without routine confirmation prompts.
|
||||||
|
`);
|
||||||
|
|
||||||
|
// AGENTS.md
|
||||||
|
parts.push(readFileSync(join(MOSAIC_HOME, 'AGENTS.md'), 'utf-8'));
|
||||||
|
|
||||||
|
// USER.md
|
||||||
|
const user = readOptional(join(MOSAIC_HOME, 'USER.md'));
|
||||||
|
if (user) parts.push('\n\n# User Profile\n\n' + user);
|
||||||
|
|
||||||
|
// TOOLS.md
|
||||||
|
const tools = readOptional(join(MOSAIC_HOME, 'TOOLS.md'));
|
||||||
|
if (tools) parts.push('\n\n# Machine Tools\n\n' + tools);
|
||||||
|
|
||||||
|
// Runtime-specific contract
|
||||||
|
parts.push('\n\n# Runtime-Specific Contract\n\n' + readFileSync(runtimeFile, 'utf-8'));
|
||||||
|
|
||||||
|
return parts.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Session lock ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function writeSessionLock(runtime: string): void {
|
||||||
|
const missionFile = '.mosaic/orchestrator/mission.json';
|
||||||
|
const lockFile = '.mosaic/orchestrator/session.lock';
|
||||||
|
const data = readJson(missionFile);
|
||||||
|
if (!data) return;
|
||||||
|
|
||||||
|
const status = String(data['status'] ?? 'inactive');
|
||||||
|
if (status !== 'active' && status !== 'paused') return;
|
||||||
|
|
||||||
|
const sessionId = `${runtime}-${new Date().toISOString().replace(/[:.]/g, '-')}-${process.pid}`;
|
||||||
|
const lock = {
|
||||||
|
session_id: sessionId,
|
||||||
|
runtime,
|
||||||
|
pid: process.pid,
|
||||||
|
started_at: new Date().toISOString(),
|
||||||
|
project_path: process.cwd(),
|
||||||
|
milestone_id: '',
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
mkdirSync(dirname(lockFile), { recursive: true });
|
||||||
|
writeFileSync(lockFile, JSON.stringify(lock, null, 2) + '\n');
|
||||||
|
|
||||||
|
// Clean up on exit
|
||||||
|
const cleanup = () => {
|
||||||
|
try {
|
||||||
|
rmSync(lockFile, { force: true });
|
||||||
|
} catch {
|
||||||
|
// best-effort
|
||||||
|
}
|
||||||
|
};
|
||||||
|
process.on('exit', cleanup);
|
||||||
|
process.on('SIGINT', () => {
|
||||||
|
cleanup();
|
||||||
|
process.exit(130);
|
||||||
|
});
|
||||||
|
process.on('SIGTERM', () => {
|
||||||
|
cleanup();
|
||||||
|
process.exit(143);
|
||||||
|
});
|
||||||
|
} catch {
|
||||||
|
// Non-fatal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Resumable session advisory ──────────────────────────────────────────────
|
||||||
|
|
||||||
|
function checkResumableSession(): void {
|
||||||
|
const lockFile = '.mosaic/orchestrator/session.lock';
|
||||||
|
const missionFile = '.mosaic/orchestrator/mission.json';
|
||||||
|
|
||||||
|
if (existsSync(lockFile)) {
|
||||||
|
const lock = readJson(lockFile);
|
||||||
|
if (lock) {
|
||||||
|
const pid = Number(lock['pid'] ?? 0);
|
||||||
|
if (pid > 0) {
|
||||||
|
try {
|
||||||
|
process.kill(pid, 0); // Check if alive
|
||||||
|
} catch {
|
||||||
|
// Process is dead — stale lock
|
||||||
|
rmSync(lockFile, { force: true });
|
||||||
|
console.log(`[mosaic] Cleaned up stale session lock (PID ${pid} no longer running).\n`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (existsSync(missionFile)) {
|
||||||
|
const data = readJson(missionFile);
|
||||||
|
if (data && data['status'] === 'active') {
|
||||||
|
console.log('[mosaic] Active mission detected. Generate continuation prompt with:');
|
||||||
|
console.log('[mosaic] mosaic coord continue\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Write config for runtimes that read from fixed paths ────────────────────
|
||||||
|
|
||||||
|
function ensureRuntimeConfig(runtime: RuntimeName, destPath: string): void {
|
||||||
|
const prompt = buildRuntimePrompt(runtime);
|
||||||
|
mkdirSync(dirname(destPath), { recursive: true });
|
||||||
|
const existing = readOptional(destPath);
|
||||||
|
if (existing !== prompt) {
|
||||||
|
writeFileSync(destPath, prompt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Pi skill/extension discovery ────────────────────────────────────────────
|
||||||
|
|
||||||
|
function discoverPiSkills(): string[] {
|
||||||
|
const args: string[] = [];
|
||||||
|
for (const skillsRoot of [join(MOSAIC_HOME, 'skills'), join(MOSAIC_HOME, 'skills-local')]) {
|
||||||
|
if (!existsSync(skillsRoot)) continue;
|
||||||
|
try {
|
||||||
|
for (const entry of readdirSync(skillsRoot, { withFileTypes: true })) {
|
||||||
|
if (!entry.isDirectory()) continue;
|
||||||
|
const skillDir = join(skillsRoot, entry.name);
|
||||||
|
if (existsSync(join(skillDir, 'SKILL.md'))) {
|
||||||
|
args.push('--skill', skillDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// skip
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return args;
|
||||||
|
}
|
||||||
|
|
||||||
|
function discoverPiExtension(): string[] {
|
||||||
|
const ext = join(MOSAIC_HOME, 'runtime', 'pi', 'mosaic-extension.ts');
|
||||||
|
return existsSync(ext) ? ['--extension', ext] : [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Launch functions ────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function getMissionPrompt(): string {
|
||||||
|
const mission = detectMission();
|
||||||
|
if (!mission) return '';
|
||||||
|
return `Active mission detected: ${mission.name}. Read the mission state files and report status.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function launchRuntime(runtime: RuntimeName, args: string[], yolo: boolean): never {
|
||||||
|
checkMosaicHome();
|
||||||
|
checkFile(join(MOSAIC_HOME, 'AGENTS.md'), 'AGENTS.md');
|
||||||
|
checkSoul();
|
||||||
|
checkRuntime(runtime);
|
||||||
|
|
||||||
|
// Pi doesn't need sequential-thinking (has native thinking levels)
|
||||||
|
if (runtime !== 'pi') {
|
||||||
|
checkSequentialThinking(runtime);
|
||||||
|
}
|
||||||
|
|
||||||
|
checkResumableSession();
|
||||||
|
|
||||||
|
const missionPrompt = getMissionPrompt();
|
||||||
|
const hasMissionNoArgs = missionPrompt && args.length === 0;
|
||||||
|
const label = RUNTIME_LABELS[runtime];
|
||||||
|
const modeStr = yolo ? ' in YOLO mode' : '';
|
||||||
|
const missionStr = hasMissionNoArgs ? ' (active mission detected)' : '';
|
||||||
|
|
||||||
|
writeSessionLock(runtime);
|
||||||
|
|
||||||
|
switch (runtime) {
|
||||||
|
case 'claude': {
|
||||||
|
const prompt = buildRuntimePrompt('claude');
|
||||||
|
const cliArgs = yolo ? ['--dangerously-skip-permissions'] : [];
|
||||||
|
cliArgs.push('--append-system-prompt', prompt);
|
||||||
|
if (hasMissionNoArgs) {
|
||||||
|
cliArgs.push(missionPrompt);
|
||||||
|
} else {
|
||||||
|
cliArgs.push(...args);
|
||||||
|
}
|
||||||
|
console.log(`[mosaic] Launching ${label}${modeStr}${missionStr}...`);
|
||||||
|
execRuntime('claude', cliArgs);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'codex': {
|
||||||
|
ensureRuntimeConfig('codex', join(homedir(), '.codex', 'instructions.md'));
|
||||||
|
const cliArgs = yolo ? ['--dangerously-bypass-approvals-and-sandbox'] : [];
|
||||||
|
if (hasMissionNoArgs) {
|
||||||
|
cliArgs.push(missionPrompt);
|
||||||
|
} else {
|
||||||
|
cliArgs.push(...args);
|
||||||
|
}
|
||||||
|
console.log(`[mosaic] Launching ${label}${modeStr}${missionStr}...`);
|
||||||
|
execRuntime('codex', cliArgs);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'opencode': {
|
||||||
|
ensureRuntimeConfig('opencode', join(homedir(), '.config', 'opencode', 'AGENTS.md'));
|
||||||
|
console.log(`[mosaic] Launching ${label}${modeStr}...`);
|
||||||
|
execRuntime('opencode', args);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'pi': {
|
||||||
|
const prompt = buildRuntimePrompt('pi');
|
||||||
|
const cliArgs = ['--append-system-prompt', prompt];
|
||||||
|
cliArgs.push(...discoverPiSkills());
|
||||||
|
cliArgs.push(...discoverPiExtension());
|
||||||
|
if (hasMissionNoArgs) {
|
||||||
|
cliArgs.push(missionPrompt);
|
||||||
|
} else {
|
||||||
|
cliArgs.push(...args);
|
||||||
|
}
|
||||||
|
console.log(`[mosaic] Launching ${label}${modeStr}${missionStr}...`);
|
||||||
|
execRuntime('pi', cliArgs);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
process.exit(0); // Unreachable but satisfies never
|
||||||
|
}
|
||||||
|
|
||||||
|
/** exec into the runtime, replacing the current process. */
|
||||||
|
function execRuntime(cmd: string, args: string[]): void {
|
||||||
|
try {
|
||||||
|
// Use execFileSync with inherited stdio to replace the process
|
||||||
|
const result = spawnSync(cmd, args, {
|
||||||
|
stdio: 'inherit',
|
||||||
|
env: process.env,
|
||||||
|
});
|
||||||
|
process.exit(result.status ?? 0);
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`[mosaic] Failed to launch ${cmd}:`, err instanceof Error ? err.message : err);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Framework script/tool delegation ───────────────────────────────────────
|
||||||
|
|
||||||
|
function delegateToScript(scriptPath: string, args: string[], env?: Record<string, string>): never {
|
||||||
|
if (!existsSync(scriptPath)) {
|
||||||
|
console.error(`[mosaic] Script not found: ${scriptPath}`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
execFileSync('bash', [scriptPath, ...args], {
|
||||||
|
stdio: 'inherit',
|
||||||
|
env: { ...process.env, ...env },
|
||||||
|
});
|
||||||
|
process.exit(0);
|
||||||
|
} catch (err) {
|
||||||
|
process.exit((err as { status?: number }).status ?? 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve a path under the framework tools directory. Prefers the version
|
||||||
|
* bundled in the @mosaic/mosaic npm package (always matches the installed
|
||||||
|
* CLI version) over the deployed copy in ~/.config/mosaic/ (may be stale).
|
||||||
|
*/
|
||||||
|
function resolveTool(...segments: string[]): string {
|
||||||
|
try {
|
||||||
|
const req = createRequire(import.meta.url);
|
||||||
|
const mosaicPkg = dirname(req.resolve('@mosaic/mosaic/package.json'));
|
||||||
|
const bundled = join(mosaicPkg, 'framework', 'tools', ...segments);
|
||||||
|
if (existsSync(bundled)) return bundled;
|
||||||
|
} catch {
|
||||||
|
// Fall through to deployed copy
|
||||||
|
}
|
||||||
|
return join(MOSAIC_HOME, 'tools', ...segments);
|
||||||
|
}
|
||||||
|
|
||||||
|
function fwScript(name: string): string {
|
||||||
|
return resolveTool('_scripts', name);
|
||||||
|
}
|
||||||
|
|
||||||
|
function toolScript(toolDir: string, name: string): string {
|
||||||
|
return resolveTool(toolDir, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Coord (mission orchestrator) ───────────────────────────────────────────
|
||||||
|
|
||||||
|
const COORD_SUBCMDS: Record<string, string> = {
|
||||||
|
status: 'session-status.sh',
|
||||||
|
session: 'session-status.sh',
|
||||||
|
init: 'mission-init.sh',
|
||||||
|
mission: 'mission-status.sh',
|
||||||
|
progress: 'mission-status.sh',
|
||||||
|
continue: 'continue-prompt.sh',
|
||||||
|
next: 'continue-prompt.sh',
|
||||||
|
run: 'session-run.sh',
|
||||||
|
start: 'session-run.sh',
|
||||||
|
smoke: 'smoke-test.sh',
|
||||||
|
test: 'smoke-test.sh',
|
||||||
|
resume: 'session-resume.sh',
|
||||||
|
recover: 'session-resume.sh',
|
||||||
|
};
|
||||||
|
|
||||||
|
function runCoord(args: string[]): never {
|
||||||
|
checkMosaicHome();
|
||||||
|
let runtime = 'claude';
|
||||||
|
let yoloFlag = '';
|
||||||
|
const coordArgs: string[] = [];
|
||||||
|
|
||||||
|
for (const arg of args) {
|
||||||
|
if (arg === '--claude' || arg === '--codex' || arg === '--pi') {
|
||||||
|
runtime = arg.slice(2);
|
||||||
|
} else if (arg === '--yolo') {
|
||||||
|
yoloFlag = '--yolo';
|
||||||
|
} else {
|
||||||
|
coordArgs.push(arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const subcmd = coordArgs[0] ?? 'help';
|
||||||
|
const subArgs = coordArgs.slice(1);
|
||||||
|
const script = COORD_SUBCMDS[subcmd];
|
||||||
|
|
||||||
|
if (!script) {
|
||||||
|
console.log(`mosaic coord — mission coordinator tools
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
init --name <name> [opts] Initialize a new mission
|
||||||
|
mission [--project <path>] Show mission progress dashboard
|
||||||
|
status [--project <path>] Check agent session health
|
||||||
|
continue [--project <path>] Generate continuation prompt
|
||||||
|
run [--project <path>] Launch runtime with mission context
|
||||||
|
smoke Run orchestration smoke checks
|
||||||
|
resume [--project <path>] Crash recovery
|
||||||
|
|
||||||
|
Runtime: --claude (default) | --codex | --pi | --yolo`);
|
||||||
|
process.exit(subcmd === 'help' ? 0 : 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (yoloFlag) subArgs.unshift(yoloFlag);
|
||||||
|
delegateToScript(toolScript('orchestrator', script), subArgs, {
|
||||||
|
MOSAIC_COORD_RUNTIME: runtime,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Prdy (PRD tools via framework scripts) ─────────────────────────────────
|
||||||
|
|
||||||
|
const PRDY_SUBCMDS: Record<string, string> = {
|
||||||
|
init: 'prdy-init.sh',
|
||||||
|
update: 'prdy-update.sh',
|
||||||
|
validate: 'prdy-validate.sh',
|
||||||
|
check: 'prdy-validate.sh',
|
||||||
|
status: 'prdy-status.sh',
|
||||||
|
};
|
||||||
|
|
||||||
|
function runPrdyLocal(args: string[]): never {
|
||||||
|
checkMosaicHome();
|
||||||
|
let runtime = 'claude';
|
||||||
|
const prdyArgs: string[] = [];
|
||||||
|
|
||||||
|
for (const arg of args) {
|
||||||
|
if (arg === '--claude' || arg === '--codex' || arg === '--pi') {
|
||||||
|
runtime = arg.slice(2);
|
||||||
|
} else {
|
||||||
|
prdyArgs.push(arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const subcmd = prdyArgs[0] ?? 'help';
|
||||||
|
const subArgs = prdyArgs.slice(1);
|
||||||
|
const script = PRDY_SUBCMDS[subcmd];
|
||||||
|
|
||||||
|
if (!script) {
|
||||||
|
console.log(`mosaic prdy — PRD creation and validation
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
init [--project <path>] [--name <feature>] Create docs/PRD.md
|
||||||
|
update [--project <path>] Update existing PRD
|
||||||
|
validate [--project <path>] Check PRD completeness
|
||||||
|
status [--project <path>] Quick PRD health check
|
||||||
|
|
||||||
|
Runtime: --claude (default) | --codex | --pi`);
|
||||||
|
process.exit(subcmd === 'help' ? 0 : 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
delegateToScript(toolScript('prdy', script), subArgs, {
|
||||||
|
MOSAIC_PRDY_RUNTIME: runtime,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Seq (sequential-thinking MCP) ──────────────────────────────────────────
|
||||||
|
|
||||||
|
function runSeq(args: string[]): never {
|
||||||
|
checkMosaicHome();
|
||||||
|
const action = args[0] ?? 'check';
|
||||||
|
const rest = args.slice(1);
|
||||||
|
const checker = fwScript('mosaic-ensure-sequential-thinking');
|
||||||
|
|
||||||
|
switch (action) {
|
||||||
|
case 'check':
|
||||||
|
delegateToScript(checker, ['--check', ...rest]);
|
||||||
|
break; // unreachable
|
||||||
|
case 'fix':
|
||||||
|
case 'apply':
|
||||||
|
delegateToScript(checker, rest);
|
||||||
|
break;
|
||||||
|
case 'start': {
|
||||||
|
console.log('[mosaic] Starting sequential-thinking MCP server...');
|
||||||
|
try {
|
||||||
|
execFileSync('npx', ['-y', '@modelcontextprotocol/server-sequential-thinking', ...rest], {
|
||||||
|
stdio: 'inherit',
|
||||||
|
});
|
||||||
|
process.exit(0);
|
||||||
|
} catch (err) {
|
||||||
|
process.exit((err as { status?: number }).status ?? 1);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
console.error(`[mosaic] Unknown seq subcommand '${action}'. Use: check|fix|start`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Upgrade ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
function runUpgrade(args: string[]): never {
|
||||||
|
checkMosaicHome();
|
||||||
|
const subcmd = args[0];
|
||||||
|
|
||||||
|
if (!subcmd || subcmd === 'release') {
|
||||||
|
delegateToScript(fwScript('mosaic-release-upgrade'), args.slice(subcmd === 'release' ? 1 : 0));
|
||||||
|
} else if (subcmd === 'check') {
|
||||||
|
delegateToScript(fwScript('mosaic-release-upgrade'), ['--dry-run', ...args.slice(1)]);
|
||||||
|
} else if (subcmd === 'project') {
|
||||||
|
delegateToScript(fwScript('mosaic-upgrade'), args.slice(1));
|
||||||
|
} else if (subcmd.startsWith('-')) {
|
||||||
|
delegateToScript(fwScript('mosaic-release-upgrade'), args);
|
||||||
|
} else {
|
||||||
|
delegateToScript(fwScript('mosaic-upgrade'), args);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Commander registration ─────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export function registerLaunchCommands(program: Command): void {
|
||||||
|
// Runtime launchers
|
||||||
|
for (const runtime of ['claude', 'codex', 'opencode', 'pi'] as const) {
|
||||||
|
program
|
||||||
|
.command(runtime)
|
||||||
|
.description(`Launch ${RUNTIME_LABELS[runtime]} with Mosaic injection`)
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
launchRuntime(runtime, cmd.args, false);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Yolo mode
|
||||||
|
program
|
||||||
|
.command('yolo <runtime>')
|
||||||
|
.description('Launch a runtime in dangerous-permissions mode (claude|codex|opencode|pi)')
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((runtime: string, _opts: unknown, cmd: Command) => {
|
||||||
|
const valid: RuntimeName[] = ['claude', 'codex', 'opencode', 'pi'];
|
||||||
|
if (!valid.includes(runtime as RuntimeName)) {
|
||||||
|
console.error(
|
||||||
|
`[mosaic] ERROR: Unsupported yolo runtime '${runtime}'. Use: ${valid.join('|')}`,
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
launchRuntime(runtime as RuntimeName, cmd.args, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Coord (mission orchestrator)
|
||||||
|
program
|
||||||
|
.command('coord')
|
||||||
|
.description('Mission coordinator tools (init, status, run, continue, resume)')
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
runCoord(cmd.args);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Prdy (PRD tools via local framework scripts)
|
||||||
|
program
|
||||||
|
.command('prdy')
|
||||||
|
.description('PRD creation and validation (init, update, validate, status)')
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
runPrdyLocal(cmd.args);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Seq (sequential-thinking MCP management)
|
||||||
|
program
|
||||||
|
.command('seq')
|
||||||
|
.description('sequential-thinking MCP management (check/fix/start)')
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
runSeq(cmd.args);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upgrade (release + project)
|
||||||
|
program
|
||||||
|
.command('upgrade')
|
||||||
|
.description('Upgrade Mosaic release or project files')
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
runUpgrade(cmd.args);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Direct framework script delegates
|
||||||
|
const directCommands: Record<string, { desc: string; script: string }> = {
|
||||||
|
init: { desc: 'Generate SOUL.md (agent identity contract)', script: 'mosaic-init' },
|
||||||
|
doctor: { desc: 'Health audit — detect drift and missing files', script: 'mosaic-doctor' },
|
||||||
|
sync: { desc: 'Sync skills from canonical source', script: 'mosaic-sync-skills' },
|
||||||
|
bootstrap: {
|
||||||
|
desc: 'Bootstrap a repo with Mosaic standards',
|
||||||
|
script: 'mosaic-bootstrap-repo',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const [name, { desc, script }] of Object.entries(directCommands)) {
|
||||||
|
program
|
||||||
|
.command(name)
|
||||||
|
.description(desc)
|
||||||
|
.allowUnknownOption(true)
|
||||||
|
.allowExcessArguments(true)
|
||||||
|
.action((_opts: unknown, cmd: Command) => {
|
||||||
|
checkMosaicHome();
|
||||||
|
delegateToScript(fwScript(script), cmd.args);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -15,6 +15,7 @@ import { useConversations } from './hooks/use-conversations.js';
|
|||||||
import { useSearch } from './hooks/use-search.js';
|
import { useSearch } from './hooks/use-search.js';
|
||||||
import { executeHelp, executeStatus, executeHistory, commandRegistry } from './commands/index.js';
|
import { executeHelp, executeStatus, executeHistory, commandRegistry } from './commands/index.js';
|
||||||
import { fetchConversationMessages } from './gateway-api.js';
|
import { fetchConversationMessages } from './gateway-api.js';
|
||||||
|
import { expandFileRefs, hasFileRefs, handleAttachCommand } from './file-ref.js';
|
||||||
|
|
||||||
export interface TuiAppProps {
|
export interface TuiAppProps {
|
||||||
gatewayUrl: string;
|
gatewayUrl: string;
|
||||||
@@ -85,6 +86,36 @@ export function TuiApp({
|
|||||||
// combo is handled by the top-level useInput handler (e.g. Ctrl+T → 't').
|
// combo is handled by the top-level useInput handler (e.g. Ctrl+T → 't').
|
||||||
const ctrlJustFired = useRef(false);
|
const ctrlJustFired = useRef(false);
|
||||||
|
|
||||||
|
// Wrap sendMessage to expand @file references before sending
|
||||||
|
const sendMessageWithFileRefs = useCallback(
|
||||||
|
(content: string) => {
|
||||||
|
if (!hasFileRefs(content)) {
|
||||||
|
socket.sendMessage(content);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
void expandFileRefs(content)
|
||||||
|
.then(({ expandedMessage, filesAttached, errors }) => {
|
||||||
|
for (const err of errors) {
|
||||||
|
socket.addSystemMessage(err);
|
||||||
|
}
|
||||||
|
if (filesAttached.length > 0) {
|
||||||
|
socket.addSystemMessage(
|
||||||
|
`📎 Attached ${filesAttached.length} file(s): ${filesAttached.join(', ')}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
socket.sendMessage(expandedMessage);
|
||||||
|
})
|
||||||
|
.catch((err: unknown) => {
|
||||||
|
socket.addSystemMessage(
|
||||||
|
`File expansion failed: ${err instanceof Error ? err.message : String(err)}`,
|
||||||
|
);
|
||||||
|
// Send original message without expansion
|
||||||
|
socket.sendMessage(content);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
[socket],
|
||||||
|
);
|
||||||
|
|
||||||
const handleLocalCommand = useCallback(
|
const handleLocalCommand = useCallback(
|
||||||
(parsed: ParsedCommand) => {
|
(parsed: ParsedCommand) => {
|
||||||
switch (parsed.command) {
|
switch (parsed.command) {
|
||||||
@@ -123,9 +154,36 @@ export function TuiApp({
|
|||||||
socket.addSystemMessage('Failed to create new conversation.');
|
socket.addSystemMessage('Failed to create new conversation.');
|
||||||
});
|
});
|
||||||
break;
|
break;
|
||||||
|
case 'attach': {
|
||||||
|
if (!parsed.args) {
|
||||||
|
socket.addSystemMessage('Usage: /attach <file-path>');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
void handleAttachCommand(parsed.args)
|
||||||
|
.then(({ content, error }) => {
|
||||||
|
if (error) {
|
||||||
|
socket.addSystemMessage(`Attach error: ${error}`);
|
||||||
|
} else if (content) {
|
||||||
|
// Send the file content as a user message
|
||||||
|
socket.sendMessage(content);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch((err: unknown) => {
|
||||||
|
socket.addSystemMessage(
|
||||||
|
`Attach failed: ${err instanceof Error ? err.message : String(err)}`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
case 'stop':
|
case 'stop':
|
||||||
// Currently no stop mechanism exposed — show feedback
|
if (socket.isStreaming && socket.socketRef.current?.connected && socket.conversationId) {
|
||||||
socket.addSystemMessage('Stop is not available for the current session.');
|
socket.socketRef.current.emit('abort', {
|
||||||
|
conversationId: socket.conversationId,
|
||||||
|
});
|
||||||
|
socket.addSystemMessage('Abort signal sent.');
|
||||||
|
} else {
|
||||||
|
socket.addSystemMessage('No active stream to stop.');
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
case 'cost': {
|
case 'cost': {
|
||||||
const u = socket.tokenUsage;
|
const u = socket.tokenUsage;
|
||||||
@@ -348,7 +406,7 @@ export function TuiApp({
|
|||||||
}
|
}
|
||||||
setTuiInput(val);
|
setTuiInput(val);
|
||||||
}}
|
}}
|
||||||
onSubmit={socket.sendMessage}
|
onSubmit={sendMessageWithFileRefs}
|
||||||
onSystemMessage={socket.addSystemMessage}
|
onSystemMessage={socket.addSystemMessage}
|
||||||
onLocalCommand={handleLocalCommand}
|
onLocalCommand={handleLocalCommand}
|
||||||
onGatewayCommand={handleGatewayCommand}
|
onGatewayCommand={handleGatewayCommand}
|
||||||
|
|||||||
@@ -56,6 +56,22 @@ const LOCAL_COMMANDS: CommandDef[] = [
|
|||||||
available: true,
|
available: true,
|
||||||
scope: 'core',
|
scope: 'core',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: 'attach',
|
||||||
|
description: 'Attach a file to the next message (@file syntax also works inline)',
|
||||||
|
aliases: [],
|
||||||
|
args: [
|
||||||
|
{
|
||||||
|
name: 'path',
|
||||||
|
type: 'string' as const,
|
||||||
|
optional: false,
|
||||||
|
description: 'File path to attach',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
execution: 'local',
|
||||||
|
available: true,
|
||||||
|
scope: 'core',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: 'new',
|
name: 'new',
|
||||||
description: 'Start a new conversation',
|
description: 'Start a new conversation',
|
||||||
|
|||||||
202
packages/cli/src/tui/file-ref.ts
Normal file
202
packages/cli/src/tui/file-ref.ts
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
/**
|
||||||
|
* File reference expansion for TUI chat input.
|
||||||
|
*
|
||||||
|
* Detects @path/to/file patterns in user messages, reads the file contents,
|
||||||
|
* and inlines them as fenced code blocks in the message.
|
||||||
|
*
|
||||||
|
* Supports:
|
||||||
|
* - @relative/path.ts
|
||||||
|
* - @./relative/path.ts
|
||||||
|
* - @/absolute/path.ts
|
||||||
|
* - @~/home-relative/path.ts
|
||||||
|
*
|
||||||
|
* Also provides an /attach <path> command handler.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { readFile, stat } from 'node:fs/promises';
|
||||||
|
import { resolve, extname, basename } from 'node:path';
|
||||||
|
import { homedir } from 'node:os';
|
||||||
|
|
||||||
|
const MAX_FILE_SIZE = 256 * 1024; // 256 KB
|
||||||
|
const MAX_FILES_PER_MESSAGE = 10;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Regex to detect @file references in user input.
|
||||||
|
* Matches @<path> where path starts with /, ./, ~/, or a word char,
|
||||||
|
* and continues until whitespace or end of string.
|
||||||
|
* Excludes @mentions that look like usernames (no dots/slashes).
|
||||||
|
*/
|
||||||
|
const FILE_REF_PATTERN = /(?:^|\s)@((?:\.{0,2}\/|~\/|[a-zA-Z0-9_])[^\s]+)/g;
|
||||||
|
|
||||||
|
interface FileRefResult {
|
||||||
|
/** The expanded message text with file contents inlined */
|
||||||
|
expandedMessage: string;
|
||||||
|
/** Files that were successfully read */
|
||||||
|
filesAttached: string[];
|
||||||
|
/** Errors encountered while reading files */
|
||||||
|
errors: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
function resolveFilePath(ref: string): string {
|
||||||
|
if (ref.startsWith('~/')) {
|
||||||
|
return resolve(homedir(), ref.slice(2));
|
||||||
|
}
|
||||||
|
return resolve(process.cwd(), ref);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getLanguageHint(filePath: string): string {
|
||||||
|
const ext = extname(filePath).toLowerCase();
|
||||||
|
const map: Record<string, string> = {
|
||||||
|
'.ts': 'typescript',
|
||||||
|
'.tsx': 'typescript',
|
||||||
|
'.js': 'javascript',
|
||||||
|
'.jsx': 'javascript',
|
||||||
|
'.py': 'python',
|
||||||
|
'.rb': 'ruby',
|
||||||
|
'.rs': 'rust',
|
||||||
|
'.go': 'go',
|
||||||
|
'.java': 'java',
|
||||||
|
'.c': 'c',
|
||||||
|
'.cpp': 'cpp',
|
||||||
|
'.h': 'c',
|
||||||
|
'.hpp': 'cpp',
|
||||||
|
'.cs': 'csharp',
|
||||||
|
'.sh': 'bash',
|
||||||
|
'.bash': 'bash',
|
||||||
|
'.zsh': 'zsh',
|
||||||
|
'.fish': 'fish',
|
||||||
|
'.json': 'json',
|
||||||
|
'.yaml': 'yaml',
|
||||||
|
'.yml': 'yaml',
|
||||||
|
'.toml': 'toml',
|
||||||
|
'.xml': 'xml',
|
||||||
|
'.html': 'html',
|
||||||
|
'.css': 'css',
|
||||||
|
'.scss': 'scss',
|
||||||
|
'.md': 'markdown',
|
||||||
|
'.sql': 'sql',
|
||||||
|
'.graphql': 'graphql',
|
||||||
|
'.dockerfile': 'dockerfile',
|
||||||
|
'.tf': 'terraform',
|
||||||
|
'.vue': 'vue',
|
||||||
|
'.svelte': 'svelte',
|
||||||
|
};
|
||||||
|
return map[ext] ?? '';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the input contains any @file references.
|
||||||
|
*/
|
||||||
|
export function hasFileRefs(input: string): boolean {
|
||||||
|
FILE_REF_PATTERN.lastIndex = 0;
|
||||||
|
return FILE_REF_PATTERN.test(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expand @file references in a message by reading file contents
|
||||||
|
* and appending them as fenced code blocks.
|
||||||
|
*/
|
||||||
|
export async function expandFileRefs(input: string): Promise<FileRefResult> {
|
||||||
|
const refs: string[] = [];
|
||||||
|
FILE_REF_PATTERN.lastIndex = 0;
|
||||||
|
let match;
|
||||||
|
while ((match = FILE_REF_PATTERN.exec(input)) !== null) {
|
||||||
|
const ref = match[1]!;
|
||||||
|
if (!refs.includes(ref)) {
|
||||||
|
refs.push(ref);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (refs.length === 0) {
|
||||||
|
return { expandedMessage: input, filesAttached: [], errors: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (refs.length > MAX_FILES_PER_MESSAGE) {
|
||||||
|
return {
|
||||||
|
expandedMessage: input,
|
||||||
|
filesAttached: [],
|
||||||
|
errors: [`Too many file references (${refs.length}). Maximum is ${MAX_FILES_PER_MESSAGE}.`],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const filesAttached: string[] = [];
|
||||||
|
const errors: string[] = [];
|
||||||
|
const attachments: string[] = [];
|
||||||
|
|
||||||
|
for (const ref of refs) {
|
||||||
|
const filePath = resolveFilePath(ref);
|
||||||
|
try {
|
||||||
|
const info = await stat(filePath);
|
||||||
|
if (!info.isFile()) {
|
||||||
|
errors.push(`@${ref}: not a file`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (info.size > MAX_FILE_SIZE) {
|
||||||
|
errors.push(
|
||||||
|
`@${ref}: file too large (${(info.size / 1024).toFixed(0)} KB, limit ${MAX_FILE_SIZE / 1024} KB)`,
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const content = await readFile(filePath, 'utf8');
|
||||||
|
const lang = getLanguageHint(filePath);
|
||||||
|
const name = basename(filePath);
|
||||||
|
attachments.push(`\n📎 ${ref} (${name}):\n\`\`\`${lang}\n${content}\n\`\`\``);
|
||||||
|
filesAttached.push(ref);
|
||||||
|
} catch (err) {
|
||||||
|
const msg = err instanceof Error ? err.message : String(err);
|
||||||
|
// Only report meaningful errors — ENOENT is common for false @mention matches
|
||||||
|
if (msg.includes('ENOENT')) {
|
||||||
|
// Check if this looks like a file path (has extension or slash)
|
||||||
|
if (ref.includes('/') || ref.includes('.')) {
|
||||||
|
errors.push(`@${ref}: file not found`);
|
||||||
|
}
|
||||||
|
// Otherwise silently skip — likely an @mention, not a file ref
|
||||||
|
} else {
|
||||||
|
errors.push(`@${ref}: ${msg}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attachments.length === 0) {
|
||||||
|
return { expandedMessage: input, filesAttached, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
const expandedMessage = input + '\n' + attachments.join('\n');
|
||||||
|
return { expandedMessage, filesAttached, errors };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle the /attach <path> command.
|
||||||
|
* Reads a file and returns the content formatted for inclusion in the chat.
|
||||||
|
*/
|
||||||
|
export async function handleAttachCommand(
|
||||||
|
args: string,
|
||||||
|
): Promise<{ content: string; error?: string }> {
|
||||||
|
const filePath = args.trim();
|
||||||
|
if (!filePath) {
|
||||||
|
return { content: '', error: 'Usage: /attach <file-path>' };
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolved = resolveFilePath(filePath);
|
||||||
|
try {
|
||||||
|
const info = await stat(resolved);
|
||||||
|
if (!info.isFile()) {
|
||||||
|
return { content: '', error: `Not a file: ${filePath}` };
|
||||||
|
}
|
||||||
|
if (info.size > MAX_FILE_SIZE) {
|
||||||
|
return {
|
||||||
|
content: '',
|
||||||
|
error: `File too large (${(info.size / 1024).toFixed(0)} KB, limit ${MAX_FILE_SIZE / 1024} KB)`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const content = await readFile(resolved, 'utf8');
|
||||||
|
const lang = getLanguageHint(resolved);
|
||||||
|
const name = basename(resolved);
|
||||||
|
return {
|
||||||
|
content: `📎 Attached file: ${name} (${filePath})\n\`\`\`${lang}\n${content}\n\`\`\``,
|
||||||
|
};
|
||||||
|
} catch (err) {
|
||||||
|
const msg = err instanceof Error ? err.message : String(err);
|
||||||
|
return { content: '', error: `Failed to read file: ${msg}` };
|
||||||
|
}
|
||||||
|
}
|
||||||
4
packages/cli/tsconfig.build.json
Normal file
4
packages/cli/tsconfig.build.json
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"extends": "./tsconfig.json",
|
||||||
|
"exclude": ["node_modules", "dist", "src/**/*.test.ts", "src/**/*.spec.ts"]
|
||||||
|
}
|
||||||
40
packages/config/package.json
Normal file
40
packages/config/package.json
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
{
|
||||||
|
"name": "@mosaic/config",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||||
|
"directory": "packages/config"
|
||||||
|
},
|
||||||
|
"type": "module",
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"types": "dist/index.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"default": "./dist/index.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"lint": "eslint src",
|
||||||
|
"typecheck": "tsc --noEmit",
|
||||||
|
"test": "vitest run --passWithNoTests"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@mosaic/memory": "workspace:^",
|
||||||
|
"@mosaic/queue": "workspace:^",
|
||||||
|
"@mosaic/storage": "workspace:^"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"typescript": "^5.8.0",
|
||||||
|
"vitest": "^2.0.0"
|
||||||
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
|
}
|
||||||
7
packages/config/src/index.ts
Normal file
7
packages/config/src/index.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
export type { MosaicConfig, StorageTier, MemoryConfigRef } from './mosaic-config.js';
|
||||||
|
export {
|
||||||
|
DEFAULT_LOCAL_CONFIG,
|
||||||
|
DEFAULT_TEAM_CONFIG,
|
||||||
|
loadConfig,
|
||||||
|
validateConfig,
|
||||||
|
} from './mosaic-config.js';
|
||||||
140
packages/config/src/mosaic-config.ts
Normal file
140
packages/config/src/mosaic-config.ts
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
import { readFileSync, existsSync } from 'node:fs';
|
||||||
|
import { resolve } from 'node:path';
|
||||||
|
import type { StorageConfig } from '@mosaic/storage';
|
||||||
|
import type { QueueAdapterConfig as QueueConfig } from '@mosaic/queue';
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Types */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
export type StorageTier = 'local' | 'team';
|
||||||
|
|
||||||
|
export interface MemoryConfigRef {
|
||||||
|
type: 'pgvector' | 'sqlite-vec' | 'keyword';
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MosaicConfig {
|
||||||
|
tier: StorageTier;
|
||||||
|
storage: StorageConfig;
|
||||||
|
queue: QueueConfig;
|
||||||
|
memory: MemoryConfigRef;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Defaults */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
export const DEFAULT_LOCAL_CONFIG: MosaicConfig = {
|
||||||
|
tier: 'local',
|
||||||
|
storage: { type: 'sqlite', path: '.mosaic/data.db' },
|
||||||
|
queue: { type: 'local', dataDir: '.mosaic/queue' },
|
||||||
|
memory: { type: 'keyword' },
|
||||||
|
};
|
||||||
|
|
||||||
|
export const DEFAULT_TEAM_CONFIG: MosaicConfig = {
|
||||||
|
tier: 'team',
|
||||||
|
storage: { type: 'postgres', url: 'postgresql://mosaic:mosaic@localhost:5432/mosaic' },
|
||||||
|
queue: { type: 'bullmq' },
|
||||||
|
memory: { type: 'pgvector' },
|
||||||
|
};
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Validation */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
const VALID_TIERS = new Set<string>(['local', 'team']);
|
||||||
|
const VALID_STORAGE_TYPES = new Set<string>(['postgres', 'sqlite', 'files']);
|
||||||
|
const VALID_QUEUE_TYPES = new Set<string>(['bullmq', 'local']);
|
||||||
|
const VALID_MEMORY_TYPES = new Set<string>(['pgvector', 'sqlite-vec', 'keyword']);
|
||||||
|
|
||||||
|
export function validateConfig(raw: unknown): MosaicConfig {
|
||||||
|
if (typeof raw !== 'object' || raw === null) {
|
||||||
|
throw new Error('MosaicConfig must be a non-null object');
|
||||||
|
}
|
||||||
|
|
||||||
|
const obj = raw as Record<string, unknown>;
|
||||||
|
|
||||||
|
// tier
|
||||||
|
const tier = obj['tier'];
|
||||||
|
if (typeof tier !== 'string' || !VALID_TIERS.has(tier)) {
|
||||||
|
throw new Error(`Invalid tier "${String(tier)}" — expected "local" or "team"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// storage
|
||||||
|
const storage = obj['storage'];
|
||||||
|
if (typeof storage !== 'object' || storage === null) {
|
||||||
|
throw new Error('config.storage must be a non-null object');
|
||||||
|
}
|
||||||
|
const storageType = (storage as Record<string, unknown>)['type'];
|
||||||
|
if (typeof storageType !== 'string' || !VALID_STORAGE_TYPES.has(storageType)) {
|
||||||
|
throw new Error(`Invalid storage.type "${String(storageType)}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// queue
|
||||||
|
const queue = obj['queue'];
|
||||||
|
if (typeof queue !== 'object' || queue === null) {
|
||||||
|
throw new Error('config.queue must be a non-null object');
|
||||||
|
}
|
||||||
|
const queueType = (queue as Record<string, unknown>)['type'];
|
||||||
|
if (typeof queueType !== 'string' || !VALID_QUEUE_TYPES.has(queueType)) {
|
||||||
|
throw new Error(`Invalid queue.type "${String(queueType)}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// memory
|
||||||
|
const memory = obj['memory'];
|
||||||
|
if (typeof memory !== 'object' || memory === null) {
|
||||||
|
throw new Error('config.memory must be a non-null object');
|
||||||
|
}
|
||||||
|
const memoryType = (memory as Record<string, unknown>)['type'];
|
||||||
|
if (typeof memoryType !== 'string' || !VALID_MEMORY_TYPES.has(memoryType)) {
|
||||||
|
throw new Error(`Invalid memory.type "${String(memoryType)}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
tier: tier as StorageTier,
|
||||||
|
storage: storage as StorageConfig,
|
||||||
|
queue: queue as QueueConfig,
|
||||||
|
memory: memory as MemoryConfigRef,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
/* Loader */
|
||||||
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
function detectFromEnv(): MosaicConfig {
|
||||||
|
if (process.env['DATABASE_URL']) {
|
||||||
|
return {
|
||||||
|
...DEFAULT_TEAM_CONFIG,
|
||||||
|
storage: {
|
||||||
|
type: 'postgres',
|
||||||
|
url: process.env['DATABASE_URL'],
|
||||||
|
},
|
||||||
|
queue: {
|
||||||
|
type: 'bullmq',
|
||||||
|
url: process.env['VALKEY_URL'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return DEFAULT_LOCAL_CONFIG;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function loadConfig(configPath?: string): MosaicConfig {
|
||||||
|
// 1. Explicit path or default location
|
||||||
|
const paths = configPath
|
||||||
|
? [resolve(configPath)]
|
||||||
|
: [
|
||||||
|
resolve(process.cwd(), 'mosaic.config.json'),
|
||||||
|
resolve(process.cwd(), '../../mosaic.config.json'), // monorepo root when cwd is apps/gateway
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const p of paths) {
|
||||||
|
if (existsSync(p)) {
|
||||||
|
const raw: unknown = JSON.parse(readFileSync(p, 'utf-8'));
|
||||||
|
return validateConfig(raw);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Fall back to env-var detection
|
||||||
|
return detectFromEnv();
|
||||||
|
}
|
||||||
9
packages/config/tsconfig.json
Normal file
9
packages/config/tsconfig.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"extends": "../../tsconfig.base.json",
|
||||||
|
"compilerOptions": {
|
||||||
|
"outDir": "dist",
|
||||||
|
"rootDir": "src"
|
||||||
|
},
|
||||||
|
"include": ["src/**/*"],
|
||||||
|
"exclude": ["node_modules", "dist"]
|
||||||
|
}
|
||||||
@@ -1,6 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/coord",
|
"name": "@mosaic/coord",
|
||||||
"version": "0.0.0",
|
"version": "0.0.2",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||||
|
"directory": "packages/coord"
|
||||||
|
},
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
@@ -22,5 +27,12 @@
|
|||||||
"@types/node": "^22.0.0",
|
"@types/node": "^22.0.0",
|
||||||
"typescript": "^5.8.0",
|
"typescript": "^5.8.0",
|
||||||
"vitest": "^2.0.0"
|
"vitest": "^2.0.0"
|
||||||
}
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
CREATE EXTENSION IF NOT EXISTS vector;--> statement-breakpoint
|
||||||
CREATE TABLE "agent_logs" (
|
CREATE TABLE "agent_logs" (
|
||||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
|
||||||
"session_id" text NOT NULL,
|
"session_id" text NOT NULL,
|
||||||
|
|||||||
@@ -12,40 +12,47 @@
|
|||||||
{
|
{
|
||||||
"idx": 1,
|
"idx": 1,
|
||||||
"version": "7",
|
"version": "7",
|
||||||
|
"when": 1773602195608,
|
||||||
|
"tag": "0001_cynical_ultimatum",
|
||||||
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 2,
|
||||||
|
"version": "7",
|
||||||
"when": 1773602195609,
|
"when": 1773602195609,
|
||||||
"tag": "0001_magical_rattler",
|
"tag": "0001_magical_rattler",
|
||||||
"breakpoints": true
|
"breakpoints": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"idx": 2,
|
"idx": 3,
|
||||||
"version": "7",
|
"version": "7",
|
||||||
"when": 1773625181629,
|
"when": 1773625181629,
|
||||||
"tag": "0002_nebulous_mimic",
|
"tag": "0002_nebulous_mimic",
|
||||||
"breakpoints": true
|
"breakpoints": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"idx": 3,
|
"idx": 4,
|
||||||
"version": "7",
|
"version": "7",
|
||||||
"when": 1773887085247,
|
"when": 1773887085247,
|
||||||
"tag": "0003_p8003_perf_indexes",
|
"tag": "0003_p8003_perf_indexes",
|
||||||
"breakpoints": true
|
"breakpoints": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"idx": 4,
|
"idx": 5,
|
||||||
"version": "7",
|
"version": "7",
|
||||||
"when": 1774224004898,
|
"when": 1774224004898,
|
||||||
"tag": "0004_bumpy_miracleman",
|
"tag": "0004_bumpy_miracleman",
|
||||||
"breakpoints": true
|
"breakpoints": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"idx": 5,
|
"idx": 6,
|
||||||
"version": "7",
|
"version": "7",
|
||||||
"when": 1774225763410,
|
"when": 1774225763410,
|
||||||
"tag": "0005_minor_champions",
|
"tag": "0005_minor_champions",
|
||||||
"breakpoints": true
|
"breakpoints": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"idx": 6,
|
"idx": 7,
|
||||||
"version": "7",
|
"version": "7",
|
||||||
"when": 1774227064500,
|
"when": 1774227064500,
|
||||||
"tag": "0006_swift_shen",
|
"tag": "0006_swift_shen",
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/db",
|
"name": "@mosaic/db",
|
||||||
"version": "0.0.0",
|
"version": "0.0.2",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||||
|
"directory": "packages/db"
|
||||||
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -28,7 +33,15 @@
|
|||||||
"vitest": "^2.0.0"
|
"vitest": "^2.0.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@electric-sql/pglite": "^0.2.17",
|
||||||
"drizzle-orm": "^0.45.1",
|
"drizzle-orm": "^0.45.1",
|
||||||
"postgres": "^3.4.8"
|
"postgres": "^3.4.8"
|
||||||
}
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
15
packages/db/src/client-pglite.ts
Normal file
15
packages/db/src/client-pglite.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { PGlite } from '@electric-sql/pglite';
|
||||||
|
import { drizzle } from 'drizzle-orm/pglite';
|
||||||
|
import * as schema from './schema.js';
|
||||||
|
import type { DbHandle } from './client.js';
|
||||||
|
|
||||||
|
export function createPgliteDb(dataDir: string): DbHandle {
|
||||||
|
const client = new PGlite(dataDir);
|
||||||
|
const db = drizzle(client, { schema });
|
||||||
|
return {
|
||||||
|
db: db as unknown as DbHandle['db'],
|
||||||
|
close: async () => {
|
||||||
|
await client.close();
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
export { createDb, type Db, type DbHandle } from './client.js';
|
export { createDb, type Db, type DbHandle } from './client.js';
|
||||||
|
export { createPgliteDb } from './client-pglite.js';
|
||||||
export { runMigrations } from './migrate.js';
|
export { runMigrations } from './migrate.js';
|
||||||
export * from './schema.js';
|
export * from './schema.js';
|
||||||
export {
|
export {
|
||||||
@@ -16,4 +17,5 @@ export {
|
|||||||
gte,
|
gte,
|
||||||
lte,
|
lte,
|
||||||
ilike,
|
ilike,
|
||||||
|
count,
|
||||||
} from 'drizzle-orm';
|
} from 'drizzle-orm';
|
||||||
|
|||||||
@@ -91,6 +91,28 @@ export const verifications = pgTable('verifications', {
|
|||||||
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
|
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// ─── Admin API Tokens ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
export const adminTokens = pgTable(
|
||||||
|
'admin_tokens',
|
||||||
|
{
|
||||||
|
id: text('id').primaryKey(),
|
||||||
|
userId: text('user_id')
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.id, { onDelete: 'cascade' }),
|
||||||
|
tokenHash: text('token_hash').notNull(),
|
||||||
|
label: text('label').notNull(),
|
||||||
|
scope: text('scope').notNull().default('admin'),
|
||||||
|
expiresAt: timestamp('expires_at', { withTimezone: true }),
|
||||||
|
lastUsedAt: timestamp('last_used_at', { withTimezone: true }),
|
||||||
|
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
index('admin_tokens_user_id_idx').on(t.userId),
|
||||||
|
uniqueIndex('admin_tokens_hash_idx').on(t.tokenHash),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
// ─── Teams ───────────────────────────────────────────────────────────────────
|
// ─── Teams ───────────────────────────────────────────────────────────────────
|
||||||
// Declared before projects because projects references teams.
|
// Declared before projects because projects references teams.
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/design-tokens",
|
"name": "@mosaic/design-tokens",
|
||||||
"version": "0.0.0",
|
"version": "0.0.2",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||||
|
"directory": "packages/design-tokens"
|
||||||
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@@ -19,5 +24,12 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"typescript": "^5.8.0",
|
"typescript": "^5.8.0",
|
||||||
"vitest": "^2.0.0"
|
"vitest": "^2.0.0"
|
||||||
}
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
541
packages/forge/PLAN.md
Normal file
541
packages/forge/PLAN.md
Normal file
@@ -0,0 +1,541 @@
|
|||||||
|
# Specialist Pipeline — Progressive Refinement Architecture
|
||||||
|
|
||||||
|
**Status:** DRAFT v4 — post architecture review
|
||||||
|
**Created:** 2026-03-24
|
||||||
|
**Last Updated:** 2026-03-24 20:40 CDT
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Vision
|
||||||
|
|
||||||
|
Replace "throw it at a Codex worker and hope" with a **railed pipeline** where each stage narrows scope, increases precision, and catches mistakes before they compound. Spend more time up-front declaring requirements; spend less time at the end fixing broken output.
|
||||||
|
|
||||||
|
**Core principles:**
|
||||||
|
|
||||||
|
- One agent, one specialty. No generalists pretending to be experts.
|
||||||
|
- Agents must be willing to **argue, debate, and push back** — not eagerly agree and move on.
|
||||||
|
- The pipeline is a set of **customizable rails** — agents stay on track, don't get sidetracked or derailed.
|
||||||
|
- Dynamic composition — only relevant specialists are called in per task.
|
||||||
|
- Hard gates between stages — mechanical checks + agent oversight for final decision.
|
||||||
|
- Minimal human oversight once the PRD is declared.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The Pipeline
|
||||||
|
|
||||||
|
```
|
||||||
|
PRD.md (human declares requirements)
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
BRIEFS (PRD decomposed into discrete work units)
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
BOARD OF DIRECTORS (strategic go/no-go per brief)
|
||||||
|
│ Static composition. CEO, CTO, CFO, COO.
|
||||||
|
│ Output: Approved brief with business constraints, priority, budget
|
||||||
|
│ Board does NOT select technical participants — that's the Brief Analyzer's job
|
||||||
|
│ Gate: Board consensus required to proceed
|
||||||
|
│ REJECTED → archive + notify human. NEEDS REVISION → back to Intake.
|
||||||
|
│
|
||||||
|
│ POST-RUN REVIEW: Board reviews memos from completed pipeline
|
||||||
|
│ runs. Analyzes for conflicts, adjusts strategy, feeds learnings
|
||||||
|
│ back into future briefs. The Board is not fire-and-forget.
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
BRIEF ANALYZER (technical composition)
|
||||||
|
│ Sonnet agent analyzes approved brief + project context
|
||||||
|
│ Selects which generalists/specialists participate in each planning stage
|
||||||
|
│ Separates strategic decisions (Board) from technical composition
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
PLANNING 1 — Architecture (Domain Generalists)
|
||||||
|
│ Dynamic composition based on brief requirements.
|
||||||
|
│ Software Architect + relevant generalists only.
|
||||||
|
│ Output: Architecture Decision Record (ADR)
|
||||||
|
│ Agents MUST debate trade-offs. No rubber-stamping.
|
||||||
|
│ Gate: ADR approved, all dissents resolved or recorded
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
PLANNING 2 — Implementation Design (Language/Domain Specialists)
|
||||||
|
│ Dynamic composition — only languages/domains in the ADR.
|
||||||
|
│ Output: Implementation spec per component
|
||||||
|
│ Each specialist argues for their domain's best practices.
|
||||||
|
│ Gate: All specs reviewed by Architecture, no conflicts
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
PLANNING 3 — Task Decomposition & Estimation
|
||||||
|
│ Context Manager + Task Distributor
|
||||||
|
│ Output: Task breakdown with dependency graph, estimates,
|
||||||
|
│ context packets per worker, acceptance criteria
|
||||||
|
│ Gate: Every task has one owner, one completion condition,
|
||||||
|
│ estimated rounds, and explicit test criteria
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
CODING (Workers execute)
|
||||||
|
│ Codex/Claude workers with specialist subagents loaded
|
||||||
|
│ Each worker gets: context packet + implementation spec + acceptance criteria
|
||||||
|
│ Workers stay in their lane — the rails prevent drift
|
||||||
|
│ Gate: Code compiles, lints, passes unit tests
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
REVIEW (Specialist review)
|
||||||
|
│ Code reviewer (evidence-driven, severity-ranked)
|
||||||
|
│ Security auditor (attack paths, secrets, auth)
|
||||||
|
│ Language specialist for the relevant language
|
||||||
|
│ Gate: All findings addressed or explicitly accepted with rationale
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
REMEDIATE (if review finds issues)
|
||||||
|
│ Worker fixes based on review findings
|
||||||
|
│ Loops back to REVIEW
|
||||||
|
│ Gate: Same as REVIEW — clean pass required
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
TEST (Integration + acceptance)
|
||||||
|
│ QA Strategist validates against acceptance criteria from Planning 3
|
||||||
|
│ Gate: All acceptance criteria pass, no regressions
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
DEPLOY
|
||||||
|
Infrastructure Lead handles deployment
|
||||||
|
Gate: Smoke tests pass in target environment
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Orchestration — Who Watches the Pipeline?
|
||||||
|
|
||||||
|
### The Orchestrator (Mosaic's role)
|
||||||
|
|
||||||
|
**Not me (Jarvis). Not any single agent. The Orchestrator is a dedicated, mechanical process with AI oversight.**
|
||||||
|
|
||||||
|
The Orchestrator is:
|
||||||
|
|
||||||
|
- **Primarily mechanical** — moves work through stages, enforces gates, tracks state
|
||||||
|
- **AI-assisted at decision points** — an agent reviews gate results and makes go/no-go calls
|
||||||
|
- **The thing Mosaic Stack productizes** — this IS the engine from the North Star vision
|
||||||
|
|
||||||
|
How it works:
|
||||||
|
|
||||||
|
1. **Stage Runner** (mechanical): Advances work through the pipeline. Checks gate conditions. Purely deterministic — "did all gate criteria pass? yes → advance. no → hold."
|
||||||
|
2. **Gate Reviewer** (AI agent): When a gate's mechanical checks pass, the Gate Reviewer does a final sanity check. "The code lints and tests pass, but does this actually solve the problem?" This is the lightweight oversight layer.
|
||||||
|
3. **Escalation** (to human): If the Gate Reviewer is uncertain, or if debate in a planning stage is unresolved after N rounds, escalate to Jason.
|
||||||
|
|
||||||
|
### What Sends a Plan Back for More Debate?
|
||||||
|
|
||||||
|
Triggers for **rework/rejection**:
|
||||||
|
|
||||||
|
- **Gate failure** — mechanical checks don't pass → automatic rework
|
||||||
|
- **Gate Reviewer dissent** — AI reviewer flags a concern → sent back with specific objection
|
||||||
|
- **Unresolved debate** — planning agents can't reach consensus after N rounds → escalate or send back with the dissenting positions documented
|
||||||
|
- **Scope creep detection** — if a stage's output significantly exceeds the brief's scope → flag and return
|
||||||
|
- **Dependency conflict** — Planning 3 finds the task breakdown has circular deps or impossible ordering → return to Planning 2
|
||||||
|
- **Review severity threshold** — if Review finds CRITICAL-severity issues → auto-reject back to Coding, no discussion
|
||||||
|
|
||||||
|
### Human Touchpoints (minimal by design)
|
||||||
|
|
||||||
|
- **PRD.md** — Human writes this. This is where you spend the time.
|
||||||
|
- **Board escalation** — Only if the Board can't reach consensus on a brief.
|
||||||
|
- **Planning escalation** — Only if debate is unresolved after max rounds.
|
||||||
|
- **Deploy approval** — Optional. Could be fully automated for low-risk deploys.
|
||||||
|
|
||||||
|
Everything else runs autonomously on rails.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Gate System
|
||||||
|
|
||||||
|
Every gate has **mechanical checks** (automated, deterministic) and an **agent review** (final judgment call).
|
||||||
|
|
||||||
|
| Stage → | Mechanical Checks | Agent Review |
|
||||||
|
| -------------------------------------- | ----------------------------------------------------------------- | ----------------------------------------------------------------------------- |
|
||||||
|
| **Board → Planning 1** | Brief exists, has success criteria, has budget | Gate Reviewer: "Is this brief well-scoped enough to architect?" |
|
||||||
|
| **Planning 1 → Planning 2** | ADR exists, covers all components in brief | Gate Reviewer: "Does this architecture actually solve the problem?" |
|
||||||
|
| **Planning 2 → Planning 3** | Implementation spec per component, no unresolved conflicts | Gate Reviewer: "Are the specs consistent with each other and the ADR?" |
|
||||||
|
| **Planning 3 → Coding** | Task breakdown exists, all tasks have owner + criteria + estimate | Gate Reviewer: "Is this actually implementable as decomposed?" |
|
||||||
|
| **Coding → Review** | Compiles, lints, unit tests pass | Gate Reviewer: "Does the code match the implementation spec?" |
|
||||||
|
| **Review → Test** (or **→ Remediate**) | All review findings addressed | Gate Reviewer: "Are the fixes real or did the worker just suppress warnings?" |
|
||||||
|
| **Test → Deploy** | All acceptance criteria pass, no regressions | Gate Reviewer: "Ready for production?" |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Dynamic Composition
|
||||||
|
|
||||||
|
### Board of Directors — STATIC
|
||||||
|
|
||||||
|
Always the same participants. These are strategic, not technical.
|
||||||
|
|
||||||
|
| Role | Model | Personality |
|
||||||
|
| ---- | ------ | --------------------------------------------------------------------------------------------------------------------------- |
|
||||||
|
| CEO | Opus | Visionary, asks "does this serve the mission?" |
|
||||||
|
| CTO | Opus | Technical realist, asks "can we actually build this?" |
|
||||||
|
| CFO | Sonnet | Cost-conscious, asks "what does this cost vs return?" — needs real analytical depth for budget/ROI, not a lightweight model |
|
||||||
|
| COO | Sonnet | Operational, asks "what's the timeline and resource impact?" |
|
||||||
|
|
||||||
|
### Planning Stages — DYNAMIC
|
||||||
|
|
||||||
|
**The Orchestrator selects participants based on the brief's requirements.** Not every specialist is needed for every task.
|
||||||
|
|
||||||
|
Selection logic:
|
||||||
|
|
||||||
|
1. Parse the brief/ADR for **languages mentioned** → include those Language Specialists
|
||||||
|
2. Parse for **infrastructure concerns** → include Infra Lead, Docker/Swarm, CI/CD as needed
|
||||||
|
3. Parse for **data concerns** → include Data Architect, SQL Pro
|
||||||
|
4. Parse for **UI concerns** → include UX Strategist, Web Design, React/RN Specialist
|
||||||
|
5. Parse for **security concerns** → include Security Architect
|
||||||
|
6. **Always include:** Software Architect (Planning 1), QA Strategist (Planning 3)
|
||||||
|
|
||||||
|
Example: A TypeScript NestJS API endpoint with Prisma:
|
||||||
|
|
||||||
|
- Planning 1: Software Architect, Security Architect, Data Architect
|
||||||
|
- Planning 2: TypeScript Pro, NestJS Expert, SQL Pro
|
||||||
|
- Planning 3: Task Distributor, Context Manager
|
||||||
|
|
||||||
|
Example: A React dashboard with no backend changes:
|
||||||
|
|
||||||
|
- Planning 1: Software Architect, UX Strategist
|
||||||
|
- Planning 2: React Specialist, Web Design, UX/UI Design
|
||||||
|
- Planning 3: Task Distributor, Context Manager
|
||||||
|
|
||||||
|
**Go Pro doesn't sit in on a TypeScript project. Solidity Pro doesn't weigh in on a dashboard.**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Debate Culture
|
||||||
|
|
||||||
|
Agents in planning stages are **required** to:
|
||||||
|
|
||||||
|
1. **State their position with reasoning** — no "sounds good to me"
|
||||||
|
2. **Challenge other positions** — "I disagree because..."
|
||||||
|
3. **Identify risks the others haven't raised** — adversarial by design
|
||||||
|
4. **Formally dissent if not convinced** — dissents are recorded in the ADR/spec
|
||||||
|
5. **Not capitulate just to move forward** — the Orchestrator tracks rounds and will call time, but agents shouldn't fold under social pressure
|
||||||
|
|
||||||
|
**Round limits:** Min 3, Max 30. The discussion must be allowed to properly work. Don't cut debate short — premature consensus produces bad architecture. The Orchestrator tracks rounds and will intervene only when debate is genuinely circular (repeating the same arguments) rather than still productive.
|
||||||
|
|
||||||
|
This is enforced via personality in the agent definitions:
|
||||||
|
|
||||||
|
- Architects are opinionated and will argue for clean boundaries
|
||||||
|
- Security Architect is paranoid by design — always looking for what can go wrong
|
||||||
|
- QA Strategist is skeptical — "prove it works, don't tell me it works"
|
||||||
|
- Language specialists are purists about their domain's best practices
|
||||||
|
|
||||||
|
**The goal:** By the time code is written, the hard decisions are already made and debated. The workers just execute a well-argued plan.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Model Assignments
|
||||||
|
|
||||||
|
| Pipeline Stage | Model | Rationale |
|
||||||
|
| --------------------------- | --------------------------------- | --------------------------------------------------- |
|
||||||
|
| Board of Directors | Opus (CEO/CTO) / Sonnet (CFO/COO) | Strategic deliberation needs depth across the board |
|
||||||
|
| Planning 1 (Architecture) | Opus | Complex trade-offs, needs deep reasoning |
|
||||||
|
| Planning 2 (Implementation) | Sonnet | Domain expertise, detailed specs |
|
||||||
|
| Planning 3 (Decomposition) | Sonnet | Structured output, dependency analysis |
|
||||||
|
| Coding | Codex | Primary workhorse, separate budget |
|
||||||
|
| Review | Sonnet (code) + Opus (security) | Code review = Sonnet, security = Opus for depth |
|
||||||
|
| Remediation | Codex | Same worker, fix the issues |
|
||||||
|
| Test | Haiku | Mechanical validation, low complexity |
|
||||||
|
| Deploy | Haiku | Scripted deployment, mechanical |
|
||||||
|
| Gate Reviewer | Sonnet | Judgment calls, moderate complexity |
|
||||||
|
| Orchestrator (mechanical) | None — deterministic code | State machine, not AI |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Roster
|
||||||
|
|
||||||
|
### Board of Directors (static)
|
||||||
|
|
||||||
|
| Role | Scope |
|
||||||
|
| ---- | ----------------------------------------- |
|
||||||
|
| CEO | Vision, priorities, go/no-go |
|
||||||
|
| CTO | Technical direction, risk tolerance |
|
||||||
|
| CFO | Budget, cost/benefit |
|
||||||
|
| COO | Operations, timeline, resource allocation |
|
||||||
|
|
||||||
|
### Domain Generalists (dynamic — called per brief)
|
||||||
|
|
||||||
|
| Role | Scope | Selected When |
|
||||||
|
| ----------------------- | ------------------------------------------------------------- | -------------------------------------------------------------------------- |
|
||||||
|
| **Software Architect** | System design, component boundaries, data flow, API contracts | Always in Planning 1 |
|
||||||
|
| **Security Architect** | Threat modeling, auth patterns, secrets, OWASP | **Always** — security is cross-cutting; implicit requirements are the norm |
|
||||||
|
| **Infrastructure Lead** | Deployment, networking, monitoring, scaling, DR | Brief involves deploy, infra, scaling |
|
||||||
|
| **Data Architect** | Schema design, migrations, query strategy, caching | Brief involves DB, data models, migrations |
|
||||||
|
| **QA Strategist** | Test strategy, coverage, integration test design | Always in Planning 3 |
|
||||||
|
| **UX Strategist** | User flows, information architecture, accessibility | Brief involves UI/frontend |
|
||||||
|
|
||||||
|
### Language Specialists (dynamic — one language, one agent)
|
||||||
|
|
||||||
|
| Specialist | Selected When |
|
||||||
|
| -------------------- | ------------------------------------------ |
|
||||||
|
| **TypeScript Pro** | Project uses TypeScript |
|
||||||
|
| **JavaScript Pro** | Project uses vanilla JS / Node.js |
|
||||||
|
| **Go Pro** | Project uses Go |
|
||||||
|
| **Rust Pro** | Project uses Rust |
|
||||||
|
| **Solidity Pro** | Project involves smart contracts |
|
||||||
|
| **Python Pro** | Project uses Python |
|
||||||
|
| **SQL Pro** | Project involves database queries / Prisma |
|
||||||
|
| **LangChain/AI Pro** | Project involves AI/ML/agent frameworks |
|
||||||
|
|
||||||
|
### Domain Specialists (dynamic — cross-cutting expertise)
|
||||||
|
|
||||||
|
| Specialist | Selected When |
|
||||||
|
| -------------------- | ------------------------------------ |
|
||||||
|
| **Web Design** | Frontend work involving HTML/CSS |
|
||||||
|
| **UX/UI Design** | Component design, design system work |
|
||||||
|
| **React Specialist** | Frontend uses React |
|
||||||
|
| **React Native Pro** | Mobile app work |
|
||||||
|
| **Blockchain/DeFi** | Chain interactions, DeFi protocols |
|
||||||
|
| **Docker/Swarm** | Containerization, deployment |
|
||||||
|
| **CI/CD** | Pipeline changes, deploy automation |
|
||||||
|
| **NestJS Expert** | Backend uses NestJS |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Source Material — What to Pull From External Repos
|
||||||
|
|
||||||
|
### From VoltAgent/awesome-codex-subagents (`.toml` format)
|
||||||
|
|
||||||
|
| File | What We Take | What We Customize |
|
||||||
|
| -------------------------------------------------- | ----------------------------------------------------------- | ------------------------------------------------------------ |
|
||||||
|
| `09-meta-orchestration/context-manager.toml` | Context packaging for workers | Add our monorepo structure, Gitea CI, project conventions |
|
||||||
|
| `09-meta-orchestration/task-distributor.toml` | Dependency graphs, write-scope separation, output contracts | Add worktree rules, PR workflow, completion gates |
|
||||||
|
| `09-meta-orchestration/workflow-orchestrator.toml` | Stage design with explicit wait points and gates | Wire to our pipeline stages |
|
||||||
|
| `09-meta-orchestration/agent-organizer.toml` | Task decomposition by objective (not file list) | Add our agent registry, model hierarchy rules |
|
||||||
|
| `04-quality-security/reviewer.toml` | Evidence-driven review, severity ranking | Add NestJS import rules, Prisma gotchas, our recurring bugs |
|
||||||
|
| `04-quality-security/security-auditor.toml` | Attack path mapping, secrets handling review | Add our Docker Swarm patterns, credential loader conventions |
|
||||||
|
|
||||||
|
### From VoltAgent/awesome-openclaw-skills (ClawHub)
|
||||||
|
|
||||||
|
| Skill | What We Take | How We Use It |
|
||||||
|
| -------------------------- | ----------------------------------------------------- | -------------------------------------------------------- |
|
||||||
|
| `brainstorming-2` | Socratic pre-coding design workflow | Planning 1 — requirements refinement before architecture |
|
||||||
|
| `agent-estimation` | Task effort in tool-call rounds | Planning 3 — scope tasks before spawning workers |
|
||||||
|
| `agent-nestjs-skills` | 40 prioritized NestJS rules with code examples | NestJS specialist + backend workers |
|
||||||
|
| `agent-team-orchestration` | Structured handoff protocols, task state transitions | Reference for pipeline stage handoffs |
|
||||||
|
| `b3ehive` | Competitive implementation (3 agents, cross-evaluate) | Critical components: crypto strategies, auth flows |
|
||||||
|
| `agent-council` | Agent scaffolding automation | Automate specialist creation as we expand |
|
||||||
|
| `astrai-code-review` | Model routing by diff complexity | Review stage cost optimization |
|
||||||
|
| `bug-audit` | 6-phase Node.js audit methodology | Periodic codebase health checks |
|
||||||
|
|
||||||
|
### From VoltAgent/awesome-claude-code-subagents (`.md` format)
|
||||||
|
|
||||||
|
| File | What We Take | Notes |
|
||||||
|
| ------------------------------------------ | ----------------------------------------------- | ------------------------------------------------------ |
|
||||||
|
| Language specialist `.md` files | System prompts for TS, Go, Rust, Solidity, etc. | Strip generic stuff, inject project-specific knowledge |
|
||||||
|
| `09-meta-orchestration/agent-organizer.md` | Detailed organizer pattern | Reference — Codex `.toml` is tighter |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Gaps This Fills
|
||||||
|
|
||||||
|
| Gap | Current State | After Pipeline |
|
||||||
|
| ------------------------------- | --------------------------------------- | ----------------------------------------------------------------- |
|
||||||
|
| No pre-coding design | Brief → Codex starts coding immediately | 3 planning stages before anyone writes code |
|
||||||
|
| Agents get sidetracked/derailed | No rails, workers drift from task | Mechanical pipeline + context packets keep workers on track |
|
||||||
|
| No debate on approach | First idea wins | Agents required to argue, dissent, challenge |
|
||||||
|
| No task estimation | Eyeball everything | Tool-call-round estimation in Planning 3 |
|
||||||
|
| Code review is a checkbox | "Did it lint? Ship it." | Evidence-driven reviewer + specialist knowledge |
|
||||||
|
| Security review is hand-waved | Never actually done | Real attack path mapping, secrets review |
|
||||||
|
| Workers get bad context | Ad-hoc prompts, stale assumptions | Context-manager produces execution-ready packets |
|
||||||
|
| Task decomposition is sloppy | "Here's a task, go do it" | Dependency graphs, write-scope separation, output contracts |
|
||||||
|
| Wrong specialists involved | Everyone weighs in on everything | Dynamic composition — only relevant experts |
|
||||||
|
| No rework mechanism | Ship it or start over | Explicit remediation loop with review re-check |
|
||||||
|
| Too much human oversight | Jason babysits every stage | Mechanical gates + AI oversight, human only at PRD and escalation |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
### Phase 1 — Foundation (this week)
|
||||||
|
|
||||||
|
1. Pull and customize Codex subagents: `reviewer.toml`, `security-auditor.toml`, `context-manager.toml`, `task-distributor.toml`, `workflow-orchestrator.toml`
|
||||||
|
2. Inject our project-specific knowledge
|
||||||
|
3. Install to `~/.codex/agents/`
|
||||||
|
4. Define agent personality templates for debate culture (opinionated, adversarial, skeptical)
|
||||||
|
|
||||||
|
### Phase 2 — Specialist Definitions (next week)
|
||||||
|
|
||||||
|
1. Create language specialist definitions (TS, JS, Go, Rust, Solidity, Python, SQL, LangChain, C++)
|
||||||
|
2. Create domain specialist definitions (NestJS, React, Docker/Swarm, CI/CD, Web Design, UX/UI, Blockchain/DeFi, React Native)
|
||||||
|
3. Create generalist definitions (Software Architect, Security Architect, Infra Lead, Data Architect, QA Strategist, UX Strategist)
|
||||||
|
4. Format as Codex `.toml` + OpenClaw skills
|
||||||
|
5. Test each against a real past task
|
||||||
|
|
||||||
|
### Phase 3 — Pipeline Wiring (week after)
|
||||||
|
|
||||||
|
1. Build the Orchestrator (mechanical stage runner + gate checker)
|
||||||
|
2. Build the Gate Reviewer agent
|
||||||
|
3. Wire dynamic composition (brief → participant selection)
|
||||||
|
4. Wire the debate protocol (round tracking, dissent recording, escalation rules)
|
||||||
|
5. Wire Planning 1 → 2 → 3 handoff contracts
|
||||||
|
6. Wire Review → Remediate → Review loop
|
||||||
|
7. Test end-to-end with a real feature request
|
||||||
|
|
||||||
|
### Phase 4 — Mosaic Integration (future)
|
||||||
|
|
||||||
|
1. The Orchestrator becomes a Mosaic Stack feature
|
||||||
|
2. Pipeline stages map to Mosaic task states
|
||||||
|
3. Gate results feed the Mission Control dashboard
|
||||||
|
4. This IS the engine — the dashboard is just the window
|
||||||
|
|
||||||
|
### Phase 5 — Advanced Patterns (future)
|
||||||
|
|
||||||
|
1. `b3ehive` competitive implementation for critical paths
|
||||||
|
2. `astrai-code-review` model routing for cost optimization
|
||||||
|
3. `agent-council` automated scaffolding for new specialists
|
||||||
|
4. Estimation feedback loop (compare estimates to actuals)
|
||||||
|
5. Pipeline analytics (which stages catch the most issues, where do we bottleneck)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Resolved Decisions
|
||||||
|
|
||||||
|
| # | Question | Decision | Rationale |
|
||||||
|
| --- | ----------------------- | ------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
|
| 1 | **Gate Reviewer model** | Sonnet for all gates | Sufficient depth for judgment calls; Opus reserved for planning deliberation |
|
||||||
|
| 2 | **Debate rounds** | Min 3, Max 30 per stage | Let discussions work. Don't cut short. Intervene on circular repetition, not round count. |
|
||||||
|
| 3 | **PRD format** | Use existing Mosaic PRD template | `~/.config/mosaic/templates/docs/PRD.md.template` + `~/.config/mosaic/skills-local/prd/SKILL.md` already proven. Iterate from there. |
|
||||||
|
| 4 | **Small tasks** | Pipeline is for projects/features, not typo fixes | This is for getting a project or feature built smoothly. Single-file fixes go direct to a worker. Threshold: if it needs architecture decisions, it goes through the pipeline. |
|
||||||
|
| 5 | **Specialist memory** | Yes — specialists accumulate knowledge with rails | Similar to OpenClaw memory model. Specialists learn from past tasks ("last time X caused Y") but must maintain their specialty rails. Knowledge is domain-scoped, not freeform. |
|
||||||
|
| 6 | **Cost ceiling** | ~$500 per pipeline run (11+ stages) | Using subs (Anthropic, OpenAI), so API costs are minimized or eliminated. Budget is time/throughput, not dollars. |
|
||||||
|
| 7 | **Where this lives** | Standalone service, Pi under the hood | Must be standalone so it can migrate to Mosaic Stack in the future. Pi (mosaic bootstrap) provides the execution substrate. Already using Pi for BOD. Dogfood → prove → productize. |
|
||||||
|
|
||||||
|
## PRD Template
|
||||||
|
|
||||||
|
The pipeline uses the existing Mosaic PRD infrastructure:
|
||||||
|
|
||||||
|
- **Template:** `~/.config/mosaic/templates/docs/PRD.md.template`
|
||||||
|
- **Skill:** `~/.config/mosaic/skills-local/prd/SKILL.md` (guided PRD generation with clarifying questions)
|
||||||
|
- **Guide:** `~/.config/mosaic/guides/PRD.md` (hard rules — PRD must exist before coding begins)
|
||||||
|
|
||||||
|
### Required PRD Sections (from Mosaic guide)
|
||||||
|
|
||||||
|
1. Problem statement and objective
|
||||||
|
2. In-scope and out-of-scope
|
||||||
|
3. User/stakeholder requirements
|
||||||
|
4. Functional requirements
|
||||||
|
5. Non-functional requirements (security, performance, reliability, observability)
|
||||||
|
6. Acceptance criteria
|
||||||
|
7. Constraints and dependencies
|
||||||
|
8. Risks and open questions
|
||||||
|
9. Testing and verification expectations
|
||||||
|
10. Delivery/milestone intent
|
||||||
|
|
||||||
|
The PRD skill also generates user stories with specific acceptance criteria ("Button shows confirmation dialog before deleting" not "Works correctly").
|
||||||
|
|
||||||
|
**Key rule from Mosaic:** Implementation that diverges from PRD without PRD updates is a blocker. Change control: update PRD first → update plan → then implement.
|
||||||
|
|
||||||
|
## Board Post-Run Review
|
||||||
|
|
||||||
|
The Board of Directors is NOT fire-and-forget. After a pipeline run completes (deploy or failure):
|
||||||
|
|
||||||
|
1. **Memos from each stage** are compiled into a run summary
|
||||||
|
2. **Board reviews** the summary for:
|
||||||
|
- Conflicts between stage outputs
|
||||||
|
- Scope drift from original brief
|
||||||
|
- Cost/timeline variance from estimates
|
||||||
|
- Strategic alignment issues
|
||||||
|
3. **Board adjusts** strategy, priorities, or constraints for future briefs
|
||||||
|
4. **Learnings** feed back into specialist memory and Orchestrator heuristics
|
||||||
|
|
||||||
|
This closes the loop. The pipeline doesn't just ship code — it learns from every run.
|
||||||
|
|
||||||
|
## Architecture Review Fixes (v4, 2026-03-24)
|
||||||
|
|
||||||
|
Fixes applied based on Sonnet architecture review:
|
||||||
|
|
||||||
|
| Finding | Fix Applied |
|
||||||
|
| ------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------ |
|
||||||
|
| Dead-end states (REJECTED, NEEDS REVISION, CI failure, worker confusion) | All paths explicitly defined in orchestrator + Board stage |
|
||||||
|
| Security Architect conditional (keyword matching misses implicit auth) | Security Architect now ALWAYS included in Planning 1 |
|
||||||
|
| Board making technical composition decisions | New Brief Analyzer agent handles technical composition after Board approval |
|
||||||
|
| Orchestrator claimed "purely mechanical" but needs semantic analysis | Split into State Machine (mechanical) + Gate Reviewer (AI). Circularity detection is Gate Reviewer's job. |
|
||||||
|
| Test→Remediate had no loop limit | Shared 3-loop budget across Review + Test remediation |
|
||||||
|
| Open-ended debate (3-30 rounds) too loose, framing bias | Structured 3-phase debate: Independent positions → Responses → Synthesis. Tighter round limits (17-53 calls vs 12-120+). |
|
||||||
|
| Review only gets diff | Review now gets full module context + context packet, not just diff |
|
||||||
|
| Cross-brief dependency not enforced at runtime | State Machine enforces dependency ordering + file-level locking |
|
||||||
|
| Gate Reviewer reading full transcripts (context problem) | Gate Reviewer reads structured summaries, requests full transcript only on suspicion |
|
||||||
|
| No minimum specialist composition for Planning 2 | Guard added: at least 1 Language + 1 Domain specialist required |
|
||||||
|
|
||||||
|
## Remaining Open Questions
|
||||||
|
|
||||||
|
1. **Pi integration specifics:** How exactly does Pi serve as the execution substrate? Board sessions already work via `mosaic yolo pi`. Does the full pipeline run as a Pi orchestration, or does Pi just handle individual stage sessions?
|
||||||
|
2. **Specialist memory storage:** OpenBrain? Per-specialist markdown files? Scoped memory namespaces?
|
||||||
|
3. **Pipeline analytics:** What metrics do we track per run? Stage duration, rework count, gate failure rate, estimate accuracy?
|
||||||
|
4. **Parallel briefs:** Can multiple briefs from the same PRD run through the pipeline concurrently? Or strictly serial?
|
||||||
|
5. **Escalation UX:** When the pipeline escalates to Jason, where does that notification go? Discord? TUI? Both?
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Connection to Mosaic North Star
|
||||||
|
|
||||||
|
This pipeline IS the Mosaic vision, just running on agent infrastructure instead of a proper platform:
|
||||||
|
|
||||||
|
- **PRD.md** → Mosaic's task queue API
|
||||||
|
- **Orchestrator** → Mosaic's agent lifecycle management
|
||||||
|
- **Gates** → Mosaic's review gates
|
||||||
|
- **Pipeline stages** → Mosaic's workflow engine
|
||||||
|
- **Dynamic composition** → Mosaic's agent selection
|
||||||
|
|
||||||
|
Everything we build here gets dogfooded, refined, and eventually productized as Mosaic Stack features. We're building the engine that Mosaic will sell.
|
||||||
|
|
||||||
|
### Standalone Architecture (decided)
|
||||||
|
|
||||||
|
The pipeline is built as a **standalone service** — not embedded in OpenClaw or tightly coupled to any single agent framework. This is deliberate:
|
||||||
|
|
||||||
|
1. **Pi (mosaic bootstrap) is the execution substrate** — already proven with BOD sessions
|
||||||
|
2. **The Orchestrator is a mechanical state machine** — it doesn't need an LLM, it needs a process manager
|
||||||
|
3. **Stage sessions are Pi/agent sessions** — each planning/review stage spawns a session with the right participants
|
||||||
|
4. **Migration path to Mosaic Stack is clean** — standalone service → Mosaic feature, not "rip out of OpenClaw"
|
||||||
|
|
||||||
|
The pattern: dogfood on our projects → track what works → extract into Mosaic Stack as a first-class feature.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- VoltAgent/awesome-codex-subagents: https://github.com/VoltAgent/awesome-codex-subagents
|
||||||
|
- VoltAgent/awesome-claude-code-subagents: https://github.com/VoltAgent/awesome-claude-code-subagents
|
||||||
|
- VoltAgent/awesome-openclaw-skills: https://github.com/VoltAgent/awesome-openclaw-skills
|
||||||
|
- Board implementation: `mosaic/board` branch (commit ad4304b)
|
||||||
|
- Mosaic North Star: `~/.openclaw/workspace/memory/mosaic-north-star.md`
|
||||||
|
- Existing agent registry: `~/.openclaw/workspace/agents/REGISTRY.yaml`
|
||||||
|
- Mosaic Queue PRD: `~/src/jarvis-brain/docs/planning/MOSAIC-QUEUE-PRD.md`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Brief Classification System (skip-BOD support)
|
||||||
|
|
||||||
|
**Added:** 2026-03-26
|
||||||
|
|
||||||
|
Not every brief needs full Board of Directors review. The classification system lets briefs skip stages based on their nature.
|
||||||
|
|
||||||
|
### Classes
|
||||||
|
|
||||||
|
| Class | Pipeline | Use case |
|
||||||
|
| ----------- | ----------------------------- | -------------------------------------------------------------------- |
|
||||||
|
| `strategic` | BOD → BA → Planning 1 → 2 → 3 | New features, architecture, integrations, security, budget decisions |
|
||||||
|
| `technical` | BA → Planning 1 → 2 → 3 | Refactors, bugfixes, UI tweaks, style changes |
|
||||||
|
| `hotfix` | Planning 1 → 2 → 3 | Urgent patches — skip both BOD and BA |
|
||||||
|
|
||||||
|
### Classification priority (highest wins)
|
||||||
|
|
||||||
|
1. `--class` CLI flag on `forge run` or `forge resume`
|
||||||
|
2. YAML frontmatter `class:` field in the brief
|
||||||
|
3. Auto-classification via keyword analysis
|
||||||
|
|
||||||
|
### Auto-classification keywords
|
||||||
|
|
||||||
|
- **Strategic:** security, pricing, architecture, integration, budget, strategy, compliance, migration, partnership, launch
|
||||||
|
- **Technical:** bugfix, bug, refactor, ui, style, tweak, typo, lint, cleanup, rename, hotfix, patch, css, format
|
||||||
|
- **Default** (no keyword match): strategic (conservative — full pipeline)
|
||||||
|
|
||||||
|
### Overrides
|
||||||
|
|
||||||
|
- `--force-board` — forces BOD stage to run even for technical/hotfix briefs
|
||||||
|
- `--class` on `resume` — re-classifies a run mid-flight (stages already passed are not re-run)
|
||||||
|
|
||||||
|
### Backward compatibility
|
||||||
|
|
||||||
|
Existing briefs without a `class` field are auto-classified. The default (no matching keywords) is `strategic`, so all existing runs get the full pipeline unless keywords trigger `technical`.
|
||||||
199
packages/forge/__tests__/board-tasks.test.ts
Normal file
199
packages/forge/__tests__/board-tasks.test.ts
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
import fs from 'node:fs';
|
||||||
|
import os from 'node:os';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
|
||||||
|
import {
|
||||||
|
buildPersonaBrief,
|
||||||
|
writePersonaBrief,
|
||||||
|
personaResultPath,
|
||||||
|
synthesisResultPath,
|
||||||
|
generateBoardTasks,
|
||||||
|
synthesizeReviews,
|
||||||
|
} from '../src/board-tasks.js';
|
||||||
|
import type { BoardPersona, PersonaReview } from '../src/types.js';
|
||||||
|
|
||||||
|
const testPersonas: BoardPersona[] = [
|
||||||
|
{ name: 'CEO', slug: 'ceo', description: 'The CEO sets direction.', path: 'agents/board/ceo.md' },
|
||||||
|
{
|
||||||
|
name: 'CTO',
|
||||||
|
slug: 'cto',
|
||||||
|
description: 'The CTO evaluates feasibility.',
|
||||||
|
path: 'agents/board/cto.md',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
describe('buildPersonaBrief', () => {
|
||||||
|
it('includes persona name and description', () => {
|
||||||
|
const brief = buildPersonaBrief('Build feature X', testPersonas[0]!);
|
||||||
|
expect(brief).toContain('# Board Evaluation: CEO');
|
||||||
|
expect(brief).toContain('The CEO sets direction.');
|
||||||
|
expect(brief).toContain('Build feature X');
|
||||||
|
expect(brief).toContain('"persona": "CEO"');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('writePersonaBrief', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'forge-board-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('writes brief file to disk', () => {
|
||||||
|
const briefPath = writePersonaBrief(tmpDir, 'BOARD', testPersonas[0]!, 'Test brief');
|
||||||
|
expect(fs.existsSync(briefPath)).toBe(true);
|
||||||
|
const content = fs.readFileSync(briefPath, 'utf-8');
|
||||||
|
expect(content).toContain('Board Evaluation: CEO');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('personaResultPath', () => {
|
||||||
|
it('builds correct path', () => {
|
||||||
|
const p = personaResultPath('/run/abc', 'BOARD-ceo');
|
||||||
|
expect(p).toContain('01-board/results/BOARD-ceo.board.json');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('synthesisResultPath', () => {
|
||||||
|
it('builds correct path', () => {
|
||||||
|
const p = synthesisResultPath('/run/abc', 'BOARD-SYNTHESIS');
|
||||||
|
expect(p).toContain('01-board/results/BOARD-SYNTHESIS.board.json');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('generateBoardTasks', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'forge-board-tasks-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('generates one task per persona plus synthesis', () => {
|
||||||
|
const tasks = generateBoardTasks('Test brief', testPersonas, tmpDir);
|
||||||
|
expect(tasks).toHaveLength(3); // 2 personas + 1 synthesis
|
||||||
|
});
|
||||||
|
|
||||||
|
it('persona tasks have no dependsOn', () => {
|
||||||
|
const tasks = generateBoardTasks('Test brief', testPersonas, tmpDir);
|
||||||
|
expect(tasks[0]!.dependsOn).toBeUndefined();
|
||||||
|
expect(tasks[1]!.dependsOn).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('synthesis task depends on all persona tasks', () => {
|
||||||
|
const tasks = generateBoardTasks('Test brief', testPersonas, tmpDir);
|
||||||
|
const synthesis = tasks[tasks.length - 1]!;
|
||||||
|
expect(synthesis.id).toBe('BOARD-SYNTHESIS');
|
||||||
|
expect(synthesis.dependsOn).toEqual(['BOARD-ceo', 'BOARD-cto']);
|
||||||
|
expect(synthesis.dependsOnPolicy).toBe('all_terminal');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('persona tasks have correct metadata', () => {
|
||||||
|
const tasks = generateBoardTasks('Test brief', testPersonas, tmpDir);
|
||||||
|
expect(tasks[0]!.metadata['personaName']).toBe('CEO');
|
||||||
|
expect(tasks[0]!.metadata['personaSlug']).toBe('ceo');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('uses custom base task ID', () => {
|
||||||
|
const tasks = generateBoardTasks('Brief', testPersonas, tmpDir, 'CUSTOM');
|
||||||
|
expect(tasks[0]!.id).toBe('CUSTOM-ceo');
|
||||||
|
expect(tasks[tasks.length - 1]!.id).toBe('CUSTOM-SYNTHESIS');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('writes persona brief files to disk', () => {
|
||||||
|
generateBoardTasks('Test brief', testPersonas, tmpDir);
|
||||||
|
const briefDir = path.join(tmpDir, '01-board', 'briefs');
|
||||||
|
expect(fs.existsSync(briefDir)).toBe(true);
|
||||||
|
const files = fs.readdirSync(briefDir);
|
||||||
|
expect(files).toHaveLength(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('synthesizeReviews', () => {
|
||||||
|
const makeReview = (
|
||||||
|
persona: string,
|
||||||
|
verdict: PersonaReview['verdict'],
|
||||||
|
confidence: number,
|
||||||
|
): PersonaReview => ({
|
||||||
|
persona,
|
||||||
|
verdict,
|
||||||
|
confidence,
|
||||||
|
concerns: [`${persona} concern`],
|
||||||
|
recommendations: [`${persona} rec`],
|
||||||
|
keyRisks: [`${persona} risk`],
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns approve when all approve', () => {
|
||||||
|
const result = synthesizeReviews([
|
||||||
|
makeReview('CEO', 'approve', 0.8),
|
||||||
|
makeReview('CTO', 'approve', 0.9),
|
||||||
|
]);
|
||||||
|
expect(result.verdict).toBe('approve');
|
||||||
|
expect(result.confidence).toBe(0.85);
|
||||||
|
expect(result.persona).toBe('Board Synthesis');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns reject when any reject', () => {
|
||||||
|
const result = synthesizeReviews([
|
||||||
|
makeReview('CEO', 'approve', 0.8),
|
||||||
|
makeReview('CTO', 'reject', 0.7),
|
||||||
|
]);
|
||||||
|
expect(result.verdict).toBe('reject');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns conditional when any conditional (no reject)', () => {
|
||||||
|
const result = synthesizeReviews([
|
||||||
|
makeReview('CEO', 'approve', 0.8),
|
||||||
|
makeReview('CTO', 'conditional', 0.6),
|
||||||
|
]);
|
||||||
|
expect(result.verdict).toBe('conditional');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('merges and deduplicates concerns', () => {
|
||||||
|
const reviews = [makeReview('CEO', 'approve', 0.8), makeReview('CTO', 'approve', 0.9)];
|
||||||
|
const result = synthesizeReviews(reviews);
|
||||||
|
expect(result.concerns).toEqual(['CEO concern', 'CTO concern']);
|
||||||
|
expect(result.recommendations).toEqual(['CEO rec', 'CTO rec']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('deduplicates identical items', () => {
|
||||||
|
const r1: PersonaReview = {
|
||||||
|
persona: 'CEO',
|
||||||
|
verdict: 'approve',
|
||||||
|
confidence: 0.8,
|
||||||
|
concerns: ['shared concern'],
|
||||||
|
recommendations: [],
|
||||||
|
keyRisks: [],
|
||||||
|
};
|
||||||
|
const r2: PersonaReview = {
|
||||||
|
persona: 'CTO',
|
||||||
|
verdict: 'approve',
|
||||||
|
confidence: 0.8,
|
||||||
|
concerns: ['shared concern'],
|
||||||
|
recommendations: [],
|
||||||
|
keyRisks: [],
|
||||||
|
};
|
||||||
|
const result = synthesizeReviews([r1, r2]);
|
||||||
|
expect(result.concerns).toEqual(['shared concern']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('includes original reviews', () => {
|
||||||
|
const reviews = [makeReview('CEO', 'approve', 0.8)];
|
||||||
|
const result = synthesizeReviews(reviews);
|
||||||
|
expect(result.reviews).toEqual(reviews);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles empty reviews', () => {
|
||||||
|
const result = synthesizeReviews([]);
|
||||||
|
expect(result.verdict).toBe('approve');
|
||||||
|
expect(result.confidence).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
131
packages/forge/__tests__/brief-classifier.test.ts
Normal file
131
packages/forge/__tests__/brief-classifier.test.ts
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
import { describe, it, expect } from 'vitest';
|
||||||
|
|
||||||
|
import {
|
||||||
|
classifyBrief,
|
||||||
|
parseBriefFrontmatter,
|
||||||
|
determineBriefClass,
|
||||||
|
stagesForClass,
|
||||||
|
} from '../src/brief-classifier.js';
|
||||||
|
|
||||||
|
describe('classifyBrief', () => {
|
||||||
|
it('returns strategic when strategic keywords dominate', () => {
|
||||||
|
expect(classifyBrief('We need a new security architecture for compliance')).toBe('strategic');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns technical when technical keywords are present and dominate', () => {
|
||||||
|
expect(classifyBrief('Fix the bugfix for CSS lint cleanup')).toBe('technical');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns strategic when no keywords match (default)', () => {
|
||||||
|
expect(classifyBrief('Implement a new notification system')).toBe('strategic');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns strategic when strategic and technical are tied', () => {
|
||||||
|
// 1 strategic (security) + 1 technical (bug) = strategic wins on > check
|
||||||
|
expect(classifyBrief('security bug')).toBe('technical');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns strategic for empty text', () => {
|
||||||
|
expect(classifyBrief('')).toBe('strategic');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('is case-insensitive', () => {
|
||||||
|
expect(classifyBrief('MIGRATION and COMPLIANCE strategy')).toBe('strategic');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('parseBriefFrontmatter', () => {
|
||||||
|
it('parses simple key-value frontmatter', () => {
|
||||||
|
const text = '---\nclass: technical\ntitle: My Brief\n---\n\n# Body';
|
||||||
|
const fm = parseBriefFrontmatter(text);
|
||||||
|
expect(fm).toEqual({ class: 'technical', title: 'My Brief' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('strips quotes from values', () => {
|
||||||
|
const text = '---\nclass: "hotfix"\ntitle: \'Test\'\n---\n\n# Body';
|
||||||
|
const fm = parseBriefFrontmatter(text);
|
||||||
|
expect(fm['class']).toBe('hotfix');
|
||||||
|
expect(fm['title']).toBe('Test');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty object when no frontmatter', () => {
|
||||||
|
expect(parseBriefFrontmatter('# Just a heading')).toEqual({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty object for malformed frontmatter', () => {
|
||||||
|
expect(parseBriefFrontmatter('---\n---\n')).toEqual({});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('determineBriefClass', () => {
|
||||||
|
it('CLI flag takes priority', () => {
|
||||||
|
const result = determineBriefClass('security migration', 'hotfix');
|
||||||
|
expect(result).toEqual({ briefClass: 'hotfix', classSource: 'cli' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('frontmatter takes priority over auto', () => {
|
||||||
|
const text = '---\nclass: technical\n---\n\nSecurity architecture compliance';
|
||||||
|
const result = determineBriefClass(text);
|
||||||
|
expect(result).toEqual({ briefClass: 'technical', classSource: 'frontmatter' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('falls back to auto-classify', () => {
|
||||||
|
const result = determineBriefClass('We need a migration plan');
|
||||||
|
expect(result).toEqual({ briefClass: 'strategic', classSource: 'auto' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ignores invalid CLI class', () => {
|
||||||
|
const result = determineBriefClass('bugfix cleanup', 'invalid');
|
||||||
|
expect(result).toEqual({ briefClass: 'technical', classSource: 'auto' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ignores invalid frontmatter class', () => {
|
||||||
|
const text = '---\nclass: banana\n---\n\nbugfix';
|
||||||
|
const result = determineBriefClass(text);
|
||||||
|
expect(result).toEqual({ briefClass: 'technical', classSource: 'auto' });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('stagesForClass', () => {
|
||||||
|
it('strategic includes all stages including board', () => {
|
||||||
|
const stages = stagesForClass('strategic');
|
||||||
|
expect(stages).toContain('01-board');
|
||||||
|
expect(stages).toContain('01b-brief-analyzer');
|
||||||
|
expect(stages).toContain('00-intake');
|
||||||
|
expect(stages).toContain('09-deploy');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('technical skips board', () => {
|
||||||
|
const stages = stagesForClass('technical');
|
||||||
|
expect(stages).not.toContain('01-board');
|
||||||
|
expect(stages).toContain('01b-brief-analyzer');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('hotfix skips board and brief analyzer', () => {
|
||||||
|
const stages = stagesForClass('hotfix');
|
||||||
|
expect(stages).not.toContain('01-board');
|
||||||
|
expect(stages).not.toContain('01b-brief-analyzer');
|
||||||
|
expect(stages).toContain('05-coding');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('forceBoard adds board back for technical', () => {
|
||||||
|
const stages = stagesForClass('technical', true);
|
||||||
|
expect(stages).toContain('01-board');
|
||||||
|
expect(stages).toContain('01b-brief-analyzer');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('forceBoard adds board back for hotfix', () => {
|
||||||
|
const stages = stagesForClass('hotfix', true);
|
||||||
|
expect(stages).toContain('01-board');
|
||||||
|
expect(stages).toContain('01b-brief-analyzer');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('stages are in canonical order', () => {
|
||||||
|
const stages = stagesForClass('strategic');
|
||||||
|
for (let i = 1; i < stages.length; i++) {
|
||||||
|
const prevIdx = stages.indexOf(stages[i - 1]!);
|
||||||
|
const currIdx = stages.indexOf(stages[i]!);
|
||||||
|
expect(prevIdx).toBeLessThan(currIdx);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
196
packages/forge/__tests__/persona-loader.test.ts
Normal file
196
packages/forge/__tests__/persona-loader.test.ts
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
import fs from 'node:fs';
|
||||||
|
import os from 'node:os';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
|
||||||
|
import {
|
||||||
|
slugify,
|
||||||
|
personaNameFromMarkdown,
|
||||||
|
loadBoardPersonas,
|
||||||
|
loadPersonaOverrides,
|
||||||
|
loadForgeConfig,
|
||||||
|
getEffectivePersonas,
|
||||||
|
} from '../src/persona-loader.js';
|
||||||
|
|
||||||
|
describe('slugify', () => {
|
||||||
|
it('converts to lowercase and replaces non-alphanumeric with hyphens', () => {
|
||||||
|
expect(slugify('Chief Executive Officer')).toBe('chief-executive-officer');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('strips leading and trailing hyphens', () => {
|
||||||
|
expect(slugify('--hello--')).toBe('hello');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns "persona" for empty string', () => {
|
||||||
|
expect(slugify('')).toBe('persona');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles special characters', () => {
|
||||||
|
expect(slugify('CTO — Technical')).toBe('cto-technical');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('personaNameFromMarkdown', () => {
|
||||||
|
it('extracts name from heading', () => {
|
||||||
|
expect(personaNameFromMarkdown('# CEO — Chief Executive Officer', 'FALLBACK')).toBe('CEO');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('strips markdown heading markers', () => {
|
||||||
|
expect(personaNameFromMarkdown('## CTO - Technical Lead', 'FALLBACK')).toBe('CTO');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns fallback for empty content', () => {
|
||||||
|
expect(personaNameFromMarkdown('', 'FALLBACK')).toBe('FALLBACK');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns full heading if no separator', () => {
|
||||||
|
expect(personaNameFromMarkdown('# SimpleTitle', 'FALLBACK')).toBe('SimpleTitle');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('loadBoardPersonas', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'forge-personas-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty array for non-existent directory', () => {
|
||||||
|
expect(loadBoardPersonas('/nonexistent')).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('loads personas from markdown files', () => {
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(tmpDir, 'ceo.md'),
|
||||||
|
'# CEO — Visionary Leader\n\nThe CEO sets direction.',
|
||||||
|
);
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(tmpDir, 'cto.md'),
|
||||||
|
'# CTO — Technical Realist\n\nThe CTO evaluates feasibility.',
|
||||||
|
);
|
||||||
|
|
||||||
|
const personas = loadBoardPersonas(tmpDir);
|
||||||
|
expect(personas).toHaveLength(2);
|
||||||
|
expect(personas[0]!.name).toBe('CEO');
|
||||||
|
expect(personas[0]!.slug).toBe('ceo');
|
||||||
|
expect(personas[1]!.name).toBe('CTO');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('sorts by filename', () => {
|
||||||
|
fs.writeFileSync(path.join(tmpDir, 'z-last.md'), '# Z Last');
|
||||||
|
fs.writeFileSync(path.join(tmpDir, 'a-first.md'), '# A First');
|
||||||
|
|
||||||
|
const personas = loadBoardPersonas(tmpDir);
|
||||||
|
expect(personas[0]!.slug).toBe('a-first');
|
||||||
|
expect(personas[1]!.slug).toBe('z-last');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ignores non-markdown files', () => {
|
||||||
|
fs.writeFileSync(path.join(tmpDir, 'notes.txt'), 'not a persona');
|
||||||
|
fs.writeFileSync(path.join(tmpDir, 'ceo.md'), '# CEO');
|
||||||
|
|
||||||
|
const personas = loadBoardPersonas(tmpDir);
|
||||||
|
expect(personas).toHaveLength(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('loadPersonaOverrides', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'forge-overrides-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty object when .forge/personas/ does not exist', () => {
|
||||||
|
expect(loadPersonaOverrides(tmpDir)).toEqual({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('loads override files', () => {
|
||||||
|
const overridesDir = path.join(tmpDir, '.forge', 'personas');
|
||||||
|
fs.mkdirSync(overridesDir, { recursive: true });
|
||||||
|
fs.writeFileSync(path.join(overridesDir, 'ceo.md'), 'Additional CEO context');
|
||||||
|
|
||||||
|
const overrides = loadPersonaOverrides(tmpDir);
|
||||||
|
expect(overrides['ceo']).toBe('Additional CEO context');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('loadForgeConfig', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'forge-config-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty config when file does not exist', () => {
|
||||||
|
expect(loadForgeConfig(tmpDir)).toEqual({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('parses board skipMembers', () => {
|
||||||
|
const configDir = path.join(tmpDir, '.forge');
|
||||||
|
fs.mkdirSync(configDir, { recursive: true });
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(configDir, 'config.yaml'),
|
||||||
|
'board:\n skipMembers:\n - cfo\n - coo\n',
|
||||||
|
);
|
||||||
|
|
||||||
|
const config = loadForgeConfig(tmpDir);
|
||||||
|
expect(config.board?.skipMembers).toEqual(['cfo', 'coo']);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getEffectivePersonas', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
let boardDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'forge-effective-'));
|
||||||
|
boardDir = path.join(tmpDir, 'board-agents');
|
||||||
|
fs.mkdirSync(boardDir, { recursive: true });
|
||||||
|
fs.writeFileSync(path.join(boardDir, 'ceo.md'), '# CEO — Visionary');
|
||||||
|
fs.writeFileSync(path.join(boardDir, 'cto.md'), '# CTO — Technical');
|
||||||
|
fs.writeFileSync(path.join(boardDir, 'cfo.md'), '# CFO — Financial');
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns all personas with no overrides or config', () => {
|
||||||
|
const personas = getEffectivePersonas(tmpDir, boardDir);
|
||||||
|
expect(personas).toHaveLength(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('appends project overrides to base description', () => {
|
||||||
|
const overridesDir = path.join(tmpDir, '.forge', 'personas');
|
||||||
|
fs.mkdirSync(overridesDir, { recursive: true });
|
||||||
|
fs.writeFileSync(path.join(overridesDir, 'ceo.md'), 'Focus on AI strategy');
|
||||||
|
|
||||||
|
const personas = getEffectivePersonas(tmpDir, boardDir);
|
||||||
|
const ceo = personas.find((p) => p.slug === 'ceo')!;
|
||||||
|
expect(ceo.description).toContain('# CEO — Visionary');
|
||||||
|
expect(ceo.description).toContain('Focus on AI strategy');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('removes skipped members via config', () => {
|
||||||
|
const configDir = path.join(tmpDir, '.forge');
|
||||||
|
fs.mkdirSync(configDir, { recursive: true });
|
||||||
|
fs.writeFileSync(path.join(configDir, 'config.yaml'), 'board:\n skipMembers:\n - cfo\n');
|
||||||
|
|
||||||
|
const personas = getEffectivePersonas(tmpDir, boardDir);
|
||||||
|
expect(personas).toHaveLength(2);
|
||||||
|
expect(personas.find((p) => p.slug === 'cfo')).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
331
packages/forge/__tests__/pipeline-runner.test.ts
Normal file
331
packages/forge/__tests__/pipeline-runner.test.ts
Normal file
@@ -0,0 +1,331 @@
|
|||||||
|
import fs from 'node:fs';
|
||||||
|
import os from 'node:os';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
|
||||||
|
import {
|
||||||
|
generateRunId,
|
||||||
|
selectStages,
|
||||||
|
saveManifest,
|
||||||
|
loadManifest,
|
||||||
|
runPipeline,
|
||||||
|
resumePipeline,
|
||||||
|
getPipelineStatus,
|
||||||
|
} from '../src/pipeline-runner.js';
|
||||||
|
import type { ForgeTask, RunManifest, TaskExecutor } from '../src/types.js';
|
||||||
|
import type { TaskResult } from '@mosaic/macp';
|
||||||
|
|
||||||
|
/** Mock TaskExecutor that records submitted tasks and returns success. */
|
||||||
|
function createMockExecutor(options?: {
|
||||||
|
failStage?: string;
|
||||||
|
}): TaskExecutor & { submittedTasks: ForgeTask[] } {
|
||||||
|
const submittedTasks: ForgeTask[] = [];
|
||||||
|
return {
|
||||||
|
submittedTasks,
|
||||||
|
async submitTask(task: ForgeTask) {
|
||||||
|
submittedTasks.push(task);
|
||||||
|
},
|
||||||
|
async waitForCompletion(taskId: string): Promise<TaskResult> {
|
||||||
|
const failStage = options?.failStage;
|
||||||
|
const task = submittedTasks.find((t) => t.id === taskId);
|
||||||
|
const stageName = task?.metadata?.['stageName'] as string | undefined;
|
||||||
|
|
||||||
|
if (failStage && stageName === failStage) {
|
||||||
|
return {
|
||||||
|
task_id: taskId,
|
||||||
|
status: 'failed',
|
||||||
|
completed_at: new Date().toISOString(),
|
||||||
|
exit_code: 1,
|
||||||
|
gate_results: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
task_id: taskId,
|
||||||
|
status: 'completed',
|
||||||
|
completed_at: new Date().toISOString(),
|
||||||
|
exit_code: 0,
|
||||||
|
gate_results: [],
|
||||||
|
};
|
||||||
|
},
|
||||||
|
async getTaskStatus() {
|
||||||
|
return 'completed' as const;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('generateRunId', () => {
|
||||||
|
it('returns a timestamp string', () => {
|
||||||
|
const id = generateRunId();
|
||||||
|
expect(id).toMatch(/^\d{8}-\d{6}$/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns unique IDs', () => {
|
||||||
|
const ids = new Set(Array.from({ length: 10 }, generateRunId));
|
||||||
|
// Given they run in the same second, they should at least be consistent format
|
||||||
|
expect(ids.size).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('selectStages', () => {
|
||||||
|
it('returns full sequence when no args', () => {
|
||||||
|
const stages = selectStages();
|
||||||
|
expect(stages.length).toBeGreaterThan(0);
|
||||||
|
expect(stages[0]).toBe('00-intake');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns provided stages', () => {
|
||||||
|
const stages = selectStages(['00-intake', '05-coding']);
|
||||||
|
expect(stages).toEqual(['00-intake', '05-coding']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('throws for unknown stages', () => {
|
||||||
|
expect(() => selectStages(['unknown'])).toThrow('Unknown Forge stages');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('skips to specified stage', () => {
|
||||||
|
const stages = selectStages(undefined, '05-coding');
|
||||||
|
expect(stages[0]).toBe('05-coding');
|
||||||
|
expect(stages).not.toContain('00-intake');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('throws if skipTo not in selected stages', () => {
|
||||||
|
expect(() => selectStages(['00-intake'], '05-coding')).toThrow(
|
||||||
|
"skip_to stage '05-coding' is not present",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('manifest operations', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'forge-manifest-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('saveManifest and loadManifest roundtrip', () => {
|
||||||
|
const manifest: RunManifest = {
|
||||||
|
runId: 'test-123',
|
||||||
|
brief: '/path/to/brief.md',
|
||||||
|
codebase: '/project',
|
||||||
|
briefClass: 'strategic',
|
||||||
|
classSource: 'auto',
|
||||||
|
forceBoard: false,
|
||||||
|
createdAt: '2026-01-01T00:00:00Z',
|
||||||
|
updatedAt: '2026-01-01T00:00:00Z',
|
||||||
|
currentStage: '00-intake',
|
||||||
|
status: 'in_progress',
|
||||||
|
stages: {
|
||||||
|
'00-intake': { status: 'passed', startedAt: '2026-01-01T00:00:00Z' },
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
saveManifest(tmpDir, manifest);
|
||||||
|
const loaded = loadManifest(tmpDir);
|
||||||
|
expect(loaded.runId).toBe('test-123');
|
||||||
|
expect(loaded.briefClass).toBe('strategic');
|
||||||
|
expect(loaded.stages['00-intake']?.status).toBe('passed');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('loadManifest throws for missing file', () => {
|
||||||
|
expect(() => loadManifest('/nonexistent')).toThrow('manifest.json not found');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('runPipeline', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
let briefPath: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'forge-pipeline-'));
|
||||||
|
briefPath = path.join(tmpDir, 'test-brief.md');
|
||||||
|
fs.writeFileSync(
|
||||||
|
briefPath,
|
||||||
|
'---\nclass: hotfix\n---\n\n# Fix CSS bug\n\nFix the bugfix for lint cleanup.',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('runs pipeline to completion with mock executor', async () => {
|
||||||
|
const executor = createMockExecutor();
|
||||||
|
const result = await runPipeline(briefPath, tmpDir, {
|
||||||
|
executor,
|
||||||
|
stages: ['00-intake', '00b-discovery'],
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.runId).toMatch(/^\d{8}-\d{6}$/);
|
||||||
|
expect(result.stages).toEqual(['00-intake', '00b-discovery']);
|
||||||
|
expect(result.manifest.status).toBe('completed');
|
||||||
|
expect(executor.submittedTasks).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates run directory under .forge/runs/', async () => {
|
||||||
|
const executor = createMockExecutor();
|
||||||
|
const result = await runPipeline(briefPath, tmpDir, {
|
||||||
|
executor,
|
||||||
|
stages: ['00-intake'],
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.runDir).toContain(path.join('.forge', 'runs'));
|
||||||
|
expect(fs.existsSync(result.runDir)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('writes manifest with stage statuses', async () => {
|
||||||
|
const executor = createMockExecutor();
|
||||||
|
const result = await runPipeline(briefPath, tmpDir, {
|
||||||
|
executor,
|
||||||
|
stages: ['00-intake', '00b-discovery'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const manifest = loadManifest(result.runDir);
|
||||||
|
expect(manifest.stages['00-intake']?.status).toBe('passed');
|
||||||
|
expect(manifest.stages['00b-discovery']?.status).toBe('passed');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('respects CLI class override', async () => {
|
||||||
|
const executor = createMockExecutor();
|
||||||
|
const result = await runPipeline(briefPath, tmpDir, {
|
||||||
|
executor,
|
||||||
|
briefClass: 'strategic',
|
||||||
|
stages: ['00-intake'],
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.manifest.briefClass).toBe('strategic');
|
||||||
|
expect(result.manifest.classSource).toBe('cli');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('uses frontmatter class', async () => {
|
||||||
|
const executor = createMockExecutor();
|
||||||
|
const result = await runPipeline(briefPath, tmpDir, {
|
||||||
|
executor,
|
||||||
|
stages: ['00-intake'],
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.manifest.briefClass).toBe('hotfix');
|
||||||
|
expect(result.manifest.classSource).toBe('frontmatter');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('builds dependency chain between tasks', async () => {
|
||||||
|
const executor = createMockExecutor();
|
||||||
|
await runPipeline(briefPath, tmpDir, {
|
||||||
|
executor,
|
||||||
|
stages: ['00-intake', '00b-discovery', '02-planning-1'],
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(executor.submittedTasks[0]!.dependsOn).toBeUndefined();
|
||||||
|
expect(executor.submittedTasks[1]!.dependsOn).toEqual([executor.submittedTasks[0]!.id]);
|
||||||
|
expect(executor.submittedTasks[2]!.dependsOn).toEqual([executor.submittedTasks[1]!.id]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles stage failure', async () => {
|
||||||
|
const executor = createMockExecutor({ failStage: '00b-discovery' });
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
runPipeline(briefPath, tmpDir, {
|
||||||
|
executor,
|
||||||
|
stages: ['00-intake', '00b-discovery'],
|
||||||
|
}),
|
||||||
|
).rejects.toThrow('Stage 00b-discovery failed');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('marks manifest as failed on stage failure', async () => {
|
||||||
|
const executor = createMockExecutor({ failStage: '00-intake' });
|
||||||
|
|
||||||
|
try {
|
||||||
|
await runPipeline(briefPath, tmpDir, {
|
||||||
|
executor,
|
||||||
|
stages: ['00-intake'],
|
||||||
|
});
|
||||||
|
} catch {
|
||||||
|
// expected
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the run dir (we don't have it from the failed result)
|
||||||
|
const runsDir = path.join(tmpDir, '.forge', 'runs');
|
||||||
|
const runDirs = fs.readdirSync(runsDir);
|
||||||
|
expect(runDirs).toHaveLength(1);
|
||||||
|
const manifest = loadManifest(path.join(runsDir, runDirs[0]!));
|
||||||
|
expect(manifest.status).toBe('failed');
|
||||||
|
expect(manifest.stages['00-intake']?.status).toBe('failed');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('resumePipeline', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
let briefPath: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'forge-resume-'));
|
||||||
|
briefPath = path.join(tmpDir, 'brief.md');
|
||||||
|
fs.writeFileSync(briefPath, '---\nclass: hotfix\n---\n\n# Fix bug');
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('resumes from first incomplete stage', async () => {
|
||||||
|
// First run fails on discovery
|
||||||
|
const executor1 = createMockExecutor({ failStage: '00b-discovery' });
|
||||||
|
let runDir: string;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await runPipeline(briefPath, tmpDir, {
|
||||||
|
executor: executor1,
|
||||||
|
stages: ['00-intake', '00b-discovery', '02-planning-1'],
|
||||||
|
});
|
||||||
|
} catch {
|
||||||
|
// expected
|
||||||
|
}
|
||||||
|
|
||||||
|
const runsDir = path.join(tmpDir, '.forge', 'runs');
|
||||||
|
runDir = path.join(runsDir, fs.readdirSync(runsDir)[0]!);
|
||||||
|
|
||||||
|
// Resume should pick up from 00b-discovery
|
||||||
|
const executor2 = createMockExecutor();
|
||||||
|
const result = await resumePipeline(runDir, executor2);
|
||||||
|
|
||||||
|
expect(result.manifest.status).toBe('completed');
|
||||||
|
// Should have re-run from 00b-discovery onward
|
||||||
|
expect(result.stages[0]).toBe('00b-discovery');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getPipelineStatus', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'forge-status-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns manifest', () => {
|
||||||
|
const manifest: RunManifest = {
|
||||||
|
runId: 'test',
|
||||||
|
brief: '/brief.md',
|
||||||
|
codebase: '',
|
||||||
|
briefClass: 'strategic',
|
||||||
|
classSource: 'auto',
|
||||||
|
forceBoard: false,
|
||||||
|
createdAt: '2026-01-01T00:00:00Z',
|
||||||
|
updatedAt: '2026-01-01T00:00:00Z',
|
||||||
|
currentStage: '00-intake',
|
||||||
|
status: 'in_progress',
|
||||||
|
stages: {},
|
||||||
|
};
|
||||||
|
saveManifest(tmpDir, manifest);
|
||||||
|
|
||||||
|
const status = getPipelineStatus(tmpDir);
|
||||||
|
expect(status.runId).toBe('test');
|
||||||
|
expect(status.status).toBe('in_progress');
|
||||||
|
});
|
||||||
|
});
|
||||||
172
packages/forge/__tests__/stage-adapter.test.ts
Normal file
172
packages/forge/__tests__/stage-adapter.test.ts
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
import fs from 'node:fs';
|
||||||
|
import os from 'node:os';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
|
||||||
|
import {
|
||||||
|
stageTaskId,
|
||||||
|
stageDir,
|
||||||
|
stageBriefPath,
|
||||||
|
stageResultPath,
|
||||||
|
buildStageBrief,
|
||||||
|
mapStageToTask,
|
||||||
|
} from '../src/stage-adapter.js';
|
||||||
|
import { STAGE_SEQUENCE, STAGE_SPECS } from '../src/constants.js';
|
||||||
|
|
||||||
|
describe('stageTaskId', () => {
|
||||||
|
it('generates correct task ID', () => {
|
||||||
|
expect(stageTaskId('20260330-120000', '00-intake')).toBe('FORGE-20260330-120000-00');
|
||||||
|
expect(stageTaskId('20260330-120000', '05-coding')).toBe('FORGE-20260330-120000-05');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('throws for unknown stage', () => {
|
||||||
|
expect(() => stageTaskId('run1', 'unknown-stage')).toThrow('Unknown Forge stage');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('stageDir', () => {
|
||||||
|
it('returns correct directory path', () => {
|
||||||
|
expect(stageDir('/runs/abc', '00-intake')).toBe('/runs/abc/00-intake');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('stageBriefPath', () => {
|
||||||
|
it('returns brief.md inside stage directory', () => {
|
||||||
|
expect(stageBriefPath('/runs/abc', '00-intake')).toBe('/runs/abc/00-intake/brief.md');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('stageResultPath', () => {
|
||||||
|
it('returns result.json inside stage directory', () => {
|
||||||
|
expect(stageResultPath('/runs/abc', '05-coding')).toBe('/runs/abc/05-coding/result.json');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('buildStageBrief', () => {
|
||||||
|
it('includes all sections', () => {
|
||||||
|
const brief = buildStageBrief({
|
||||||
|
stageName: '00-intake',
|
||||||
|
stagePrompt: 'Parse the brief into structured data.',
|
||||||
|
briefContent: '# My Brief\n\nImplement feature X.',
|
||||||
|
projectRoot: '/project',
|
||||||
|
runId: 'abc',
|
||||||
|
runDir: '/runs/abc',
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(brief).toContain('# Forge Pipeline Stage: 00-intake');
|
||||||
|
expect(brief).toContain('Run ID: abc');
|
||||||
|
expect(brief).toContain('Project Root: /project');
|
||||||
|
expect(brief).toContain('# My Brief');
|
||||||
|
expect(brief).toContain('Implement feature X.');
|
||||||
|
expect(brief).toContain('Parse the brief into structured data.');
|
||||||
|
expect(brief).toContain('/runs/abc/');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('mapStageToTask', () => {
|
||||||
|
let tmpDir: string;
|
||||||
|
let runDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'forge-stage-adapter-'));
|
||||||
|
runDir = path.join(tmpDir, 'runs', 'test-run');
|
||||||
|
fs.mkdirSync(runDir, { recursive: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
fs.rmSync(tmpDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('maps intake stage correctly', () => {
|
||||||
|
const task = mapStageToTask({
|
||||||
|
stageName: '00-intake',
|
||||||
|
briefContent: '# Test Brief',
|
||||||
|
projectRoot: tmpDir,
|
||||||
|
runId: 'test-run',
|
||||||
|
runDir,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(task.id).toBe('FORGE-test-run-00');
|
||||||
|
expect(task.title).toBe('Forge Intake');
|
||||||
|
expect(task.status).toBe('pending');
|
||||||
|
expect(task.dispatch).toBe('exec');
|
||||||
|
expect(task.type).toBe('research');
|
||||||
|
expect(task.timeoutSeconds).toBe(120);
|
||||||
|
expect(task.qualityGates).toEqual([]);
|
||||||
|
expect(task.dependsOn).toBeUndefined(); // First stage has no deps
|
||||||
|
expect(task.worktree).toBe(path.resolve(tmpDir));
|
||||||
|
});
|
||||||
|
|
||||||
|
it('writes brief to disk', () => {
|
||||||
|
mapStageToTask({
|
||||||
|
stageName: '00-intake',
|
||||||
|
briefContent: '# Test Brief',
|
||||||
|
projectRoot: tmpDir,
|
||||||
|
runId: 'test-run',
|
||||||
|
runDir,
|
||||||
|
});
|
||||||
|
|
||||||
|
const briefPath = path.join(runDir, '00-intake', 'brief.md');
|
||||||
|
expect(fs.existsSync(briefPath)).toBe(true);
|
||||||
|
const content = fs.readFileSync(briefPath, 'utf-8');
|
||||||
|
expect(content).toContain('# Test Brief');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('sets depends_on for non-first stages', () => {
|
||||||
|
const task = mapStageToTask({
|
||||||
|
stageName: '00b-discovery',
|
||||||
|
briefContent: '# Test',
|
||||||
|
projectRoot: tmpDir,
|
||||||
|
runId: 'test-run',
|
||||||
|
runDir,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(task.dependsOn).toEqual(['FORGE-test-run-00']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('includes metadata with stage info', () => {
|
||||||
|
const task = mapStageToTask({
|
||||||
|
stageName: '05-coding',
|
||||||
|
briefContent: '# Test',
|
||||||
|
projectRoot: tmpDir,
|
||||||
|
runId: 'test-run',
|
||||||
|
runDir,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(task.metadata['stageName']).toBe('05-coding');
|
||||||
|
expect(task.metadata['stageNumber']).toBe('05');
|
||||||
|
expect(task.metadata['gate']).toBe('lint-build-test');
|
||||||
|
expect(task.metadata['runId']).toBe('test-run');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('yolo dispatch does not set worktree', () => {
|
||||||
|
const task = mapStageToTask({
|
||||||
|
stageName: '05-coding',
|
||||||
|
briefContent: '# Test',
|
||||||
|
projectRoot: tmpDir,
|
||||||
|
runId: 'test-run',
|
||||||
|
runDir,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(task.dispatch).toBe('yolo');
|
||||||
|
expect(task.worktree).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('throws for unknown stage', () => {
|
||||||
|
expect(() =>
|
||||||
|
mapStageToTask({
|
||||||
|
stageName: 'unknown',
|
||||||
|
briefContent: 'test',
|
||||||
|
projectRoot: tmpDir,
|
||||||
|
runId: 'r1',
|
||||||
|
runDir,
|
||||||
|
}),
|
||||||
|
).toThrow('Unknown Forge stage');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('all stages in STAGE_SEQUENCE have specs', () => {
|
||||||
|
for (const stage of STAGE_SEQUENCE) {
|
||||||
|
expect(STAGE_SPECS[stage]).toBeDefined();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
74
packages/forge/briefs/mordor-coffee-shop.md
Normal file
74
packages/forge/briefs/mordor-coffee-shop.md
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# Brief: Mordor Coffee Shop — Full Business Launch
|
||||||
|
|
||||||
|
## Source
|
||||||
|
|
||||||
|
New business venture — Jason Woltje / Diverse Canvas LLC
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
Launch "Mordor Coffee Shop" as a complete business with web presence, branding, and operational infrastructure. This is a full-stack business formation covering:
|
||||||
|
|
||||||
|
### 1. Business Formation
|
||||||
|
|
||||||
|
- Business entity structure (under Diverse Canvas LLC or standalone?)
|
||||||
|
- Brand identity: name, tagline, logo concepts, color palette
|
||||||
|
- LOTR-themed coffee shop concept (dark roast specialty, volcanic imagery, "One does not simply walk past our coffee")
|
||||||
|
|
||||||
|
### 2. Website Design & Development
|
||||||
|
|
||||||
|
- Marketing site at mordor.woltje.com
|
||||||
|
- Tech stack decision (static site generator vs full app)
|
||||||
|
- Pages: Home, Menu, About, Contact, Online Ordering (future)
|
||||||
|
- Mobile-responsive design
|
||||||
|
- SEO fundamentals
|
||||||
|
- Dark/dramatic aesthetic fitting the Mordor theme
|
||||||
|
|
||||||
|
### 3. Deployment & Infrastructure
|
||||||
|
|
||||||
|
- Hosted on existing Portainer/Docker Swarm instance (w-docker0, 10.1.1.45)
|
||||||
|
- Traefik reverse proxy for TLS/routing
|
||||||
|
- CI/CD via Woodpecker (git.mosaicstack.dev)
|
||||||
|
- Domain: mordor.woltje.com (DNS via existing infrastructure)
|
||||||
|
|
||||||
|
### 4. Social Media Strategy
|
||||||
|
|
||||||
|
- Platform selection (Instagram, TikTok, X, Facebook — which ones and why)
|
||||||
|
- Content strategy and posting cadence
|
||||||
|
- Brand voice guide
|
||||||
|
- Launch campaign plan
|
||||||
|
|
||||||
|
### 5. Business Strategy
|
||||||
|
|
||||||
|
- Target market analysis
|
||||||
|
- Revenue model (physical location? online only? merch? subscription coffee?)
|
||||||
|
- Competitive positioning
|
||||||
|
- 6-month launch roadmap
|
||||||
|
- Exit strategy options
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
1. Business strategy document with clear go-to-market plan
|
||||||
|
2. Brand guide (colors, fonts, voice, logo direction)
|
||||||
|
3. Website live at mordor.woltje.com with at least Home + Menu + About pages
|
||||||
|
4. Social media accounts strategy document
|
||||||
|
5. Docker stack deployed via Portainer with health checks
|
||||||
|
6. CI/CD pipeline pushing from Gitea to production
|
||||||
|
7. Exit strategy documented
|
||||||
|
|
||||||
|
## Technical Constraints
|
||||||
|
|
||||||
|
- Must run on existing Docker Swarm infrastructure (w-docker0)
|
||||||
|
- Traefik handles TLS termination and routing
|
||||||
|
- Woodpecker CI for build/deploy pipeline
|
||||||
|
- Git repo on git.mosaicstack.dev
|
||||||
|
- Budget: minimal — use open source tools, no paid SaaS dependencies
|
||||||
|
|
||||||
|
## Estimated Complexity
|
||||||
|
|
||||||
|
High — crosses business strategy, design, development, DevOps, and marketing domains
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
- DNS record for mordor.woltje.com (Jason to configure)
|
||||||
|
- Portainer access (existing credentials)
|
||||||
|
- Gitea repo creation
|
||||||
30
packages/forge/examples/sample-brief.md
Normal file
30
packages/forge/examples/sample-brief.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
---
|
||||||
|
class: technical
|
||||||
|
---
|
||||||
|
|
||||||
|
# Brief: Add User Preferences API Endpoint
|
||||||
|
|
||||||
|
## Source PRD
|
||||||
|
|
||||||
|
mosaic-stack PRD — Mission Control Dashboard
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
Add a REST endpoint for storing and retrieving user dashboard preferences (layout, theme, sidebar state). This enables the Mission Control dashboard to persist user customization.
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
1. GET /api/users/:id/preferences returns stored preferences (JSON)
|
||||||
|
2. PUT /api/users/:id/preferences stores/updates preferences
|
||||||
|
3. Preferences persist across sessions
|
||||||
|
4. Default preferences returned for users with no stored preferences
|
||||||
|
5. Only the authenticated user can read/write their own preferences
|
||||||
|
|
||||||
|
## Estimated Complexity
|
||||||
|
|
||||||
|
Medium — new endpoint, new DB table, auth integration
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
- Requires existing auth system (JWT guards)
|
||||||
|
- Requires existing user entity in database
|
||||||
41
packages/forge/package.json
Normal file
41
packages/forge/package.json
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
{
|
||||||
|
"name": "@mosaic/forge",
|
||||||
|
"version": "0.0.2",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://git.mosaicstack.dev/mosaic/mosaic-stack.git",
|
||||||
|
"directory": "packages/forge"
|
||||||
|
},
|
||||||
|
"type": "module",
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"types": "dist/index.d.ts",
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"default": "./dist/index.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist",
|
||||||
|
"pipeline"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"lint": "eslint src",
|
||||||
|
"typecheck": "tsc --noEmit",
|
||||||
|
"test": "vitest run --passWithNoTests"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@mosaic/macp": "workspace:*"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.0.0",
|
||||||
|
"@vitest/coverage-v8": "^2.0.0",
|
||||||
|
"typescript": "^5.8.0",
|
||||||
|
"vitest": "^2.0.0"
|
||||||
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://git.mosaicstack.dev/api/packages/mosaic/npm/",
|
||||||
|
"access": "public"
|
||||||
|
}
|
||||||
|
}
|
||||||
52
packages/forge/pipeline/agents/board/ceo.md
Normal file
52
packages/forge/pipeline/agents/board/ceo.md
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# CEO — Board of Directors
|
||||||
|
|
||||||
|
## Identity
|
||||||
|
|
||||||
|
You are the CEO of this organization. You think in terms of mission, vision, and strategic alignment.
|
||||||
|
|
||||||
|
## Model
|
||||||
|
|
||||||
|
Opus
|
||||||
|
|
||||||
|
## Personality
|
||||||
|
|
||||||
|
- Visionary but grounded
|
||||||
|
- Asks "does this serve the mission?" before anything else
|
||||||
|
- Willing to kill good ideas that don't align with priorities
|
||||||
|
- Respects the CFO's cost concerns but won't let penny-pinching kill strategic bets
|
||||||
|
- Pushes back on the CTO when technical elegance conflicts with business needs
|
||||||
|
|
||||||
|
## In Debates
|
||||||
|
|
||||||
|
- You speak to strategic value, not technical details
|
||||||
|
- You ask: "Who is this for? Why now? What happens if we don't do this?"
|
||||||
|
- You are the tiebreaker when CTO and COO disagree — but you explain your reasoning
|
||||||
|
- You call for synthesis when debate is converging, not before
|
||||||
|
|
||||||
|
## LANE BOUNDARY — CRITICAL
|
||||||
|
|
||||||
|
You are a STRATEGIC voice. You do not make technical decisions.
|
||||||
|
|
||||||
|
### You DO
|
||||||
|
|
||||||
|
- Assess strategic alignment with the mission
|
||||||
|
- Define scope boundaries (what's in, what's explicitly out)
|
||||||
|
- Set priority relative to other work
|
||||||
|
- Assess business risk (not technical risk — that's the CTO's lane)
|
||||||
|
- Make the final go/no-go call
|
||||||
|
|
||||||
|
### You DO NOT
|
||||||
|
|
||||||
|
- Specify technical approaches, schemas, or implementation details
|
||||||
|
- Override the CTO's technical risk assessment (you can weigh it against business value, but don't dismiss it)
|
||||||
|
- Make decisions that belong to the architects or specialists
|
||||||
|
|
||||||
|
## Output Format
|
||||||
|
|
||||||
|
```
|
||||||
|
POSITION: [your stance]
|
||||||
|
REASONING: [why, grounded in mission/strategy]
|
||||||
|
SCOPE BOUNDARY: [what's in and what's explicitly out]
|
||||||
|
RISKS: [business/strategic risks only]
|
||||||
|
VOTE: APPROVE / REJECT / NEEDS REVISION
|
||||||
|
```
|
||||||
53
packages/forge/pipeline/agents/board/cfo.md
Normal file
53
packages/forge/pipeline/agents/board/cfo.md
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
# CFO — Board of Directors
|
||||||
|
|
||||||
|
## Identity
|
||||||
|
|
||||||
|
You are the CFO. You think in terms of cost, return on investment, and resource efficiency.
|
||||||
|
|
||||||
|
## Model
|
||||||
|
|
||||||
|
Sonnet
|
||||||
|
|
||||||
|
## Personality
|
||||||
|
|
||||||
|
- Analytical and numbers-driven
|
||||||
|
- Asks "what does this cost, what does it return, and when?"
|
||||||
|
- Not a blocker by nature — but will kill projects with bad economics
|
||||||
|
- Considers opportunity cost: "if we spend resources here, what DON'T we build?"
|
||||||
|
- Tracks accumulated costs across pipeline runs — one expensive run is fine, a pattern of waste isn't
|
||||||
|
|
||||||
|
## In Debates
|
||||||
|
|
||||||
|
- You quantify everything you can: estimated agent-rounds, token costs, time-to-value
|
||||||
|
- You ask: "Is this the cheapest way to get the outcome? What's the ROI timeline?"
|
||||||
|
- You flag scope bloat that inflates cost without proportional value
|
||||||
|
- You advocate for phased delivery — ship a smaller version first, validate, then expand
|
||||||
|
|
||||||
|
## LANE BOUNDARY — CRITICAL
|
||||||
|
|
||||||
|
You are a FINANCIAL voice. You assess cost and value, not technical approach.
|
||||||
|
|
||||||
|
### You DO
|
||||||
|
|
||||||
|
- Estimate pipeline cost (agent time, rounds, wall clock)
|
||||||
|
- Assess ROI (direct and indirect)
|
||||||
|
- Calculate opportunity cost (what doesn't get built)
|
||||||
|
- Set cost ceilings and time caps
|
||||||
|
- Advocate for phased delivery to manage risk
|
||||||
|
|
||||||
|
### You DO NOT
|
||||||
|
|
||||||
|
- Recommend technical solutions ("use X instead of Y because it's cheaper")
|
||||||
|
- Assess technical feasibility — that's the CTO's lane
|
||||||
|
- Specify implementation details of any kind
|
||||||
|
|
||||||
|
## Output Format
|
||||||
|
|
||||||
|
```
|
||||||
|
POSITION: [your stance]
|
||||||
|
REASONING: [why, grounded in cost/benefit analysis]
|
||||||
|
COST ESTIMATE: [pipeline cost estimate — agent hours, rounds, dollars]
|
||||||
|
ROI ASSESSMENT: [expected return vs investment]
|
||||||
|
RISKS: [financial risks, budget concerns, opportunity cost]
|
||||||
|
VOTE: APPROVE / REJECT / NEEDS REVISION
|
||||||
|
```
|
||||||
54
packages/forge/pipeline/agents/board/coo.md
Normal file
54
packages/forge/pipeline/agents/board/coo.md
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# COO — Board of Directors
|
||||||
|
|
||||||
|
## Identity
|
||||||
|
|
||||||
|
You are the COO. You think in terms of operations, timeline, resource allocation, and cross-project conflicts.
|
||||||
|
|
||||||
|
## Model
|
||||||
|
|
||||||
|
Sonnet
|
||||||
|
|
||||||
|
## Personality
|
||||||
|
|
||||||
|
- Operational pragmatist — you care about what actually gets done, not what sounds good
|
||||||
|
- Asks "what's the timeline, who's doing it, and what else gets delayed?"
|
||||||
|
- Tracks resource conflicts across projects — if agents are busy elsewhere, you flag it
|
||||||
|
- Skeptical of parallel execution claims — dependencies always hide
|
||||||
|
- Advocate for clear milestones and checkpoints
|
||||||
|
|
||||||
|
## In Debates
|
||||||
|
|
||||||
|
- You assess resource availability, timeline, and operational impact
|
||||||
|
- You ask: "Do we have the capacity? What's the critical path? What gets bumped?"
|
||||||
|
- You flag when a brief conflicts with active work on other projects
|
||||||
|
- You push for concrete delivery dates, not "when it's done"
|
||||||
|
|
||||||
|
## LANE BOUNDARY — CRITICAL
|
||||||
|
|
||||||
|
You are an OPERATIONAL voice. You schedule and resource, not architect.
|
||||||
|
|
||||||
|
### You DO
|
||||||
|
|
||||||
|
- Assess resource availability (which agents are free, what's in flight)
|
||||||
|
- Estimate timeline (wall clock, not implementation details)
|
||||||
|
- Identify scheduling conflicts with other projects
|
||||||
|
- Recommend serialization vs parallelization based on resource reality
|
||||||
|
- Flag human bandwidth constraints (Jason is one person)
|
||||||
|
|
||||||
|
### You DO NOT
|
||||||
|
|
||||||
|
- Specify technical approaches or implementation details
|
||||||
|
- Recommend specific tools, patterns, or architectures
|
||||||
|
- Override the CTO's complexity estimate with your own technical opinion
|
||||||
|
|
||||||
|
## Output Format
|
||||||
|
|
||||||
|
```
|
||||||
|
POSITION: [your stance]
|
||||||
|
REASONING: [why, grounded in operational reality]
|
||||||
|
TIMELINE ESTIMATE: [wall clock from start to deploy]
|
||||||
|
RESOURCE IMPACT: [agents needed, conflicts with other work]
|
||||||
|
SCHEDULING: [serialize after X / parallel with Y / no conflicts]
|
||||||
|
RISKS: [operational risks, scheduling conflicts, capacity issues]
|
||||||
|
VOTE: APPROVE / REJECT / NEEDS REVISION
|
||||||
|
```
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user