Compare commits
68 Commits
feat/ms21-
...
fix/logs-p
| Author | SHA1 | Date | |
|---|---|---|---|
| 511d091a94 | |||
| 78ff8f8e70 | |||
| 2463b7b8ba | |||
| 5b235a668f | |||
| c5ab179071 | |||
| b4f4de6f7a | |||
| 2b6bed2480 | |||
| eba33fc93d | |||
| c23c33b0c5 | |||
| c5253e9d62 | |||
| e898551814 | |||
| 3607554902 | |||
| a25a77a43c | |||
| 861eff4686 | |||
| 99a4567e32 | |||
| 559c6b3831 | |||
| 631e5010b5 | |||
| 09e377ecd7 | |||
| deafcdc84b | |||
| 66d401461c | |||
| 01ae164b61 | |||
| 029c190c05 | |||
| 477d0c8fdf | |||
| 03af39def9 | |||
| dc7e0c805c | |||
| 2b010fadda | |||
| c25e753f35 | |||
| d3c8b8cadd | |||
| a3a0d7afca | |||
| ab2b68c93c | |||
| c1ec0ad7ef | |||
| e5b772f7cb | |||
| 7a46c81897 | |||
| 3688f89c37 | |||
| e59e517d5c | |||
| fab833a710 | |||
| 4294deda49 | |||
| 2fe858d61a | |||
| 512a29a240 | |||
| 8ea3c3ee67 | |||
| c4a6be5b6b | |||
| f4c1c9d816 | |||
| ac67697fe4 | |||
| 6521f655a8 | |||
| 0e74b03d9c | |||
| a925f91062 | |||
| 7106512fa9 | |||
| 1df20f0e13 | |||
| 8dab20c022 | |||
| 7073057e8d | |||
| 5e7346adc7 | |||
| d07a840f25 | |||
| 4b2e48af9c | |||
| 7b390d8be2 | |||
| e8502577b8 | |||
| af68f84dcd | |||
| b57f549d39 | |||
| 2c8d0a8daf | |||
| c939a541a7 | |||
| 895ea7fd14 | |||
| e93e7ffaa9 | |||
| 307639eca0 | |||
| 31814f181a | |||
| 5cd6b8622d | |||
| 20c9e68e1b | |||
| 127bf61fe2 | |||
| f99107fbfc | |||
| 5b782bafc9 |
@@ -75,6 +75,16 @@
|
||||
"milestone_at_end": "",
|
||||
"tasks_completed": [],
|
||||
"last_task_id": ""
|
||||
},
|
||||
{
|
||||
"session_id": "sess-002",
|
||||
"runtime": "unknown",
|
||||
"started_at": "2026-02-28T20:30:13Z",
|
||||
"ended_at": "",
|
||||
"ended_reason": "",
|
||||
"milestone_at_end": "",
|
||||
"tasks_completed": [],
|
||||
"last_task_id": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"session_id": "sess-001",
|
||||
"session_id": "sess-002",
|
||||
"runtime": "unknown",
|
||||
"pid": 2396592,
|
||||
"started_at": "2026-02-28T17:48:51Z",
|
||||
"project_path": "/tmp/ms21-api-003",
|
||||
"pid": 3178395,
|
||||
"started_at": "2026-02-28T20:30:13Z",
|
||||
"project_path": "/tmp/ms21-ui-001",
|
||||
"milestone_id": ""
|
||||
}
|
||||
|
||||
@@ -34,3 +34,9 @@ CVE-2026-26996 # HIGH: minimatch DoS via specially crafted glob patterns (needs
|
||||
# OpenBao 2.5.0 compiled with Go 1.25.6, fix needs Go >= 1.25.7.
|
||||
# Cannot build OpenBao from source (large project). Waiting for upstream release.
|
||||
CVE-2025-68121 # CRITICAL: crypto/tls session resumption
|
||||
|
||||
# === multer CVEs (upstream via @nestjs/platform-express) ===
|
||||
# multer <2.1.0 — waiting on NestJS to update their dependency
|
||||
# These are DoS vulnerabilities in file upload handling
|
||||
GHSA-xf7r-hgr6-v32p # HIGH: DoS via incomplete cleanup
|
||||
GHSA-v52c-386h-88mc # HIGH: DoS via resource exhaustion
|
||||
|
||||
@@ -1,232 +0,0 @@
|
||||
# API Pipeline - Mosaic Stack
|
||||
# Quality gates, build, and Docker publish for @mosaic/api
|
||||
#
|
||||
# Triggers on: apps/api/**, packages/**, root configs
|
||||
# Security chain: source audit + Trivy container scan
|
||||
|
||||
when:
|
||||
- event: [push, pull_request, manual]
|
||||
path:
|
||||
include:
|
||||
- "apps/api/**"
|
||||
- "packages/**"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "turbo.json"
|
||||
- "package.json"
|
||||
- ".woodpecker/api.yml"
|
||||
- ".trivyignore"
|
||||
|
||||
variables:
|
||||
- &node_image "node:24-alpine"
|
||||
- &install_deps |
|
||||
corepack enable
|
||||
pnpm install --frozen-lockfile
|
||||
- &use_deps |
|
||||
corepack enable
|
||||
- &turbo_env
|
||||
TURBO_API:
|
||||
from_secret: turbo_api
|
||||
TURBO_TOKEN:
|
||||
from_secret: turbo_token
|
||||
TURBO_TEAM:
|
||||
from_secret: turbo_team
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17.7-alpine3.22
|
||||
environment:
|
||||
POSTGRES_DB: test_db
|
||||
POSTGRES_USER: test_user
|
||||
POSTGRES_PASSWORD: test_password
|
||||
|
||||
steps:
|
||||
# === Quality Gates ===
|
||||
|
||||
install:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *install_deps
|
||||
|
||||
security-audit:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm audit --audit-level=high
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
prisma-generate:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" prisma:generate
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
lint:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo lint --filter=@mosaic/api
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
typecheck:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo typecheck --filter=@mosaic/api
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
prisma-migrate:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" prisma migrate deploy
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
test:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts' --exclude 'src/mosaic-telemetry/mosaic-telemetry.module.spec.ts'
|
||||
depends_on:
|
||||
- prisma-migrate
|
||||
|
||||
# === Build ===
|
||||
|
||||
build:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
NODE_ENV: "production"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo build --filter=@mosaic/api
|
||||
depends_on:
|
||||
- lint
|
||||
- typecheck
|
||||
- test
|
||||
- security-audit
|
||||
|
||||
# === Docker Build & Push ===
|
||||
|
||||
docker-build-api:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/api/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# === Container Security Scan ===
|
||||
|
||||
security-trivy-api:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
SCAN_TAG="latest"
|
||||
else
|
||||
SCAN_TAG="latest"
|
||||
fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
||||
--ignorefile .trivyignore \
|
||||
git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-api
|
||||
|
||||
# === Package Linking ===
|
||||
|
||||
link-packages:
|
||||
image: alpine:3
|
||||
environment:
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
commands:
|
||||
- apk add --no-cache curl
|
||||
- sleep 10
|
||||
- |
|
||||
set -e
|
||||
link_package() {
|
||||
PKG="$$1"
|
||||
echo "Linking $$PKG..."
|
||||
for attempt in 1 2 3; do
|
||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||
-H "Authorization: token $$GITEA_TOKEN" \
|
||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||
echo " Linked $$PKG"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "400" ]; then
|
||||
echo " $$PKG already linked"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||
sleep 5
|
||||
else
|
||||
echo " FAILED: $$PKG status $$STATUS"
|
||||
cat /tmp/link-response.txt
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
link_package "stack-api"
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- security-trivy-api
|
||||
337
.woodpecker/ci.yml
Normal file
337
.woodpecker/ci.yml
Normal file
@@ -0,0 +1,337 @@
|
||||
# Unified CI Pipeline - Mosaic Stack
|
||||
# Single install, parallel quality gates, sequential deploy
|
||||
#
|
||||
# Replaces: api.yml, orchestrator.yml, web.yml
|
||||
# Keeps: coordinator.yml (Python), infra.yml (separate concerns)
|
||||
#
|
||||
# Flow:
|
||||
# install → security-audit
|
||||
# → prisma-generate → lint + typecheck (parallel)
|
||||
# → prisma-migrate → test
|
||||
# → build (after all gates pass)
|
||||
# → docker builds (main only, parallel)
|
||||
# → trivy scans (main only, parallel)
|
||||
# → package linking (main only)
|
||||
|
||||
when:
|
||||
- event: [push, pull_request, manual]
|
||||
path:
|
||||
include:
|
||||
- "apps/api/**"
|
||||
- "apps/orchestrator/**"
|
||||
- "apps/web/**"
|
||||
- "packages/**"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "turbo.json"
|
||||
- "package.json"
|
||||
- ".woodpecker/ci.yml"
|
||||
- ".trivyignore"
|
||||
|
||||
variables:
|
||||
- &node_image "node:24-alpine"
|
||||
- &install_deps |
|
||||
corepack enable
|
||||
pnpm install --frozen-lockfile
|
||||
- &use_deps |
|
||||
corepack enable
|
||||
- &turbo_env
|
||||
TURBO_API:
|
||||
from_secret: turbo_api
|
||||
TURBO_TOKEN:
|
||||
from_secret: turbo_token
|
||||
TURBO_TEAM:
|
||||
from_secret: turbo_team
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17.7-alpine3.22
|
||||
environment:
|
||||
POSTGRES_DB: test_db
|
||||
POSTGRES_USER: test_user
|
||||
POSTGRES_PASSWORD: test_password
|
||||
|
||||
steps:
|
||||
# ─── Install (once) ─────────────────────────────────────────
|
||||
install:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *install_deps
|
||||
|
||||
# ─── Security Audit (once) ──────────────────────────────────
|
||||
security-audit:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm audit --audit-level=high
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
# ─── Prisma Generate ────────────────────────────────────────
|
||||
prisma-generate:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" prisma:generate
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
# ─── Lint (all packages) ────────────────────────────────────
|
||||
lint:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo lint
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
# ─── Typecheck (all packages, parallel with lint) ───────────
|
||||
typecheck:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo typecheck
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
# ─── Prisma Migrate (test DB) ──────────────────────────────
|
||||
prisma-migrate:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" prisma migrate deploy
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
# ─── Test (all packages) ───────────────────────────────────
|
||||
test:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts' --exclude 'src/mosaic-telemetry/mosaic-telemetry.module.spec.ts'
|
||||
- pnpm turbo test --filter=@mosaic/orchestrator --filter=@mosaic/web
|
||||
depends_on:
|
||||
- prisma-migrate
|
||||
|
||||
# ─── Build (all packages) ──────────────────────────────────
|
||||
build:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
NODE_ENV: "production"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo build
|
||||
depends_on:
|
||||
- lint
|
||||
- typecheck
|
||||
- test
|
||||
- security-audit
|
||||
|
||||
# ─── Docker Builds (main only, parallel) ───────────────────
|
||||
|
||||
docker-build-api:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/api/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
docker-build-orchestrator:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
docker-build-web:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --snapshot-mode=redo --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# ─── Container Security Scans (main only) ──────────────────
|
||||
|
||||
security-trivy-api:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-api
|
||||
|
||||
security-trivy-orchestrator:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-orchestrator
|
||||
|
||||
security-trivy-web:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-web
|
||||
|
||||
# ─── Package Linking (main only, once) ─────────────────────
|
||||
|
||||
link-packages:
|
||||
image: alpine:3
|
||||
environment:
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
commands:
|
||||
- apk add --no-cache curl
|
||||
- sleep 10
|
||||
- |
|
||||
set -e
|
||||
link_package() {
|
||||
PKG="$$1"
|
||||
echo "Linking $$PKG..."
|
||||
for attempt in 1 2 3; do
|
||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||
-H "Authorization: token $$GITEA_TOKEN" \
|
||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||
echo " Linked $$PKG"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "400" ]; then
|
||||
echo " $$PKG already linked"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||
sleep 5
|
||||
else
|
||||
echo " FAILED: $$PKG status $$STATUS"
|
||||
cat /tmp/link-response.txt
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
link_package "stack-api"
|
||||
link_package "stack-orchestrator"
|
||||
link_package "stack-web"
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- security-trivy-api
|
||||
- security-trivy-orchestrator
|
||||
- security-trivy-web
|
||||
@@ -1,202 +0,0 @@
|
||||
# Orchestrator Pipeline - Mosaic Stack
|
||||
# Quality gates, build, and Docker publish for @mosaic/orchestrator
|
||||
#
|
||||
# Triggers on: apps/orchestrator/**, packages/**, root configs
|
||||
# Security chain: source audit + Trivy container scan
|
||||
|
||||
when:
|
||||
- event: [push, pull_request, manual]
|
||||
path:
|
||||
include:
|
||||
- "apps/orchestrator/**"
|
||||
- "packages/**"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "turbo.json"
|
||||
- "package.json"
|
||||
- ".woodpecker/orchestrator.yml"
|
||||
- ".trivyignore"
|
||||
|
||||
variables:
|
||||
- &node_image "node:24-alpine"
|
||||
- &install_deps |
|
||||
corepack enable
|
||||
pnpm install --frozen-lockfile
|
||||
- &use_deps |
|
||||
corepack enable
|
||||
- &turbo_env
|
||||
TURBO_API:
|
||||
from_secret: turbo_api
|
||||
TURBO_TOKEN:
|
||||
from_secret: turbo_token
|
||||
TURBO_TEAM:
|
||||
from_secret: turbo_team
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
steps:
|
||||
# === Quality Gates ===
|
||||
|
||||
install:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *install_deps
|
||||
|
||||
security-audit:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm audit --audit-level=high
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
lint:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo lint --filter=@mosaic/orchestrator
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
typecheck:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo typecheck --filter=@mosaic/orchestrator
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
test:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo test --filter=@mosaic/orchestrator
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
# === Build ===
|
||||
|
||||
build:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
NODE_ENV: "production"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo build --filter=@mosaic/orchestrator
|
||||
depends_on:
|
||||
- lint
|
||||
- typecheck
|
||||
- test
|
||||
- security-audit
|
||||
|
||||
# === Docker Build & Push ===
|
||||
|
||||
docker-build-orchestrator:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# === Container Security Scan ===
|
||||
|
||||
security-trivy-orchestrator:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
SCAN_TAG="latest"
|
||||
else
|
||||
SCAN_TAG="latest"
|
||||
fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
||||
--ignorefile .trivyignore \
|
||||
git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-orchestrator
|
||||
|
||||
# === Package Linking ===
|
||||
|
||||
link-packages:
|
||||
image: alpine:3
|
||||
environment:
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
commands:
|
||||
- apk add --no-cache curl
|
||||
- sleep 10
|
||||
- |
|
||||
set -e
|
||||
link_package() {
|
||||
PKG="$$1"
|
||||
echo "Linking $$PKG..."
|
||||
for attempt in 1 2 3; do
|
||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||
-H "Authorization: token $$GITEA_TOKEN" \
|
||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||
echo " Linked $$PKG"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "400" ]; then
|
||||
echo " $$PKG already linked"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||
sleep 5
|
||||
else
|
||||
echo " FAILED: $$PKG status $$STATUS"
|
||||
cat /tmp/link-response.txt
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
link_package "stack-orchestrator"
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- security-trivy-orchestrator
|
||||
@@ -1,202 +0,0 @@
|
||||
# Web Pipeline - Mosaic Stack
|
||||
# Quality gates, build, and Docker publish for @mosaic/web
|
||||
#
|
||||
# Triggers on: apps/web/**, packages/**, root configs
|
||||
# Security chain: source audit + Trivy container scan
|
||||
|
||||
when:
|
||||
- event: [push, pull_request, manual]
|
||||
path:
|
||||
include:
|
||||
- "apps/web/**"
|
||||
- "packages/**"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "turbo.json"
|
||||
- "package.json"
|
||||
- ".woodpecker/web.yml"
|
||||
- ".trivyignore"
|
||||
|
||||
variables:
|
||||
- &node_image "node:24-alpine"
|
||||
- &install_deps |
|
||||
corepack enable
|
||||
pnpm install --frozen-lockfile
|
||||
- &use_deps |
|
||||
corepack enable
|
||||
- &turbo_env
|
||||
TURBO_API:
|
||||
from_secret: turbo_api
|
||||
TURBO_TOKEN:
|
||||
from_secret: turbo_token
|
||||
TURBO_TEAM:
|
||||
from_secret: turbo_team
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
steps:
|
||||
# === Quality Gates ===
|
||||
|
||||
install:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *install_deps
|
||||
|
||||
security-audit:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm audit --audit-level=high
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
lint:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo lint --filter=@mosaic/web
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
typecheck:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo typecheck --filter=@mosaic/web
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
test:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo test --filter=@mosaic/web
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
# === Build ===
|
||||
|
||||
build:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
NODE_ENV: "production"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo build --filter=@mosaic/web
|
||||
depends_on:
|
||||
- lint
|
||||
- typecheck
|
||||
- test
|
||||
- security-audit
|
||||
|
||||
# === Docker Build & Push ===
|
||||
|
||||
docker-build-web:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --snapshot-mode=redo --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# === Container Security Scan ===
|
||||
|
||||
security-trivy-web:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
SCAN_TAG="latest"
|
||||
else
|
||||
SCAN_TAG="latest"
|
||||
fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
||||
--ignorefile .trivyignore \
|
||||
git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-web
|
||||
|
||||
# === Package Linking ===
|
||||
|
||||
link-packages:
|
||||
image: alpine:3
|
||||
environment:
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
commands:
|
||||
- apk add --no-cache curl
|
||||
- sleep 10
|
||||
- |
|
||||
set -e
|
||||
link_package() {
|
||||
PKG="$$1"
|
||||
echo "Linking $$PKG..."
|
||||
for attempt in 1 2 3; do
|
||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||
-H "Authorization: token $$GITEA_TOKEN" \
|
||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||
echo " Linked $$PKG"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "400" ]; then
|
||||
echo " $$PKG already linked"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||
sleep 5
|
||||
else
|
||||
echo " FAILED: $$PKG status $$STATUS"
|
||||
cat /tmp/link-response.txt
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
link_package "stack-web"
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- security-trivy-web
|
||||
@@ -36,6 +36,7 @@
|
||||
"@nestjs/mapped-types": "^2.1.0",
|
||||
"@nestjs/platform-express": "^11.1.12",
|
||||
"@nestjs/platform-socket.io": "^11.1.12",
|
||||
"@nestjs/schedule": "^6.1.1",
|
||||
"@nestjs/throttler": "^6.5.0",
|
||||
"@nestjs/websockets": "^11.1.12",
|
||||
"@opentelemetry/api": "^1.9.0",
|
||||
@@ -59,6 +60,7 @@
|
||||
"class-validator": "^0.14.3",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"discord.js": "^14.25.1",
|
||||
"dockerode": "^4.0.9",
|
||||
"gray-matter": "^4.0.3",
|
||||
"highlight.js": "^11.11.1",
|
||||
"ioredis": "^5.9.2",
|
||||
@@ -88,6 +90,7 @@
|
||||
"@types/archiver": "^7.0.0",
|
||||
"@types/bcryptjs": "^3.0.0",
|
||||
"@types/cookie-parser": "^1.4.10",
|
||||
"@types/dockerode": "^3.3.47",
|
||||
"@types/express": "^5.0.1",
|
||||
"@types/highlight.js": "^10.1.0",
|
||||
"@types/node": "^22.13.4",
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "agent_memories" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"agent_id" TEXT NOT NULL,
|
||||
"key" TEXT NOT NULL,
|
||||
"value" JSONB NOT NULL,
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "agent_memories_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "agent_memories_workspace_id_agent_id_key_key" ON "agent_memories"("workspace_id", "agent_id", "key");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "agent_memories_workspace_id_idx" ON "agent_memories"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "agent_memories_agent_id_idx" ON "agent_memories"("agent_id");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "agent_memories" ADD CONSTRAINT "agent_memories_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,33 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "conversation_archives" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"session_id" TEXT NOT NULL,
|
||||
"agent_id" TEXT NOT NULL,
|
||||
"messages" JSONB NOT NULL,
|
||||
"message_count" INTEGER NOT NULL,
|
||||
"summary" TEXT NOT NULL,
|
||||
"embedding" vector(1536),
|
||||
"started_at" TIMESTAMPTZ NOT NULL,
|
||||
"ended_at" TIMESTAMPTZ,
|
||||
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "conversation_archives_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "conversation_archives_workspace_id_session_id_key" ON "conversation_archives"("workspace_id", "session_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "conversation_archives_workspace_id_idx" ON "conversation_archives"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "conversation_archives_agent_id_idx" ON "conversation_archives"("agent_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "conversation_archives_started_at_idx" ON "conversation_archives"("started_at");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "conversation_archives" ADD CONSTRAINT "conversation_archives_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,109 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "SystemConfig" (
|
||||
"id" TEXT NOT NULL,
|
||||
"key" TEXT NOT NULL,
|
||||
"value" TEXT NOT NULL,
|
||||
"encrypted" BOOLEAN NOT NULL DEFAULT false,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "SystemConfig_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "BreakglassUser" (
|
||||
"id" TEXT NOT NULL,
|
||||
"username" TEXT NOT NULL,
|
||||
"passwordHash" TEXT NOT NULL,
|
||||
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "BreakglassUser_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "LlmProvider" (
|
||||
"id" TEXT NOT NULL,
|
||||
"userId" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"displayName" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"baseUrl" TEXT,
|
||||
"apiKey" TEXT,
|
||||
"apiType" TEXT NOT NULL DEFAULT 'openai-completions',
|
||||
"models" JSONB NOT NULL DEFAULT '[]',
|
||||
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "LlmProvider_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "UserContainer" (
|
||||
"id" TEXT NOT NULL,
|
||||
"userId" TEXT NOT NULL,
|
||||
"containerId" TEXT,
|
||||
"containerName" TEXT NOT NULL,
|
||||
"gatewayPort" INTEGER,
|
||||
"gatewayToken" TEXT NOT NULL,
|
||||
"status" TEXT NOT NULL DEFAULT 'stopped',
|
||||
"lastActiveAt" TIMESTAMP(3),
|
||||
"idleTimeoutMin" INTEGER NOT NULL DEFAULT 30,
|
||||
"config" JSONB NOT NULL DEFAULT '{}',
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "UserContainer_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "SystemContainer" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"role" TEXT NOT NULL,
|
||||
"containerId" TEXT,
|
||||
"gatewayPort" INTEGER,
|
||||
"gatewayToken" TEXT NOT NULL,
|
||||
"status" TEXT NOT NULL DEFAULT 'stopped',
|
||||
"primaryModel" TEXT NOT NULL,
|
||||
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "SystemContainer_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "UserAgentConfig" (
|
||||
"id" TEXT NOT NULL,
|
||||
"userId" TEXT NOT NULL,
|
||||
"primaryModel" TEXT,
|
||||
"fallbackModels" JSONB NOT NULL DEFAULT '[]',
|
||||
"personality" TEXT,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "UserAgentConfig_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "SystemConfig_key_key" ON "SystemConfig"("key");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "BreakglassUser_username_key" ON "BreakglassUser"("username");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LlmProvider_userId_idx" ON "LlmProvider"("userId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "LlmProvider_userId_name_key" ON "LlmProvider"("userId", "name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "UserContainer_userId_key" ON "UserContainer"("userId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "SystemContainer_name_key" ON "SystemContainer"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "UserAgentConfig_userId_key" ON "UserAgentConfig"("userId");
|
||||
@@ -0,0 +1,37 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "findings" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"task_id" UUID,
|
||||
"agent_id" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"title" TEXT NOT NULL,
|
||||
"data" JSONB NOT NULL,
|
||||
"summary" TEXT NOT NULL,
|
||||
"embedding" vector(1536),
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "findings_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "findings_id_workspace_id_key" ON "findings"("id", "workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_workspace_id_idx" ON "findings"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_agent_id_idx" ON "findings"("agent_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_type_idx" ON "findings"("type");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_task_id_idx" ON "findings"("task_id");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "findings" ADD CONSTRAINT "findings_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "findings" ADD CONSTRAINT "findings_task_id_fkey" FOREIGN KEY ("task_id") REFERENCES "agent_tasks"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "tasks" ADD COLUMN "assigned_agent" TEXT;
|
||||
@@ -298,6 +298,8 @@ model Workspace {
|
||||
agents Agent[]
|
||||
agentSessions AgentSession[]
|
||||
agentTasks AgentTask[]
|
||||
findings Finding[]
|
||||
agentMemories AgentMemory[]
|
||||
userLayouts UserLayout[]
|
||||
knowledgeEntries KnowledgeEntry[]
|
||||
knowledgeTags KnowledgeTag[]
|
||||
@@ -312,6 +314,7 @@ model Workspace {
|
||||
llmUsageLogs LlmUsageLog[]
|
||||
userCredentials UserCredential[]
|
||||
terminalSessions TerminalSession[]
|
||||
conversationArchives ConversationArchive[]
|
||||
|
||||
@@index([ownerId])
|
||||
@@map("workspaces")
|
||||
@@ -376,6 +379,7 @@ model Task {
|
||||
creatorId String @map("creator_id") @db.Uuid
|
||||
projectId String? @map("project_id") @db.Uuid
|
||||
parentId String? @map("parent_id") @db.Uuid
|
||||
assignedAgent String? @map("assigned_agent")
|
||||
domainId String? @map("domain_id") @db.Uuid
|
||||
sortOrder Int @default(0) @map("sort_order")
|
||||
metadata Json @default("{}")
|
||||
@@ -689,6 +693,7 @@ model AgentTask {
|
||||
createdBy User @relation("AgentTaskCreator", fields: [createdById], references: [id], onDelete: Cascade)
|
||||
createdById String @map("created_by_id") @db.Uuid
|
||||
runnerJobs RunnerJob[]
|
||||
findings Finding[]
|
||||
|
||||
@@unique([id, workspaceId])
|
||||
@@index([workspaceId])
|
||||
@@ -698,6 +703,33 @@ model AgentTask {
|
||||
@@map("agent_tasks")
|
||||
}
|
||||
|
||||
model Finding {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
taskId String? @map("task_id") @db.Uuid
|
||||
|
||||
agentId String @map("agent_id")
|
||||
type String
|
||||
title String
|
||||
data Json
|
||||
summary String @db.Text
|
||||
// Note: vector dimension (1536) must match EMBEDDING_DIMENSION constant in @mosaic/shared
|
||||
embedding Unsupported("vector(1536)")?
|
||||
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
task AgentTask? @relation(fields: [taskId], references: [id], onDelete: SetNull)
|
||||
|
||||
@@unique([id, workspaceId])
|
||||
@@index([workspaceId])
|
||||
@@index([agentId])
|
||||
@@index([type])
|
||||
@@index([taskId])
|
||||
@@map("findings")
|
||||
}
|
||||
|
||||
model AgentSession {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
@@ -735,6 +767,23 @@ model AgentSession {
|
||||
@@map("agent_sessions")
|
||||
}
|
||||
|
||||
model AgentMemory {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
agentId String @map("agent_id")
|
||||
key String
|
||||
value Json
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([workspaceId, agentId, key])
|
||||
@@index([workspaceId])
|
||||
@@index([agentId])
|
||||
@@map("agent_memories")
|
||||
}
|
||||
|
||||
model WidgetDefinition {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
@@ -1546,3 +1595,111 @@ model TerminalSession {
|
||||
@@index([workspaceId, status])
|
||||
@@map("terminal_sessions")
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// CONVERSATION ARCHIVE MODULE
|
||||
// ============================================
|
||||
|
||||
model ConversationArchive {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
sessionId String @map("session_id")
|
||||
agentId String @map("agent_id")
|
||||
messages Json
|
||||
messageCount Int @map("message_count")
|
||||
summary String @db.Text
|
||||
// Note: vector dimension (1536) must match EMBEDDING_DIMENSION constant in @mosaic/shared
|
||||
embedding Unsupported("vector(1536)")?
|
||||
startedAt DateTime @map("started_at") @db.Timestamptz
|
||||
endedAt DateTime? @map("ended_at") @db.Timestamptz
|
||||
metadata Json @default("{}")
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
// Relations
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([workspaceId, sessionId])
|
||||
@@index([workspaceId])
|
||||
@@index([agentId])
|
||||
@@index([startedAt])
|
||||
@@map("conversation_archives")
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// AGENT FLEET MODULE
|
||||
// ============================================
|
||||
|
||||
model SystemConfig {
|
||||
id String @id @default(cuid())
|
||||
key String @unique
|
||||
value String
|
||||
encrypted Boolean @default(false)
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model BreakglassUser {
|
||||
id String @id @default(cuid())
|
||||
username String @unique
|
||||
passwordHash String
|
||||
isActive Boolean @default(true)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model LlmProvider {
|
||||
id String @id @default(cuid())
|
||||
userId String
|
||||
name String
|
||||
displayName String
|
||||
type String
|
||||
baseUrl String?
|
||||
apiKey String?
|
||||
apiType String @default("openai-completions")
|
||||
models Json @default("[]")
|
||||
isActive Boolean @default(true)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@unique([userId, name])
|
||||
@@index([userId])
|
||||
}
|
||||
|
||||
model UserContainer {
|
||||
id String @id @default(cuid())
|
||||
userId String @unique
|
||||
containerId String?
|
||||
containerName String
|
||||
gatewayPort Int?
|
||||
gatewayToken String
|
||||
status String @default("stopped")
|
||||
lastActiveAt DateTime?
|
||||
idleTimeoutMin Int @default(30)
|
||||
config Json @default("{}")
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model SystemContainer {
|
||||
id String @id @default(cuid())
|
||||
name String @unique
|
||||
role String
|
||||
containerId String?
|
||||
gatewayPort Int?
|
||||
gatewayToken String
|
||||
status String @default("stopped")
|
||||
primaryModel String
|
||||
isActive Boolean @default(true)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
model UserAgentConfig {
|
||||
id String @id @default(cuid())
|
||||
userId String @unique
|
||||
primaryModel String?
|
||||
fallbackModels Json @default("[]")
|
||||
personality String?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Controller, Get, Query, Param, UseGuards } from "@nestjs/common";
|
||||
import { ActivityService } from "./activity.service";
|
||||
import { EntityType } from "@prisma/client";
|
||||
import type { QueryActivityLogDto } from "./dto";
|
||||
import { QueryActivityLogDto } from "./dto";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||
|
||||
@@ -117,12 +117,13 @@ export class ActivityService {
|
||||
/**
|
||||
* Get a single activity log by ID
|
||||
*/
|
||||
async findOne(id: string, workspaceId: string): Promise<ActivityLogResult | null> {
|
||||
async findOne(id: string, workspaceId?: string): Promise<ActivityLogResult | null> {
|
||||
const where: Prisma.ActivityLogWhereUniqueInput = { id };
|
||||
if (workspaceId) {
|
||||
where.workspaceId = workspaceId;
|
||||
}
|
||||
return await this.prisma.activityLog.findUnique({
|
||||
where: {
|
||||
id,
|
||||
workspaceId,
|
||||
},
|
||||
where,
|
||||
include: {
|
||||
user: {
|
||||
select: {
|
||||
|
||||
@@ -4,6 +4,7 @@ import { tap } from "rxjs/operators";
|
||||
import { ActivityService } from "../activity.service";
|
||||
import { ActivityAction, EntityType } from "@prisma/client";
|
||||
import type { Prisma } from "@prisma/client";
|
||||
import type { CreateActivityLogInput } from "../interfaces/activity.interface";
|
||||
import type { AuthenticatedRequest } from "../../common/types/user.types";
|
||||
|
||||
/**
|
||||
@@ -61,10 +62,13 @@ export class ActivityLoggingInterceptor implements NestInterceptor {
|
||||
// Extract entity information
|
||||
const resultObj = result as Record<string, unknown> | undefined;
|
||||
const entityId = params.id ?? (resultObj?.id as string | undefined);
|
||||
|
||||
// workspaceId is now optional - log events even when missing
|
||||
const workspaceId = user.workspaceId ?? (body.workspaceId as string | undefined);
|
||||
|
||||
if (!entityId || !workspaceId) {
|
||||
this.logger.warn("Cannot log activity: missing entityId or workspaceId");
|
||||
// Log with warning if entityId is missing, but still proceed with logging if workspaceId exists
|
||||
if (!entityId) {
|
||||
this.logger.warn("Cannot log activity: missing entityId");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -92,9 +96,8 @@ export class ActivityLoggingInterceptor implements NestInterceptor {
|
||||
const userAgent =
|
||||
typeof userAgentHeader === "string" ? userAgentHeader : userAgentHeader?.[0];
|
||||
|
||||
// Log the activity
|
||||
await this.activityService.logActivity({
|
||||
workspaceId,
|
||||
// Log the activity — workspaceId is optional
|
||||
const activityInput: CreateActivityLogInput = {
|
||||
userId: user.id,
|
||||
action,
|
||||
entityType,
|
||||
@@ -102,7 +105,11 @@ export class ActivityLoggingInterceptor implements NestInterceptor {
|
||||
details,
|
||||
ipAddress: ip ?? undefined,
|
||||
userAgent: userAgent ?? undefined,
|
||||
});
|
||||
};
|
||||
if (workspaceId) {
|
||||
activityInput.workspaceId = workspaceId;
|
||||
}
|
||||
await this.activityService.logActivity(activityInput);
|
||||
} catch (error) {
|
||||
// Don't fail the request if activity logging fails
|
||||
this.logger.error(
|
||||
|
||||
@@ -2,9 +2,10 @@ import type { ActivityAction, EntityType, Prisma } from "@prisma/client";
|
||||
|
||||
/**
|
||||
* Interface for creating a new activity log entry
|
||||
* workspaceId is optional - allows logging events without workspace context
|
||||
*/
|
||||
export interface CreateActivityLogInput {
|
||||
workspaceId: string;
|
||||
workspaceId?: string | null;
|
||||
userId: string;
|
||||
action: ActivityAction;
|
||||
entityType: EntityType;
|
||||
|
||||
@@ -24,7 +24,15 @@ describe("AdminService", () => {
|
||||
workspaceMember: {
|
||||
create: vi.fn(),
|
||||
},
|
||||
$transaction: vi.fn(),
|
||||
session: {
|
||||
deleteMany: vi.fn(),
|
||||
},
|
||||
$transaction: vi.fn(async (ops) => {
|
||||
if (typeof ops === "function") {
|
||||
return ops(mockPrismaService);
|
||||
}
|
||||
return Promise.all(ops);
|
||||
}),
|
||||
};
|
||||
|
||||
const mockAdminId = "550e8400-e29b-41d4-a716-446655440001";
|
||||
@@ -82,10 +90,6 @@ describe("AdminService", () => {
|
||||
service = module.get<AdminService>(AdminService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
|
||||
mockPrismaService.$transaction.mockImplementation(async (fn: (tx: unknown) => unknown) => {
|
||||
return fn(mockPrismaService);
|
||||
});
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
@@ -325,12 +329,13 @@ describe("AdminService", () => {
|
||||
});
|
||||
|
||||
describe("deactivateUser", () => {
|
||||
it("should set deactivatedAt on the user", async () => {
|
||||
it("should set deactivatedAt and invalidate sessions", async () => {
|
||||
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||
mockPrismaService.user.update.mockResolvedValue({
|
||||
...mockUser,
|
||||
deactivatedAt: new Date(),
|
||||
});
|
||||
mockPrismaService.session.deleteMany.mockResolvedValue({ count: 3 });
|
||||
|
||||
const result = await service.deactivateUser(mockUserId);
|
||||
|
||||
@@ -341,6 +346,7 @@ describe("AdminService", () => {
|
||||
data: { deactivatedAt: expect.any(Date) },
|
||||
})
|
||||
);
|
||||
expect(mockPrismaService.session.deleteMany).toHaveBeenCalledWith({ where: { userId: mockUserId } });
|
||||
});
|
||||
|
||||
it("should throw NotFoundException if user does not exist", async () => {
|
||||
|
||||
@@ -192,19 +192,22 @@ export class AdminService {
|
||||
throw new BadRequestException(`User ${id} is already deactivated`);
|
||||
}
|
||||
|
||||
const user = await this.prisma.user.update({
|
||||
where: { id },
|
||||
data: { deactivatedAt: new Date() },
|
||||
include: {
|
||||
workspaceMemberships: {
|
||||
include: {
|
||||
workspace: { select: { id: true, name: true } },
|
||||
const [user] = await this.prisma.$transaction([
|
||||
this.prisma.user.update({
|
||||
where: { id },
|
||||
data: { deactivatedAt: new Date() },
|
||||
include: {
|
||||
workspaceMemberships: {
|
||||
include: {
|
||||
workspace: { select: { id: true, name: true } },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}),
|
||||
this.prisma.session.deleteMany({ where: { userId: id } }),
|
||||
]);
|
||||
|
||||
this.logger.log(`User deactivated: ${id}`);
|
||||
this.logger.log(`User deactivated and sessions invalidated: ${id}`);
|
||||
|
||||
return {
|
||||
id: user.id,
|
||||
|
||||
40
apps/api/src/agent-config/agent-config.controller.ts
Normal file
40
apps/api/src/agent-config/agent-config.controller.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import {
|
||||
Controller,
|
||||
ForbiddenException,
|
||||
Get,
|
||||
Param,
|
||||
Req,
|
||||
UnauthorizedException,
|
||||
UseGuards,
|
||||
} from "@nestjs/common";
|
||||
import { AgentConfigService } from "./agent-config.service";
|
||||
import { AgentConfigGuard, type AgentConfigRequest } from "./agent-config.guard";
|
||||
|
||||
@Controller("internal")
|
||||
@UseGuards(AgentConfigGuard)
|
||||
export class AgentConfigController {
|
||||
constructor(private readonly agentConfigService: AgentConfigService) {}
|
||||
|
||||
// GET /api/internal/agent-config/:id
|
||||
// Auth: Bearer token (validated against UserContainer.gatewayToken or SystemContainer.gatewayToken)
|
||||
// Returns: assembled openclaw.json
|
||||
//
|
||||
// The :id param is the container record ID (cuid)
|
||||
// Token must match the container requesting its own config
|
||||
@Get("agent-config/:id")
|
||||
async getAgentConfig(
|
||||
@Param("id") id: string,
|
||||
@Req() request: AgentConfigRequest
|
||||
): Promise<object> {
|
||||
const containerAuth = request.containerAuth;
|
||||
if (!containerAuth) {
|
||||
throw new UnauthorizedException("Missing container authentication context");
|
||||
}
|
||||
|
||||
if (containerAuth.id !== id) {
|
||||
throw new ForbiddenException("Token is not authorized for the requested container");
|
||||
}
|
||||
|
||||
return this.agentConfigService.generateConfigForContainer(containerAuth.type, id);
|
||||
}
|
||||
}
|
||||
43
apps/api/src/agent-config/agent-config.guard.ts
Normal file
43
apps/api/src/agent-config/agent-config.guard.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { CanActivate, ExecutionContext, Injectable, UnauthorizedException } from "@nestjs/common";
|
||||
import type { Request } from "express";
|
||||
import { AgentConfigService, type ContainerTokenValidation } from "./agent-config.service";
|
||||
|
||||
export interface AgentConfigRequest extends Request {
|
||||
containerAuth?: ContainerTokenValidation;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class AgentConfigGuard implements CanActivate {
|
||||
constructor(private readonly agentConfigService: AgentConfigService) {}
|
||||
|
||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||
const request = context.switchToHttp().getRequest<AgentConfigRequest>();
|
||||
const token = this.extractBearerToken(request.headers.authorization);
|
||||
|
||||
if (!token) {
|
||||
throw new UnauthorizedException("Missing Bearer token");
|
||||
}
|
||||
|
||||
const containerAuth = await this.agentConfigService.validateContainerToken(token);
|
||||
if (!containerAuth) {
|
||||
throw new UnauthorizedException("Invalid container token");
|
||||
}
|
||||
|
||||
request.containerAuth = containerAuth;
|
||||
return true;
|
||||
}
|
||||
|
||||
private extractBearerToken(headerValue: string | string[] | undefined): string | null {
|
||||
const normalizedHeader = Array.isArray(headerValue) ? headerValue[0] : headerValue;
|
||||
if (!normalizedHeader) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const [scheme, token] = normalizedHeader.split(" ");
|
||||
if (!scheme || !token || scheme.toLowerCase() !== "bearer") {
|
||||
return null;
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
}
|
||||
14
apps/api/src/agent-config/agent-config.module.ts
Normal file
14
apps/api/src/agent-config/agent-config.module.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { CryptoModule } from "../crypto/crypto.module";
|
||||
import { AgentConfigController } from "./agent-config.controller";
|
||||
import { AgentConfigService } from "./agent-config.service";
|
||||
import { AgentConfigGuard } from "./agent-config.guard";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, CryptoModule],
|
||||
controllers: [AgentConfigController],
|
||||
providers: [AgentConfigService, AgentConfigGuard],
|
||||
exports: [AgentConfigService],
|
||||
})
|
||||
export class AgentConfigModule {}
|
||||
215
apps/api/src/agent-config/agent-config.service.spec.ts
Normal file
215
apps/api/src/agent-config/agent-config.service.spec.ts
Normal file
@@ -0,0 +1,215 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { AgentConfigService } from "./agent-config.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { CryptoService } from "../crypto/crypto.service";
|
||||
|
||||
describe("AgentConfigService", () => {
|
||||
let service: AgentConfigService;
|
||||
|
||||
const mockPrismaService = {
|
||||
userAgentConfig: {
|
||||
findUnique: vi.fn(),
|
||||
},
|
||||
llmProvider: {
|
||||
findMany: vi.fn(),
|
||||
},
|
||||
userContainer: {
|
||||
findUnique: vi.fn(),
|
||||
findMany: vi.fn(),
|
||||
},
|
||||
systemContainer: {
|
||||
findUnique: vi.fn(),
|
||||
findMany: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
const mockCryptoService = {
|
||||
isEncrypted: vi.fn((value: string) => value.startsWith("enc:")),
|
||||
decrypt: vi.fn((value: string) => value.replace(/^enc:/, "")),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
service = new AgentConfigService(
|
||||
mockPrismaService as unknown as PrismaService,
|
||||
mockCryptoService as unknown as CryptoService
|
||||
);
|
||||
});
|
||||
|
||||
it("generateUserConfig returns valid openclaw.json structure", async () => {
|
||||
mockPrismaService.userAgentConfig.findUnique.mockResolvedValue({
|
||||
id: "cfg-1",
|
||||
userId: "user-1",
|
||||
primaryModel: "my-zai/glm-5",
|
||||
});
|
||||
|
||||
mockPrismaService.userContainer.findUnique.mockResolvedValue({
|
||||
id: "container-1",
|
||||
userId: "user-1",
|
||||
gatewayPort: 19001,
|
||||
});
|
||||
|
||||
mockPrismaService.llmProvider.findMany.mockResolvedValue([
|
||||
{
|
||||
id: "provider-1",
|
||||
userId: "user-1",
|
||||
name: "my-zai",
|
||||
displayName: "Z.ai",
|
||||
type: "zai",
|
||||
baseUrl: "https://api.z.ai/v1",
|
||||
apiKey: "enc:secret-zai-key",
|
||||
apiType: "openai-completions",
|
||||
models: [{ id: "glm-5" }],
|
||||
isActive: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await service.generateUserConfig("user-1");
|
||||
|
||||
expect(result).toEqual({
|
||||
gateway: {
|
||||
mode: "local",
|
||||
port: 19001,
|
||||
bind: "lan",
|
||||
auth: { mode: "token" },
|
||||
http: {
|
||||
endpoints: {
|
||||
chatCompletions: { enabled: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
agents: {
|
||||
defaults: {
|
||||
model: {
|
||||
primary: "my-zai/glm-5",
|
||||
},
|
||||
},
|
||||
},
|
||||
models: {
|
||||
providers: {
|
||||
"my-zai": {
|
||||
apiKey: "secret-zai-key",
|
||||
baseUrl: "https://api.z.ai/v1",
|
||||
models: {
|
||||
"glm-5": {},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("generateUserConfig decrypts API keys correctly", async () => {
|
||||
mockPrismaService.userAgentConfig.findUnique.mockResolvedValue({
|
||||
id: "cfg-1",
|
||||
userId: "user-1",
|
||||
primaryModel: "openai-work/gpt-4.1",
|
||||
});
|
||||
|
||||
mockPrismaService.userContainer.findUnique.mockResolvedValue({
|
||||
id: "container-1",
|
||||
userId: "user-1",
|
||||
gatewayPort: 18789,
|
||||
});
|
||||
|
||||
mockPrismaService.llmProvider.findMany.mockResolvedValue([
|
||||
{
|
||||
id: "provider-1",
|
||||
userId: "user-1",
|
||||
name: "openai-work",
|
||||
displayName: "OpenAI Work",
|
||||
type: "openai",
|
||||
baseUrl: "https://api.openai.com/v1",
|
||||
apiKey: "enc:encrypted-openai-key",
|
||||
apiType: "openai-completions",
|
||||
models: [{ id: "gpt-4.1" }],
|
||||
isActive: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await service.generateUserConfig("user-1");
|
||||
|
||||
expect(mockCryptoService.decrypt).toHaveBeenCalledWith("enc:encrypted-openai-key");
|
||||
expect(result.models.providers["openai-work"]?.apiKey).toBe("encrypted-openai-key");
|
||||
});
|
||||
|
||||
it("generateUserConfig handles user with no providers", async () => {
|
||||
mockPrismaService.userAgentConfig.findUnique.mockResolvedValue({
|
||||
id: "cfg-1",
|
||||
userId: "user-2",
|
||||
primaryModel: "openai/gpt-4o-mini",
|
||||
});
|
||||
|
||||
mockPrismaService.userContainer.findUnique.mockResolvedValue({
|
||||
id: "container-2",
|
||||
userId: "user-2",
|
||||
gatewayPort: null,
|
||||
});
|
||||
|
||||
mockPrismaService.llmProvider.findMany.mockResolvedValue([]);
|
||||
|
||||
const result = await service.generateUserConfig("user-2");
|
||||
|
||||
expect(result.models.providers).toEqual({});
|
||||
expect(result.gateway.port).toBe(18789);
|
||||
});
|
||||
|
||||
it("validateContainerToken returns correct type for user container", async () => {
|
||||
mockPrismaService.userContainer.findMany.mockResolvedValue([
|
||||
{
|
||||
id: "user-container-1",
|
||||
gatewayToken: "enc:user-token-1",
|
||||
},
|
||||
]);
|
||||
mockPrismaService.systemContainer.findMany.mockResolvedValue([]);
|
||||
|
||||
const result = await service.validateContainerToken("user-token-1");
|
||||
|
||||
expect(result).toEqual({
|
||||
type: "user",
|
||||
id: "user-container-1",
|
||||
});
|
||||
});
|
||||
|
||||
it("validateContainerToken returns correct type for system container", async () => {
|
||||
mockPrismaService.userContainer.findMany.mockResolvedValue([]);
|
||||
mockPrismaService.systemContainer.findMany.mockResolvedValue([
|
||||
{
|
||||
id: "system-container-1",
|
||||
gatewayToken: "enc:system-token-1",
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await service.validateContainerToken("system-token-1");
|
||||
|
||||
expect(result).toEqual({
|
||||
type: "system",
|
||||
id: "system-container-1",
|
||||
});
|
||||
});
|
||||
|
||||
it("validateContainerToken returns null for invalid token", async () => {
|
||||
mockPrismaService.userContainer.findMany.mockResolvedValue([
|
||||
{
|
||||
id: "user-container-1",
|
||||
gatewayToken: "enc:user-token-1",
|
||||
},
|
||||
]);
|
||||
|
||||
mockPrismaService.systemContainer.findMany.mockResolvedValue([
|
||||
{
|
||||
id: "system-container-1",
|
||||
gatewayToken: "enc:system-token-1",
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await service.validateContainerToken("no-match");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
285
apps/api/src/agent-config/agent-config.service.ts
Normal file
285
apps/api/src/agent-config/agent-config.service.ts
Normal file
@@ -0,0 +1,285 @@
|
||||
import { Injectable, NotFoundException } from "@nestjs/common";
|
||||
import type { LlmProvider } from "@prisma/client";
|
||||
import { createHash, timingSafeEqual } from "node:crypto";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { CryptoService } from "../crypto/crypto.service";
|
||||
|
||||
const DEFAULT_GATEWAY_PORT = 18789;
|
||||
const DEFAULT_PRIMARY_MODEL = "openai/gpt-4o-mini";
|
||||
|
||||
type ContainerType = "user" | "system";
|
||||
|
||||
export interface ContainerTokenValidation {
|
||||
type: ContainerType;
|
||||
id: string;
|
||||
}
|
||||
|
||||
type OpenClawModelMap = Record<string, Record<string, never>>;
|
||||
|
||||
interface OpenClawProviderConfig {
|
||||
apiKey?: string;
|
||||
baseUrl?: string;
|
||||
models: OpenClawModelMap;
|
||||
}
|
||||
|
||||
interface OpenClawConfig {
|
||||
gateway: {
|
||||
mode: "local";
|
||||
port: number;
|
||||
bind: "lan";
|
||||
auth: { mode: "token" };
|
||||
http: {
|
||||
endpoints: {
|
||||
chatCompletions: { enabled: true };
|
||||
};
|
||||
};
|
||||
};
|
||||
agents: {
|
||||
defaults: {
|
||||
model: {
|
||||
primary: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
models: {
|
||||
providers: Record<string, OpenClawProviderConfig>;
|
||||
};
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class AgentConfigService {
|
||||
constructor(
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly crypto: CryptoService
|
||||
) {}
|
||||
|
||||
// Generate complete openclaw.json for a user container
|
||||
async generateUserConfig(userId: string): Promise<OpenClawConfig> {
|
||||
const [userAgentConfig, providers, userContainer] = await Promise.all([
|
||||
this.prisma.userAgentConfig.findUnique({
|
||||
where: { userId },
|
||||
}),
|
||||
this.prisma.llmProvider.findMany({
|
||||
where: {
|
||||
userId,
|
||||
isActive: true,
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: "asc",
|
||||
},
|
||||
}),
|
||||
this.prisma.userContainer.findUnique({
|
||||
where: { userId },
|
||||
}),
|
||||
]);
|
||||
|
||||
if (!userContainer) {
|
||||
throw new NotFoundException(`User container not found for user ${userId}`);
|
||||
}
|
||||
|
||||
const primaryModel =
|
||||
userAgentConfig?.primaryModel ??
|
||||
this.resolvePrimaryModelFromProviders(providers) ??
|
||||
DEFAULT_PRIMARY_MODEL;
|
||||
|
||||
return this.buildOpenClawConfig(primaryModel, userContainer.gatewayPort, providers);
|
||||
}
|
||||
|
||||
// Generate config for a system container
|
||||
async generateSystemConfig(containerId: string): Promise<OpenClawConfig> {
|
||||
const systemContainer = await this.prisma.systemContainer.findUnique({
|
||||
where: { id: containerId },
|
||||
});
|
||||
|
||||
if (!systemContainer) {
|
||||
throw new NotFoundException(`System container ${containerId} not found`);
|
||||
}
|
||||
|
||||
return this.buildOpenClawConfig(
|
||||
systemContainer.primaryModel || DEFAULT_PRIMARY_MODEL,
|
||||
systemContainer.gatewayPort,
|
||||
[]
|
||||
);
|
||||
}
|
||||
|
||||
async generateConfigForContainer(
|
||||
type: ContainerType,
|
||||
containerId: string
|
||||
): Promise<OpenClawConfig> {
|
||||
if (type === "system") {
|
||||
return this.generateSystemConfig(containerId);
|
||||
}
|
||||
|
||||
const userContainer = await this.prisma.userContainer.findUnique({
|
||||
where: { id: containerId },
|
||||
select: { userId: true },
|
||||
});
|
||||
|
||||
if (!userContainer) {
|
||||
throw new NotFoundException(`User container ${containerId} not found`);
|
||||
}
|
||||
|
||||
return this.generateUserConfig(userContainer.userId);
|
||||
}
|
||||
|
||||
// Validate a container's bearer token
|
||||
async validateContainerToken(token: string): Promise<ContainerTokenValidation | null> {
|
||||
if (!token) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const [userContainers, systemContainers] = await Promise.all([
|
||||
this.prisma.userContainer.findMany({
|
||||
select: {
|
||||
id: true,
|
||||
gatewayToken: true,
|
||||
},
|
||||
}),
|
||||
this.prisma.systemContainer.findMany({
|
||||
select: {
|
||||
id: true,
|
||||
gatewayToken: true,
|
||||
},
|
||||
}),
|
||||
]);
|
||||
|
||||
let match: ContainerTokenValidation | null = null;
|
||||
|
||||
for (const container of userContainers) {
|
||||
const storedToken = this.decryptContainerToken(container.gatewayToken);
|
||||
if (!match && storedToken && this.tokensEqual(storedToken, token)) {
|
||||
match = { type: "user", id: container.id };
|
||||
}
|
||||
}
|
||||
|
||||
for (const container of systemContainers) {
|
||||
const storedToken = this.decryptContainerToken(container.gatewayToken);
|
||||
if (!match && storedToken && this.tokensEqual(storedToken, token)) {
|
||||
match = { type: "system", id: container.id };
|
||||
}
|
||||
}
|
||||
|
||||
return match;
|
||||
}
|
||||
|
||||
private buildOpenClawConfig(
|
||||
primaryModel: string,
|
||||
gatewayPort: number | null,
|
||||
providers: LlmProvider[]
|
||||
): OpenClawConfig {
|
||||
return {
|
||||
gateway: {
|
||||
mode: "local",
|
||||
port: gatewayPort ?? DEFAULT_GATEWAY_PORT,
|
||||
bind: "lan",
|
||||
auth: { mode: "token" },
|
||||
http: {
|
||||
endpoints: {
|
||||
chatCompletions: { enabled: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
agents: {
|
||||
defaults: {
|
||||
model: {
|
||||
primary: primaryModel,
|
||||
},
|
||||
},
|
||||
},
|
||||
models: {
|
||||
providers: this.buildProviderConfig(providers),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private buildProviderConfig(providers: LlmProvider[]): Record<string, OpenClawProviderConfig> {
|
||||
const providerConfig: Record<string, OpenClawProviderConfig> = {};
|
||||
|
||||
for (const provider of providers) {
|
||||
const config: OpenClawProviderConfig = {
|
||||
models: this.extractModels(provider.models),
|
||||
};
|
||||
|
||||
const apiKey = this.decryptIfNeeded(provider.apiKey);
|
||||
if (apiKey) {
|
||||
config.apiKey = apiKey;
|
||||
}
|
||||
|
||||
if (provider.baseUrl) {
|
||||
config.baseUrl = provider.baseUrl;
|
||||
}
|
||||
|
||||
providerConfig[provider.name] = config;
|
||||
}
|
||||
|
||||
return providerConfig;
|
||||
}
|
||||
|
||||
private extractModels(models: unknown): OpenClawModelMap {
|
||||
const modelMap: OpenClawModelMap = {};
|
||||
|
||||
if (!Array.isArray(models)) {
|
||||
return modelMap;
|
||||
}
|
||||
|
||||
for (const modelEntry of models) {
|
||||
if (typeof modelEntry === "string") {
|
||||
modelMap[modelEntry] = {};
|
||||
continue;
|
||||
}
|
||||
|
||||
if (this.hasModelId(modelEntry)) {
|
||||
modelMap[modelEntry.id] = {};
|
||||
}
|
||||
}
|
||||
|
||||
return modelMap;
|
||||
}
|
||||
|
||||
private resolvePrimaryModelFromProviders(providers: LlmProvider[]): string | null {
|
||||
for (const provider of providers) {
|
||||
const modelIds = Object.keys(this.extractModels(provider.models));
|
||||
const firstModelId = modelIds[0];
|
||||
|
||||
if (firstModelId) {
|
||||
return `${provider.name}/${firstModelId}`;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private decryptIfNeeded(value: string | null | undefined): string | undefined {
|
||||
if (!value) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (this.crypto.isEncrypted(value)) {
|
||||
return this.crypto.decrypt(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
private decryptContainerToken(value: string): string | null {
|
||||
try {
|
||||
return this.decryptIfNeeded(value) ?? null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private tokensEqual(left: string, right: string): boolean {
|
||||
const leftDigest = createHash("sha256").update(left, "utf8").digest();
|
||||
const rightDigest = createHash("sha256").update(right, "utf8").digest();
|
||||
return timingSafeEqual(leftDigest, rightDigest);
|
||||
}
|
||||
|
||||
private hasModelId(modelEntry: unknown): modelEntry is { id: string } {
|
||||
if (typeof modelEntry !== "object" || modelEntry === null || !("id" in modelEntry)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return typeof (modelEntry as { id?: unknown }).id === "string";
|
||||
}
|
||||
}
|
||||
102
apps/api/src/agent-memory/agent-memory.controller.spec.ts
Normal file
102
apps/api/src/agent-memory/agent-memory.controller.spec.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { AgentMemoryController } from "./agent-memory.controller";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
|
||||
describe("AgentMemoryController", () => {
|
||||
let controller: AgentMemoryController;
|
||||
|
||||
const mockAgentMemoryService = {
|
||||
upsert: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
findOne: vi.fn(),
|
||||
remove: vi.fn(),
|
||||
};
|
||||
|
||||
const mockGuard = { canActivate: vi.fn(() => true) };
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
controllers: [AgentMemoryController],
|
||||
providers: [
|
||||
{
|
||||
provide: AgentMemoryService,
|
||||
useValue: mockAgentMemoryService,
|
||||
},
|
||||
],
|
||||
})
|
||||
.overrideGuard(AuthGuard)
|
||||
.useValue(mockGuard)
|
||||
.overrideGuard(WorkspaceGuard)
|
||||
.useValue(mockGuard)
|
||||
.overrideGuard(PermissionGuard)
|
||||
.useValue(mockGuard)
|
||||
.compile();
|
||||
|
||||
controller = module.get<AgentMemoryController>(AgentMemoryController);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const workspaceId = "workspace-1";
|
||||
const agentId = "agent-1";
|
||||
const key = "context";
|
||||
|
||||
describe("upsert", () => {
|
||||
it("should upsert a memory entry", async () => {
|
||||
const dto = { value: { foo: "bar" } };
|
||||
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: dto.value };
|
||||
|
||||
mockAgentMemoryService.upsert.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await controller.upsert(agentId, key, dto, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.upsert).toHaveBeenCalledWith(workspaceId, agentId, key, dto);
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should list all memory entries for an agent", async () => {
|
||||
const mockEntries = [
|
||||
{ id: "mem-1", key: "a", value: 1 },
|
||||
{ id: "mem-2", key: "b", value: 2 },
|
||||
];
|
||||
|
||||
mockAgentMemoryService.findAll.mockResolvedValue(mockEntries);
|
||||
|
||||
const result = await controller.findAll(agentId, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.findAll).toHaveBeenCalledWith(workspaceId, agentId);
|
||||
expect(result).toEqual(mockEntries);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
it("should get a single memory entry", async () => {
|
||||
const mockEntry = { id: "mem-1", key, value: "v" };
|
||||
|
||||
mockAgentMemoryService.findOne.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await controller.findOne(agentId, key, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.findOne).toHaveBeenCalledWith(workspaceId, agentId, key);
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
it("should delete a memory entry", async () => {
|
||||
const mockResponse = { message: "Memory entry deleted successfully" };
|
||||
|
||||
mockAgentMemoryService.remove.mockResolvedValue(mockResponse);
|
||||
|
||||
const result = await controller.remove(agentId, key, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.remove).toHaveBeenCalledWith(workspaceId, agentId, key);
|
||||
expect(result).toEqual(mockResponse);
|
||||
});
|
||||
});
|
||||
});
|
||||
89
apps/api/src/agent-memory/agent-memory.controller.ts
Normal file
89
apps/api/src/agent-memory/agent-memory.controller.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Put,
|
||||
Delete,
|
||||
Body,
|
||||
Param,
|
||||
UseGuards,
|
||||
HttpCode,
|
||||
HttpStatus,
|
||||
} from "@nestjs/common";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { UpsertAgentMemoryDto } from "./dto";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||
|
||||
/**
|
||||
* Controller for per-agent key/value memory endpoints.
|
||||
* All endpoints require authentication and workspace context.
|
||||
*
|
||||
* Guards are applied in order:
|
||||
* 1. AuthGuard - Verifies user authentication
|
||||
* 2. WorkspaceGuard - Validates workspace access
|
||||
* 3. PermissionGuard - Checks role-based permissions
|
||||
*/
|
||||
@Controller("agents/:agentId/memory")
|
||||
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||
export class AgentMemoryController {
|
||||
constructor(private readonly agentMemoryService: AgentMemoryService) {}
|
||||
|
||||
/**
|
||||
* PUT /api/agents/:agentId/memory/:key
|
||||
* Upsert a memory entry for an agent
|
||||
* Requires: MEMBER role or higher
|
||||
*/
|
||||
@Put(":key")
|
||||
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||
async upsert(
|
||||
@Param("agentId") agentId: string,
|
||||
@Param("key") key: string,
|
||||
@Body() dto: UpsertAgentMemoryDto,
|
||||
@Workspace() workspaceId: string
|
||||
) {
|
||||
return this.agentMemoryService.upsert(workspaceId, agentId, key, dto);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/agents/:agentId/memory
|
||||
* List all memory entries for an agent
|
||||
* Requires: Any workspace member (including GUEST)
|
||||
*/
|
||||
@Get()
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findAll(@Param("agentId") agentId: string, @Workspace() workspaceId: string) {
|
||||
return this.agentMemoryService.findAll(workspaceId, agentId);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/agents/:agentId/memory/:key
|
||||
* Get a single memory entry by key
|
||||
* Requires: Any workspace member (including GUEST)
|
||||
*/
|
||||
@Get(":key")
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findOne(
|
||||
@Param("agentId") agentId: string,
|
||||
@Param("key") key: string,
|
||||
@Workspace() workspaceId: string
|
||||
) {
|
||||
return this.agentMemoryService.findOne(workspaceId, agentId, key);
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/agents/:agentId/memory/:key
|
||||
* Remove a memory entry
|
||||
* Requires: MEMBER role or higher
|
||||
*/
|
||||
@Delete(":key")
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||
async remove(
|
||||
@Param("agentId") agentId: string,
|
||||
@Param("key") key: string,
|
||||
@Workspace() workspaceId: string
|
||||
) {
|
||||
return this.agentMemoryService.remove(workspaceId, agentId, key);
|
||||
}
|
||||
}
|
||||
198
apps/api/src/agent-memory/agent-memory.integration.spec.ts
Normal file
198
apps/api/src/agent-memory/agent-memory.integration.spec.ts
Normal file
@@ -0,0 +1,198 @@
|
||||
import { beforeAll, beforeEach, describe, expect, it, afterAll } from "vitest";
|
||||
import { randomUUID as uuid } from "crypto";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { NotFoundException } from "@nestjs/common";
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
|
||||
const shouldRunDbIntegrationTests =
|
||||
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||
const describeFn = shouldRunDbIntegrationTests ? describe : describe.skip;
|
||||
|
||||
async function createWorkspace(
|
||||
prisma: PrismaClient,
|
||||
label: string
|
||||
): Promise<{ workspaceId: string; ownerId: string }> {
|
||||
const workspace = await prisma.workspace.create({
|
||||
data: {
|
||||
name: `${label} ${Date.now()}`,
|
||||
owner: {
|
||||
create: {
|
||||
email: `${label.toLowerCase().replace(/\s+/g, "-")}-${Date.now()}@example.com`,
|
||||
name: `${label} Owner`,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
workspaceId: workspace.id,
|
||||
ownerId: workspace.ownerId,
|
||||
};
|
||||
}
|
||||
|
||||
describeFn("AgentMemoryService Integration", () => {
|
||||
let moduleRef: TestingModule;
|
||||
let prisma: PrismaClient;
|
||||
let service: AgentMemoryService;
|
||||
let setupComplete = false;
|
||||
|
||||
let workspaceAId: string;
|
||||
let workspaceAOwnerId: string;
|
||||
let workspaceBId: string;
|
||||
let workspaceBOwnerId: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
prisma = new PrismaClient();
|
||||
await prisma.$connect();
|
||||
|
||||
const workspaceA = await createWorkspace(prisma, "Agent Memory Integration A");
|
||||
workspaceAId = workspaceA.workspaceId;
|
||||
workspaceAOwnerId = workspaceA.ownerId;
|
||||
|
||||
const workspaceB = await createWorkspace(prisma, "Agent Memory Integration B");
|
||||
workspaceBId = workspaceB.workspaceId;
|
||||
workspaceBOwnerId = workspaceB.ownerId;
|
||||
|
||||
moduleRef = await Test.createTestingModule({
|
||||
providers: [
|
||||
AgentMemoryService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: prisma,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = moduleRef.get<AgentMemoryService>(AgentMemoryService);
|
||||
setupComplete = true;
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
await prisma.agentMemory.deleteMany({
|
||||
where: {
|
||||
workspaceId: {
|
||||
in: [workspaceAId, workspaceBId],
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!prisma) {
|
||||
return;
|
||||
}
|
||||
|
||||
const workspaceIds = [workspaceAId, workspaceBId].filter(
|
||||
(id): id is string => typeof id === "string"
|
||||
);
|
||||
const ownerIds = [workspaceAOwnerId, workspaceBOwnerId].filter(
|
||||
(id): id is string => typeof id === "string"
|
||||
);
|
||||
|
||||
if (workspaceIds.length > 0) {
|
||||
await prisma.agentMemory.deleteMany({
|
||||
where: {
|
||||
workspaceId: {
|
||||
in: workspaceIds,
|
||||
},
|
||||
},
|
||||
});
|
||||
await prisma.workspace.deleteMany({ where: { id: { in: workspaceIds } } });
|
||||
}
|
||||
|
||||
if (ownerIds.length > 0) {
|
||||
await prisma.user.deleteMany({ where: { id: { in: ownerIds } } });
|
||||
}
|
||||
|
||||
if (moduleRef) {
|
||||
await moduleRef.close();
|
||||
}
|
||||
await prisma.$disconnect();
|
||||
});
|
||||
|
||||
it("upserts and lists memory entries", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const agentId = `agent-${uuid()}`;
|
||||
|
||||
const entry = await service.upsert(workspaceAId, agentId, "session-context", {
|
||||
value: { intent: "create-tests", depth: "integration" },
|
||||
});
|
||||
|
||||
expect(entry.workspaceId).toBe(workspaceAId);
|
||||
expect(entry.agentId).toBe(agentId);
|
||||
expect(entry.key).toBe("session-context");
|
||||
|
||||
const listed = await service.findAll(workspaceAId, agentId);
|
||||
|
||||
expect(listed).toHaveLength(1);
|
||||
expect(listed[0]?.id).toBe(entry.id);
|
||||
expect(listed[0]?.value).toMatchObject({ intent: "create-tests" });
|
||||
});
|
||||
|
||||
it("updates existing key via upsert without creating duplicates", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const agentId = `agent-${uuid()}`;
|
||||
|
||||
const first = await service.upsert(workspaceAId, agentId, "preferences", {
|
||||
value: { model: "fast" },
|
||||
});
|
||||
|
||||
const second = await service.upsert(workspaceAId, agentId, "preferences", {
|
||||
value: { model: "accurate" },
|
||||
});
|
||||
|
||||
expect(second.id).toBe(first.id);
|
||||
expect(second.value).toMatchObject({ model: "accurate" });
|
||||
|
||||
const rowCount = await prisma.agentMemory.count({
|
||||
where: {
|
||||
workspaceId: workspaceAId,
|
||||
agentId,
|
||||
key: "preferences",
|
||||
},
|
||||
});
|
||||
|
||||
expect(rowCount).toBe(1);
|
||||
});
|
||||
|
||||
it("lists keys in sorted order and isolates by workspace", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const agentId = `agent-${uuid()}`;
|
||||
|
||||
await service.upsert(workspaceAId, agentId, "beta", { value: { v: 2 } });
|
||||
await service.upsert(workspaceAId, agentId, "alpha", { value: { v: 1 } });
|
||||
await service.upsert(workspaceBId, agentId, "alpha", { value: { v: 99 } });
|
||||
|
||||
const workspaceAEntries = await service.findAll(workspaceAId, agentId);
|
||||
const workspaceBEntries = await service.findAll(workspaceBId, agentId);
|
||||
|
||||
expect(workspaceAEntries.map((row) => row.key)).toEqual(["alpha", "beta"]);
|
||||
expect(workspaceBEntries).toHaveLength(1);
|
||||
expect(workspaceBEntries[0]?.value).toMatchObject({ v: 99 });
|
||||
});
|
||||
|
||||
it("throws NotFoundException when requesting unknown key", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
await expect(service.findOne(workspaceAId, `agent-${uuid()}`, "missing")).rejects.toThrow(
|
||||
NotFoundException
|
||||
);
|
||||
});
|
||||
});
|
||||
13
apps/api/src/agent-memory/agent-memory.module.ts
Normal file
13
apps/api/src/agent-memory/agent-memory.module.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AgentMemoryController } from "./agent-memory.controller";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, AuthModule],
|
||||
controllers: [AgentMemoryController],
|
||||
providers: [AgentMemoryService],
|
||||
exports: [AgentMemoryService],
|
||||
})
|
||||
export class AgentMemoryModule {}
|
||||
126
apps/api/src/agent-memory/agent-memory.service.spec.ts
Normal file
126
apps/api/src/agent-memory/agent-memory.service.spec.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { NotFoundException } from "@nestjs/common";
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
|
||||
describe("AgentMemoryService", () => {
|
||||
let service: AgentMemoryService;
|
||||
|
||||
const mockPrismaService = {
|
||||
agentMemory: {
|
||||
upsert: vi.fn(),
|
||||
findMany: vi.fn(),
|
||||
findUnique: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
AgentMemoryService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: mockPrismaService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<AgentMemoryService>(AgentMemoryService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const workspaceId = "workspace-1";
|
||||
const agentId = "agent-1";
|
||||
const key = "session-context";
|
||||
|
||||
describe("upsert", () => {
|
||||
it("should upsert a memory entry", async () => {
|
||||
const dto = { value: { data: "some context" } };
|
||||
const mockEntry = {
|
||||
id: "mem-1",
|
||||
workspaceId,
|
||||
agentId,
|
||||
key,
|
||||
value: dto.value,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockPrismaService.agentMemory.upsert.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await service.upsert(workspaceId, agentId, key, dto);
|
||||
|
||||
expect(mockPrismaService.agentMemory.upsert).toHaveBeenCalledWith({
|
||||
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||
create: { workspaceId, agentId, key, value: dto.value },
|
||||
update: { value: dto.value },
|
||||
});
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should return all memory entries for an agent", async () => {
|
||||
const mockEntries = [
|
||||
{ id: "mem-1", key: "a", value: 1 },
|
||||
{ id: "mem-2", key: "b", value: 2 },
|
||||
];
|
||||
|
||||
mockPrismaService.agentMemory.findMany.mockResolvedValue(mockEntries);
|
||||
|
||||
const result = await service.findAll(workspaceId, agentId);
|
||||
|
||||
expect(mockPrismaService.agentMemory.findMany).toHaveBeenCalledWith({
|
||||
where: { workspaceId, agentId },
|
||||
orderBy: { key: "asc" },
|
||||
});
|
||||
expect(result).toEqual(mockEntries);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
it("should return a memory entry by key", async () => {
|
||||
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: "ctx" };
|
||||
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await service.findOne(workspaceId, agentId, key);
|
||||
|
||||
expect(mockPrismaService.agentMemory.findUnique).toHaveBeenCalledWith({
|
||||
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||
});
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
|
||||
it("should throw NotFoundException when key not found", async () => {
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.findOne(workspaceId, agentId, key)).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
it("should delete a memory entry", async () => {
|
||||
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: "x" };
|
||||
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(mockEntry);
|
||||
mockPrismaService.agentMemory.delete.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await service.remove(workspaceId, agentId, key);
|
||||
|
||||
expect(mockPrismaService.agentMemory.delete).toHaveBeenCalledWith({
|
||||
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||
});
|
||||
expect(result).toEqual({ message: "Memory entry deleted successfully" });
|
||||
});
|
||||
|
||||
it("should throw NotFoundException when key not found", async () => {
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.remove(workspaceId, agentId, key)).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
});
|
||||
});
|
||||
79
apps/api/src/agent-memory/agent-memory.service.ts
Normal file
79
apps/api/src/agent-memory/agent-memory.service.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { Injectable, NotFoundException } from "@nestjs/common";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import type { UpsertAgentMemoryDto } from "./dto";
|
||||
|
||||
@Injectable()
|
||||
export class AgentMemoryService {
|
||||
constructor(private readonly prisma: PrismaService) {}
|
||||
|
||||
/**
|
||||
* Upsert a memory entry for an agent.
|
||||
*/
|
||||
async upsert(workspaceId: string, agentId: string, key: string, dto: UpsertAgentMemoryDto) {
|
||||
return this.prisma.agentMemory.upsert({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
create: {
|
||||
workspaceId,
|
||||
agentId,
|
||||
key,
|
||||
value: dto.value as Prisma.InputJsonValue,
|
||||
},
|
||||
update: {
|
||||
value: dto.value as Prisma.InputJsonValue,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* List all memory entries for an agent in a workspace.
|
||||
*/
|
||||
async findAll(workspaceId: string, agentId: string) {
|
||||
return this.prisma.agentMemory.findMany({
|
||||
where: { workspaceId, agentId },
|
||||
orderBy: { key: "asc" },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single memory entry by key.
|
||||
*/
|
||||
async findOne(workspaceId: string, agentId: string, key: string) {
|
||||
const entry = await this.prisma.agentMemory.findUnique({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
});
|
||||
|
||||
if (!entry) {
|
||||
throw new NotFoundException(`Memory key "${key}" not found for agent "${agentId}"`);
|
||||
}
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a memory entry by key.
|
||||
*/
|
||||
async remove(workspaceId: string, agentId: string, key: string) {
|
||||
const entry = await this.prisma.agentMemory.findUnique({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
});
|
||||
|
||||
if (!entry) {
|
||||
throw new NotFoundException(`Memory key "${key}" not found for agent "${agentId}"`);
|
||||
}
|
||||
|
||||
await this.prisma.agentMemory.delete({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
});
|
||||
|
||||
return { message: "Memory entry deleted successfully" };
|
||||
}
|
||||
}
|
||||
1
apps/api/src/agent-memory/dto/index.ts
Normal file
1
apps/api/src/agent-memory/dto/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "./upsert-agent-memory.dto";
|
||||
10
apps/api/src/agent-memory/dto/upsert-agent-memory.dto.ts
Normal file
10
apps/api/src/agent-memory/dto/upsert-agent-memory.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { IsNotEmpty } from "class-validator";
|
||||
|
||||
/**
|
||||
* DTO for upserting an agent memory entry.
|
||||
* The value accepts any JSON-serializable data.
|
||||
*/
|
||||
export class UpsertAgentMemoryDto {
|
||||
@IsNotEmpty({ message: "value must not be empty" })
|
||||
value!: unknown;
|
||||
}
|
||||
@@ -2,6 +2,7 @@ import { Module } from "@nestjs/common";
|
||||
import { APP_INTERCEPTOR, APP_GUARD } from "@nestjs/core";
|
||||
import { ThrottlerModule } from "@nestjs/throttler";
|
||||
import { BullModule } from "@nestjs/bullmq";
|
||||
import { ScheduleModule } from "@nestjs/schedule";
|
||||
import { ThrottlerValkeyStorageService, ThrottlerApiKeyGuard } from "./common/throttler";
|
||||
import { CsrfGuard } from "./common/guards/csrf.guard";
|
||||
import { CsrfService } from "./common/services/csrf.service";
|
||||
@@ -27,6 +28,8 @@ import { LlmUsageModule } from "./llm-usage/llm-usage.module";
|
||||
import { BrainModule } from "./brain/brain.module";
|
||||
import { CronModule } from "./cron/cron.module";
|
||||
import { AgentTasksModule } from "./agent-tasks/agent-tasks.module";
|
||||
import { FindingsModule } from "./findings/findings.module";
|
||||
import { AgentMemoryModule } from "./agent-memory/agent-memory.module";
|
||||
import { ValkeyModule } from "./valkey/valkey.module";
|
||||
import { BullMqModule } from "./bullmq/bullmq.module";
|
||||
import { StitcherModule } from "./stitcher/stitcher.module";
|
||||
@@ -37,6 +40,7 @@ import { JobStepsModule } from "./job-steps/job-steps.module";
|
||||
import { CoordinatorIntegrationModule } from "./coordinator-integration/coordinator-integration.module";
|
||||
import { FederationModule } from "./federation/federation.module";
|
||||
import { CredentialsModule } from "./credentials/credentials.module";
|
||||
import { CryptoModule } from "./crypto/crypto.module";
|
||||
import { MosaicTelemetryModule } from "./mosaic-telemetry";
|
||||
import { SpeechModule } from "./speech/speech.module";
|
||||
import { DashboardModule } from "./dashboard/dashboard.module";
|
||||
@@ -45,7 +49,16 @@ import { PersonalitiesModule } from "./personalities/personalities.module";
|
||||
import { WorkspacesModule } from "./workspaces/workspaces.module";
|
||||
import { AdminModule } from "./admin/admin.module";
|
||||
import { TeamsModule } from "./teams/teams.module";
|
||||
import { ImportModule } from "./import/import.module";
|
||||
import { ConversationArchiveModule } from "./conversation-archive/conversation-archive.module";
|
||||
import { RlsContextInterceptor } from "./common/interceptors/rls-context.interceptor";
|
||||
import { AgentConfigModule } from "./agent-config/agent-config.module";
|
||||
import { ContainerLifecycleModule } from "./container-lifecycle/container-lifecycle.module";
|
||||
import { ContainerReaperModule } from "./container-reaper/container-reaper.module";
|
||||
import { FleetSettingsModule } from "./fleet-settings/fleet-settings.module";
|
||||
import { OnboardingModule } from "./onboarding/onboarding.module";
|
||||
import { ChatProxyModule } from "./chat-proxy/chat-proxy.module";
|
||||
import { OrchestratorModule } from "./orchestrator/orchestrator.module";
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
@@ -76,6 +89,7 @@ import { RlsContextInterceptor } from "./common/interceptors/rls-context.interce
|
||||
};
|
||||
})(),
|
||||
}),
|
||||
ScheduleModule.forRoot(),
|
||||
TelemetryModule,
|
||||
PrismaModule,
|
||||
DatabaseModule,
|
||||
@@ -99,12 +113,15 @@ import { RlsContextInterceptor } from "./common/interceptors/rls-context.interce
|
||||
BrainModule,
|
||||
CronModule,
|
||||
AgentTasksModule,
|
||||
FindingsModule,
|
||||
AgentMemoryModule,
|
||||
RunnerJobsModule,
|
||||
JobEventsModule,
|
||||
JobStepsModule,
|
||||
CoordinatorIntegrationModule,
|
||||
FederationModule,
|
||||
CredentialsModule,
|
||||
CryptoModule,
|
||||
MosaicTelemetryModule,
|
||||
SpeechModule,
|
||||
DashboardModule,
|
||||
@@ -113,6 +130,15 @@ import { RlsContextInterceptor } from "./common/interceptors/rls-context.interce
|
||||
WorkspacesModule,
|
||||
AdminModule,
|
||||
TeamsModule,
|
||||
ImportModule,
|
||||
ConversationArchiveModule,
|
||||
AgentConfigModule,
|
||||
ContainerLifecycleModule,
|
||||
ContainerReaperModule,
|
||||
FleetSettingsModule,
|
||||
OnboardingModule,
|
||||
ChatProxyModule,
|
||||
OrchestratorModule,
|
||||
],
|
||||
controllers: [AppController, CsrfController],
|
||||
providers: [
|
||||
|
||||
102
apps/api/src/chat-proxy/chat-proxy.controller.ts
Normal file
102
apps/api/src/chat-proxy/chat-proxy.controller.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
HttpException,
|
||||
Logger,
|
||||
Post,
|
||||
Req,
|
||||
Res,
|
||||
UnauthorizedException,
|
||||
UseGuards,
|
||||
} from "@nestjs/common";
|
||||
import type { Response } from "express";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import type { MaybeAuthenticatedRequest } from "../auth/types/better-auth-request.interface";
|
||||
import { ChatStreamDto } from "./chat-proxy.dto";
|
||||
import { ChatProxyService } from "./chat-proxy.service";
|
||||
|
||||
@Controller("chat")
|
||||
@UseGuards(AuthGuard)
|
||||
export class ChatProxyController {
|
||||
private readonly logger = new Logger(ChatProxyController.name);
|
||||
|
||||
constructor(private readonly chatProxyService: ChatProxyService) {}
|
||||
|
||||
// POST /api/chat/stream
|
||||
// Request: { messages: Array<{role, content}> }
|
||||
// Response: SSE stream of chat completion events
|
||||
@Post("stream")
|
||||
async streamChat(
|
||||
@Body() body: ChatStreamDto,
|
||||
@Req() req: MaybeAuthenticatedRequest,
|
||||
@Res() res: Response
|
||||
): Promise<void> {
|
||||
const userId = req.user?.id;
|
||||
if (!userId) {
|
||||
throw new UnauthorizedException("No authenticated user found on request");
|
||||
}
|
||||
|
||||
const abortController = new AbortController();
|
||||
req.once("close", () => {
|
||||
abortController.abort();
|
||||
});
|
||||
|
||||
res.setHeader("Content-Type", "text/event-stream");
|
||||
res.setHeader("Cache-Control", "no-cache");
|
||||
res.setHeader("Connection", "keep-alive");
|
||||
res.setHeader("X-Accel-Buffering", "no");
|
||||
|
||||
try {
|
||||
const upstreamResponse = await this.chatProxyService.proxyChat(
|
||||
userId,
|
||||
body.messages,
|
||||
abortController.signal
|
||||
);
|
||||
|
||||
const upstreamContentType = upstreamResponse.headers.get("content-type");
|
||||
if (upstreamContentType) {
|
||||
res.setHeader("Content-Type", upstreamContentType);
|
||||
}
|
||||
|
||||
if (!upstreamResponse.body) {
|
||||
throw new Error("OpenClaw response did not include a stream body");
|
||||
}
|
||||
|
||||
for await (const chunk of upstreamResponse.body as unknown as AsyncIterable<Uint8Array>) {
|
||||
if (res.writableEnded || res.destroyed) {
|
||||
break;
|
||||
}
|
||||
|
||||
res.write(Buffer.from(chunk));
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
this.logStreamError(error);
|
||||
|
||||
if (!res.writableEnded && !res.destroyed) {
|
||||
res.write("event: error\n");
|
||||
res.write(`data: ${JSON.stringify({ error: this.toSafeClientMessage(error) })}\n\n`);
|
||||
}
|
||||
} finally {
|
||||
if (!res.writableEnded && !res.destroyed) {
|
||||
res.end();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private toSafeClientMessage(error: unknown): string {
|
||||
if (error instanceof HttpException && error.getStatus() < 500) {
|
||||
return "Chat request was rejected";
|
||||
}
|
||||
|
||||
return "Chat stream failed";
|
||||
}
|
||||
|
||||
private logStreamError(error: unknown): void {
|
||||
if (error instanceof Error) {
|
||||
this.logger.warn(`Chat stream failed: ${error.message}`);
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.warn(`Chat stream failed: ${String(error)}`);
|
||||
}
|
||||
}
|
||||
25
apps/api/src/chat-proxy/chat-proxy.dto.ts
Normal file
25
apps/api/src/chat-proxy/chat-proxy.dto.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { Type } from "class-transformer";
|
||||
import { ArrayMinSize, IsArray, IsNotEmpty, IsString, ValidateNested } from "class-validator";
|
||||
|
||||
export interface ChatMessage {
|
||||
role: string;
|
||||
content: string;
|
||||
}
|
||||
|
||||
export class ChatMessageDto implements ChatMessage {
|
||||
@IsString({ message: "role must be a string" })
|
||||
@IsNotEmpty({ message: "role is required" })
|
||||
role!: string;
|
||||
|
||||
@IsString({ message: "content must be a string" })
|
||||
@IsNotEmpty({ message: "content is required" })
|
||||
content!: string;
|
||||
}
|
||||
|
||||
export class ChatStreamDto {
|
||||
@IsArray({ message: "messages must be an array" })
|
||||
@ArrayMinSize(1, { message: "messages must contain at least one message" })
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => ChatMessageDto)
|
||||
messages!: ChatMessageDto[];
|
||||
}
|
||||
15
apps/api/src/chat-proxy/chat-proxy.module.ts
Normal file
15
apps/api/src/chat-proxy/chat-proxy.module.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { AgentConfigModule } from "../agent-config/agent-config.module";
|
||||
import { ContainerLifecycleModule } from "../container-lifecycle/container-lifecycle.module";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { ChatProxyController } from "./chat-proxy.controller";
|
||||
import { ChatProxyService } from "./chat-proxy.service";
|
||||
|
||||
@Module({
|
||||
imports: [AuthModule, PrismaModule, ContainerLifecycleModule, AgentConfigModule],
|
||||
controllers: [ChatProxyController],
|
||||
providers: [ChatProxyService],
|
||||
exports: [ChatProxyService],
|
||||
})
|
||||
export class ChatProxyModule {}
|
||||
108
apps/api/src/chat-proxy/chat-proxy.service.spec.ts
Normal file
108
apps/api/src/chat-proxy/chat-proxy.service.spec.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import { ServiceUnavailableException } from "@nestjs/common";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { ChatProxyService } from "./chat-proxy.service";
|
||||
|
||||
describe("ChatProxyService", () => {
|
||||
const userId = "user-123";
|
||||
|
||||
const prisma = {
|
||||
userAgentConfig: {
|
||||
findUnique: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
const containerLifecycle = {
|
||||
ensureRunning: vi.fn(),
|
||||
touch: vi.fn(),
|
||||
};
|
||||
|
||||
let service: ChatProxyService;
|
||||
let fetchMock: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(() => {
|
||||
fetchMock = vi.fn();
|
||||
vi.stubGlobal("fetch", fetchMock);
|
||||
service = new ChatProxyService(prisma as never, containerLifecycle as never);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllGlobals();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe("getContainerUrl", () => {
|
||||
it("calls ensureRunning and touch for the user", async () => {
|
||||
containerLifecycle.ensureRunning.mockResolvedValue({
|
||||
url: "http://mosaic-user-user-123:19000",
|
||||
token: "gateway-token",
|
||||
});
|
||||
containerLifecycle.touch.mockResolvedValue(undefined);
|
||||
|
||||
const url = await service.getContainerUrl(userId);
|
||||
|
||||
expect(url).toBe("http://mosaic-user-user-123:19000");
|
||||
expect(containerLifecycle.ensureRunning).toHaveBeenCalledWith(userId);
|
||||
expect(containerLifecycle.touch).toHaveBeenCalledWith(userId);
|
||||
});
|
||||
});
|
||||
|
||||
describe("proxyChat", () => {
|
||||
it("forwards the request to the user's OpenClaw container", async () => {
|
||||
containerLifecycle.ensureRunning.mockResolvedValue({
|
||||
url: "http://mosaic-user-user-123:19000",
|
||||
token: "gateway-token",
|
||||
});
|
||||
containerLifecycle.touch.mockResolvedValue(undefined);
|
||||
fetchMock.mockResolvedValue(new Response("event: token\ndata: hello\n\n"));
|
||||
|
||||
const messages = [{ role: "user", content: "Hello from Mosaic" }];
|
||||
const response = await service.proxyChat(userId, messages);
|
||||
|
||||
expect(response).toBeInstanceOf(Response);
|
||||
expect(fetchMock).toHaveBeenCalledWith(
|
||||
"http://mosaic-user-user-123:19000/v1/chat/completions",
|
||||
expect.objectContaining({
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: "Bearer gateway-token",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
const [, request] = fetchMock.mock.calls[0] as [string, RequestInit];
|
||||
const parsedBody = JSON.parse(String(request.body));
|
||||
expect(parsedBody).toEqual({
|
||||
messages,
|
||||
model: "openclaw:default",
|
||||
stream: true,
|
||||
});
|
||||
});
|
||||
|
||||
it("throws ServiceUnavailableException on connection refused errors", async () => {
|
||||
containerLifecycle.ensureRunning.mockResolvedValue({
|
||||
url: "http://mosaic-user-user-123:19000",
|
||||
token: "gateway-token",
|
||||
});
|
||||
containerLifecycle.touch.mockResolvedValue(undefined);
|
||||
fetchMock.mockRejectedValue(new Error("connect ECONNREFUSED 127.0.0.1:19000"));
|
||||
|
||||
await expect(service.proxyChat(userId, [])).rejects.toBeInstanceOf(
|
||||
ServiceUnavailableException
|
||||
);
|
||||
});
|
||||
|
||||
it("throws ServiceUnavailableException on timeout errors", async () => {
|
||||
containerLifecycle.ensureRunning.mockResolvedValue({
|
||||
url: "http://mosaic-user-user-123:19000",
|
||||
token: "gateway-token",
|
||||
});
|
||||
containerLifecycle.touch.mockResolvedValue(undefined);
|
||||
fetchMock.mockRejectedValue(new Error("The operation was aborted due to timeout"));
|
||||
|
||||
await expect(service.proxyChat(userId, [])).rejects.toBeInstanceOf(
|
||||
ServiceUnavailableException
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
110
apps/api/src/chat-proxy/chat-proxy.service.ts
Normal file
110
apps/api/src/chat-proxy/chat-proxy.service.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import {
|
||||
BadGatewayException,
|
||||
Injectable,
|
||||
Logger,
|
||||
ServiceUnavailableException,
|
||||
} from "@nestjs/common";
|
||||
import { ContainerLifecycleService } from "../container-lifecycle/container-lifecycle.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import type { ChatMessage } from "./chat-proxy.dto";
|
||||
|
||||
const DEFAULT_OPENCLAW_MODEL = "openclaw:default";
|
||||
|
||||
interface ContainerConnection {
|
||||
url: string;
|
||||
token: string;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ChatProxyService {
|
||||
private readonly logger = new Logger(ChatProxyService.name);
|
||||
|
||||
constructor(
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly containerLifecycle: ContainerLifecycleService
|
||||
) {}
|
||||
|
||||
// Get the user's OpenClaw container URL and mark it active.
|
||||
async getContainerUrl(userId: string): Promise<string> {
|
||||
const { url } = await this.getContainerConnection(userId);
|
||||
return url;
|
||||
}
|
||||
|
||||
// Proxy chat request to OpenClaw.
|
||||
async proxyChat(
|
||||
userId: string,
|
||||
messages: ChatMessage[],
|
||||
signal?: AbortSignal
|
||||
): Promise<Response> {
|
||||
const { url: containerUrl, token: gatewayToken } = await this.getContainerConnection(userId);
|
||||
const model = await this.getPreferredModel(userId);
|
||||
const requestInit: RequestInit = {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${gatewayToken}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
messages,
|
||||
model,
|
||||
stream: true,
|
||||
}),
|
||||
};
|
||||
|
||||
if (signal) {
|
||||
requestInit.signal = signal;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${containerUrl}/v1/chat/completions`, requestInit);
|
||||
|
||||
if (!response.ok) {
|
||||
const detail = await this.readResponseText(response);
|
||||
const status = `${String(response.status)} ${response.statusText}`.trim();
|
||||
this.logger.warn(
|
||||
detail ? `OpenClaw returned ${status}: ${detail}` : `OpenClaw returned ${status}`
|
||||
);
|
||||
throw new BadGatewayException(`OpenClaw returned ${status}`);
|
||||
}
|
||||
|
||||
return response;
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof BadGatewayException) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
this.logger.warn(`Failed to proxy chat request: ${message}`);
|
||||
throw new ServiceUnavailableException("Failed to proxy chat to OpenClaw");
|
||||
}
|
||||
}
|
||||
|
||||
private async getContainerConnection(userId: string): Promise<ContainerConnection> {
|
||||
const connection = await this.containerLifecycle.ensureRunning(userId);
|
||||
await this.containerLifecycle.touch(userId);
|
||||
return connection;
|
||||
}
|
||||
|
||||
private async getPreferredModel(userId: string): Promise<string> {
|
||||
const config = await this.prisma.userAgentConfig.findUnique({
|
||||
where: { userId },
|
||||
select: { primaryModel: true },
|
||||
});
|
||||
|
||||
const primaryModel = config?.primaryModel?.trim();
|
||||
if (!primaryModel) {
|
||||
return DEFAULT_OPENCLAW_MODEL;
|
||||
}
|
||||
|
||||
return primaryModel;
|
||||
}
|
||||
|
||||
private async readResponseText(response: Response): Promise<string | null> {
|
||||
try {
|
||||
const text = (await response.text()).trim();
|
||||
return text.length > 0 ? text : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -87,6 +87,17 @@ describe("CsrfGuard", () => {
|
||||
});
|
||||
|
||||
describe("State-changing methods requiring CSRF", () => {
|
||||
it("should allow POST with Bearer auth without CSRF token", () => {
|
||||
const context = createContext(
|
||||
"POST",
|
||||
{},
|
||||
{ authorization: "Bearer api-token" },
|
||||
false,
|
||||
"user-123"
|
||||
);
|
||||
expect(guard.canActivate(context)).toBe(true);
|
||||
});
|
||||
|
||||
it("should reject POST without CSRF token", () => {
|
||||
const context = createContext("POST", {}, {}, false, "user-123");
|
||||
expect(() => guard.canActivate(context)).toThrow(ForbiddenException);
|
||||
|
||||
@@ -57,6 +57,11 @@ export class CsrfGuard implements CanActivate {
|
||||
return true;
|
||||
}
|
||||
|
||||
const authHeader = request.headers.authorization;
|
||||
if (typeof authHeader === "string" && authHeader.startsWith("Bearer ")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Get CSRF token from cookie and header
|
||||
const cookies = request.cookies as Record<string, string> | undefined;
|
||||
const cookieToken = cookies?.["csrf-token"];
|
||||
@@ -106,14 +111,9 @@ export class CsrfGuard implements CanActivate {
|
||||
|
||||
throw new ForbiddenException("CSRF token not bound to session");
|
||||
}
|
||||
} else {
|
||||
this.logger.debug({
|
||||
event: "CSRF_SKIP_SESSION_BINDING",
|
||||
method: request.method,
|
||||
path: request.path,
|
||||
reason: "User context not yet available (global guard runs before AuthGuard)",
|
||||
});
|
||||
}
|
||||
// Note: when userId is absent, the double-submit cookie check above is
|
||||
// sufficient CSRF protection. AuthGuard populates request.user afterward.
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { ConfigModule } from "@nestjs/config";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { CryptoModule } from "../crypto/crypto.module";
|
||||
import { ContainerLifecycleService } from "./container-lifecycle.service";
|
||||
|
||||
@Module({
|
||||
imports: [ConfigModule, PrismaModule, CryptoModule],
|
||||
providers: [ContainerLifecycleService],
|
||||
exports: [ContainerLifecycleService],
|
||||
})
|
||||
export class ContainerLifecycleModule {}
|
||||
@@ -0,0 +1,593 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import type { ConfigService } from "@nestjs/config";
|
||||
import type { PrismaService } from "../prisma/prisma.service";
|
||||
import type { CryptoService } from "../crypto/crypto.service";
|
||||
|
||||
interface MockUserContainerRecord {
|
||||
id: string;
|
||||
userId: string;
|
||||
containerId: string | null;
|
||||
containerName: string;
|
||||
gatewayPort: number | null;
|
||||
gatewayToken: string;
|
||||
status: string;
|
||||
lastActiveAt: Date | null;
|
||||
idleTimeoutMin: number;
|
||||
config: Record<string, unknown>;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
const dockerMock = vi.hoisted(() => {
|
||||
interface MockDockerContainerState {
|
||||
id: string;
|
||||
name: string;
|
||||
running: boolean;
|
||||
port: number;
|
||||
}
|
||||
|
||||
const containers = new Map<string, MockDockerContainerState>();
|
||||
const handles = new Map<
|
||||
string,
|
||||
{
|
||||
inspect: ReturnType<typeof vi.fn>;
|
||||
start: ReturnType<typeof vi.fn>;
|
||||
stop: ReturnType<typeof vi.fn>;
|
||||
}
|
||||
>();
|
||||
|
||||
const ensureHandle = (id: string) => {
|
||||
const existing = handles.get(id);
|
||||
if (existing) {
|
||||
return existing;
|
||||
}
|
||||
|
||||
const handle = {
|
||||
inspect: vi.fn(async () => {
|
||||
const container = containers.get(id);
|
||||
if (!container) {
|
||||
throw { statusCode: 404 };
|
||||
}
|
||||
|
||||
return {
|
||||
Id: container.id,
|
||||
State: {
|
||||
Running: container.running,
|
||||
},
|
||||
NetworkSettings: {
|
||||
Ports: {
|
||||
"18789/tcp": [{ HostPort: String(container.port) }],
|
||||
},
|
||||
},
|
||||
};
|
||||
}),
|
||||
start: vi.fn(async () => {
|
||||
const container = containers.get(id);
|
||||
if (!container) {
|
||||
throw { statusCode: 404 };
|
||||
}
|
||||
container.running = true;
|
||||
}),
|
||||
stop: vi.fn(async () => {
|
||||
const container = containers.get(id);
|
||||
if (!container) {
|
||||
throw { statusCode: 404 };
|
||||
}
|
||||
container.running = false;
|
||||
}),
|
||||
};
|
||||
|
||||
handles.set(id, handle);
|
||||
return handle;
|
||||
};
|
||||
|
||||
const listContainers = vi.fn(
|
||||
async (options?: { all?: boolean; filters?: { name?: string[] } }) => {
|
||||
const nameFilter = options?.filters?.name?.[0];
|
||||
return [...containers.values()]
|
||||
.filter((container) => (nameFilter ? container.name.includes(nameFilter) : true))
|
||||
.map((container) => ({
|
||||
Id: container.id,
|
||||
Names: [`/${container.name}`],
|
||||
}));
|
||||
}
|
||||
);
|
||||
|
||||
const getContainer = vi.fn((id: string) => ensureHandle(id));
|
||||
|
||||
const createContainer = vi.fn(
|
||||
async (options: {
|
||||
name?: string;
|
||||
HostConfig?: { PortBindings?: Record<string, Array<{ HostPort?: string }>> };
|
||||
}) => {
|
||||
const id = `ctr-${containers.size + 1}`;
|
||||
const name = options.name ?? id;
|
||||
const hostPort = options.HostConfig?.PortBindings?.["18789/tcp"]?.[0]?.HostPort;
|
||||
const port = hostPort ? Number.parseInt(hostPort, 10) : 0;
|
||||
|
||||
containers.set(id, {
|
||||
id,
|
||||
name,
|
||||
running: false,
|
||||
port,
|
||||
});
|
||||
|
||||
return ensureHandle(id);
|
||||
}
|
||||
);
|
||||
|
||||
const dockerInstance = {
|
||||
listContainers,
|
||||
getContainer,
|
||||
createContainer,
|
||||
};
|
||||
|
||||
const constructorSpy = vi.fn();
|
||||
class DockerConstructorMock {
|
||||
constructor(options?: unknown) {
|
||||
constructorSpy(options);
|
||||
return dockerInstance;
|
||||
}
|
||||
}
|
||||
|
||||
const registerContainer = (container: MockDockerContainerState) => {
|
||||
containers.set(container.id, { ...container });
|
||||
ensureHandle(container.id);
|
||||
};
|
||||
|
||||
const reset = () => {
|
||||
containers.clear();
|
||||
handles.clear();
|
||||
constructorSpy.mockClear();
|
||||
listContainers.mockClear();
|
||||
getContainer.mockClear();
|
||||
createContainer.mockClear();
|
||||
};
|
||||
|
||||
return {
|
||||
DockerConstructorMock,
|
||||
constructorSpy,
|
||||
createContainer,
|
||||
handles,
|
||||
registerContainer,
|
||||
reset,
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("dockerode", () => ({
|
||||
default: dockerMock.DockerConstructorMock,
|
||||
}));
|
||||
|
||||
import { ContainerLifecycleService } from "./container-lifecycle.service";
|
||||
|
||||
function createConfigMock(values: Record<string, string> = {}) {
|
||||
return {
|
||||
get: vi.fn((key: string) => values[key]),
|
||||
};
|
||||
}
|
||||
|
||||
function createCryptoMock() {
|
||||
return {
|
||||
generateToken: vi.fn(() => "generated-token"),
|
||||
encrypt: vi.fn((value: string) => `enc:${value}`),
|
||||
decrypt: vi.fn((value: string) => value.replace(/^enc:/, "")),
|
||||
isEncrypted: vi.fn((value: string) => value.startsWith("enc:")),
|
||||
};
|
||||
}
|
||||
|
||||
function projectRecord(
|
||||
record: MockUserContainerRecord,
|
||||
select?: Record<string, boolean>
|
||||
): Partial<MockUserContainerRecord> {
|
||||
if (!select) {
|
||||
return { ...record };
|
||||
}
|
||||
|
||||
const projection: Partial<MockUserContainerRecord> = {};
|
||||
for (const [field, enabled] of Object.entries(select)) {
|
||||
if (enabled) {
|
||||
const key = field as keyof MockUserContainerRecord;
|
||||
projection[key] = record[key];
|
||||
}
|
||||
}
|
||||
|
||||
return projection;
|
||||
}
|
||||
|
||||
function createPrismaMock(initialRecords: MockUserContainerRecord[] = []) {
|
||||
const records = new Map<string, MockUserContainerRecord>();
|
||||
for (const record of initialRecords) {
|
||||
records.set(record.userId, { ...record });
|
||||
}
|
||||
|
||||
const userContainer = {
|
||||
findUnique: vi.fn(
|
||||
async (args: {
|
||||
where: { userId?: string; id?: string };
|
||||
select?: Record<string, boolean>;
|
||||
}) => {
|
||||
let record: MockUserContainerRecord | undefined;
|
||||
if (args.where.userId) {
|
||||
record = records.get(args.where.userId);
|
||||
} else if (args.where.id) {
|
||||
record = [...records.values()].find((entry) => entry.id === args.where.id);
|
||||
}
|
||||
|
||||
if (!record) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return projectRecord(record, args.select);
|
||||
}
|
||||
),
|
||||
create: vi.fn(
|
||||
async (args: {
|
||||
data: Partial<MockUserContainerRecord> & {
|
||||
userId: string;
|
||||
containerName: string;
|
||||
gatewayToken: string;
|
||||
};
|
||||
}) => {
|
||||
const now = new Date();
|
||||
const next: MockUserContainerRecord = {
|
||||
id: args.data.id ?? `uc-${records.size + 1}`,
|
||||
userId: args.data.userId,
|
||||
containerId: args.data.containerId ?? null,
|
||||
containerName: args.data.containerName,
|
||||
gatewayPort: args.data.gatewayPort ?? null,
|
||||
gatewayToken: args.data.gatewayToken,
|
||||
status: args.data.status ?? "stopped",
|
||||
lastActiveAt: args.data.lastActiveAt ?? null,
|
||||
idleTimeoutMin: args.data.idleTimeoutMin ?? 30,
|
||||
config: args.data.config ?? {},
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
};
|
||||
|
||||
records.set(next.userId, next);
|
||||
return { ...next };
|
||||
}
|
||||
),
|
||||
update: vi.fn(
|
||||
async (args: { where: { userId: string }; data: Partial<MockUserContainerRecord> }) => {
|
||||
const record = records.get(args.where.userId);
|
||||
if (!record) {
|
||||
throw new Error(`Record ${args.where.userId} not found`);
|
||||
}
|
||||
|
||||
const updated: MockUserContainerRecord = {
|
||||
...record,
|
||||
...args.data,
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
records.set(updated.userId, updated);
|
||||
return { ...updated };
|
||||
}
|
||||
),
|
||||
updateMany: vi.fn(
|
||||
async (args: { where: { userId: string }; data: Partial<MockUserContainerRecord> }) => {
|
||||
const record = records.get(args.where.userId);
|
||||
if (!record) {
|
||||
return { count: 0 };
|
||||
}
|
||||
|
||||
const updated: MockUserContainerRecord = {
|
||||
...record,
|
||||
...args.data,
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
records.set(updated.userId, updated);
|
||||
return { count: 1 };
|
||||
}
|
||||
),
|
||||
findMany: vi.fn(
|
||||
async (args?: {
|
||||
where?: {
|
||||
status?: string;
|
||||
lastActiveAt?: { not: null };
|
||||
gatewayPort?: { not: null };
|
||||
};
|
||||
select?: Record<string, boolean>;
|
||||
}) => {
|
||||
let rows = [...records.values()];
|
||||
|
||||
if (args?.where?.status) {
|
||||
rows = rows.filter((record) => record.status === args.where?.status);
|
||||
}
|
||||
|
||||
if (args?.where?.lastActiveAt?.not === null) {
|
||||
rows = rows.filter((record) => record.lastActiveAt !== null);
|
||||
}
|
||||
|
||||
if (args?.where?.gatewayPort?.not === null) {
|
||||
rows = rows.filter((record) => record.gatewayPort !== null);
|
||||
}
|
||||
|
||||
return rows.map((record) => projectRecord(record, args?.select));
|
||||
}
|
||||
),
|
||||
};
|
||||
|
||||
return {
|
||||
prisma: {
|
||||
userContainer,
|
||||
},
|
||||
records,
|
||||
};
|
||||
}
|
||||
|
||||
function createRecord(overrides: Partial<MockUserContainerRecord>): MockUserContainerRecord {
|
||||
const now = new Date();
|
||||
return {
|
||||
id: overrides.id ?? "uc-default",
|
||||
userId: overrides.userId ?? "user-default",
|
||||
containerId: overrides.containerId ?? null,
|
||||
containerName: overrides.containerName ?? "mosaic-user-user-default",
|
||||
gatewayPort: overrides.gatewayPort ?? null,
|
||||
gatewayToken: overrides.gatewayToken ?? "enc:token-default",
|
||||
status: overrides.status ?? "stopped",
|
||||
lastActiveAt: overrides.lastActiveAt ?? null,
|
||||
idleTimeoutMin: overrides.idleTimeoutMin ?? 30,
|
||||
config: overrides.config ?? {},
|
||||
createdAt: overrides.createdAt ?? now,
|
||||
updatedAt: overrides.updatedAt ?? now,
|
||||
};
|
||||
}
|
||||
|
||||
describe("ContainerLifecycleService", () => {
|
||||
beforeEach(() => {
|
||||
dockerMock.reset();
|
||||
});
|
||||
|
||||
it("ensureRunning creates container when none exists", async () => {
|
||||
const { prisma, records } = createPrismaMock();
|
||||
const crypto = createCryptoMock();
|
||||
const config = createConfigMock();
|
||||
const service = new ContainerLifecycleService(
|
||||
prisma as unknown as PrismaService,
|
||||
crypto as unknown as CryptoService,
|
||||
config as unknown as ConfigService
|
||||
);
|
||||
|
||||
const result = await service.ensureRunning("user-1");
|
||||
|
||||
expect(result).toEqual({
|
||||
url: "http://mosaic-user-user-1:19000",
|
||||
token: "generated-token",
|
||||
});
|
||||
|
||||
const updatedRecord = records.get("user-1");
|
||||
expect(updatedRecord?.status).toBe("running");
|
||||
expect(updatedRecord?.containerId).toBe("ctr-1");
|
||||
expect(updatedRecord?.gatewayPort).toBe(19000);
|
||||
expect(updatedRecord?.gatewayToken).toBe("enc:generated-token");
|
||||
|
||||
expect(dockerMock.createContainer).toHaveBeenCalledTimes(1);
|
||||
const [createCall] = dockerMock.createContainer.mock.calls[0] as [
|
||||
{
|
||||
name: string;
|
||||
Image: string;
|
||||
Env: string[];
|
||||
HostConfig: { Binds: string[]; NetworkMode: string };
|
||||
},
|
||||
];
|
||||
expect(createCall.name).toBe("mosaic-user-user-1");
|
||||
expect(createCall.Image).toBe("alpine/openclaw:latest");
|
||||
expect(createCall.HostConfig.Binds).toEqual(["mosaic-user-user-1-state:/home/node/.openclaw"]);
|
||||
expect(createCall.HostConfig.NetworkMode).toBe("mosaic-internal");
|
||||
expect(createCall.Env).toContain("AGENT_TOKEN=generated-token");
|
||||
});
|
||||
|
||||
it("ensureRunning starts existing stopped container", async () => {
|
||||
const { prisma, records } = createPrismaMock([
|
||||
createRecord({
|
||||
id: "uc-1",
|
||||
userId: "user-2",
|
||||
containerId: "ctr-stopped",
|
||||
containerName: "mosaic-user-user-2",
|
||||
gatewayToken: "enc:existing-token",
|
||||
status: "stopped",
|
||||
}),
|
||||
]);
|
||||
const crypto = createCryptoMock();
|
||||
const config = createConfigMock();
|
||||
const service = new ContainerLifecycleService(
|
||||
prisma as unknown as PrismaService,
|
||||
crypto as unknown as CryptoService,
|
||||
config as unknown as ConfigService
|
||||
);
|
||||
|
||||
dockerMock.registerContainer({
|
||||
id: "ctr-stopped",
|
||||
name: "mosaic-user-user-2",
|
||||
running: false,
|
||||
port: 19042,
|
||||
});
|
||||
|
||||
const result = await service.ensureRunning("user-2");
|
||||
|
||||
expect(result).toEqual({
|
||||
url: "http://mosaic-user-user-2:19042",
|
||||
token: "existing-token",
|
||||
});
|
||||
|
||||
const handle = dockerMock.handles.get("ctr-stopped");
|
||||
expect(handle?.start).toHaveBeenCalledTimes(1);
|
||||
expect(records.get("user-2")?.status).toBe("running");
|
||||
expect(records.get("user-2")?.gatewayPort).toBe(19042);
|
||||
});
|
||||
|
||||
it("ensureRunning returns existing running container", async () => {
|
||||
const { prisma } = createPrismaMock([
|
||||
createRecord({
|
||||
id: "uc-2",
|
||||
userId: "user-3",
|
||||
containerId: "ctr-running",
|
||||
containerName: "mosaic-user-user-3",
|
||||
gatewayPort: 19043,
|
||||
gatewayToken: "enc:running-token",
|
||||
status: "running",
|
||||
}),
|
||||
]);
|
||||
const crypto = createCryptoMock();
|
||||
const config = createConfigMock();
|
||||
const service = new ContainerLifecycleService(
|
||||
prisma as unknown as PrismaService,
|
||||
crypto as unknown as CryptoService,
|
||||
config as unknown as ConfigService
|
||||
);
|
||||
|
||||
dockerMock.registerContainer({
|
||||
id: "ctr-running",
|
||||
name: "mosaic-user-user-3",
|
||||
running: true,
|
||||
port: 19043,
|
||||
});
|
||||
|
||||
const result = await service.ensureRunning("user-3");
|
||||
|
||||
expect(result).toEqual({
|
||||
url: "http://mosaic-user-user-3:19043",
|
||||
token: "running-token",
|
||||
});
|
||||
|
||||
expect(dockerMock.createContainer).not.toHaveBeenCalled();
|
||||
const handle = dockerMock.handles.get("ctr-running");
|
||||
expect(handle?.start).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("stop gracefully stops container and updates DB", async () => {
|
||||
const { prisma, records } = createPrismaMock([
|
||||
createRecord({
|
||||
id: "uc-stop",
|
||||
userId: "user-stop",
|
||||
containerId: "ctr-stop",
|
||||
containerName: "mosaic-user-user-stop",
|
||||
gatewayPort: 19044,
|
||||
status: "running",
|
||||
}),
|
||||
]);
|
||||
const crypto = createCryptoMock();
|
||||
const config = createConfigMock();
|
||||
const service = new ContainerLifecycleService(
|
||||
prisma as unknown as PrismaService,
|
||||
crypto as unknown as CryptoService,
|
||||
config as unknown as ConfigService
|
||||
);
|
||||
|
||||
dockerMock.registerContainer({
|
||||
id: "ctr-stop",
|
||||
name: "mosaic-user-user-stop",
|
||||
running: true,
|
||||
port: 19044,
|
||||
});
|
||||
|
||||
await service.stop("user-stop");
|
||||
|
||||
const handle = dockerMock.handles.get("ctr-stop");
|
||||
expect(handle?.stop).toHaveBeenCalledWith({ t: 10 });
|
||||
|
||||
const updatedRecord = records.get("user-stop");
|
||||
expect(updatedRecord?.status).toBe("stopped");
|
||||
expect(updatedRecord?.containerId).toBeNull();
|
||||
expect(updatedRecord?.gatewayPort).toBeNull();
|
||||
});
|
||||
|
||||
it("reapIdle stops only containers past their idle timeout", async () => {
|
||||
const now = Date.now();
|
||||
const { prisma, records } = createPrismaMock([
|
||||
createRecord({
|
||||
id: "uc-old",
|
||||
userId: "user-old",
|
||||
containerId: "ctr-old",
|
||||
containerName: "mosaic-user-user-old",
|
||||
gatewayPort: 19045,
|
||||
status: "running",
|
||||
lastActiveAt: new Date(now - 60 * 60 * 1000),
|
||||
idleTimeoutMin: 30,
|
||||
}),
|
||||
createRecord({
|
||||
id: "uc-fresh",
|
||||
userId: "user-fresh",
|
||||
containerId: "ctr-fresh",
|
||||
containerName: "mosaic-user-user-fresh",
|
||||
gatewayPort: 19046,
|
||||
status: "running",
|
||||
lastActiveAt: new Date(now - 5 * 60 * 1000),
|
||||
idleTimeoutMin: 30,
|
||||
}),
|
||||
]);
|
||||
const crypto = createCryptoMock();
|
||||
const config = createConfigMock();
|
||||
const service = new ContainerLifecycleService(
|
||||
prisma as unknown as PrismaService,
|
||||
crypto as unknown as CryptoService,
|
||||
config as unknown as ConfigService
|
||||
);
|
||||
|
||||
dockerMock.registerContainer({
|
||||
id: "ctr-old",
|
||||
name: "mosaic-user-user-old",
|
||||
running: true,
|
||||
port: 19045,
|
||||
});
|
||||
dockerMock.registerContainer({
|
||||
id: "ctr-fresh",
|
||||
name: "mosaic-user-user-fresh",
|
||||
running: true,
|
||||
port: 19046,
|
||||
});
|
||||
|
||||
const result = await service.reapIdle();
|
||||
|
||||
expect(result).toEqual({
|
||||
stopped: ["user-old"],
|
||||
});
|
||||
|
||||
expect(records.get("user-old")?.status).toBe("stopped");
|
||||
expect(records.get("user-fresh")?.status).toBe("running");
|
||||
|
||||
const oldHandle = dockerMock.handles.get("ctr-old");
|
||||
const freshHandle = dockerMock.handles.get("ctr-fresh");
|
||||
expect(oldHandle?.stop).toHaveBeenCalledTimes(1);
|
||||
expect(freshHandle?.stop).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("touch updates lastActiveAt", async () => {
|
||||
const { prisma, records } = createPrismaMock([
|
||||
createRecord({
|
||||
id: "uc-touch",
|
||||
userId: "user-touch",
|
||||
containerName: "mosaic-user-user-touch",
|
||||
lastActiveAt: null,
|
||||
}),
|
||||
]);
|
||||
const crypto = createCryptoMock();
|
||||
const config = createConfigMock();
|
||||
const service = new ContainerLifecycleService(
|
||||
prisma as unknown as PrismaService,
|
||||
crypto as unknown as CryptoService,
|
||||
config as unknown as ConfigService
|
||||
);
|
||||
|
||||
await service.touch("user-touch");
|
||||
|
||||
const updatedRecord = records.get("user-touch");
|
||||
expect(updatedRecord?.lastActiveAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it("getStatus returns null for unknown user", async () => {
|
||||
const { prisma } = createPrismaMock();
|
||||
const crypto = createCryptoMock();
|
||||
const config = createConfigMock();
|
||||
const service = new ContainerLifecycleService(
|
||||
prisma as unknown as PrismaService,
|
||||
crypto as unknown as CryptoService,
|
||||
config as unknown as ConfigService
|
||||
);
|
||||
|
||||
const status = await service.getStatus("missing-user");
|
||||
|
||||
expect(status).toBeNull();
|
||||
});
|
||||
});
|
||||
532
apps/api/src/container-lifecycle/container-lifecycle.service.ts
Normal file
532
apps/api/src/container-lifecycle/container-lifecycle.service.ts
Normal file
@@ -0,0 +1,532 @@
|
||||
import { Injectable, Logger } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import Docker from "dockerode";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { CryptoService } from "../crypto/crypto.service";
|
||||
|
||||
const DEFAULT_DOCKER_SOCKET_PATH = "/var/run/docker.sock";
|
||||
const DEFAULT_DOCKER_TCP_PORT = 2375;
|
||||
const DEFAULT_OPENCLAW_IMAGE = "alpine/openclaw:latest";
|
||||
const DEFAULT_OPENCLAW_NETWORK = "mosaic-internal";
|
||||
const DEFAULT_OPENCLAW_PORT_RANGE_START = 19000;
|
||||
const DEFAULT_MOSAIC_API_URL = "http://mosaic-api:3000/api";
|
||||
const OPENCLAW_GATEWAY_PORT_KEY = "18789/tcp";
|
||||
const OPENCLAW_STATE_PATH = "/home/node/.openclaw";
|
||||
const CONTAINER_STOP_TIMEOUT_SECONDS = 10;
|
||||
|
||||
interface ContainerHandle {
|
||||
inspect(): Promise<DockerInspect>;
|
||||
start(): Promise<void>;
|
||||
stop(options?: { t?: number }): Promise<void>;
|
||||
}
|
||||
|
||||
interface DockerInspect {
|
||||
Id?: string;
|
||||
State?: {
|
||||
Running?: boolean;
|
||||
Health?: {
|
||||
Status?: string;
|
||||
};
|
||||
};
|
||||
NetworkSettings?: {
|
||||
Ports?: Record<string, { HostPort?: string }[] | null>;
|
||||
};
|
||||
HostConfig?: {
|
||||
PortBindings?: Record<string, { HostPort?: string }[] | null>;
|
||||
};
|
||||
}
|
||||
|
||||
interface UserContainerRecord {
|
||||
id: string;
|
||||
userId: string;
|
||||
containerId: string | null;
|
||||
containerName: string;
|
||||
gatewayPort: number | null;
|
||||
gatewayToken: string;
|
||||
status: string;
|
||||
lastActiveAt: Date | null;
|
||||
idleTimeoutMin: number;
|
||||
}
|
||||
|
||||
interface ContainerLookup {
|
||||
containerId: string | null;
|
||||
containerName: string;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ContainerLifecycleService {
|
||||
private readonly logger = new Logger(ContainerLifecycleService.name);
|
||||
private readonly docker: Docker;
|
||||
|
||||
constructor(
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly crypto: CryptoService,
|
||||
private readonly config: ConfigService
|
||||
) {
|
||||
const dockerHost = this.config.get<string>("DOCKER_HOST");
|
||||
this.docker = this.createDockerClient(dockerHost);
|
||||
}
|
||||
|
||||
// Ensure a user's container is running. Creates if needed, starts if stopped.
|
||||
// Returns the container's internal URL and gateway token.
|
||||
async ensureRunning(userId: string): Promise<{ url: string; token: string }> {
|
||||
const containerRecord = await this.getOrCreateContainerRecord(userId);
|
||||
const token = this.getGatewayToken(containerRecord.gatewayToken);
|
||||
const existingContainer = await this.resolveContainer(containerRecord);
|
||||
|
||||
let container: ContainerHandle;
|
||||
if (existingContainer) {
|
||||
container = existingContainer;
|
||||
const inspect = await container.inspect();
|
||||
if (!inspect.State?.Running) {
|
||||
await container.start();
|
||||
}
|
||||
} else {
|
||||
const port = await this.findAvailableGatewayPort();
|
||||
container = await this.createContainer(containerRecord, token, port);
|
||||
await container.start();
|
||||
}
|
||||
|
||||
const inspect = await container.inspect();
|
||||
const containerId = inspect.Id;
|
||||
if (!containerId) {
|
||||
throw new Error(
|
||||
`Docker inspect did not return container ID for ${containerRecord.containerName}`
|
||||
);
|
||||
}
|
||||
|
||||
const gatewayPort = this.extractGatewayPort(inspect);
|
||||
if (!gatewayPort) {
|
||||
throw new Error(`Could not determine gateway port for ${containerRecord.containerName}`);
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
await this.prisma.userContainer.update({
|
||||
where: { userId },
|
||||
data: {
|
||||
containerId,
|
||||
gatewayPort,
|
||||
status: "running",
|
||||
lastActiveAt: now,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
url: `http://${containerRecord.containerName}:${String(gatewayPort)}`,
|
||||
token,
|
||||
};
|
||||
}
|
||||
|
||||
// Stop a user's container
|
||||
async stop(userId: string): Promise<void> {
|
||||
const containerRecord = await this.prisma.userContainer.findUnique({
|
||||
where: { userId },
|
||||
});
|
||||
|
||||
if (!containerRecord) {
|
||||
return;
|
||||
}
|
||||
|
||||
const container = await this.resolveContainer(containerRecord);
|
||||
if (container) {
|
||||
try {
|
||||
await container.stop({ t: CONTAINER_STOP_TIMEOUT_SECONDS });
|
||||
} catch (error) {
|
||||
if (!this.isDockerNotFound(error) && !this.isAlreadyStopped(error)) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await this.prisma.userContainer.update({
|
||||
where: { userId },
|
||||
data: {
|
||||
status: "stopped",
|
||||
containerId: null,
|
||||
gatewayPort: null,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Stop idle containers (called by cron/scheduler)
|
||||
async reapIdle(): Promise<{ stopped: string[] }> {
|
||||
const now = Date.now();
|
||||
const runningContainers = await this.prisma.userContainer.findMany({
|
||||
where: {
|
||||
status: "running",
|
||||
lastActiveAt: { not: null },
|
||||
},
|
||||
select: {
|
||||
userId: true,
|
||||
lastActiveAt: true,
|
||||
idleTimeoutMin: true,
|
||||
},
|
||||
});
|
||||
|
||||
const stopped: string[] = [];
|
||||
for (const container of runningContainers) {
|
||||
const lastActiveAt = container.lastActiveAt;
|
||||
if (!lastActiveAt) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const idleLimitMs = container.idleTimeoutMin * 60 * 1000;
|
||||
if (now - lastActiveAt.getTime() < idleLimitMs) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.stop(container.userId);
|
||||
stopped.push(container.userId);
|
||||
} catch (error) {
|
||||
this.logger.warn(
|
||||
`Failed to stop idle container for user ${container.userId}: ${this.getErrorMessage(error)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return { stopped };
|
||||
}
|
||||
|
||||
// Health check all running containers
|
||||
async healthCheckAll(): Promise<{ userId: string; healthy: boolean; error?: string }[]> {
|
||||
const runningContainers = await this.prisma.userContainer.findMany({
|
||||
where: {
|
||||
status: "running",
|
||||
},
|
||||
select: {
|
||||
userId: true,
|
||||
containerId: true,
|
||||
containerName: true,
|
||||
},
|
||||
});
|
||||
|
||||
const results: { userId: string; healthy: boolean; error?: string }[] = [];
|
||||
for (const containerRecord of runningContainers) {
|
||||
const container = await this.resolveContainer(containerRecord);
|
||||
if (!container) {
|
||||
results.push({
|
||||
userId: containerRecord.userId,
|
||||
healthy: false,
|
||||
error: "Container not found",
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const inspect = await container.inspect();
|
||||
const isRunning = inspect.State?.Running === true;
|
||||
const healthState = inspect.State?.Health?.Status;
|
||||
const healthy = isRunning && healthState !== "unhealthy";
|
||||
|
||||
if (healthy) {
|
||||
results.push({
|
||||
userId: containerRecord.userId,
|
||||
healthy: true,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
results.push({
|
||||
userId: containerRecord.userId,
|
||||
healthy: false,
|
||||
error:
|
||||
healthState === "unhealthy" ? "Container healthcheck failed" : "Container not running",
|
||||
});
|
||||
} catch (error) {
|
||||
results.push({
|
||||
userId: containerRecord.userId,
|
||||
healthy: false,
|
||||
error: this.getErrorMessage(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
// Restart a container with fresh config (for config updates)
|
||||
async restart(userId: string): Promise<void> {
|
||||
await this.stop(userId);
|
||||
await this.ensureRunning(userId);
|
||||
}
|
||||
|
||||
// Update lastActiveAt timestamp (called on each chat request)
|
||||
async touch(userId: string): Promise<void> {
|
||||
await this.prisma.userContainer.updateMany({
|
||||
where: { userId },
|
||||
data: {
|
||||
lastActiveAt: new Date(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Get container status for a user
|
||||
async getStatus(
|
||||
userId: string
|
||||
): Promise<{ status: string; port?: number; lastActive?: Date } | null> {
|
||||
const container = await this.prisma.userContainer.findUnique({
|
||||
where: { userId },
|
||||
select: {
|
||||
status: true,
|
||||
gatewayPort: true,
|
||||
lastActiveAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!container) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const status: { status: string; port?: number; lastActive?: Date } = {
|
||||
status: container.status,
|
||||
};
|
||||
|
||||
if (container.gatewayPort !== null) {
|
||||
status.port = container.gatewayPort;
|
||||
}
|
||||
|
||||
if (container.lastActiveAt !== null) {
|
||||
status.lastActive = container.lastActiveAt;
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
private createDockerClient(dockerHost?: string): Docker {
|
||||
if (!dockerHost || dockerHost.trim().length === 0) {
|
||||
return new Docker({ socketPath: DEFAULT_DOCKER_SOCKET_PATH });
|
||||
}
|
||||
|
||||
if (dockerHost.startsWith("unix://")) {
|
||||
return new Docker({ socketPath: dockerHost.slice("unix://".length) });
|
||||
}
|
||||
|
||||
if (dockerHost.startsWith("tcp://")) {
|
||||
const parsed = new URL(dockerHost.replace("tcp://", "http://"));
|
||||
return new Docker({
|
||||
host: parsed.hostname,
|
||||
port: this.parseInteger(parsed.port, DEFAULT_DOCKER_TCP_PORT),
|
||||
protocol: "http",
|
||||
});
|
||||
}
|
||||
|
||||
if (dockerHost.startsWith("http://") || dockerHost.startsWith("https://")) {
|
||||
const parsed = new URL(dockerHost);
|
||||
const protocol = parsed.protocol.replace(":", "");
|
||||
return new Docker({
|
||||
host: parsed.hostname,
|
||||
port: this.parseInteger(parsed.port, DEFAULT_DOCKER_TCP_PORT),
|
||||
protocol: protocol === "https" ? "https" : "http",
|
||||
});
|
||||
}
|
||||
|
||||
return new Docker({ socketPath: dockerHost });
|
||||
}
|
||||
|
||||
private async getOrCreateContainerRecord(userId: string): Promise<UserContainerRecord> {
|
||||
const existingContainer = await this.prisma.userContainer.findUnique({
|
||||
where: { userId },
|
||||
});
|
||||
|
||||
if (existingContainer) {
|
||||
return existingContainer;
|
||||
}
|
||||
|
||||
const token = this.crypto.generateToken();
|
||||
const containerName = this.getContainerName(userId);
|
||||
return this.prisma.userContainer.create({
|
||||
data: {
|
||||
userId,
|
||||
containerName,
|
||||
gatewayToken: this.crypto.encrypt(token),
|
||||
status: "stopped",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
private getContainerName(userId: string): string {
|
||||
return `mosaic-user-${userId}`;
|
||||
}
|
||||
|
||||
private getVolumeName(userId: string): string {
|
||||
return `mosaic-user-${userId}-state`;
|
||||
}
|
||||
|
||||
private getOpenClawImage(): string {
|
||||
return this.config.get<string>("OPENCLAW_IMAGE") ?? DEFAULT_OPENCLAW_IMAGE;
|
||||
}
|
||||
|
||||
private getOpenClawNetwork(): string {
|
||||
return this.config.get<string>("OPENCLAW_NETWORK") ?? DEFAULT_OPENCLAW_NETWORK;
|
||||
}
|
||||
|
||||
private getMosaicApiUrl(): string {
|
||||
return this.config.get<string>("MOSAIC_API_URL") ?? DEFAULT_MOSAIC_API_URL;
|
||||
}
|
||||
|
||||
private getPortRangeStart(): number {
|
||||
return this.parseInteger(
|
||||
this.config.get<string>("OPENCLAW_PORT_RANGE_START"),
|
||||
DEFAULT_OPENCLAW_PORT_RANGE_START
|
||||
);
|
||||
}
|
||||
|
||||
private async resolveContainer(record: ContainerLookup): Promise<ContainerHandle | null> {
|
||||
if (record.containerId) {
|
||||
const byId = this.docker.getContainer(record.containerId) as unknown as ContainerHandle;
|
||||
if (await this.containerExists(byId)) {
|
||||
return byId;
|
||||
}
|
||||
}
|
||||
|
||||
const byName = await this.findContainerByName(record.containerName);
|
||||
if (byName) {
|
||||
return byName;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private async findContainerByName(containerName: string): Promise<ContainerHandle | null> {
|
||||
const containers = await this.docker.listContainers({
|
||||
all: true,
|
||||
filters: {
|
||||
name: [containerName],
|
||||
},
|
||||
});
|
||||
|
||||
const match = containers.find((container) => {
|
||||
const names = container.Names;
|
||||
return names.some((name) => name === `/${containerName}` || name.includes(containerName));
|
||||
});
|
||||
|
||||
if (!match?.Id) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.docker.getContainer(match.Id) as unknown as ContainerHandle;
|
||||
}
|
||||
|
||||
private async containerExists(container: ContainerHandle): Promise<boolean> {
|
||||
try {
|
||||
await container.inspect();
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (this.isDockerNotFound(error)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async createContainer(
|
||||
containerRecord: UserContainerRecord,
|
||||
token: string,
|
||||
gatewayPort: number
|
||||
): Promise<ContainerHandle> {
|
||||
const container = await this.docker.createContainer({
|
||||
name: containerRecord.containerName,
|
||||
Image: this.getOpenClawImage(),
|
||||
Env: [
|
||||
`MOSAIC_API_URL=${this.getMosaicApiUrl()}`,
|
||||
`AGENT_TOKEN=${token}`,
|
||||
`AGENT_ID=${containerRecord.id}`,
|
||||
],
|
||||
ExposedPorts: {
|
||||
[OPENCLAW_GATEWAY_PORT_KEY]: {},
|
||||
},
|
||||
HostConfig: {
|
||||
Binds: [`${this.getVolumeName(containerRecord.userId)}:${OPENCLAW_STATE_PATH}`],
|
||||
PortBindings: {
|
||||
[OPENCLAW_GATEWAY_PORT_KEY]: [{ HostPort: String(gatewayPort) }],
|
||||
},
|
||||
NetworkMode: this.getOpenClawNetwork(),
|
||||
},
|
||||
});
|
||||
|
||||
return container as unknown as ContainerHandle;
|
||||
}
|
||||
|
||||
private extractGatewayPort(inspect: DockerInspect): number | null {
|
||||
const networkPort = inspect.NetworkSettings?.Ports?.[OPENCLAW_GATEWAY_PORT_KEY]?.[0]?.HostPort;
|
||||
if (networkPort) {
|
||||
return this.parseInteger(networkPort, 0) || null;
|
||||
}
|
||||
|
||||
const hostPort = inspect.HostConfig?.PortBindings?.[OPENCLAW_GATEWAY_PORT_KEY]?.[0]?.HostPort;
|
||||
if (hostPort) {
|
||||
return this.parseInteger(hostPort, 0) || null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private async findAvailableGatewayPort(): Promise<number> {
|
||||
const usedPorts = await this.prisma.userContainer.findMany({
|
||||
where: {
|
||||
gatewayPort: { not: null },
|
||||
},
|
||||
select: {
|
||||
gatewayPort: true,
|
||||
},
|
||||
});
|
||||
|
||||
const takenPorts = new Set<number>();
|
||||
for (const entry of usedPorts) {
|
||||
if (entry.gatewayPort !== null) {
|
||||
takenPorts.add(entry.gatewayPort);
|
||||
}
|
||||
}
|
||||
|
||||
let candidate = this.getPortRangeStart();
|
||||
while (takenPorts.has(candidate)) {
|
||||
candidate += 1;
|
||||
}
|
||||
|
||||
return candidate;
|
||||
}
|
||||
|
||||
private getGatewayToken(storedToken: string): string {
|
||||
if (this.crypto.isEncrypted(storedToken)) {
|
||||
return this.crypto.decrypt(storedToken);
|
||||
}
|
||||
|
||||
return storedToken;
|
||||
}
|
||||
|
||||
private parseInteger(value: string | undefined, fallback: number): number {
|
||||
if (!value) {
|
||||
return fallback;
|
||||
}
|
||||
|
||||
const parsed = Number.parseInt(value, 10);
|
||||
return Number.isFinite(parsed) ? parsed : fallback;
|
||||
}
|
||||
|
||||
private isDockerNotFound(error: unknown): boolean {
|
||||
return this.getDockerStatusCode(error) === 404;
|
||||
}
|
||||
|
||||
private isAlreadyStopped(error: unknown): boolean {
|
||||
return this.getDockerStatusCode(error) === 304;
|
||||
}
|
||||
|
||||
private getDockerStatusCode(error: unknown): number | null {
|
||||
if (typeof error !== "object" || error === null || !("statusCode" in error)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const statusCode = error.statusCode;
|
||||
return typeof statusCode === "number" ? statusCode : null;
|
||||
}
|
||||
|
||||
private getErrorMessage(error: unknown): string {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
return "Unknown error";
|
||||
}
|
||||
}
|
||||
10
apps/api/src/container-reaper/container-reaper.module.ts
Normal file
10
apps/api/src/container-reaper/container-reaper.module.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { ScheduleModule } from "@nestjs/schedule";
|
||||
import { ContainerLifecycleModule } from "../container-lifecycle/container-lifecycle.module";
|
||||
import { ContainerReaperService } from "./container-reaper.service";
|
||||
|
||||
@Module({
|
||||
imports: [ScheduleModule, ContainerLifecycleModule],
|
||||
providers: [ContainerReaperService],
|
||||
})
|
||||
export class ContainerReaperModule {}
|
||||
@@ -0,0 +1,45 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import type { ContainerLifecycleService } from "../container-lifecycle/container-lifecycle.service";
|
||||
import { ContainerReaperService } from "./container-reaper.service";
|
||||
|
||||
describe("ContainerReaperService", () => {
|
||||
let service: ContainerReaperService;
|
||||
let containerLifecycle: Pick<ContainerLifecycleService, "reapIdle">;
|
||||
|
||||
beforeEach(() => {
|
||||
containerLifecycle = {
|
||||
reapIdle: vi.fn(),
|
||||
};
|
||||
service = new ContainerReaperService(containerLifecycle as ContainerLifecycleService);
|
||||
});
|
||||
|
||||
it("reapIdleContainers calls containerLifecycle.reapIdle()", async () => {
|
||||
vi.mocked(containerLifecycle.reapIdle).mockResolvedValue({ stopped: [] });
|
||||
|
||||
await service.reapIdleContainers();
|
||||
|
||||
expect(containerLifecycle.reapIdle).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("reapIdleContainers handles errors gracefully", async () => {
|
||||
const error = new Error("reap failure");
|
||||
vi.mocked(containerLifecycle.reapIdle).mockRejectedValue(error);
|
||||
const loggerError = vi.spyOn(service["logger"], "error").mockImplementation(() => {});
|
||||
|
||||
await expect(service.reapIdleContainers()).resolves.toBeUndefined();
|
||||
|
||||
expect(loggerError).toHaveBeenCalledWith(
|
||||
"Failed to reap idle containers",
|
||||
expect.stringContaining("reap failure")
|
||||
);
|
||||
});
|
||||
|
||||
it("reapIdleContainers logs stopped container count", async () => {
|
||||
vi.mocked(containerLifecycle.reapIdle).mockResolvedValue({ stopped: ["user-1", "user-2"] });
|
||||
const loggerLog = vi.spyOn(service["logger"], "log").mockImplementation(() => {});
|
||||
|
||||
await service.reapIdleContainers();
|
||||
|
||||
expect(loggerLog).toHaveBeenCalledWith("Stopped 2 idle containers: user-1, user-2");
|
||||
});
|
||||
});
|
||||
30
apps/api/src/container-reaper/container-reaper.service.ts
Normal file
30
apps/api/src/container-reaper/container-reaper.service.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { Injectable, Logger } from "@nestjs/common";
|
||||
import { Cron, CronExpression } from "@nestjs/schedule";
|
||||
import { ContainerLifecycleService } from "../container-lifecycle/container-lifecycle.service";
|
||||
|
||||
@Injectable()
|
||||
export class ContainerReaperService {
|
||||
private readonly logger = new Logger(ContainerReaperService.name);
|
||||
|
||||
constructor(private readonly containerLifecycle: ContainerLifecycleService) {}
|
||||
|
||||
@Cron(CronExpression.EVERY_5_MINUTES)
|
||||
async reapIdleContainers(): Promise<void> {
|
||||
this.logger.log("Running idle container reap cycle...");
|
||||
try {
|
||||
const result = await this.containerLifecycle.reapIdle();
|
||||
if (result.stopped.length > 0) {
|
||||
this.logger.log(
|
||||
`Stopped ${String(result.stopped.length)} idle containers: ${result.stopped.join(", ")}`
|
||||
);
|
||||
} else {
|
||||
this.logger.debug("No idle containers to stop");
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error(
|
||||
"Failed to reap idle containers",
|
||||
error instanceof Error ? error.stack : String(error)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
import { Controller, Post, Get, Body, Param, Query, UseGuards } from "@nestjs/common";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { Workspace, RequirePermission, Permission } from "../common/decorators";
|
||||
import { ConversationArchiveService } from "./conversation-archive.service";
|
||||
import { IngestConversationDto, SearchConversationDto, ListConversationsDto } from "./dto";
|
||||
|
||||
/**
|
||||
* Controller for conversation archive endpoints.
|
||||
* All endpoints require workspace membership.
|
||||
*/
|
||||
@Controller("conversations")
|
||||
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||
export class ConversationArchiveController {
|
||||
constructor(private readonly service: ConversationArchiveService) {}
|
||||
|
||||
/**
|
||||
* POST /api/conversations/ingest
|
||||
* Ingest a conversation session log and auto-embed for semantic search.
|
||||
* Requires: MEMBER or higher
|
||||
*/
|
||||
@Post("ingest")
|
||||
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||
async ingest(
|
||||
@Workspace() workspaceId: string,
|
||||
@Body() dto: IngestConversationDto
|
||||
): Promise<{ id: string }> {
|
||||
return this.service.ingest(workspaceId, dto);
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/conversations/search
|
||||
* Vector similarity search across archived conversations.
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Post("search")
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async search(
|
||||
@Workspace() workspaceId: string,
|
||||
@Body() dto: SearchConversationDto
|
||||
): Promise<unknown> {
|
||||
return this.service.search(workspaceId, dto);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/conversations
|
||||
* List conversation archives with filtering and pagination.
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Get()
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findAll(
|
||||
@Workspace() workspaceId: string,
|
||||
@Query() query: ListConversationsDto
|
||||
): Promise<unknown> {
|
||||
return this.service.findAll(workspaceId, query);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/conversations/:id
|
||||
* Get a single conversation archive by ID (includes full messages).
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Get(":id")
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findOne(@Workspace() workspaceId: string, @Param("id") id: string): Promise<unknown> {
|
||||
return this.service.findOne(workspaceId, id);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,239 @@
|
||||
import { beforeAll, beforeEach, describe, expect, it, afterAll, vi } from "vitest";
|
||||
import { randomUUID as uuid } from "crypto";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { ConflictException } from "@nestjs/common";
|
||||
import { PrismaClient, Prisma } from "@prisma/client";
|
||||
import { EMBEDDING_DIMENSION } from "@mosaic/shared";
|
||||
import { ConversationArchiveService } from "./conversation-archive.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||
|
||||
const shouldRunDbIntegrationTests =
|
||||
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||
const describeFn = shouldRunDbIntegrationTests ? describe : describe.skip;
|
||||
|
||||
function vector(value: number): number[] {
|
||||
return Array.from({ length: EMBEDDING_DIMENSION }, () => value);
|
||||
}
|
||||
|
||||
function toVectorLiteral(input: number[]): string {
|
||||
return `[${input.join(",")}]`;
|
||||
}
|
||||
|
||||
describeFn("ConversationArchiveService Integration", () => {
|
||||
let moduleRef: TestingModule;
|
||||
let prisma: PrismaClient;
|
||||
let service: ConversationArchiveService;
|
||||
let workspaceId: string;
|
||||
let ownerId: string;
|
||||
let setupComplete = false;
|
||||
|
||||
const embeddingServiceMock = {
|
||||
isConfigured: vi.fn(),
|
||||
generateEmbedding: vi.fn(),
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
prisma = new PrismaClient();
|
||||
await prisma.$connect();
|
||||
|
||||
const workspace = await prisma.workspace.create({
|
||||
data: {
|
||||
name: `Conversation Archive Integration ${Date.now()}`,
|
||||
owner: {
|
||||
create: {
|
||||
email: `conversation-archive-integration-${Date.now()}@example.com`,
|
||||
name: "Conversation Archive Integration Owner",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
workspaceId = workspace.id;
|
||||
ownerId = workspace.ownerId;
|
||||
|
||||
moduleRef = await Test.createTestingModule({
|
||||
providers: [
|
||||
ConversationArchiveService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: prisma,
|
||||
},
|
||||
{
|
||||
provide: EmbeddingService,
|
||||
useValue: embeddingServiceMock,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = moduleRef.get<ConversationArchiveService>(ConversationArchiveService);
|
||||
setupComplete = true;
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
embeddingServiceMock.isConfigured.mockReturnValue(false);
|
||||
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
await prisma.conversationArchive.deleteMany({ where: { workspaceId } });
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!prisma) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (workspaceId) {
|
||||
await prisma.conversationArchive.deleteMany({ where: { workspaceId } });
|
||||
await prisma.workspace.deleteMany({ where: { id: workspaceId } });
|
||||
}
|
||||
if (ownerId) {
|
||||
await prisma.user.deleteMany({ where: { id: ownerId } });
|
||||
}
|
||||
|
||||
if (moduleRef) {
|
||||
await moduleRef.close();
|
||||
}
|
||||
await prisma.$disconnect();
|
||||
});
|
||||
|
||||
it("ingests a conversation log", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sessionId = `session-${uuid()}`;
|
||||
|
||||
const result = await service.ingest(workspaceId, {
|
||||
sessionId,
|
||||
agentId: "agent-conversation-ingest",
|
||||
messages: [
|
||||
{ role: "user", content: "Can you summarize deployment issues?" },
|
||||
{ role: "assistant", content: "Yes, three retries timed out in staging." },
|
||||
],
|
||||
summary: "Deployment retry failures discussed",
|
||||
startedAt: "2026-02-28T21:00:00.000Z",
|
||||
endedAt: "2026-02-28T21:05:00.000Z",
|
||||
metadata: { source: "integration-test" },
|
||||
});
|
||||
|
||||
expect(result.id).toBeDefined();
|
||||
|
||||
const stored = await prisma.conversationArchive.findUnique({
|
||||
where: {
|
||||
id: result.id,
|
||||
},
|
||||
});
|
||||
|
||||
expect(stored).toBeTruthy();
|
||||
expect(stored?.workspaceId).toBe(workspaceId);
|
||||
expect(stored?.sessionId).toBe(sessionId);
|
||||
expect(stored?.messageCount).toBe(2);
|
||||
expect(stored?.summary).toBe("Deployment retry failures discussed");
|
||||
});
|
||||
|
||||
it("rejects duplicate session ingest per workspace", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sessionId = `session-${uuid()}`;
|
||||
const dto = {
|
||||
sessionId,
|
||||
agentId: "agent-conversation-duplicate",
|
||||
messages: [{ role: "user", content: "hello" }],
|
||||
summary: "simple conversation",
|
||||
startedAt: "2026-02-28T22:00:00.000Z",
|
||||
};
|
||||
|
||||
await service.ingest(workspaceId, dto);
|
||||
|
||||
await expect(service.ingest(workspaceId, dto)).rejects.toThrow(ConflictException);
|
||||
});
|
||||
|
||||
it("rejects semantic search when embeddings are disabled", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
embeddingServiceMock.isConfigured.mockReturnValue(false);
|
||||
|
||||
await expect(
|
||||
service.search(workspaceId, {
|
||||
query: "deployment retries",
|
||||
})
|
||||
).rejects.toThrow(ConflictException);
|
||||
});
|
||||
|
||||
it("searches archived conversations by vector similarity", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const near = vector(0.02);
|
||||
const far = vector(0.85);
|
||||
|
||||
const matching = await prisma.conversationArchive.create({
|
||||
data: {
|
||||
workspaceId,
|
||||
sessionId: `session-search-${uuid()}`,
|
||||
agentId: "agent-conversation-search-a",
|
||||
messages: [
|
||||
{ role: "user", content: "What caused deployment retries?" },
|
||||
{ role: "assistant", content: "A connection pool timeout." },
|
||||
] as unknown as Prisma.InputJsonValue,
|
||||
messageCount: 2,
|
||||
summary: "Deployment retries caused by connection pool timeout",
|
||||
startedAt: new Date("2026-02-28T23:00:00.000Z"),
|
||||
metadata: { channel: "cli" } as Prisma.InputJsonValue,
|
||||
},
|
||||
});
|
||||
|
||||
const nonMatching = await prisma.conversationArchive.create({
|
||||
data: {
|
||||
workspaceId,
|
||||
sessionId: `session-search-${uuid()}`,
|
||||
agentId: "agent-conversation-search-b",
|
||||
messages: [
|
||||
{ role: "user", content: "How is billing configured?" },
|
||||
] as unknown as Prisma.InputJsonValue,
|
||||
messageCount: 1,
|
||||
summary: "Billing and quotas conversation",
|
||||
startedAt: new Date("2026-02-28T23:10:00.000Z"),
|
||||
metadata: { channel: "cli" } as Prisma.InputJsonValue,
|
||||
},
|
||||
});
|
||||
|
||||
await prisma.$executeRaw`
|
||||
UPDATE conversation_archives
|
||||
SET embedding = ${toVectorLiteral(near)}::vector(${EMBEDDING_DIMENSION})
|
||||
WHERE id = ${matching.id}::uuid
|
||||
`;
|
||||
await prisma.$executeRaw`
|
||||
UPDATE conversation_archives
|
||||
SET embedding = ${toVectorLiteral(far)}::vector(${EMBEDDING_DIMENSION})
|
||||
WHERE id = ${nonMatching.id}::uuid
|
||||
`;
|
||||
|
||||
embeddingServiceMock.isConfigured.mockReturnValue(true);
|
||||
embeddingServiceMock.generateEmbedding.mockResolvedValue(near);
|
||||
|
||||
const result = await service.search(workspaceId, {
|
||||
query: "deployment retries timeout",
|
||||
agentId: "agent-conversation-search-a",
|
||||
similarityThreshold: 0,
|
||||
limit: 10,
|
||||
});
|
||||
|
||||
const rows = result.data as Array<{ id: string; agent_id: string; similarity: number }>;
|
||||
|
||||
expect(result.pagination.total).toBe(1);
|
||||
expect(rows).toHaveLength(1);
|
||||
expect(rows[0]?.id).toBe(matching.id);
|
||||
expect(rows[0]?.agent_id).toBe("agent-conversation-search-a");
|
||||
expect(rows[0]?.similarity).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,14 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { KnowledgeModule } from "../knowledge/knowledge.module";
|
||||
import { ConversationArchiveService } from "./conversation-archive.service";
|
||||
import { ConversationArchiveController } from "./conversation-archive.controller";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, AuthModule, KnowledgeModule],
|
||||
controllers: [ConversationArchiveController],
|
||||
providers: [ConversationArchiveService],
|
||||
exports: [ConversationArchiveService],
|
||||
})
|
||||
export class ConversationArchiveModule {}
|
||||
@@ -0,0 +1,149 @@
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { ConflictException, NotFoundException } from "@nestjs/common";
|
||||
import { ConversationArchiveService } from "./conversation-archive.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||
|
||||
const mockPrisma = {
|
||||
conversationArchive: {
|
||||
findUnique: vi.fn(),
|
||||
create: vi.fn(),
|
||||
count: vi.fn(),
|
||||
findMany: vi.fn(),
|
||||
findFirst: vi.fn(),
|
||||
},
|
||||
$queryRaw: vi.fn(),
|
||||
$executeRaw: vi.fn(),
|
||||
};
|
||||
|
||||
const mockEmbedding = {
|
||||
isConfigured: vi.fn(),
|
||||
generateEmbedding: vi.fn(),
|
||||
};
|
||||
|
||||
describe("ConversationArchiveService", () => {
|
||||
let service: ConversationArchiveService;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ConversationArchiveService,
|
||||
{ provide: PrismaService, useValue: mockPrisma },
|
||||
{ provide: EmbeddingService, useValue: mockEmbedding },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<ConversationArchiveService>(ConversationArchiveService);
|
||||
});
|
||||
|
||||
describe("ingest", () => {
|
||||
const workspaceId = "ws-1";
|
||||
const dto = {
|
||||
sessionId: "sess-abc",
|
||||
agentId: "agent-xyz",
|
||||
messages: [
|
||||
{ role: "user", content: "Hello" },
|
||||
{ role: "assistant", content: "Hi there!" },
|
||||
],
|
||||
summary: "A greeting conversation",
|
||||
startedAt: "2026-02-28T10:00:00Z",
|
||||
};
|
||||
|
||||
it("creates a conversation archive and returns id", async () => {
|
||||
mockPrisma.conversationArchive.findUnique.mockResolvedValue(null);
|
||||
mockPrisma.conversationArchive.create.mockResolvedValue({ id: "conv-1" });
|
||||
mockEmbedding.isConfigured.mockReturnValue(false);
|
||||
|
||||
const result = await service.ingest(workspaceId, dto);
|
||||
|
||||
expect(result).toEqual({ id: "conv-1" });
|
||||
expect(mockPrisma.conversationArchive.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
workspaceId,
|
||||
sessionId: dto.sessionId,
|
||||
agentId: dto.agentId,
|
||||
messageCount: 2,
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("throws ConflictException when session already exists", async () => {
|
||||
mockPrisma.conversationArchive.findUnique.mockResolvedValue({ id: "existing" });
|
||||
|
||||
await expect(service.ingest(workspaceId, dto)).rejects.toThrow(ConflictException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
const workspaceId = "ws-1";
|
||||
|
||||
it("returns paginated list", async () => {
|
||||
mockPrisma.conversationArchive.count.mockResolvedValue(5);
|
||||
mockPrisma.conversationArchive.findMany.mockResolvedValue([
|
||||
{ id: "conv-1", sessionId: "sess-1" },
|
||||
]);
|
||||
|
||||
const result = await service.findAll(workspaceId, { page: 1, limit: 10 });
|
||||
|
||||
expect(result.pagination.total).toBe(5);
|
||||
expect(result.data).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("uses default pagination when not provided", async () => {
|
||||
mockPrisma.conversationArchive.count.mockResolvedValue(0);
|
||||
mockPrisma.conversationArchive.findMany.mockResolvedValue([]);
|
||||
|
||||
const result = await service.findAll(workspaceId, {});
|
||||
|
||||
expect(result.pagination.page).toBe(1);
|
||||
expect(result.pagination.limit).toBe(20);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
const workspaceId = "ws-1";
|
||||
|
||||
it("returns record when found", async () => {
|
||||
const record = { id: "conv-1", workspaceId, sessionId: "sess-1" };
|
||||
mockPrisma.conversationArchive.findFirst.mockResolvedValue(record);
|
||||
|
||||
const result = await service.findOne(workspaceId, "conv-1");
|
||||
|
||||
expect(result).toEqual(record);
|
||||
});
|
||||
|
||||
it("throws NotFoundException when record does not exist", async () => {
|
||||
mockPrisma.conversationArchive.findFirst.mockResolvedValue(null);
|
||||
|
||||
await expect(service.findOne(workspaceId, "missing")).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("search", () => {
|
||||
it("throws ConflictException when embedding is not configured", async () => {
|
||||
mockEmbedding.isConfigured.mockReturnValue(false);
|
||||
|
||||
await expect(service.search("ws-1", { query: "test query" })).rejects.toThrow(
|
||||
ConflictException
|
||||
);
|
||||
});
|
||||
|
||||
it("performs vector search when configured", async () => {
|
||||
mockEmbedding.isConfigured.mockReturnValue(true);
|
||||
mockEmbedding.generateEmbedding.mockResolvedValue(new Array(1536).fill(0.1));
|
||||
mockPrisma.$queryRaw
|
||||
.mockResolvedValueOnce([{ id: "conv-1", similarity: 0.9 }])
|
||||
.mockResolvedValueOnce([{ count: BigInt(1) }]);
|
||||
|
||||
const result = await service.search("ws-1", { query: "greetings" });
|
||||
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.pagination.total).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,277 @@
|
||||
import { Injectable, Logger, NotFoundException, ConflictException } from "@nestjs/common";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import { EMBEDDING_DIMENSION } from "@mosaic/shared";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||
import type { IngestConversationDto, SearchConversationDto, ListConversationsDto } from "./dto";
|
||||
|
||||
/**
|
||||
* Shape of a raw conversation archive row from $queryRaw vector search
|
||||
*/
|
||||
interface RawConversationResult {
|
||||
id: string;
|
||||
workspace_id: string;
|
||||
session_id: string;
|
||||
agent_id: string;
|
||||
messages: unknown;
|
||||
message_count: number;
|
||||
summary: string;
|
||||
started_at: Date;
|
||||
ended_at: Date | null;
|
||||
metadata: unknown;
|
||||
created_at: Date;
|
||||
updated_at: Date;
|
||||
similarity: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Paginated response wrapper
|
||||
*/
|
||||
export interface PaginatedConversations<T> {
|
||||
data: T[];
|
||||
pagination: {
|
||||
page: number;
|
||||
limit: number;
|
||||
total: number;
|
||||
totalPages: number;
|
||||
};
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ConversationArchiveService {
|
||||
private readonly logger = new Logger(ConversationArchiveService.name);
|
||||
private readonly defaultSimilarityThreshold = 0.5;
|
||||
|
||||
constructor(
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly embedding: EmbeddingService
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Ingest a conversation session log.
|
||||
* Generates a vector embedding from the summary + message content and stores it alongside the record.
|
||||
*/
|
||||
async ingest(workspaceId: string, dto: IngestConversationDto): Promise<{ id: string }> {
|
||||
// Verify no duplicate session in this workspace
|
||||
const existing = await this.prisma.conversationArchive.findUnique({
|
||||
where: { workspaceId_sessionId: { workspaceId, sessionId: dto.sessionId } },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (existing) {
|
||||
throw new ConflictException(
|
||||
`Conversation session '${dto.sessionId}' already exists in this workspace`
|
||||
);
|
||||
}
|
||||
|
||||
const messageCount = dto.messages.length;
|
||||
|
||||
// Create record first to get ID for embedding
|
||||
const record = await this.prisma.conversationArchive.create({
|
||||
data: {
|
||||
workspaceId,
|
||||
sessionId: dto.sessionId,
|
||||
agentId: dto.agentId,
|
||||
messages: dto.messages as unknown as Prisma.InputJsonValue,
|
||||
messageCount,
|
||||
summary: dto.summary,
|
||||
startedAt: new Date(dto.startedAt),
|
||||
endedAt: dto.endedAt ? new Date(dto.endedAt) : null,
|
||||
metadata: (dto.metadata ?? {}) as Prisma.InputJsonValue,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
// Generate and store embedding asynchronously (non-blocking for ingest)
|
||||
if (this.embedding.isConfigured()) {
|
||||
const textForEmbedding = this.buildEmbeddingText(dto.summary, dto.messages);
|
||||
this.storeEmbedding(record.id, textForEmbedding).catch((err: unknown) => {
|
||||
this.logger.error(`Failed to store embedding for conversation ${record.id}`, err);
|
||||
});
|
||||
}
|
||||
|
||||
this.logger.log(`Ingested conversation ${record.id} (session: ${dto.sessionId})`);
|
||||
return { id: record.id };
|
||||
}
|
||||
|
||||
/**
|
||||
* Semantic vector search across conversation archives in a workspace.
|
||||
*/
|
||||
async search(
|
||||
workspaceId: string,
|
||||
dto: SearchConversationDto
|
||||
): Promise<PaginatedConversations<RawConversationResult>> {
|
||||
if (!this.embedding.isConfigured()) {
|
||||
throw new ConflictException("Semantic search requires OpenAI API key to be configured");
|
||||
}
|
||||
|
||||
const limit = dto.limit ?? 20;
|
||||
const threshold = dto.similarityThreshold ?? this.defaultSimilarityThreshold;
|
||||
const distanceThreshold = 1 - threshold;
|
||||
|
||||
const queryEmbedding = await this.embedding.generateEmbedding(dto.query);
|
||||
const embeddingStr = `[${queryEmbedding.join(",")}]`;
|
||||
|
||||
const agentFilter = dto.agentId ? Prisma.sql`AND ca.agent_id = ${dto.agentId}` : Prisma.sql``;
|
||||
|
||||
const rows = await this.prisma.$queryRaw<RawConversationResult[]>`
|
||||
SELECT
|
||||
ca.id,
|
||||
ca.workspace_id,
|
||||
ca.session_id,
|
||||
ca.agent_id,
|
||||
ca.messages,
|
||||
ca.message_count,
|
||||
ca.summary,
|
||||
ca.started_at,
|
||||
ca.ended_at,
|
||||
ca.metadata,
|
||||
ca.created_at,
|
||||
ca.updated_at,
|
||||
(1 - (ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION}))) AS similarity
|
||||
FROM conversation_archives ca
|
||||
WHERE ca.workspace_id = ${workspaceId}::uuid
|
||||
AND ca.embedding IS NOT NULL
|
||||
AND (ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION})) <= ${distanceThreshold}
|
||||
${agentFilter}
|
||||
ORDER BY ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION})
|
||||
LIMIT ${limit}
|
||||
`;
|
||||
|
||||
const countResult = await this.prisma.$queryRaw<[{ count: bigint }]>`
|
||||
SELECT COUNT(*) AS count
|
||||
FROM conversation_archives ca
|
||||
WHERE ca.workspace_id = ${workspaceId}::uuid
|
||||
AND ca.embedding IS NOT NULL
|
||||
AND (ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION})) <= ${distanceThreshold}
|
||||
${agentFilter}
|
||||
`;
|
||||
|
||||
const total = Number(countResult[0].count);
|
||||
|
||||
return {
|
||||
data: rows,
|
||||
pagination: {
|
||||
page: 1,
|
||||
limit,
|
||||
total,
|
||||
totalPages: Math.ceil(total / limit),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* List conversation archives with filtering and pagination.
|
||||
*/
|
||||
async findAll(
|
||||
workspaceId: string,
|
||||
query: ListConversationsDto
|
||||
): Promise<PaginatedConversations<object>> {
|
||||
const page = query.page ?? 1;
|
||||
const limit = query.limit ?? 20;
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
const where: Prisma.ConversationArchiveWhereInput = {
|
||||
workspaceId,
|
||||
...(query.agentId ? { agentId: query.agentId } : {}),
|
||||
...(query.startedAfter || query.startedBefore
|
||||
? {
|
||||
startedAt: {
|
||||
...(query.startedAfter ? { gte: new Date(query.startedAfter) } : {}),
|
||||
...(query.startedBefore ? { lte: new Date(query.startedBefore) } : {}),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
|
||||
const [total, records] = await Promise.all([
|
||||
this.prisma.conversationArchive.count({ where }),
|
||||
this.prisma.conversationArchive.findMany({
|
||||
where,
|
||||
select: {
|
||||
id: true,
|
||||
workspaceId: true,
|
||||
sessionId: true,
|
||||
agentId: true,
|
||||
messageCount: true,
|
||||
summary: true,
|
||||
startedAt: true,
|
||||
endedAt: true,
|
||||
metadata: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
orderBy: { startedAt: "desc" },
|
||||
skip,
|
||||
take: limit,
|
||||
}),
|
||||
]);
|
||||
|
||||
return {
|
||||
data: records,
|
||||
pagination: {
|
||||
page,
|
||||
limit,
|
||||
total,
|
||||
totalPages: Math.ceil(total / limit),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single conversation archive by ID.
|
||||
*/
|
||||
async findOne(workspaceId: string, id: string): Promise<object> {
|
||||
const record = await this.prisma.conversationArchive.findFirst({
|
||||
where: { id, workspaceId },
|
||||
select: {
|
||||
id: true,
|
||||
workspaceId: true,
|
||||
sessionId: true,
|
||||
agentId: true,
|
||||
messages: true,
|
||||
messageCount: true,
|
||||
summary: true,
|
||||
startedAt: true,
|
||||
endedAt: true,
|
||||
metadata: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!record) {
|
||||
throw new NotFoundException(`Conversation archive '${id}' not found`);
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build text content for embedding from summary and messages.
|
||||
*/
|
||||
private buildEmbeddingText(
|
||||
summary: string,
|
||||
messages: { role: string; content: string }[]
|
||||
): string {
|
||||
const messageText = messages.map((m) => `${m.role}: ${m.content}`).join("\n");
|
||||
return `${summary}\n\n${messageText}`.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate embedding and store it on the conversation_archives row.
|
||||
*/
|
||||
private async storeEmbedding(id: string, text: string): Promise<void> {
|
||||
const vector = await this.embedding.generateEmbedding(text);
|
||||
const embeddingStr = `[${vector.join(",")}]`;
|
||||
|
||||
await this.prisma.$executeRaw`
|
||||
UPDATE conversation_archives
|
||||
SET embedding = ${embeddingStr}::vector(${EMBEDDING_DIMENSION}),
|
||||
updated_at = NOW()
|
||||
WHERE id = ${id}::uuid
|
||||
`;
|
||||
|
||||
this.logger.log(`Stored embedding for conversation ${id}`);
|
||||
}
|
||||
}
|
||||
3
apps/api/src/conversation-archive/dto/index.ts
Normal file
3
apps/api/src/conversation-archive/dto/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export { IngestConversationDto, ConversationMessageDto } from "./ingest-conversation.dto";
|
||||
export { SearchConversationDto } from "./search-conversation.dto";
|
||||
export { ListConversationsDto } from "./list-conversations.dto";
|
||||
@@ -0,0 +1,64 @@
|
||||
import {
|
||||
IsString,
|
||||
IsArray,
|
||||
IsOptional,
|
||||
IsDateString,
|
||||
MinLength,
|
||||
MaxLength,
|
||||
IsObject,
|
||||
ValidateNested,
|
||||
ArrayMinSize,
|
||||
} from "class-validator";
|
||||
import { Type } from "class-transformer";
|
||||
|
||||
/**
|
||||
* Represents a single message in a conversation session
|
||||
*/
|
||||
export class ConversationMessageDto {
|
||||
@IsString()
|
||||
role!: string;
|
||||
|
||||
@IsString()
|
||||
@MinLength(1)
|
||||
content!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
timestamp?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* DTO for ingesting a conversation session log
|
||||
*/
|
||||
export class IngestConversationDto {
|
||||
@IsString()
|
||||
@MinLength(1)
|
||||
@MaxLength(500)
|
||||
sessionId!: string;
|
||||
|
||||
@IsString()
|
||||
@MinLength(1)
|
||||
@MaxLength(500)
|
||||
agentId!: string;
|
||||
|
||||
@IsArray()
|
||||
@ArrayMinSize(1)
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => ConversationMessageDto)
|
||||
messages!: ConversationMessageDto[];
|
||||
|
||||
@IsString()
|
||||
@MinLength(1)
|
||||
summary!: string;
|
||||
|
||||
@IsDateString()
|
||||
startedAt!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
endedAt?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsObject()
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
import { IsString, IsOptional, MaxLength, IsInt, Min, Max, IsDateString } from "class-validator";
|
||||
import { Type } from "class-transformer";
|
||||
|
||||
/**
|
||||
* DTO for listing/filtering conversation archives
|
||||
*/
|
||||
export class ListConversationsDto {
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
@MaxLength(500)
|
||||
agentId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
startedAfter?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
startedBefore?: string;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt()
|
||||
@Min(1)
|
||||
page?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt()
|
||||
@Min(1)
|
||||
@Max(100)
|
||||
limit?: number;
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
import {
|
||||
IsString,
|
||||
IsOptional,
|
||||
MinLength,
|
||||
MaxLength,
|
||||
IsInt,
|
||||
Min,
|
||||
Max,
|
||||
IsNumber,
|
||||
} from "class-validator";
|
||||
import { Type } from "class-transformer";
|
||||
|
||||
/**
|
||||
* DTO for semantic search across conversation archives
|
||||
*/
|
||||
export class SearchConversationDto {
|
||||
@IsString()
|
||||
@MinLength(1)
|
||||
@MaxLength(1000)
|
||||
query!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
@MaxLength(500)
|
||||
agentId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt()
|
||||
@Min(1)
|
||||
@Max(100)
|
||||
limit?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsNumber()
|
||||
@Min(0)
|
||||
@Max(1)
|
||||
similarityThreshold?: number;
|
||||
}
|
||||
10
apps/api/src/crypto/crypto.module.ts
Normal file
10
apps/api/src/crypto/crypto.module.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { ConfigModule } from "@nestjs/config";
|
||||
import { CryptoService } from "./crypto.service";
|
||||
|
||||
@Module({
|
||||
imports: [ConfigModule],
|
||||
providers: [CryptoService],
|
||||
exports: [CryptoService],
|
||||
})
|
||||
export class CryptoModule {}
|
||||
71
apps/api/src/crypto/crypto.service.spec.ts
Normal file
71
apps/api/src/crypto/crypto.service.spec.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { describe, it, expect, beforeEach } from "vitest";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { CryptoService } from "./crypto.service";
|
||||
|
||||
function createConfigService(secret?: string): ConfigService {
|
||||
return {
|
||||
get: (key: string) => {
|
||||
if (key === "MOSAIC_SECRET_KEY") {
|
||||
return secret;
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
} as unknown as ConfigService;
|
||||
}
|
||||
|
||||
describe("CryptoService", () => {
|
||||
let service: CryptoService;
|
||||
|
||||
beforeEach(() => {
|
||||
service = new CryptoService(createConfigService("this-is-a-test-secret-key-with-32+chars"));
|
||||
});
|
||||
|
||||
it("encrypt -> decrypt roundtrip", () => {
|
||||
const plaintext = "my-secret-api-key";
|
||||
|
||||
const encrypted = service.encrypt(plaintext);
|
||||
const decrypted = service.decrypt(encrypted);
|
||||
|
||||
expect(encrypted.startsWith("enc:")).toBe(true);
|
||||
expect(decrypted).toBe(plaintext);
|
||||
});
|
||||
|
||||
it("decrypt rejects tampered ciphertext", () => {
|
||||
const encrypted = service.encrypt("sensitive-token");
|
||||
const payload = encrypted.slice(4);
|
||||
const bytes = Buffer.from(payload, "base64");
|
||||
|
||||
bytes[bytes.length - 1] = bytes[bytes.length - 1]! ^ 0xff;
|
||||
|
||||
const tampered = `enc:${bytes.toString("base64")}`;
|
||||
|
||||
expect(() => service.decrypt(tampered)).toThrow();
|
||||
});
|
||||
|
||||
it("decrypt rejects non-encrypted string", () => {
|
||||
expect(() => service.decrypt("plain-text-value")).toThrow();
|
||||
});
|
||||
|
||||
it("isEncrypted detects prefix correctly", () => {
|
||||
expect(service.isEncrypted("enc:abc")).toBe(true);
|
||||
expect(service.isEncrypted("ENC:abc")).toBe(false);
|
||||
expect(service.isEncrypted("plain-text")).toBe(false);
|
||||
});
|
||||
|
||||
it("generateToken returns 64-char hex string", () => {
|
||||
const token = service.generateToken();
|
||||
|
||||
expect(token).toMatch(/^[0-9a-f]{64}$/);
|
||||
});
|
||||
|
||||
it("different plaintexts produce different ciphertexts (random IV)", () => {
|
||||
const encryptedA = service.encrypt("value-a");
|
||||
const encryptedB = service.encrypt("value-b");
|
||||
|
||||
expect(encryptedA).not.toBe(encryptedB);
|
||||
});
|
||||
|
||||
it("missing MOSAIC_SECRET_KEY throws on construction", () => {
|
||||
expect(() => new CryptoService(createConfigService(undefined))).toThrow();
|
||||
});
|
||||
});
|
||||
82
apps/api/src/crypto/crypto.service.ts
Normal file
82
apps/api/src/crypto/crypto.service.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { createCipheriv, createDecipheriv, hkdfSync, randomBytes } from "crypto";
|
||||
|
||||
const ALGORITHM = "aes-256-gcm";
|
||||
const ENCRYPTED_PREFIX = "enc:";
|
||||
const IV_LENGTH = 12;
|
||||
const AUTH_TAG_LENGTH = 16;
|
||||
const DERIVED_KEY_LENGTH = 32;
|
||||
const HKDF_SALT = "mosaic.crypto.v1";
|
||||
const HKDF_INFO = "mosaic-db-secret-encryption";
|
||||
|
||||
@Injectable()
|
||||
export class CryptoService {
|
||||
private readonly key: Buffer;
|
||||
|
||||
constructor(private readonly config: ConfigService) {
|
||||
const secret = this.config.get<string>("MOSAIC_SECRET_KEY");
|
||||
|
||||
if (!secret) {
|
||||
throw new Error("MOSAIC_SECRET_KEY environment variable is required");
|
||||
}
|
||||
|
||||
if (secret.length < 32) {
|
||||
throw new Error("MOSAIC_SECRET_KEY must be at least 32 characters");
|
||||
}
|
||||
|
||||
this.key = Buffer.from(
|
||||
hkdfSync(
|
||||
"sha256",
|
||||
Buffer.from(secret, "utf8"),
|
||||
Buffer.from(HKDF_SALT, "utf8"),
|
||||
Buffer.from(HKDF_INFO, "utf8"),
|
||||
DERIVED_KEY_LENGTH
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
encrypt(plaintext: string): string {
|
||||
const iv = randomBytes(IV_LENGTH);
|
||||
const cipher = createCipheriv(ALGORITHM, this.key, iv);
|
||||
const ciphertext = Buffer.concat([cipher.update(plaintext, "utf8"), cipher.final()]);
|
||||
const authTag = cipher.getAuthTag();
|
||||
const payload = Buffer.concat([iv, ciphertext, authTag]).toString("base64");
|
||||
|
||||
return `${ENCRYPTED_PREFIX}${payload}`;
|
||||
}
|
||||
|
||||
decrypt(encrypted: string): string {
|
||||
if (!this.isEncrypted(encrypted)) {
|
||||
throw new Error("Value is not encrypted");
|
||||
}
|
||||
|
||||
const payloadBase64 = encrypted.slice(ENCRYPTED_PREFIX.length);
|
||||
|
||||
try {
|
||||
const payload = Buffer.from(payloadBase64, "base64");
|
||||
if (payload.length < IV_LENGTH + AUTH_TAG_LENGTH) {
|
||||
throw new Error("Encrypted payload is too short");
|
||||
}
|
||||
|
||||
const iv = payload.subarray(0, IV_LENGTH);
|
||||
const authTag = payload.subarray(payload.length - AUTH_TAG_LENGTH);
|
||||
const ciphertext = payload.subarray(IV_LENGTH, payload.length - AUTH_TAG_LENGTH);
|
||||
|
||||
const decipher = createDecipheriv(ALGORITHM, this.key, iv);
|
||||
decipher.setAuthTag(authTag);
|
||||
|
||||
return Buffer.concat([decipher.update(ciphertext), decipher.final()]).toString("utf8");
|
||||
} catch {
|
||||
throw new Error("Failed to decrypt value");
|
||||
}
|
||||
}
|
||||
|
||||
isEncrypted(value: string): boolean {
|
||||
return value.startsWith(ENCRYPTED_PREFIX);
|
||||
}
|
||||
|
||||
generateToken(): string {
|
||||
return randomBytes(32).toString("hex");
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@ import { DashboardService } from "./dashboard.service";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||
import type { DashboardSummaryDto } from "./dto";
|
||||
import { DashboardSummaryDto } from "./dto";
|
||||
|
||||
/**
|
||||
* Controller for dashboard endpoints.
|
||||
|
||||
33
apps/api/src/findings/dto/create-finding.dto.ts
Normal file
33
apps/api/src/findings/dto/create-finding.dto.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { IsObject, IsOptional, IsString, IsUUID, MaxLength, MinLength } from "class-validator";
|
||||
|
||||
/**
|
||||
* DTO for creating a finding
|
||||
*/
|
||||
export class CreateFindingDto {
|
||||
@IsOptional()
|
||||
@IsUUID("4", { message: "taskId must be a valid UUID" })
|
||||
taskId?: string;
|
||||
|
||||
@IsString({ message: "agentId must be a string" })
|
||||
@MinLength(1, { message: "agentId must not be empty" })
|
||||
@MaxLength(255, { message: "agentId must not exceed 255 characters" })
|
||||
agentId!: string;
|
||||
|
||||
@IsString({ message: "type must be a string" })
|
||||
@MinLength(1, { message: "type must not be empty" })
|
||||
@MaxLength(100, { message: "type must not exceed 100 characters" })
|
||||
type!: string;
|
||||
|
||||
@IsString({ message: "title must be a string" })
|
||||
@MinLength(1, { message: "title must not be empty" })
|
||||
@MaxLength(255, { message: "title must not exceed 255 characters" })
|
||||
title!: string;
|
||||
|
||||
@IsObject({ message: "data must be an object" })
|
||||
data!: Record<string, unknown>;
|
||||
|
||||
@IsString({ message: "summary must be a string" })
|
||||
@MinLength(1, { message: "summary must not be empty" })
|
||||
@MaxLength(20000, { message: "summary must not exceed 20000 characters" })
|
||||
summary!: string;
|
||||
}
|
||||
3
apps/api/src/findings/dto/index.ts
Normal file
3
apps/api/src/findings/dto/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export { CreateFindingDto } from "./create-finding.dto";
|
||||
export { QueryFindingsDto } from "./query-findings.dto";
|
||||
export { SearchFindingsDto } from "./search-findings.dto";
|
||||
32
apps/api/src/findings/dto/query-findings.dto.ts
Normal file
32
apps/api/src/findings/dto/query-findings.dto.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { Type } from "class-transformer";
|
||||
import { IsInt, IsOptional, IsString, IsUUID, Max, Min } from "class-validator";
|
||||
|
||||
/**
|
||||
* DTO for querying findings with filters and pagination
|
||||
*/
|
||||
export class QueryFindingsDto {
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt({ message: "page must be an integer" })
|
||||
@Min(1, { message: "page must be at least 1" })
|
||||
page?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt({ message: "limit must be an integer" })
|
||||
@Min(1, { message: "limit must be at least 1" })
|
||||
@Max(100, { message: "limit must not exceed 100" })
|
||||
limit?: number;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "agentId must be a string" })
|
||||
agentId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "type must be a string" })
|
||||
type?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsUUID("4", { message: "taskId must be a valid UUID" })
|
||||
taskId?: string;
|
||||
}
|
||||
52
apps/api/src/findings/dto/search-findings.dto.ts
Normal file
52
apps/api/src/findings/dto/search-findings.dto.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { Type } from "class-transformer";
|
||||
import {
|
||||
IsInt,
|
||||
IsNumber,
|
||||
IsOptional,
|
||||
IsString,
|
||||
IsUUID,
|
||||
Max,
|
||||
MaxLength,
|
||||
Min,
|
||||
} from "class-validator";
|
||||
|
||||
/**
|
||||
* DTO for finding semantic similarity search
|
||||
*/
|
||||
export class SearchFindingsDto {
|
||||
@IsString({ message: "query must be a string" })
|
||||
@MaxLength(1000, { message: "query must not exceed 1000 characters" })
|
||||
query!: string;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt({ message: "page must be an integer" })
|
||||
@Min(1, { message: "page must be at least 1" })
|
||||
page?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt({ message: "limit must be an integer" })
|
||||
@Min(1, { message: "limit must be at least 1" })
|
||||
@Max(100, { message: "limit must not exceed 100" })
|
||||
limit?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsNumber({}, { message: "similarityThreshold must be a number" })
|
||||
@Min(0, { message: "similarityThreshold must be at least 0" })
|
||||
@Max(1, { message: "similarityThreshold must not exceed 1" })
|
||||
similarityThreshold?: number;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "agentId must be a string" })
|
||||
agentId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "type must be a string" })
|
||||
type?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsUUID("4", { message: "taskId must be a valid UUID" })
|
||||
taskId?: string;
|
||||
}
|
||||
195
apps/api/src/findings/findings.controller.spec.ts
Normal file
195
apps/api/src/findings/findings.controller.spec.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { FindingsController } from "./findings.controller";
|
||||
import { FindingsService } from "./findings.service";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { CreateFindingDto, QueryFindingsDto, SearchFindingsDto } from "./dto";
|
||||
|
||||
describe("FindingsController", () => {
|
||||
let controller: FindingsController;
|
||||
let service: FindingsService;
|
||||
|
||||
const mockFindingsService = {
|
||||
create: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
findOne: vi.fn(),
|
||||
search: vi.fn(),
|
||||
remove: vi.fn(),
|
||||
};
|
||||
|
||||
const mockAuthGuard = {
|
||||
canActivate: vi.fn(() => true),
|
||||
};
|
||||
|
||||
const mockWorkspaceGuard = {
|
||||
canActivate: vi.fn(() => true),
|
||||
};
|
||||
|
||||
const mockPermissionGuard = {
|
||||
canActivate: vi.fn(() => true),
|
||||
};
|
||||
|
||||
const workspaceId = "550e8400-e29b-41d4-a716-446655440001";
|
||||
const findingId = "550e8400-e29b-41d4-a716-446655440002";
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
controllers: [FindingsController],
|
||||
providers: [
|
||||
{
|
||||
provide: FindingsService,
|
||||
useValue: mockFindingsService,
|
||||
},
|
||||
],
|
||||
})
|
||||
.overrideGuard(AuthGuard)
|
||||
.useValue(mockAuthGuard)
|
||||
.overrideGuard(WorkspaceGuard)
|
||||
.useValue(mockWorkspaceGuard)
|
||||
.overrideGuard(PermissionGuard)
|
||||
.useValue(mockPermissionGuard)
|
||||
.compile();
|
||||
|
||||
controller = module.get<FindingsController>(FindingsController);
|
||||
service = module.get<FindingsService>(FindingsService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(controller).toBeDefined();
|
||||
});
|
||||
|
||||
describe("create", () => {
|
||||
it("should create a finding", async () => {
|
||||
const createDto: CreateFindingDto = {
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
};
|
||||
|
||||
const createdFinding = {
|
||||
id: findingId,
|
||||
workspaceId,
|
||||
taskId: null,
|
||||
...createDto,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockFindingsService.create.mockResolvedValue(createdFinding);
|
||||
|
||||
const result = await controller.create(createDto, workspaceId);
|
||||
|
||||
expect(result).toEqual(createdFinding);
|
||||
expect(service.create).toHaveBeenCalledWith(workspaceId, createDto);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should return paginated findings", async () => {
|
||||
const query: QueryFindingsDto = {
|
||||
page: 1,
|
||||
limit: 10,
|
||||
type: "security",
|
||||
};
|
||||
|
||||
const response = {
|
||||
data: [],
|
||||
meta: {
|
||||
total: 0,
|
||||
page: 1,
|
||||
limit: 10,
|
||||
totalPages: 0,
|
||||
},
|
||||
};
|
||||
|
||||
mockFindingsService.findAll.mockResolvedValue(response);
|
||||
|
||||
const result = await controller.findAll(query, workspaceId);
|
||||
|
||||
expect(result).toEqual(response);
|
||||
expect(service.findAll).toHaveBeenCalledWith(workspaceId, query);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
it("should return a finding", async () => {
|
||||
const finding = {
|
||||
id: findingId,
|
||||
workspaceId,
|
||||
taskId: null,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockFindingsService.findOne.mockResolvedValue(finding);
|
||||
|
||||
const result = await controller.findOne(findingId, workspaceId);
|
||||
|
||||
expect(result).toEqual(finding);
|
||||
expect(service.findOne).toHaveBeenCalledWith(findingId, workspaceId);
|
||||
});
|
||||
});
|
||||
|
||||
describe("search", () => {
|
||||
it("should perform semantic search", async () => {
|
||||
const searchDto: SearchFindingsDto = {
|
||||
query: "sql injection",
|
||||
limit: 5,
|
||||
};
|
||||
|
||||
const response = {
|
||||
data: [
|
||||
{
|
||||
id: findingId,
|
||||
workspaceId,
|
||||
taskId: null,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
score: 0.91,
|
||||
},
|
||||
],
|
||||
meta: {
|
||||
total: 1,
|
||||
page: 1,
|
||||
limit: 5,
|
||||
totalPages: 1,
|
||||
},
|
||||
query: "sql injection",
|
||||
};
|
||||
|
||||
mockFindingsService.search.mockResolvedValue(response);
|
||||
|
||||
const result = await controller.search(searchDto, workspaceId);
|
||||
|
||||
expect(result).toEqual(response);
|
||||
expect(service.search).toHaveBeenCalledWith(workspaceId, searchDto);
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
it("should delete a finding", async () => {
|
||||
const response = { message: "Finding deleted successfully" };
|
||||
mockFindingsService.remove.mockResolvedValue(response);
|
||||
|
||||
const result = await controller.remove(findingId, workspaceId);
|
||||
|
||||
expect(result).toEqual(response);
|
||||
expect(service.remove).toHaveBeenCalledWith(findingId, workspaceId);
|
||||
});
|
||||
});
|
||||
});
|
||||
81
apps/api/src/findings/findings.controller.ts
Normal file
81
apps/api/src/findings/findings.controller.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { Body, Controller, Delete, Get, Param, Post, Query, UseGuards } from "@nestjs/common";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||
import { CreateFindingDto, QueryFindingsDto, SearchFindingsDto } from "./dto";
|
||||
import {
|
||||
FindingsService,
|
||||
FindingsSearchResponse,
|
||||
PaginatedFindingsResponse,
|
||||
} from "./findings.service";
|
||||
|
||||
/**
|
||||
* Controller for findings endpoints
|
||||
* All endpoints require authentication and workspace context
|
||||
*/
|
||||
@Controller("findings")
|
||||
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||
export class FindingsController {
|
||||
constructor(private readonly findingsService: FindingsService) {}
|
||||
|
||||
/**
|
||||
* POST /api/findings
|
||||
* Create a new finding and embed its summary
|
||||
* Requires: MEMBER role or higher
|
||||
*/
|
||||
@Post()
|
||||
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||
async create(@Body() createFindingDto: CreateFindingDto, @Workspace() workspaceId: string) {
|
||||
return this.findingsService.create(workspaceId, createFindingDto);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/findings
|
||||
* Get paginated findings with optional filters
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Get()
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findAll(
|
||||
@Query() query: QueryFindingsDto,
|
||||
@Workspace() workspaceId: string
|
||||
): Promise<PaginatedFindingsResponse> {
|
||||
return this.findingsService.findAll(workspaceId, query);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/findings/:id
|
||||
* Get a single finding by ID
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Get(":id")
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findOne(@Param("id") id: string, @Workspace() workspaceId: string) {
|
||||
return this.findingsService.findOne(id, workspaceId);
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/findings/search
|
||||
* Semantic search findings by vector similarity
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Post("search")
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async search(
|
||||
@Body() searchDto: SearchFindingsDto,
|
||||
@Workspace() workspaceId: string
|
||||
): Promise<FindingsSearchResponse> {
|
||||
return this.findingsService.search(workspaceId, searchDto);
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/findings/:id
|
||||
* Delete a finding
|
||||
* Requires: ADMIN role or higher
|
||||
*/
|
||||
@Delete(":id")
|
||||
@RequirePermission(Permission.WORKSPACE_ADMIN)
|
||||
async remove(@Param("id") id: string, @Workspace() workspaceId: string) {
|
||||
return this.findingsService.remove(id, workspaceId);
|
||||
}
|
||||
}
|
||||
226
apps/api/src/findings/findings.integration.spec.ts
Normal file
226
apps/api/src/findings/findings.integration.spec.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
import { beforeAll, beforeEach, describe, expect, it, afterAll, vi } from "vitest";
|
||||
import { randomUUID as uuid } from "crypto";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { BadRequestException, NotFoundException } from "@nestjs/common";
|
||||
import { PrismaClient, Prisma } from "@prisma/client";
|
||||
import { FindingsService } from "./findings.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||
|
||||
const shouldRunDbIntegrationTests =
|
||||
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||
const describeFn = shouldRunDbIntegrationTests ? describe : describe.skip;
|
||||
|
||||
const EMBEDDING_DIMENSION = 1536;
|
||||
|
||||
function vector(value: number): number[] {
|
||||
return Array.from({ length: EMBEDDING_DIMENSION }, () => value);
|
||||
}
|
||||
|
||||
function toVectorLiteral(input: number[]): string {
|
||||
return `[${input.join(",")}]`;
|
||||
}
|
||||
|
||||
describeFn("FindingsService Integration", () => {
|
||||
let moduleRef: TestingModule;
|
||||
let prisma: PrismaClient;
|
||||
let service: FindingsService;
|
||||
let workspaceId: string;
|
||||
let ownerId: string;
|
||||
let setupComplete = false;
|
||||
|
||||
const embeddingServiceMock = {
|
||||
isConfigured: vi.fn(),
|
||||
generateEmbedding: vi.fn(),
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
prisma = new PrismaClient();
|
||||
await prisma.$connect();
|
||||
|
||||
const workspace = await prisma.workspace.create({
|
||||
data: {
|
||||
name: `Findings Integration ${Date.now()}`,
|
||||
owner: {
|
||||
create: {
|
||||
email: `findings-integration-${Date.now()}@example.com`,
|
||||
name: "Findings Integration Owner",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
workspaceId = workspace.id;
|
||||
ownerId = workspace.ownerId;
|
||||
|
||||
moduleRef = await Test.createTestingModule({
|
||||
providers: [
|
||||
FindingsService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: prisma,
|
||||
},
|
||||
{
|
||||
provide: EmbeddingService,
|
||||
useValue: embeddingServiceMock,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = moduleRef.get<FindingsService>(FindingsService);
|
||||
setupComplete = true;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
embeddingServiceMock.isConfigured.mockReturnValue(false);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!prisma) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (workspaceId) {
|
||||
await prisma.finding.deleteMany({ where: { workspaceId } });
|
||||
await prisma.workspace.deleteMany({ where: { id: workspaceId } });
|
||||
}
|
||||
if (ownerId) {
|
||||
await prisma.user.deleteMany({ where: { id: ownerId } });
|
||||
}
|
||||
|
||||
if (moduleRef) {
|
||||
await moduleRef.close();
|
||||
}
|
||||
await prisma.$disconnect();
|
||||
});
|
||||
|
||||
it("creates, lists, fetches, and deletes findings", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const created = await service.create(workspaceId, {
|
||||
agentId: "agent-findings-crud",
|
||||
type: "security",
|
||||
title: "Unescaped SQL fragment",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential injection risk in dynamic query path.",
|
||||
});
|
||||
|
||||
expect(created.id).toBeDefined();
|
||||
expect(created.workspaceId).toBe(workspaceId);
|
||||
expect(created.taskId).toBeNull();
|
||||
|
||||
const listed = await service.findAll(workspaceId, {
|
||||
page: 1,
|
||||
limit: 10,
|
||||
agentId: "agent-findings-crud",
|
||||
});
|
||||
|
||||
expect(listed.meta.total).toBeGreaterThanOrEqual(1);
|
||||
expect(listed.data.some((row) => row.id === created.id)).toBe(true);
|
||||
|
||||
const found = await service.findOne(created.id, workspaceId);
|
||||
expect(found.id).toBe(created.id);
|
||||
expect(found.title).toBe("Unescaped SQL fragment");
|
||||
|
||||
await expect(service.findOne(created.id, uuid())).rejects.toThrow(NotFoundException);
|
||||
|
||||
await expect(service.remove(created.id, workspaceId)).resolves.toEqual({
|
||||
message: "Finding deleted successfully",
|
||||
});
|
||||
|
||||
await expect(service.findOne(created.id, workspaceId)).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
|
||||
it("rejects create when taskId does not exist in workspace", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
await expect(
|
||||
service.create(workspaceId, {
|
||||
taskId: uuid(),
|
||||
agentId: "agent-findings-missing-task",
|
||||
type: "bug",
|
||||
title: "Invalid task id",
|
||||
data: { source: "integration-test" },
|
||||
summary: "Should fail when task relation is not found.",
|
||||
})
|
||||
).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
|
||||
it("rejects vector search when embeddings are disabled", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
embeddingServiceMock.isConfigured.mockReturnValue(false);
|
||||
|
||||
await expect(
|
||||
service.search(workspaceId, {
|
||||
query: "security issue",
|
||||
})
|
||||
).rejects.toThrow(BadRequestException);
|
||||
});
|
||||
|
||||
it("searches findings by vector similarity with filters", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const near = vector(0.01);
|
||||
const far = vector(0.9);
|
||||
|
||||
const matchedFinding = await prisma.finding.create({
|
||||
data: {
|
||||
workspaceId,
|
||||
agentId: "agent-findings-search-a",
|
||||
type: "incident",
|
||||
title: "Authentication bypass",
|
||||
data: { score: 0.9 } as Prisma.InputJsonValue,
|
||||
summary: "Bypass risk found in login checks.",
|
||||
},
|
||||
});
|
||||
|
||||
const otherFinding = await prisma.finding.create({
|
||||
data: {
|
||||
workspaceId,
|
||||
agentId: "agent-findings-search-b",
|
||||
type: "incident",
|
||||
title: "Retry timeout",
|
||||
data: { score: 0.2 } as Prisma.InputJsonValue,
|
||||
summary: "Timeout issue in downstream retries.",
|
||||
},
|
||||
});
|
||||
|
||||
await prisma.$executeRaw`
|
||||
UPDATE findings
|
||||
SET embedding = ${toVectorLiteral(near)}::vector(1536)
|
||||
WHERE id = ${matchedFinding.id}::uuid
|
||||
`;
|
||||
await prisma.$executeRaw`
|
||||
UPDATE findings
|
||||
SET embedding = ${toVectorLiteral(far)}::vector(1536)
|
||||
WHERE id = ${otherFinding.id}::uuid
|
||||
`;
|
||||
|
||||
embeddingServiceMock.isConfigured.mockReturnValue(true);
|
||||
embeddingServiceMock.generateEmbedding.mockResolvedValue(near);
|
||||
|
||||
const result = await service.search(workspaceId, {
|
||||
query: "authentication bypass risk",
|
||||
agentId: "agent-findings-search-a",
|
||||
limit: 10,
|
||||
similarityThreshold: 0,
|
||||
});
|
||||
|
||||
expect(result.query).toBe("authentication bypass risk");
|
||||
expect(result.meta.total).toBe(1);
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.data[0]?.id).toBe(matchedFinding.id);
|
||||
expect(result.data[0]?.agentId).toBe("agent-findings-search-a");
|
||||
expect(result.data.find((row) => row.id === otherFinding.id)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
14
apps/api/src/findings/findings.module.ts
Normal file
14
apps/api/src/findings/findings.module.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { KnowledgeModule } from "../knowledge/knowledge.module";
|
||||
import { FindingsController } from "./findings.controller";
|
||||
import { FindingsService } from "./findings.service";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, AuthModule, KnowledgeModule],
|
||||
controllers: [FindingsController],
|
||||
providers: [FindingsService],
|
||||
exports: [FindingsService],
|
||||
})
|
||||
export class FindingsModule {}
|
||||
300
apps/api/src/findings/findings.service.spec.ts
Normal file
300
apps/api/src/findings/findings.service.spec.ts
Normal file
@@ -0,0 +1,300 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { BadRequestException, NotFoundException } from "@nestjs/common";
|
||||
import { FindingsService } from "./findings.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||
|
||||
describe("FindingsService", () => {
|
||||
let service: FindingsService;
|
||||
let prisma: PrismaService;
|
||||
let embeddingService: EmbeddingService;
|
||||
|
||||
const mockWorkspaceId = "550e8400-e29b-41d4-a716-446655440001";
|
||||
const mockFindingId = "550e8400-e29b-41d4-a716-446655440002";
|
||||
const mockTaskId = "550e8400-e29b-41d4-a716-446655440003";
|
||||
|
||||
const mockPrismaService = {
|
||||
finding: {
|
||||
create: vi.fn(),
|
||||
findMany: vi.fn(),
|
||||
findUnique: vi.fn(),
|
||||
count: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
},
|
||||
agentTask: {
|
||||
findUnique: vi.fn(),
|
||||
},
|
||||
$queryRaw: vi.fn(),
|
||||
$executeRaw: vi.fn(),
|
||||
};
|
||||
|
||||
const mockEmbeddingService = {
|
||||
isConfigured: vi.fn(),
|
||||
generateEmbedding: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
FindingsService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: mockPrismaService,
|
||||
},
|
||||
{
|
||||
provide: EmbeddingService,
|
||||
useValue: mockEmbeddingService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<FindingsService>(FindingsService);
|
||||
prisma = module.get<PrismaService>(PrismaService);
|
||||
embeddingService = module.get<EmbeddingService>(EmbeddingService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe("create", () => {
|
||||
it("should create a finding and store embedding when configured", async () => {
|
||||
const createDto = {
|
||||
taskId: mockTaskId,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
};
|
||||
|
||||
const createdFinding = {
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
...createDto,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockPrismaService.agentTask.findUnique.mockResolvedValue({
|
||||
id: mockTaskId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
});
|
||||
mockPrismaService.finding.create.mockResolvedValue(createdFinding);
|
||||
mockPrismaService.finding.findUnique.mockResolvedValue(createdFinding);
|
||||
mockEmbeddingService.isConfigured.mockReturnValue(true);
|
||||
mockEmbeddingService.generateEmbedding.mockResolvedValue([0.1, 0.2, 0.3]);
|
||||
mockPrismaService.$executeRaw.mockResolvedValue(1);
|
||||
|
||||
const result = await service.create(mockWorkspaceId, createDto);
|
||||
|
||||
expect(result).toEqual(createdFinding);
|
||||
expect(prisma.finding.create).toHaveBeenCalledWith({
|
||||
data: expect.objectContaining({
|
||||
workspaceId: mockWorkspaceId,
|
||||
taskId: mockTaskId,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
}),
|
||||
select: expect.any(Object),
|
||||
});
|
||||
expect(embeddingService.generateEmbedding).toHaveBeenCalledWith(createDto.summary);
|
||||
expect(prisma.$executeRaw).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should create a finding without embedding when not configured", async () => {
|
||||
const createDto = {
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
};
|
||||
|
||||
const createdFinding = {
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
taskId: null,
|
||||
...createDto,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockPrismaService.finding.create.mockResolvedValue(createdFinding);
|
||||
mockEmbeddingService.isConfigured.mockReturnValue(false);
|
||||
|
||||
const result = await service.create(mockWorkspaceId, createDto);
|
||||
|
||||
expect(result).toEqual(createdFinding);
|
||||
expect(embeddingService.generateEmbedding).not.toHaveBeenCalled();
|
||||
expect(prisma.$executeRaw).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should return paginated findings with filters", async () => {
|
||||
const findings = [
|
||||
{
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
taskId: null,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
];
|
||||
|
||||
mockPrismaService.finding.findMany.mockResolvedValue(findings);
|
||||
mockPrismaService.finding.count.mockResolvedValue(1);
|
||||
|
||||
const result = await service.findAll(mockWorkspaceId, {
|
||||
page: 1,
|
||||
limit: 10,
|
||||
type: "security",
|
||||
agentId: "research-agent",
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
data: findings,
|
||||
meta: {
|
||||
total: 1,
|
||||
page: 1,
|
||||
limit: 10,
|
||||
totalPages: 1,
|
||||
},
|
||||
});
|
||||
expect(prisma.finding.findMany).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
where: {
|
||||
workspaceId: mockWorkspaceId,
|
||||
type: "security",
|
||||
agentId: "research-agent",
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
it("should return a finding", async () => {
|
||||
const finding = {
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
taskId: null,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockPrismaService.finding.findUnique.mockResolvedValue(finding);
|
||||
|
||||
const result = await service.findOne(mockFindingId, mockWorkspaceId);
|
||||
|
||||
expect(result).toEqual(finding);
|
||||
expect(prisma.finding.findUnique).toHaveBeenCalledWith({
|
||||
where: {
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
},
|
||||
select: expect.any(Object),
|
||||
});
|
||||
});
|
||||
|
||||
it("should throw when finding does not exist", async () => {
|
||||
mockPrismaService.finding.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.findOne(mockFindingId, mockWorkspaceId)).rejects.toThrow(
|
||||
NotFoundException
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("search", () => {
|
||||
it("should throw BadRequestException when embeddings are not configured", async () => {
|
||||
mockEmbeddingService.isConfigured.mockReturnValue(false);
|
||||
|
||||
await expect(
|
||||
service.search(mockWorkspaceId, {
|
||||
query: "sql injection",
|
||||
})
|
||||
).rejects.toThrow(BadRequestException);
|
||||
});
|
||||
|
||||
it("should return similarity-ranked search results", async () => {
|
||||
mockEmbeddingService.isConfigured.mockReturnValue(true);
|
||||
mockEmbeddingService.generateEmbedding.mockResolvedValue([0.1, 0.2, 0.3]);
|
||||
mockPrismaService.$queryRaw
|
||||
.mockResolvedValueOnce([
|
||||
{
|
||||
id: mockFindingId,
|
||||
workspace_id: mockWorkspaceId,
|
||||
task_id: null,
|
||||
agent_id: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
score: 0.91,
|
||||
},
|
||||
])
|
||||
.mockResolvedValueOnce([{ count: BigInt(1) }]);
|
||||
|
||||
const result = await service.search(mockWorkspaceId, {
|
||||
query: "sql injection",
|
||||
page: 1,
|
||||
limit: 5,
|
||||
similarityThreshold: 0.5,
|
||||
});
|
||||
|
||||
expect(result.query).toBe("sql injection");
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.data[0].score).toBe(0.91);
|
||||
expect(result.meta.total).toBe(1);
|
||||
expect(prisma.$queryRaw).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
it("should delete a finding", async () => {
|
||||
mockPrismaService.finding.findUnique.mockResolvedValue({
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
});
|
||||
mockPrismaService.finding.delete.mockResolvedValue({
|
||||
id: mockFindingId,
|
||||
});
|
||||
|
||||
const result = await service.remove(mockFindingId, mockWorkspaceId);
|
||||
|
||||
expect(result).toEqual({ message: "Finding deleted successfully" });
|
||||
expect(prisma.finding.delete).toHaveBeenCalledWith({
|
||||
where: {
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should throw when finding does not exist", async () => {
|
||||
mockPrismaService.finding.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.remove(mockFindingId, mockWorkspaceId)).rejects.toThrow(
|
||||
NotFoundException
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
337
apps/api/src/findings/findings.service.ts
Normal file
337
apps/api/src/findings/findings.service.ts
Normal file
@@ -0,0 +1,337 @@
|
||||
import { BadRequestException, Injectable, Logger, NotFoundException } from "@nestjs/common";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||
import type { CreateFindingDto, QueryFindingsDto, SearchFindingsDto } from "./dto";
|
||||
|
||||
const findingSelect = {
|
||||
id: true,
|
||||
workspaceId: true,
|
||||
taskId: true,
|
||||
agentId: true,
|
||||
type: true,
|
||||
title: true,
|
||||
data: true,
|
||||
summary: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
} satisfies Prisma.FindingSelect;
|
||||
|
||||
type FindingRecord = Prisma.FindingGetPayload<{ select: typeof findingSelect }>;
|
||||
|
||||
interface RawFindingSearchResult {
|
||||
id: string;
|
||||
workspace_id: string;
|
||||
task_id: string | null;
|
||||
agent_id: string;
|
||||
type: string;
|
||||
title: string;
|
||||
data: Prisma.JsonValue;
|
||||
summary: string;
|
||||
created_at: Date;
|
||||
updated_at: Date;
|
||||
score: number;
|
||||
}
|
||||
|
||||
export interface FindingSearchResult extends FindingRecord {
|
||||
score: number;
|
||||
}
|
||||
|
||||
interface PaginatedMeta {
|
||||
total: number;
|
||||
page: number;
|
||||
limit: number;
|
||||
totalPages: number;
|
||||
}
|
||||
|
||||
export interface PaginatedFindingsResponse {
|
||||
data: FindingRecord[];
|
||||
meta: PaginatedMeta;
|
||||
}
|
||||
|
||||
export interface FindingsSearchResponse {
|
||||
data: FindingSearchResult[];
|
||||
meta: PaginatedMeta;
|
||||
query: string;
|
||||
similarityThreshold: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Service for managing structured findings with vector search support
|
||||
*/
|
||||
@Injectable()
|
||||
export class FindingsService {
|
||||
private readonly logger = new Logger(FindingsService.name);
|
||||
private readonly defaultSimilarityThreshold: number;
|
||||
|
||||
constructor(
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly embeddingService: EmbeddingService
|
||||
) {
|
||||
const parsedThreshold = Number.parseFloat(process.env.FINDINGS_SIMILARITY_THRESHOLD ?? "0.5");
|
||||
|
||||
this.defaultSimilarityThreshold =
|
||||
Number.isFinite(parsedThreshold) && parsedThreshold >= 0 && parsedThreshold <= 1
|
||||
? parsedThreshold
|
||||
: 0.5;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a finding and generate its embedding from the summary when available
|
||||
*/
|
||||
async create(workspaceId: string, createFindingDto: CreateFindingDto): Promise<FindingRecord> {
|
||||
if (createFindingDto.taskId) {
|
||||
const task = await this.prisma.agentTask.findUnique({
|
||||
where: {
|
||||
id: createFindingDto.taskId,
|
||||
workspaceId,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!task) {
|
||||
throw new NotFoundException(`Agent task with ID ${createFindingDto.taskId} not found`);
|
||||
}
|
||||
}
|
||||
|
||||
const createInput: Prisma.FindingUncheckedCreateInput = {
|
||||
workspaceId,
|
||||
agentId: createFindingDto.agentId,
|
||||
type: createFindingDto.type,
|
||||
title: createFindingDto.title,
|
||||
data: createFindingDto.data as Prisma.InputJsonValue,
|
||||
summary: createFindingDto.summary,
|
||||
};
|
||||
|
||||
if (createFindingDto.taskId) {
|
||||
createInput.taskId = createFindingDto.taskId;
|
||||
}
|
||||
|
||||
const finding = await this.prisma.finding.create({
|
||||
data: createInput,
|
||||
select: findingSelect,
|
||||
});
|
||||
|
||||
await this.generateAndStoreEmbedding(finding.id, workspaceId, finding.summary);
|
||||
|
||||
if (this.embeddingService.isConfigured()) {
|
||||
return this.findOne(finding.id, workspaceId);
|
||||
}
|
||||
|
||||
return finding;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get paginated findings with optional filters
|
||||
*/
|
||||
async findAll(workspaceId: string, query: QueryFindingsDto): Promise<PaginatedFindingsResponse> {
|
||||
const page = query.page ?? 1;
|
||||
const limit = query.limit ?? 50;
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
const where: Prisma.FindingWhereInput = {
|
||||
workspaceId,
|
||||
};
|
||||
|
||||
if (query.agentId) {
|
||||
where.agentId = query.agentId;
|
||||
}
|
||||
|
||||
if (query.type) {
|
||||
where.type = query.type;
|
||||
}
|
||||
|
||||
if (query.taskId) {
|
||||
where.taskId = query.taskId;
|
||||
}
|
||||
|
||||
const [data, total] = await Promise.all([
|
||||
this.prisma.finding.findMany({
|
||||
where,
|
||||
select: findingSelect,
|
||||
orderBy: {
|
||||
createdAt: "desc",
|
||||
},
|
||||
skip,
|
||||
take: limit,
|
||||
}),
|
||||
this.prisma.finding.count({ where }),
|
||||
]);
|
||||
|
||||
return {
|
||||
data,
|
||||
meta: {
|
||||
total,
|
||||
page,
|
||||
limit,
|
||||
totalPages: Math.ceil(total / limit),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single finding by ID
|
||||
*/
|
||||
async findOne(id: string, workspaceId: string): Promise<FindingRecord> {
|
||||
const finding = await this.prisma.finding.findUnique({
|
||||
where: {
|
||||
id,
|
||||
workspaceId,
|
||||
},
|
||||
select: findingSelect,
|
||||
});
|
||||
|
||||
if (!finding) {
|
||||
throw new NotFoundException(`Finding with ID ${id} not found`);
|
||||
}
|
||||
|
||||
return finding;
|
||||
}
|
||||
|
||||
/**
|
||||
* Semantic search findings using vector similarity
|
||||
*/
|
||||
async search(workspaceId: string, searchDto: SearchFindingsDto): Promise<FindingsSearchResponse> {
|
||||
if (!this.embeddingService.isConfigured()) {
|
||||
throw new BadRequestException(
|
||||
"Finding vector search requires OPENAI_API_KEY to be configured"
|
||||
);
|
||||
}
|
||||
|
||||
const page = searchDto.page ?? 1;
|
||||
const limit = searchDto.limit ?? 20;
|
||||
const offset = (page - 1) * limit;
|
||||
const similarityThreshold = searchDto.similarityThreshold ?? this.defaultSimilarityThreshold;
|
||||
const distanceThreshold = 1 - similarityThreshold;
|
||||
|
||||
const queryEmbedding = await this.embeddingService.generateEmbedding(searchDto.query);
|
||||
const embeddingString = `[${queryEmbedding.join(",")}]`;
|
||||
|
||||
const agentFilter = searchDto.agentId
|
||||
? Prisma.sql`AND f.agent_id = ${searchDto.agentId}`
|
||||
: Prisma.sql``;
|
||||
const typeFilter = searchDto.type ? Prisma.sql`AND f.type = ${searchDto.type}` : Prisma.sql``;
|
||||
const taskFilter = searchDto.taskId
|
||||
? Prisma.sql`AND f.task_id = ${searchDto.taskId}::uuid`
|
||||
: Prisma.sql``;
|
||||
|
||||
const searchResults = await this.prisma.$queryRaw<RawFindingSearchResult[]>`
|
||||
SELECT
|
||||
f.id,
|
||||
f.workspace_id,
|
||||
f.task_id,
|
||||
f.agent_id,
|
||||
f.type,
|
||||
f.title,
|
||||
f.data,
|
||||
f.summary,
|
||||
f.created_at,
|
||||
f.updated_at,
|
||||
(1 - (f.embedding <=> ${embeddingString}::vector)) AS score
|
||||
FROM findings f
|
||||
WHERE f.workspace_id = ${workspaceId}::uuid
|
||||
AND f.embedding IS NOT NULL
|
||||
${agentFilter}
|
||||
${typeFilter}
|
||||
${taskFilter}
|
||||
AND (f.embedding <=> ${embeddingString}::vector) <= ${distanceThreshold}
|
||||
ORDER BY f.embedding <=> ${embeddingString}::vector
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
|
||||
const countResult = await this.prisma.$queryRaw<[{ count: bigint }]>`
|
||||
SELECT COUNT(*) as count
|
||||
FROM findings f
|
||||
WHERE f.workspace_id = ${workspaceId}::uuid
|
||||
AND f.embedding IS NOT NULL
|
||||
${agentFilter}
|
||||
${typeFilter}
|
||||
${taskFilter}
|
||||
AND (f.embedding <=> ${embeddingString}::vector) <= ${distanceThreshold}
|
||||
`;
|
||||
|
||||
const total = Number(countResult[0].count);
|
||||
|
||||
const data: FindingSearchResult[] = searchResults.map((row) => ({
|
||||
id: row.id,
|
||||
workspaceId: row.workspace_id,
|
||||
taskId: row.task_id,
|
||||
agentId: row.agent_id,
|
||||
type: row.type,
|
||||
title: row.title,
|
||||
data: row.data,
|
||||
summary: row.summary,
|
||||
createdAt: row.created_at,
|
||||
updatedAt: row.updated_at,
|
||||
score: row.score,
|
||||
}));
|
||||
|
||||
return {
|
||||
data,
|
||||
meta: {
|
||||
total,
|
||||
page,
|
||||
limit,
|
||||
totalPages: Math.ceil(total / limit),
|
||||
},
|
||||
query: searchDto.query,
|
||||
similarityThreshold,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a finding
|
||||
*/
|
||||
async remove(id: string, workspaceId: string): Promise<{ message: string }> {
|
||||
const existingFinding = await this.prisma.finding.findUnique({
|
||||
where: {
|
||||
id,
|
||||
workspaceId,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!existingFinding) {
|
||||
throw new NotFoundException(`Finding with ID ${id} not found`);
|
||||
}
|
||||
|
||||
await this.prisma.finding.delete({
|
||||
where: {
|
||||
id,
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
|
||||
return { message: "Finding deleted successfully" };
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate and persist embedding for a finding summary
|
||||
*/
|
||||
private async generateAndStoreEmbedding(
|
||||
findingId: string,
|
||||
workspaceId: string,
|
||||
summary: string
|
||||
): Promise<void> {
|
||||
if (!this.embeddingService.isConfigured()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const embedding = await this.embeddingService.generateEmbedding(summary);
|
||||
const embeddingString = `[${embedding.join(",")}]`;
|
||||
|
||||
await this.prisma.$executeRaw`
|
||||
UPDATE findings
|
||||
SET embedding = ${embeddingString}::vector,
|
||||
updated_at = NOW()
|
||||
WHERE id = ${findingId}::uuid
|
||||
AND workspace_id = ${workspaceId}::uuid
|
||||
`;
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
this.logger.warn(`Failed to generate embedding for finding ${findingId}: ${message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
115
apps/api/src/fleet-settings/fleet-settings.controller.ts
Normal file
115
apps/api/src/fleet-settings/fleet-settings.controller.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Delete,
|
||||
Get,
|
||||
HttpCode,
|
||||
HttpStatus,
|
||||
Param,
|
||||
Patch,
|
||||
Post,
|
||||
Put,
|
||||
UseGuards,
|
||||
} from "@nestjs/common";
|
||||
import type { AuthUser } from "@mosaic/shared";
|
||||
import { CurrentUser } from "../auth/decorators/current-user.decorator";
|
||||
import { AdminGuard } from "../auth/guards/admin.guard";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import {
|
||||
CreateProviderDto,
|
||||
ResetPasswordDto,
|
||||
UpdateAgentConfigDto,
|
||||
UpdateOidcDto,
|
||||
UpdateProviderDto,
|
||||
} from "./fleet-settings.dto";
|
||||
import { FleetSettingsService } from "./fleet-settings.service";
|
||||
|
||||
@Controller("fleet-settings")
|
||||
@UseGuards(AuthGuard)
|
||||
export class FleetSettingsController {
|
||||
constructor(private readonly fleetSettingsService: FleetSettingsService) {}
|
||||
|
||||
// --- Provider endpoints (user-scoped) ---
|
||||
// GET /api/fleet-settings/providers — list user's providers
|
||||
@Get("providers")
|
||||
async listProviders(@CurrentUser() user: AuthUser) {
|
||||
return this.fleetSettingsService.listProviders(user.id);
|
||||
}
|
||||
|
||||
// GET /api/fleet-settings/providers/:id — get single provider
|
||||
@Get("providers/:id")
|
||||
async getProvider(@CurrentUser() user: AuthUser, @Param("id") id: string) {
|
||||
return this.fleetSettingsService.getProvider(user.id, id);
|
||||
}
|
||||
|
||||
// POST /api/fleet-settings/providers — create provider
|
||||
@Post("providers")
|
||||
async createProvider(@CurrentUser() user: AuthUser, @Body() dto: CreateProviderDto) {
|
||||
return this.fleetSettingsService.createProvider(user.id, dto);
|
||||
}
|
||||
|
||||
// PATCH /api/fleet-settings/providers/:id — update provider
|
||||
@Patch("providers/:id")
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
async updateProvider(
|
||||
@CurrentUser() user: AuthUser,
|
||||
@Param("id") id: string,
|
||||
@Body() dto: UpdateProviderDto
|
||||
) {
|
||||
await this.fleetSettingsService.updateProvider(user.id, id, dto);
|
||||
}
|
||||
|
||||
// DELETE /api/fleet-settings/providers/:id — delete provider
|
||||
@Delete("providers/:id")
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
async deleteProvider(@CurrentUser() user: AuthUser, @Param("id") id: string) {
|
||||
await this.fleetSettingsService.deleteProvider(user.id, id);
|
||||
}
|
||||
|
||||
// --- Agent config endpoints (user-scoped) ---
|
||||
// GET /api/fleet-settings/agent-config — get user's agent config
|
||||
@Get("agent-config")
|
||||
async getAgentConfig(@CurrentUser() user: AuthUser) {
|
||||
return this.fleetSettingsService.getAgentConfig(user.id);
|
||||
}
|
||||
|
||||
// PATCH /api/fleet-settings/agent-config — update user's agent config
|
||||
@Patch("agent-config")
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
async updateAgentConfig(@CurrentUser() user: AuthUser, @Body() dto: UpdateAgentConfigDto) {
|
||||
await this.fleetSettingsService.updateAgentConfig(user.id, dto);
|
||||
}
|
||||
|
||||
// --- OIDC endpoints (admin only — use AdminGuard) ---
|
||||
// GET /api/fleet-settings/oidc — get OIDC config
|
||||
@Get("oidc")
|
||||
@UseGuards(AdminGuard)
|
||||
async getOidcConfig() {
|
||||
return this.fleetSettingsService.getOidcConfig();
|
||||
}
|
||||
|
||||
// PUT /api/fleet-settings/oidc — update OIDC config
|
||||
@Put("oidc")
|
||||
@UseGuards(AdminGuard)
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
async updateOidcConfig(@Body() dto: UpdateOidcDto) {
|
||||
await this.fleetSettingsService.updateOidcConfig(dto);
|
||||
}
|
||||
|
||||
// DELETE /api/fleet-settings/oidc — remove OIDC config
|
||||
@Delete("oidc")
|
||||
@UseGuards(AdminGuard)
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
async deleteOidcConfig() {
|
||||
await this.fleetSettingsService.deleteOidcConfig();
|
||||
}
|
||||
|
||||
// --- Breakglass endpoints (admin only) ---
|
||||
// POST /api/fleet-settings/breakglass/reset-password — reset admin password
|
||||
@Post("breakglass/reset-password")
|
||||
@UseGuards(AdminGuard)
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
async resetBreakglassPassword(@Body() dto: ResetPasswordDto) {
|
||||
await this.fleetSettingsService.resetBreakglassPassword(dto.username, dto.newPassword);
|
||||
}
|
||||
}
|
||||
122
apps/api/src/fleet-settings/fleet-settings.dto.ts
Normal file
122
apps/api/src/fleet-settings/fleet-settings.dto.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
import {
|
||||
ArrayNotEmpty,
|
||||
IsArray,
|
||||
IsBoolean,
|
||||
IsNotEmpty,
|
||||
IsObject,
|
||||
IsOptional,
|
||||
IsString,
|
||||
IsUrl,
|
||||
MaxLength,
|
||||
MinLength,
|
||||
} from "class-validator";
|
||||
|
||||
export class CreateProviderDto {
|
||||
@IsString({ message: "name must be a string" })
|
||||
@IsNotEmpty({ message: "name is required" })
|
||||
@MaxLength(100, { message: "name must not exceed 100 characters" })
|
||||
name!: string;
|
||||
|
||||
@IsString({ message: "displayName must be a string" })
|
||||
@IsNotEmpty({ message: "displayName is required" })
|
||||
@MaxLength(255, { message: "displayName must not exceed 255 characters" })
|
||||
displayName!: string;
|
||||
|
||||
@IsString({ message: "type must be a string" })
|
||||
@IsNotEmpty({ message: "type is required" })
|
||||
@MaxLength(100, { message: "type must not exceed 100 characters" })
|
||||
type!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsUrl(
|
||||
{ require_tld: false },
|
||||
{ message: "baseUrl must be a valid URL (for example: https://api.example.com/v1)" }
|
||||
)
|
||||
baseUrl?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "apiKey must be a string" })
|
||||
apiKey?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "apiType must be a string" })
|
||||
@MaxLength(100, { message: "apiType must not exceed 100 characters" })
|
||||
apiType?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsArray({ message: "models must be an array" })
|
||||
@IsObject({ each: true, message: "each model must be an object" })
|
||||
models?: Record<string, unknown>[];
|
||||
}
|
||||
|
||||
export class UpdateProviderDto {
|
||||
@IsOptional()
|
||||
@IsString({ message: "displayName must be a string" })
|
||||
@MaxLength(255, { message: "displayName must not exceed 255 characters" })
|
||||
displayName?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsUrl(
|
||||
{ require_tld: false },
|
||||
{ message: "baseUrl must be a valid URL (for example: https://api.example.com/v1)" }
|
||||
)
|
||||
baseUrl?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "apiKey must be a string" })
|
||||
apiKey?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsBoolean({ message: "isActive must be a boolean" })
|
||||
isActive?: boolean;
|
||||
|
||||
@IsOptional()
|
||||
@IsArray({ message: "models must be an array" })
|
||||
@IsObject({ each: true, message: "each model must be an object" })
|
||||
models?: Record<string, unknown>[];
|
||||
}
|
||||
|
||||
export class UpdateAgentConfigDto {
|
||||
@IsOptional()
|
||||
@IsString({ message: "primaryModel must be a string" })
|
||||
@MaxLength(255, { message: "primaryModel must not exceed 255 characters" })
|
||||
primaryModel?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsArray({ message: "fallbackModels must be an array" })
|
||||
@ArrayNotEmpty({ message: "fallbackModels cannot be empty" })
|
||||
@IsString({ each: true, message: "each fallback model must be a string" })
|
||||
fallbackModels?: string[];
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "personality must be a string" })
|
||||
personality?: string;
|
||||
}
|
||||
|
||||
export class UpdateOidcDto {
|
||||
@IsString({ message: "issuerUrl must be a string" })
|
||||
@IsNotEmpty({ message: "issuerUrl is required" })
|
||||
@IsUrl(
|
||||
{ require_tld: false },
|
||||
{ message: "issuerUrl must be a valid URL (for example: https://issuer.example.com)" }
|
||||
)
|
||||
issuerUrl!: string;
|
||||
|
||||
@IsString({ message: "clientId must be a string" })
|
||||
@IsNotEmpty({ message: "clientId is required" })
|
||||
clientId!: string;
|
||||
|
||||
@IsString({ message: "clientSecret must be a string" })
|
||||
@IsNotEmpty({ message: "clientSecret is required" })
|
||||
clientSecret!: string;
|
||||
}
|
||||
|
||||
export class ResetPasswordDto {
|
||||
@IsString({ message: "username must be a string" })
|
||||
@IsNotEmpty({ message: "username is required" })
|
||||
username!: string;
|
||||
|
||||
@IsString({ message: "newPassword must be a string" })
|
||||
@MinLength(8, { message: "newPassword must be at least 8 characters" })
|
||||
newPassword!: string;
|
||||
}
|
||||
14
apps/api/src/fleet-settings/fleet-settings.module.ts
Normal file
14
apps/api/src/fleet-settings/fleet-settings.module.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { CryptoModule } from "../crypto/crypto.module";
|
||||
import { FleetSettingsController } from "./fleet-settings.controller";
|
||||
import { FleetSettingsService } from "./fleet-settings.service";
|
||||
|
||||
@Module({
|
||||
imports: [AuthModule, PrismaModule, CryptoModule],
|
||||
controllers: [FleetSettingsController],
|
||||
providers: [FleetSettingsService],
|
||||
exports: [FleetSettingsService],
|
||||
})
|
||||
export class FleetSettingsModule {}
|
||||
200
apps/api/src/fleet-settings/fleet-settings.service.spec.ts
Normal file
200
apps/api/src/fleet-settings/fleet-settings.service.spec.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import { NotFoundException } from "@nestjs/common";
|
||||
import { compare } from "bcryptjs";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { FleetSettingsService } from "./fleet-settings.service";
|
||||
import type { PrismaService } from "../prisma/prisma.service";
|
||||
import type { CryptoService } from "../crypto/crypto.service";
|
||||
|
||||
describe("FleetSettingsService", () => {
|
||||
let service: FleetSettingsService;
|
||||
|
||||
const mockPrisma = {
|
||||
llmProvider: {
|
||||
findMany: vi.fn(),
|
||||
findFirst: vi.fn(),
|
||||
findUnique: vi.fn(),
|
||||
create: vi.fn(),
|
||||
update: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
},
|
||||
userAgentConfig: {
|
||||
findUnique: vi.fn(),
|
||||
upsert: vi.fn(),
|
||||
},
|
||||
systemConfig: {
|
||||
findMany: vi.fn(),
|
||||
upsert: vi.fn(),
|
||||
deleteMany: vi.fn(),
|
||||
},
|
||||
breakglassUser: {
|
||||
findUnique: vi.fn(),
|
||||
update: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
const mockCrypto = {
|
||||
encrypt: vi.fn((value: string) => `enc:${value}`),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
service = new FleetSettingsService(
|
||||
mockPrisma as unknown as PrismaService,
|
||||
mockCrypto as unknown as CryptoService
|
||||
);
|
||||
});
|
||||
|
||||
it("listProviders returns only providers for the given userId", async () => {
|
||||
mockPrisma.llmProvider.findMany.mockResolvedValue([
|
||||
{
|
||||
id: "prov-1",
|
||||
name: "openai-main",
|
||||
displayName: "OpenAI",
|
||||
type: "openai",
|
||||
baseUrl: "https://api.openai.com/v1",
|
||||
isActive: true,
|
||||
models: [{ id: "gpt-4.1" }],
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await service.listProviders("user-1");
|
||||
|
||||
expect(mockPrisma.llmProvider.findMany).toHaveBeenCalledWith({
|
||||
where: { userId: "user-1" },
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
displayName: true,
|
||||
type: true,
|
||||
baseUrl: true,
|
||||
isActive: true,
|
||||
models: true,
|
||||
},
|
||||
orderBy: { createdAt: "asc" },
|
||||
});
|
||||
expect(result).toEqual([
|
||||
{
|
||||
id: "prov-1",
|
||||
name: "openai-main",
|
||||
displayName: "OpenAI",
|
||||
type: "openai",
|
||||
baseUrl: "https://api.openai.com/v1",
|
||||
isActive: true,
|
||||
models: [{ id: "gpt-4.1" }],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("createProvider encrypts apiKey", async () => {
|
||||
mockPrisma.llmProvider.create.mockResolvedValue({
|
||||
id: "prov-2",
|
||||
});
|
||||
|
||||
const result = await service.createProvider("user-1", {
|
||||
name: "zai-main",
|
||||
displayName: "Z.ai",
|
||||
type: "zai",
|
||||
apiKey: "plaintext-key",
|
||||
models: [],
|
||||
});
|
||||
|
||||
expect(mockCrypto.encrypt).toHaveBeenCalledWith("plaintext-key");
|
||||
expect(mockPrisma.llmProvider.create).toHaveBeenCalledWith({
|
||||
data: {
|
||||
userId: "user-1",
|
||||
name: "zai-main",
|
||||
displayName: "Z.ai",
|
||||
type: "zai",
|
||||
baseUrl: null,
|
||||
apiKey: "enc:plaintext-key",
|
||||
apiType: "openai-completions",
|
||||
models: [],
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
},
|
||||
});
|
||||
expect(result).toEqual({ id: "prov-2" });
|
||||
});
|
||||
|
||||
it("updateProvider rejects if not owned by user", async () => {
|
||||
mockPrisma.llmProvider.findFirst.mockResolvedValue(null);
|
||||
|
||||
await expect(
|
||||
service.updateProvider("user-1", "provider-1", {
|
||||
displayName: "New Name",
|
||||
})
|
||||
).rejects.toBeInstanceOf(NotFoundException);
|
||||
|
||||
expect(mockPrisma.llmProvider.update).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("deleteProvider rejects if not owned by user", async () => {
|
||||
mockPrisma.llmProvider.findFirst.mockResolvedValue(null);
|
||||
|
||||
await expect(service.deleteProvider("user-1", "provider-1")).rejects.toBeInstanceOf(
|
||||
NotFoundException
|
||||
);
|
||||
|
||||
expect(mockPrisma.llmProvider.delete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("getOidcConfig never returns clientSecret", async () => {
|
||||
mockPrisma.systemConfig.findMany.mockResolvedValue([
|
||||
{
|
||||
key: "oidc.issuerUrl",
|
||||
value: "https://issuer.example.com",
|
||||
},
|
||||
{
|
||||
key: "oidc.clientId",
|
||||
value: "client-id-1",
|
||||
},
|
||||
{
|
||||
key: "oidc.clientSecret",
|
||||
value: "enc:very-secret",
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await service.getOidcConfig();
|
||||
|
||||
expect(result).toEqual({
|
||||
issuerUrl: "https://issuer.example.com",
|
||||
clientId: "client-id-1",
|
||||
configured: true,
|
||||
});
|
||||
expect(result).not.toHaveProperty("clientSecret");
|
||||
});
|
||||
|
||||
it("updateOidcConfig encrypts clientSecret", async () => {
|
||||
await service.updateOidcConfig({
|
||||
issuerUrl: "https://issuer.example.com",
|
||||
clientId: "client-id-1",
|
||||
clientSecret: "super-secret",
|
||||
});
|
||||
|
||||
expect(mockCrypto.encrypt).toHaveBeenCalledWith("super-secret");
|
||||
expect(mockPrisma.systemConfig.upsert).toHaveBeenCalledTimes(3);
|
||||
expect(mockPrisma.systemConfig.upsert).toHaveBeenCalledWith({
|
||||
where: { key: "oidc.clientSecret" },
|
||||
update: { value: "enc:super-secret", encrypted: true },
|
||||
create: { key: "oidc.clientSecret", value: "enc:super-secret", encrypted: true },
|
||||
});
|
||||
});
|
||||
|
||||
it("resetBreakglassPassword hashes new password", async () => {
|
||||
mockPrisma.breakglassUser.findUnique.mockResolvedValue({
|
||||
id: "bg-1",
|
||||
username: "admin",
|
||||
passwordHash: "old-hash",
|
||||
});
|
||||
|
||||
await service.resetBreakglassPassword("admin", "new-password-123");
|
||||
|
||||
expect(mockPrisma.breakglassUser.update).toHaveBeenCalledOnce();
|
||||
const updateCall = mockPrisma.breakglassUser.update.mock.calls[0]?.[0];
|
||||
const newHash = updateCall?.data?.passwordHash;
|
||||
expect(newHash).toBeTypeOf("string");
|
||||
expect(newHash).not.toBe("new-password-123");
|
||||
expect(await compare("new-password-123", newHash as string)).toBe(true);
|
||||
});
|
||||
});
|
||||
296
apps/api/src/fleet-settings/fleet-settings.service.ts
Normal file
296
apps/api/src/fleet-settings/fleet-settings.service.ts
Normal file
@@ -0,0 +1,296 @@
|
||||
import { Injectable, NotFoundException } from "@nestjs/common";
|
||||
import { hash } from "bcryptjs";
|
||||
import type { Prisma } from "@prisma/client";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { CryptoService } from "../crypto/crypto.service";
|
||||
import type {
|
||||
CreateProviderDto,
|
||||
ResetPasswordDto,
|
||||
UpdateAgentConfigDto,
|
||||
UpdateOidcDto,
|
||||
UpdateProviderDto,
|
||||
} from "./fleet-settings.dto";
|
||||
|
||||
const BCRYPT_ROUNDS = 12;
|
||||
const DEFAULT_PROVIDER_API_TYPE = "openai-completions";
|
||||
const OIDC_ISSUER_KEY = "oidc.issuerUrl";
|
||||
const OIDC_CLIENT_ID_KEY = "oidc.clientId";
|
||||
const OIDC_CLIENT_SECRET_KEY = "oidc.clientSecret";
|
||||
const OIDC_KEYS = [OIDC_ISSUER_KEY, OIDC_CLIENT_ID_KEY, OIDC_CLIENT_SECRET_KEY] as const;
|
||||
|
||||
export interface FleetProviderResponse {
|
||||
id: string;
|
||||
name: string;
|
||||
displayName: string;
|
||||
type: string;
|
||||
baseUrl: string | null;
|
||||
isActive: boolean;
|
||||
models: unknown;
|
||||
}
|
||||
|
||||
export interface FleetAgentConfigResponse {
|
||||
primaryModel: string | null;
|
||||
fallbackModels: unknown[];
|
||||
personality: string | null;
|
||||
}
|
||||
|
||||
export interface OidcConfigResponse {
|
||||
issuerUrl?: string;
|
||||
clientId?: string;
|
||||
configured: boolean;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class FleetSettingsService {
|
||||
constructor(
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly crypto: CryptoService
|
||||
) {}
|
||||
|
||||
// --- LLM Provider CRUD (per-user scoped) ---
|
||||
|
||||
async listProviders(userId: string): Promise<FleetProviderResponse[]> {
|
||||
return this.prisma.llmProvider.findMany({
|
||||
where: { userId },
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
displayName: true,
|
||||
type: true,
|
||||
baseUrl: true,
|
||||
isActive: true,
|
||||
models: true,
|
||||
},
|
||||
orderBy: { createdAt: "asc" },
|
||||
});
|
||||
}
|
||||
|
||||
async getProvider(userId: string, providerId: string): Promise<FleetProviderResponse> {
|
||||
const provider = await this.prisma.llmProvider.findFirst({
|
||||
where: {
|
||||
id: providerId,
|
||||
userId,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
displayName: true,
|
||||
type: true,
|
||||
baseUrl: true,
|
||||
isActive: true,
|
||||
models: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!provider) {
|
||||
throw new NotFoundException(`Provider ${providerId} not found`);
|
||||
}
|
||||
|
||||
return provider;
|
||||
}
|
||||
|
||||
async createProvider(userId: string, data: CreateProviderDto): Promise<{ id: string }> {
|
||||
const provider = await this.prisma.llmProvider.create({
|
||||
data: {
|
||||
userId,
|
||||
name: data.name,
|
||||
displayName: data.displayName,
|
||||
type: data.type,
|
||||
baseUrl: data.baseUrl ?? null,
|
||||
apiKey: data.apiKey ? this.crypto.encrypt(data.apiKey) : null,
|
||||
apiType: data.apiType ?? DEFAULT_PROVIDER_API_TYPE,
|
||||
models: (data.models ?? []) as Prisma.InputJsonValue,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
},
|
||||
});
|
||||
|
||||
return provider;
|
||||
}
|
||||
|
||||
async updateProvider(userId: string, providerId: string, data: UpdateProviderDto): Promise<void> {
|
||||
await this.assertProviderOwnership(userId, providerId);
|
||||
|
||||
const updateData: Prisma.LlmProviderUpdateInput = {};
|
||||
if (data.displayName !== undefined) {
|
||||
updateData.displayName = data.displayName;
|
||||
}
|
||||
if (data.baseUrl !== undefined) {
|
||||
updateData.baseUrl = data.baseUrl;
|
||||
}
|
||||
if (data.isActive !== undefined) {
|
||||
updateData.isActive = data.isActive;
|
||||
}
|
||||
if (data.models !== undefined) {
|
||||
updateData.models = data.models as Prisma.InputJsonValue;
|
||||
}
|
||||
if (data.apiKey !== undefined) {
|
||||
updateData.apiKey = data.apiKey.length > 0 ? this.crypto.encrypt(data.apiKey) : null;
|
||||
}
|
||||
|
||||
await this.prisma.llmProvider.update({
|
||||
where: { id: providerId },
|
||||
data: updateData,
|
||||
});
|
||||
}
|
||||
|
||||
async deleteProvider(userId: string, providerId: string): Promise<void> {
|
||||
await this.assertProviderOwnership(userId, providerId);
|
||||
|
||||
await this.prisma.llmProvider.delete({
|
||||
where: { id: providerId },
|
||||
});
|
||||
}
|
||||
|
||||
// --- User Agent Config ---
|
||||
|
||||
async getAgentConfig(userId: string): Promise<FleetAgentConfigResponse> {
|
||||
const config = await this.prisma.userAgentConfig.findUnique({
|
||||
where: { userId },
|
||||
select: {
|
||||
primaryModel: true,
|
||||
fallbackModels: true,
|
||||
personality: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!config) {
|
||||
return {
|
||||
primaryModel: null,
|
||||
fallbackModels: [],
|
||||
personality: null,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
primaryModel: config.primaryModel,
|
||||
fallbackModels: this.normalizeJsonArray(config.fallbackModels),
|
||||
personality: config.personality,
|
||||
};
|
||||
}
|
||||
|
||||
async updateAgentConfig(userId: string, data: UpdateAgentConfigDto): Promise<void> {
|
||||
const updateData: Prisma.UserAgentConfigUpdateInput = {};
|
||||
if (data.primaryModel !== undefined) {
|
||||
updateData.primaryModel = data.primaryModel;
|
||||
}
|
||||
if (data.personality !== undefined) {
|
||||
updateData.personality = data.personality;
|
||||
}
|
||||
if (data.fallbackModels !== undefined) {
|
||||
updateData.fallbackModels = data.fallbackModels as Prisma.InputJsonValue;
|
||||
}
|
||||
|
||||
const createData: Prisma.UserAgentConfigCreateInput = {
|
||||
userId,
|
||||
fallbackModels: (data.fallbackModels ?? []) as Prisma.InputJsonValue,
|
||||
...(data.primaryModel !== undefined ? { primaryModel: data.primaryModel } : {}),
|
||||
...(data.personality !== undefined ? { personality: data.personality } : {}),
|
||||
};
|
||||
|
||||
await this.prisma.userAgentConfig.upsert({
|
||||
where: { userId },
|
||||
create: createData,
|
||||
update: updateData,
|
||||
});
|
||||
}
|
||||
|
||||
// --- OIDC Config (admin only) ---
|
||||
|
||||
async getOidcConfig(): Promise<OidcConfigResponse> {
|
||||
const entries = await this.prisma.systemConfig.findMany({
|
||||
where: {
|
||||
key: {
|
||||
in: [...OIDC_KEYS],
|
||||
},
|
||||
},
|
||||
select: {
|
||||
key: true,
|
||||
value: true,
|
||||
},
|
||||
});
|
||||
|
||||
const byKey = new Map(entries.map((entry) => [entry.key, entry.value]));
|
||||
const issuerUrl = byKey.get(OIDC_ISSUER_KEY);
|
||||
const clientId = byKey.get(OIDC_CLIENT_ID_KEY);
|
||||
const hasSecret = byKey.has(OIDC_CLIENT_SECRET_KEY);
|
||||
|
||||
return {
|
||||
...(issuerUrl ? { issuerUrl } : {}),
|
||||
...(clientId ? { clientId } : {}),
|
||||
configured: Boolean(issuerUrl && clientId && hasSecret),
|
||||
};
|
||||
}
|
||||
|
||||
async updateOidcConfig(data: UpdateOidcDto): Promise<void> {
|
||||
const encryptedSecret = this.crypto.encrypt(data.clientSecret);
|
||||
|
||||
await Promise.all([
|
||||
this.upsertSystemConfig(OIDC_ISSUER_KEY, data.issuerUrl, false),
|
||||
this.upsertSystemConfig(OIDC_CLIENT_ID_KEY, data.clientId, false),
|
||||
this.upsertSystemConfig(OIDC_CLIENT_SECRET_KEY, encryptedSecret, true),
|
||||
]);
|
||||
}
|
||||
|
||||
async deleteOidcConfig(): Promise<void> {
|
||||
await this.prisma.systemConfig.deleteMany({
|
||||
where: {
|
||||
key: {
|
||||
in: [...OIDC_KEYS],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// --- Breakglass (admin only) ---
|
||||
|
||||
async resetBreakglassPassword(
|
||||
username: ResetPasswordDto["username"],
|
||||
newPassword: ResetPasswordDto["newPassword"]
|
||||
): Promise<void> {
|
||||
const user = await this.prisma.breakglassUser.findUnique({
|
||||
where: { username },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
throw new NotFoundException(`Breakglass user ${username} not found`);
|
||||
}
|
||||
|
||||
const passwordHash = await hash(newPassword, BCRYPT_ROUNDS);
|
||||
|
||||
await this.prisma.breakglassUser.update({
|
||||
where: { id: user.id },
|
||||
data: { passwordHash },
|
||||
});
|
||||
}
|
||||
|
||||
private async assertProviderOwnership(userId: string, providerId: string): Promise<void> {
|
||||
const provider = await this.prisma.llmProvider.findFirst({
|
||||
where: {
|
||||
id: providerId,
|
||||
userId,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!provider) {
|
||||
throw new NotFoundException(`Provider ${providerId} not found`);
|
||||
}
|
||||
}
|
||||
|
||||
private async upsertSystemConfig(key: string, value: string, encrypted: boolean): Promise<void> {
|
||||
await this.prisma.systemConfig.upsert({
|
||||
where: { key },
|
||||
update: { value, encrypted },
|
||||
create: { key, value, encrypted },
|
||||
});
|
||||
}
|
||||
|
||||
private normalizeJsonArray(value: unknown): unknown[] {
|
||||
return Array.isArray(value) ? value : [];
|
||||
}
|
||||
}
|
||||
89
apps/api/src/import/dto/import-project.dto.ts
Normal file
89
apps/api/src/import/dto/import-project.dto.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { IsNumber, IsOptional, IsString, MaxLength, MinLength } from "class-validator";
|
||||
|
||||
/**
|
||||
* DTO for a single jarvis-brain project record.
|
||||
* This matches the project object shape consumed by scripts/migrate-brain.ts.
|
||||
*/
|
||||
export class ImportProjectDto {
|
||||
@IsString({ message: "id must be a string" })
|
||||
@MinLength(1, { message: "id must not be empty" })
|
||||
@MaxLength(255, { message: "id must not exceed 255 characters" })
|
||||
id!: string;
|
||||
|
||||
@IsString({ message: "name must be a string" })
|
||||
@MinLength(1, { message: "name must not be empty" })
|
||||
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||
name!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "description must be a string" })
|
||||
description?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "domain must be a string" })
|
||||
domain?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "status must be a string" })
|
||||
status?: string | null;
|
||||
|
||||
// jarvis-brain project priority can be a number, string, or null
|
||||
@IsOptional()
|
||||
priority?: number | string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsNumber({}, { message: "progress must be a number" })
|
||||
progress?: number | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "repo must be a string" })
|
||||
repo?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "branch must be a string" })
|
||||
branch?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "current_milestone must be a string" })
|
||||
current_milestone?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "next_milestone must be a string" })
|
||||
next_milestone?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "blocker must be a string" })
|
||||
blocker?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "owner must be a string" })
|
||||
owner?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "docs_path must be a string" })
|
||||
docs_path?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "created must be a string" })
|
||||
created?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "updated must be a string" })
|
||||
updated?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "target_date must be a string" })
|
||||
target_date?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "notes must be a string" })
|
||||
notes?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "notes_nontechnical must be a string" })
|
||||
notes_nontechnical?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "parent must be a string" })
|
||||
parent?: string | null;
|
||||
}
|
||||
5
apps/api/src/import/dto/import-response.dto.ts
Normal file
5
apps/api/src/import/dto/import-response.dto.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export interface ImportResponseDto {
|
||||
imported: number;
|
||||
skipped: number;
|
||||
errors: string[];
|
||||
}
|
||||
76
apps/api/src/import/dto/import-task.dto.ts
Normal file
76
apps/api/src/import/dto/import-task.dto.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { IsArray, IsNumber, IsOptional, IsString, MaxLength, MinLength } from "class-validator";
|
||||
|
||||
/**
|
||||
* DTO for a single jarvis-brain task record.
|
||||
* This matches the task object shape consumed by scripts/migrate-brain.ts.
|
||||
*/
|
||||
export class ImportTaskDto {
|
||||
@IsString({ message: "id must be a string" })
|
||||
@MinLength(1, { message: "id must not be empty" })
|
||||
@MaxLength(255, { message: "id must not exceed 255 characters" })
|
||||
id!: string;
|
||||
|
||||
@IsString({ message: "title must be a string" })
|
||||
@MinLength(1, { message: "title must not be empty" })
|
||||
@MaxLength(255, { message: "title must not exceed 255 characters" })
|
||||
title!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "domain must be a string" })
|
||||
domain?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "project must be a string" })
|
||||
project?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsArray({ message: "related must be an array" })
|
||||
@IsString({ each: true, message: "related items must be strings" })
|
||||
related?: string[];
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "priority must be a string" })
|
||||
priority?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "status must be a string" })
|
||||
status?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsNumber({}, { message: "progress must be a number" })
|
||||
progress?: number | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "due must be a string" })
|
||||
due?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsArray({ message: "blocks must be an array" })
|
||||
@IsString({ each: true, message: "blocks items must be strings" })
|
||||
blocks?: string[];
|
||||
|
||||
@IsOptional()
|
||||
@IsArray({ message: "blocked_by must be an array" })
|
||||
@IsString({ each: true, message: "blocked_by items must be strings" })
|
||||
blocked_by?: string[];
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "assignee must be a string" })
|
||||
assignee?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "created must be a string" })
|
||||
created?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "updated must be a string" })
|
||||
updated?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "notes must be a string" })
|
||||
notes?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "notes_nontechnical must be a string" })
|
||||
notes_nontechnical?: string | null;
|
||||
}
|
||||
3
apps/api/src/import/dto/index.ts
Normal file
3
apps/api/src/import/dto/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export { ImportTaskDto } from "./import-task.dto";
|
||||
export { ImportProjectDto } from "./import-project.dto";
|
||||
export type { ImportResponseDto } from "./import-response.dto";
|
||||
33
apps/api/src/import/import.controller.ts
Normal file
33
apps/api/src/import/import.controller.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { Body, Controller, ParseArrayPipe, Post, UseGuards } from "@nestjs/common";
|
||||
import type { AuthUser } from "@mosaic/shared";
|
||||
import { CurrentUser } from "../auth/decorators/current-user.decorator";
|
||||
import { AdminGuard } from "../auth/guards/admin.guard";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { Workspace } from "../common/decorators";
|
||||
import { WorkspaceGuard } from "../common/guards";
|
||||
import { ImportProjectDto, type ImportResponseDto, ImportTaskDto } from "./dto";
|
||||
import { ImportService } from "./import.service";
|
||||
|
||||
@Controller("import")
|
||||
@UseGuards(AuthGuard, WorkspaceGuard, AdminGuard)
|
||||
export class ImportController {
|
||||
constructor(private readonly importService: ImportService) {}
|
||||
|
||||
@Post("tasks")
|
||||
async importTasks(
|
||||
@Body(new ParseArrayPipe({ items: ImportTaskDto })) taskPayload: ImportTaskDto[],
|
||||
@Workspace() workspaceId: string,
|
||||
@CurrentUser() user: AuthUser
|
||||
): Promise<ImportResponseDto> {
|
||||
return this.importService.importTasks(workspaceId, user.id, taskPayload);
|
||||
}
|
||||
|
||||
@Post("projects")
|
||||
async importProjects(
|
||||
@Body(new ParseArrayPipe({ items: ImportProjectDto })) projectPayload: ImportProjectDto[],
|
||||
@Workspace() workspaceId: string,
|
||||
@CurrentUser() user: AuthUser
|
||||
): Promise<ImportResponseDto> {
|
||||
return this.importService.importProjects(workspaceId, user.id, projectPayload);
|
||||
}
|
||||
}
|
||||
13
apps/api/src/import/import.module.ts
Normal file
13
apps/api/src/import/import.module.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { ImportController } from "./import.controller";
|
||||
import { ImportService } from "./import.service";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, AuthModule],
|
||||
controllers: [ImportController],
|
||||
providers: [ImportService],
|
||||
exports: [ImportService],
|
||||
})
|
||||
export class ImportModule {}
|
||||
251
apps/api/src/import/import.service.spec.ts
Normal file
251
apps/api/src/import/import.service.spec.ts
Normal file
@@ -0,0 +1,251 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { ProjectStatus, TaskPriority, TaskStatus } from "@prisma/client";
|
||||
import { ImportService } from "./import.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
|
||||
describe("ImportService", () => {
|
||||
let service: ImportService;
|
||||
|
||||
const mockPrismaService = {
|
||||
withWorkspaceContext: vi.fn(),
|
||||
domain: {
|
||||
findUnique: vi.fn(),
|
||||
create: vi.fn(),
|
||||
},
|
||||
project: {
|
||||
findFirst: vi.fn(),
|
||||
create: vi.fn(),
|
||||
},
|
||||
task: {
|
||||
findFirst: vi.fn(),
|
||||
create: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
const workspaceId = "550e8400-e29b-41d4-a716-446655440001";
|
||||
const userId = "550e8400-e29b-41d4-a716-446655440002";
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ImportService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: mockPrismaService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<ImportService>(ImportService);
|
||||
vi.clearAllMocks();
|
||||
|
||||
mockPrismaService.withWorkspaceContext.mockImplementation(
|
||||
async (_userId: string, _workspaceId: string, fn: (client: unknown) => Promise<unknown>) => {
|
||||
return fn(mockPrismaService);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe("importTasks", () => {
|
||||
it("maps status/priority/domain and imports a task", async () => {
|
||||
mockPrismaService.task.findFirst.mockResolvedValue(null);
|
||||
mockPrismaService.domain.findUnique.mockResolvedValue(null);
|
||||
mockPrismaService.domain.create.mockResolvedValue({ id: "domain-id" });
|
||||
mockPrismaService.project.findFirst.mockResolvedValue(null);
|
||||
mockPrismaService.task.create.mockResolvedValue({ id: "task-id" });
|
||||
|
||||
const result = await service.importTasks(workspaceId, userId, [
|
||||
{
|
||||
id: "task-1",
|
||||
title: "Import me",
|
||||
domain: "Platform Ops",
|
||||
status: "in-progress",
|
||||
priority: "critical",
|
||||
project: null,
|
||||
related: [],
|
||||
blocks: [],
|
||||
blocked_by: [],
|
||||
progress: 42,
|
||||
due: "2026-03-15",
|
||||
created: "2026-03-01T10:00:00.000Z",
|
||||
updated: "2026-03-05T12:00:00.000Z",
|
||||
assignee: null,
|
||||
notes: "notes",
|
||||
notes_nontechnical: "non technical",
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result).toEqual({ imported: 1, skipped: 0, errors: [] });
|
||||
expect(mockPrismaService.task.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
title: "Import me",
|
||||
status: TaskStatus.IN_PROGRESS,
|
||||
priority: TaskPriority.HIGH,
|
||||
domainId: "domain-id",
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("skips existing task by brainId", async () => {
|
||||
mockPrismaService.task.findFirst.mockResolvedValue({ id: "existing-task-id" });
|
||||
|
||||
const result = await service.importTasks(workspaceId, userId, [
|
||||
{
|
||||
id: "task-1",
|
||||
title: "Existing",
|
||||
domain: null,
|
||||
status: "pending",
|
||||
priority: "medium",
|
||||
project: null,
|
||||
related: [],
|
||||
blocks: [],
|
||||
blocked_by: [],
|
||||
progress: null,
|
||||
due: null,
|
||||
created: null,
|
||||
updated: null,
|
||||
assignee: null,
|
||||
notes: null,
|
||||
notes_nontechnical: null,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result.imported).toBe(0);
|
||||
expect(result.skipped).toBe(1);
|
||||
expect(mockPrismaService.task.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("collects mapping/missing-project errors while importing", async () => {
|
||||
mockPrismaService.task.findFirst.mockResolvedValue(null);
|
||||
mockPrismaService.project.findFirst.mockResolvedValue(null);
|
||||
mockPrismaService.task.create.mockResolvedValue({ id: "task-id" });
|
||||
|
||||
const result = await service.importTasks(workspaceId, userId, [
|
||||
{
|
||||
id: "task-1",
|
||||
title: "Needs project",
|
||||
domain: null,
|
||||
status: "mystery-status",
|
||||
priority: "mystery-priority",
|
||||
project: "brain-project-1",
|
||||
related: [],
|
||||
blocks: [],
|
||||
blocked_by: [],
|
||||
progress: null,
|
||||
due: null,
|
||||
created: null,
|
||||
updated: null,
|
||||
assignee: null,
|
||||
notes: null,
|
||||
notes_nontechnical: null,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result.imported).toBe(1);
|
||||
expect(result.errors).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.stringContaining('Unknown task status "mystery-status"'),
|
||||
expect.stringContaining('Unknown task priority "mystery-priority"'),
|
||||
expect.stringContaining('referenced project "brain-project-1" not found'),
|
||||
])
|
||||
);
|
||||
expect(mockPrismaService.task.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
status: TaskStatus.NOT_STARTED,
|
||||
priority: TaskPriority.MEDIUM,
|
||||
projectId: null,
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("importProjects", () => {
|
||||
it("maps status/domain and imports a project", async () => {
|
||||
mockPrismaService.project.findFirst.mockResolvedValue(null);
|
||||
mockPrismaService.domain.findUnique.mockResolvedValue(null);
|
||||
mockPrismaService.domain.create.mockResolvedValue({ id: "domain-id" });
|
||||
mockPrismaService.project.create.mockResolvedValue({ id: "project-id" });
|
||||
|
||||
const result = await service.importProjects(workspaceId, userId, [
|
||||
{
|
||||
id: "project-1",
|
||||
name: "Project One",
|
||||
description: "desc",
|
||||
domain: "Backend",
|
||||
status: "in-progress",
|
||||
priority: "high",
|
||||
progress: 50,
|
||||
repo: "git@example.com/repo",
|
||||
branch: "main",
|
||||
current_milestone: "MS21",
|
||||
next_milestone: "MS22",
|
||||
blocker: null,
|
||||
owner: "owner",
|
||||
docs_path: "docs/PRD.md",
|
||||
created: "2026-03-01",
|
||||
updated: "2026-03-05",
|
||||
target_date: "2026-04-01",
|
||||
notes: "notes",
|
||||
notes_nontechnical: "non tech",
|
||||
parent: null,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result).toEqual({ imported: 1, skipped: 0, errors: [] });
|
||||
expect(mockPrismaService.project.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
name: "Project One",
|
||||
status: ProjectStatus.ACTIVE,
|
||||
domainId: "domain-id",
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("captures create failures as errors", async () => {
|
||||
mockPrismaService.project.findFirst.mockResolvedValue(null);
|
||||
mockPrismaService.project.create.mockRejectedValue(new Error("db failed"));
|
||||
|
||||
const result = await service.importProjects(workspaceId, userId, [
|
||||
{
|
||||
id: "project-1",
|
||||
name: "Project One",
|
||||
description: null,
|
||||
domain: null,
|
||||
status: "planning",
|
||||
priority: null,
|
||||
progress: null,
|
||||
repo: null,
|
||||
branch: null,
|
||||
current_milestone: null,
|
||||
next_milestone: null,
|
||||
blocker: null,
|
||||
owner: null,
|
||||
docs_path: null,
|
||||
created: null,
|
||||
updated: null,
|
||||
target_date: null,
|
||||
notes: null,
|
||||
notes_nontechnical: null,
|
||||
parent: null,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result.imported).toBe(0);
|
||||
expect(result.skipped).toBe(1);
|
||||
expect(result.errors).toEqual([
|
||||
expect.stringContaining("project project-1: failed to import: db failed"),
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
496
apps/api/src/import/import.service.ts
Normal file
496
apps/api/src/import/import.service.ts
Normal file
@@ -0,0 +1,496 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { Prisma, PrismaClient, ProjectStatus, TaskPriority, TaskStatus } from "@prisma/client";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import type { ImportProjectDto, ImportResponseDto, ImportTaskDto } from "./dto";
|
||||
|
||||
interface TaskStatusMapping {
|
||||
status: TaskStatus;
|
||||
issue: string | null;
|
||||
}
|
||||
|
||||
interface TaskPriorityMapping {
|
||||
priority: TaskPriority;
|
||||
issue: string | null;
|
||||
}
|
||||
|
||||
interface ProjectStatusMapping {
|
||||
status: ProjectStatus;
|
||||
issue: string | null;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ImportService {
|
||||
constructor(private readonly prisma: PrismaService) {}
|
||||
|
||||
async importTasks(
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
taskPayload: ImportTaskDto[]
|
||||
): Promise<ImportResponseDto> {
|
||||
const errors: string[] = [];
|
||||
let imported = 0;
|
||||
let skipped = 0;
|
||||
|
||||
const importTimestamp = new Date().toISOString();
|
||||
const seenBrainTaskIds = new Set<string>();
|
||||
const domainIdBySlug = new Map<string, string>();
|
||||
const projectIdByBrainId = new Map<string, string | null>();
|
||||
|
||||
await this.prisma.withWorkspaceContext(userId, workspaceId, async (tx: PrismaClient) => {
|
||||
for (const [index, task] of taskPayload.entries()) {
|
||||
const brainId = task.id.trim();
|
||||
|
||||
if (seenBrainTaskIds.has(brainId)) {
|
||||
skipped += 1;
|
||||
errors.push(`task ${brainId}: duplicate item in request body`);
|
||||
continue;
|
||||
}
|
||||
seenBrainTaskIds.add(brainId);
|
||||
|
||||
try {
|
||||
const existingTask = await tx.task.findFirst({
|
||||
where: {
|
||||
workspaceId,
|
||||
metadata: {
|
||||
path: ["brainId"],
|
||||
equals: brainId,
|
||||
},
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (existingTask) {
|
||||
skipped += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const mappedStatus = this.mapTaskStatus(task.status ?? null);
|
||||
if (mappedStatus.issue) {
|
||||
errors.push(`task ${brainId}: ${mappedStatus.issue}`);
|
||||
}
|
||||
|
||||
const mappedPriority = this.mapTaskPriority(task.priority ?? null);
|
||||
if (mappedPriority.issue) {
|
||||
errors.push(`task ${brainId}: ${mappedPriority.issue}`);
|
||||
}
|
||||
|
||||
const projectBrainId = task.project?.trim() ? task.project.trim() : null;
|
||||
const projectId = await this.resolveProjectId(
|
||||
tx,
|
||||
workspaceId,
|
||||
projectBrainId,
|
||||
projectIdByBrainId,
|
||||
brainId,
|
||||
errors
|
||||
);
|
||||
|
||||
const domainId = await this.resolveDomainId(
|
||||
tx,
|
||||
workspaceId,
|
||||
task.domain ?? null,
|
||||
importTimestamp,
|
||||
domainIdBySlug
|
||||
);
|
||||
|
||||
const createdAt =
|
||||
this.normalizeDate(task.created ?? null, `task ${brainId}.created`, errors) ??
|
||||
new Date();
|
||||
const updatedAt =
|
||||
this.normalizeDate(task.updated ?? null, `task ${brainId}.updated`, errors) ??
|
||||
createdAt;
|
||||
const dueDate = this.normalizeDate(task.due ?? null, `task ${brainId}.due`, errors);
|
||||
const completedAt = mappedStatus.status === TaskStatus.COMPLETED ? updatedAt : null;
|
||||
|
||||
const metadata = this.asJsonValue({
|
||||
source: "jarvis-brain",
|
||||
brainId,
|
||||
brainDomain: task.domain ?? null,
|
||||
brainProjectId: projectBrainId,
|
||||
rawStatus: task.status ?? null,
|
||||
rawPriority: task.priority ?? null,
|
||||
related: task.related ?? [],
|
||||
blocks: task.blocks ?? [],
|
||||
blockedBy: task.blocked_by ?? [],
|
||||
assignee: task.assignee ?? null,
|
||||
progress: task.progress ?? null,
|
||||
notes: task.notes ?? null,
|
||||
notesNonTechnical: task.notes_nontechnical ?? null,
|
||||
importedAt: importTimestamp,
|
||||
});
|
||||
|
||||
await tx.task.create({
|
||||
data: {
|
||||
workspaceId,
|
||||
title: task.title,
|
||||
description: task.notes ?? null,
|
||||
status: mappedStatus.status,
|
||||
priority: mappedPriority.priority,
|
||||
dueDate,
|
||||
creatorId: userId,
|
||||
projectId,
|
||||
domainId,
|
||||
metadata,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
completedAt,
|
||||
},
|
||||
});
|
||||
|
||||
imported += 1;
|
||||
} catch (error) {
|
||||
skipped += 1;
|
||||
errors.push(
|
||||
`task ${brainId || `index-${String(index)}`}: failed to import: ${this.getErrorMessage(error)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
imported,
|
||||
skipped,
|
||||
errors,
|
||||
};
|
||||
}
|
||||
|
||||
async importProjects(
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
projectPayload: ImportProjectDto[]
|
||||
): Promise<ImportResponseDto> {
|
||||
const errors: string[] = [];
|
||||
let imported = 0;
|
||||
let skipped = 0;
|
||||
|
||||
const importTimestamp = new Date().toISOString();
|
||||
const seenBrainProjectIds = new Set<string>();
|
||||
const domainIdBySlug = new Map<string, string>();
|
||||
|
||||
await this.prisma.withWorkspaceContext(userId, workspaceId, async (tx: PrismaClient) => {
|
||||
for (const [index, project] of projectPayload.entries()) {
|
||||
const brainId = project.id.trim();
|
||||
|
||||
if (seenBrainProjectIds.has(brainId)) {
|
||||
skipped += 1;
|
||||
errors.push(`project ${brainId}: duplicate item in request body`);
|
||||
continue;
|
||||
}
|
||||
seenBrainProjectIds.add(brainId);
|
||||
|
||||
try {
|
||||
const existingProject = await tx.project.findFirst({
|
||||
where: {
|
||||
workspaceId,
|
||||
metadata: {
|
||||
path: ["brainId"],
|
||||
equals: brainId,
|
||||
},
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (existingProject) {
|
||||
skipped += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const mappedStatus = this.mapProjectStatus(project.status ?? null);
|
||||
if (mappedStatus.issue) {
|
||||
errors.push(`project ${brainId}: ${mappedStatus.issue}`);
|
||||
}
|
||||
|
||||
const domainId = await this.resolveDomainId(
|
||||
tx,
|
||||
workspaceId,
|
||||
project.domain ?? null,
|
||||
importTimestamp,
|
||||
domainIdBySlug
|
||||
);
|
||||
|
||||
const createdAt =
|
||||
this.normalizeDate(project.created ?? null, `project ${brainId}.created`, errors) ??
|
||||
new Date();
|
||||
const updatedAt =
|
||||
this.normalizeDate(project.updated ?? null, `project ${brainId}.updated`, errors) ??
|
||||
createdAt;
|
||||
const startDate = this.normalizeDate(
|
||||
project.created ?? null,
|
||||
`project ${brainId}.startDate`,
|
||||
errors
|
||||
);
|
||||
const endDate = this.normalizeDate(
|
||||
project.target_date ?? null,
|
||||
`project ${brainId}.target_date`,
|
||||
errors
|
||||
);
|
||||
|
||||
const metadata = this.asJsonValue({
|
||||
source: "jarvis-brain",
|
||||
brainId,
|
||||
brainDomain: project.domain ?? null,
|
||||
rawStatus: project.status ?? null,
|
||||
rawPriority: project.priority ?? null,
|
||||
progress: project.progress ?? null,
|
||||
repo: project.repo ?? null,
|
||||
branch: project.branch ?? null,
|
||||
currentMilestone: project.current_milestone ?? null,
|
||||
nextMilestone: project.next_milestone ?? null,
|
||||
blocker: project.blocker ?? null,
|
||||
owner: project.owner ?? null,
|
||||
docsPath: project.docs_path ?? null,
|
||||
targetDate: project.target_date ?? null,
|
||||
notes: project.notes ?? null,
|
||||
notesNonTechnical: project.notes_nontechnical ?? null,
|
||||
parent: project.parent ?? null,
|
||||
importedAt: importTimestamp,
|
||||
});
|
||||
|
||||
await tx.project.create({
|
||||
data: {
|
||||
workspaceId,
|
||||
name: project.name,
|
||||
description: project.description ?? null,
|
||||
status: mappedStatus.status,
|
||||
startDate,
|
||||
endDate,
|
||||
creatorId: userId,
|
||||
domainId,
|
||||
metadata,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
},
|
||||
});
|
||||
|
||||
imported += 1;
|
||||
} catch (error) {
|
||||
skipped += 1;
|
||||
errors.push(
|
||||
`project ${brainId || `index-${String(index)}`}: failed to import: ${this.getErrorMessage(error)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
imported,
|
||||
skipped,
|
||||
errors,
|
||||
};
|
||||
}
|
||||
|
||||
private async resolveProjectId(
|
||||
tx: PrismaClient,
|
||||
workspaceId: string,
|
||||
projectBrainId: string | null,
|
||||
projectIdByBrainId: Map<string, string | null>,
|
||||
taskBrainId: string,
|
||||
errors: string[]
|
||||
): Promise<string | null> {
|
||||
if (!projectBrainId) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (projectIdByBrainId.has(projectBrainId)) {
|
||||
return projectIdByBrainId.get(projectBrainId) ?? null;
|
||||
}
|
||||
|
||||
const existingProject = await tx.project.findFirst({
|
||||
where: {
|
||||
workspaceId,
|
||||
metadata: {
|
||||
path: ["brainId"],
|
||||
equals: projectBrainId,
|
||||
},
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!existingProject) {
|
||||
projectIdByBrainId.set(projectBrainId, null);
|
||||
errors.push(`task ${taskBrainId}: referenced project "${projectBrainId}" not found`);
|
||||
return null;
|
||||
}
|
||||
|
||||
projectIdByBrainId.set(projectBrainId, existingProject.id);
|
||||
return existingProject.id;
|
||||
}
|
||||
|
||||
private async resolveDomainId(
|
||||
tx: PrismaClient,
|
||||
workspaceId: string,
|
||||
rawDomain: string | null,
|
||||
importTimestamp: string,
|
||||
domainIdBySlug: Map<string, string>
|
||||
): Promise<string | null> {
|
||||
const domainSlug = this.normalizeDomain(rawDomain);
|
||||
if (!domainSlug) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cachedId = domainIdBySlug.get(domainSlug);
|
||||
if (cachedId) {
|
||||
return cachedId;
|
||||
}
|
||||
|
||||
const existingDomain = await tx.domain.findUnique({
|
||||
where: {
|
||||
workspaceId_slug: {
|
||||
workspaceId,
|
||||
slug: domainSlug,
|
||||
},
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (existingDomain) {
|
||||
domainIdBySlug.set(domainSlug, existingDomain.id);
|
||||
return existingDomain.id;
|
||||
}
|
||||
|
||||
const trimmedDomainName = rawDomain?.trim();
|
||||
const domainName =
|
||||
trimmedDomainName && trimmedDomainName.length > 0 ? trimmedDomainName : domainSlug;
|
||||
const createdDomain = await tx.domain.create({
|
||||
data: {
|
||||
workspaceId,
|
||||
slug: domainSlug,
|
||||
name: domainName,
|
||||
metadata: this.asJsonValue({
|
||||
source: "jarvis-brain",
|
||||
brainId: domainName,
|
||||
sourceValues: [domainName],
|
||||
importedAt: importTimestamp,
|
||||
}),
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
domainIdBySlug.set(domainSlug, createdDomain.id);
|
||||
return createdDomain.id;
|
||||
}
|
||||
|
||||
private normalizeKey(value: string | null | undefined): string {
|
||||
return value?.trim().toLowerCase() ?? "";
|
||||
}
|
||||
|
||||
private mapTaskStatus(rawStatus: string | null): TaskStatusMapping {
|
||||
const statusKey = this.normalizeKey(rawStatus);
|
||||
|
||||
switch (statusKey) {
|
||||
case "done":
|
||||
return { status: TaskStatus.COMPLETED, issue: null };
|
||||
case "in-progress":
|
||||
return { status: TaskStatus.IN_PROGRESS, issue: null };
|
||||
case "backlog":
|
||||
case "pending":
|
||||
case "scheduled":
|
||||
case "not-started":
|
||||
case "planned":
|
||||
return { status: TaskStatus.NOT_STARTED, issue: null };
|
||||
case "blocked":
|
||||
case "on-hold":
|
||||
return { status: TaskStatus.PAUSED, issue: null };
|
||||
case "cancelled":
|
||||
return { status: TaskStatus.ARCHIVED, issue: null };
|
||||
default:
|
||||
return {
|
||||
status: TaskStatus.NOT_STARTED,
|
||||
issue: `Unknown task status "${rawStatus ?? "null"}" mapped to NOT_STARTED`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private mapTaskPriority(rawPriority: string | null): TaskPriorityMapping {
|
||||
const priorityKey = this.normalizeKey(rawPriority);
|
||||
|
||||
switch (priorityKey) {
|
||||
case "critical":
|
||||
case "high":
|
||||
return { priority: TaskPriority.HIGH, issue: null };
|
||||
case "medium":
|
||||
return { priority: TaskPriority.MEDIUM, issue: null };
|
||||
case "low":
|
||||
return { priority: TaskPriority.LOW, issue: null };
|
||||
default:
|
||||
return {
|
||||
priority: TaskPriority.MEDIUM,
|
||||
issue: `Unknown task priority "${rawPriority ?? "null"}" mapped to MEDIUM`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private mapProjectStatus(rawStatus: string | null): ProjectStatusMapping {
|
||||
const statusKey = this.normalizeKey(rawStatus);
|
||||
|
||||
switch (statusKey) {
|
||||
case "active":
|
||||
case "in-progress":
|
||||
return { status: ProjectStatus.ACTIVE, issue: null };
|
||||
case "backlog":
|
||||
case "planning":
|
||||
return { status: ProjectStatus.PLANNING, issue: null };
|
||||
case "paused":
|
||||
case "blocked":
|
||||
return { status: ProjectStatus.PAUSED, issue: null };
|
||||
case "archived":
|
||||
case "maintenance":
|
||||
return { status: ProjectStatus.ARCHIVED, issue: null };
|
||||
default:
|
||||
return {
|
||||
status: ProjectStatus.PLANNING,
|
||||
issue: `Unknown project status "${rawStatus ?? "null"}" mapped to PLANNING`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private normalizeDomain(rawDomain: string | null | undefined): string | null {
|
||||
if (!rawDomain) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const trimmed = rawDomain.trim();
|
||||
if (trimmed.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const slug = trimmed
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, "-")
|
||||
.replace(/^-+|-+$/g, "");
|
||||
|
||||
return slug.length > 0 ? slug : null;
|
||||
}
|
||||
|
||||
private normalizeDate(rawValue: string | null, context: string, errors: string[]): Date | null {
|
||||
if (!rawValue) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const trimmed = rawValue.trim();
|
||||
if (trimmed.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const value = /^\d{4}-\d{2}-\d{2}$/.test(trimmed) ? `${trimmed}T00:00:00.000Z` : trimmed;
|
||||
const parsedDate = new Date(value);
|
||||
|
||||
if (Number.isNaN(parsedDate.getTime())) {
|
||||
errors.push(`${context}: invalid date "${rawValue}"`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return parsedDate;
|
||||
}
|
||||
|
||||
private asJsonValue(value: Record<string, unknown>): Prisma.InputJsonValue {
|
||||
return value as Prisma.InputJsonValue;
|
||||
}
|
||||
|
||||
private getErrorMessage(error: unknown): string {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
return String(error);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Controller, Get, Param, Query } from "@nestjs/common";
|
||||
import type { LlmUsageLog } from "@prisma/client";
|
||||
import { LlmUsageService } from "./llm-usage.service";
|
||||
import type { UsageAnalyticsQueryDto, UsageAnalyticsResponseDto } from "./dto";
|
||||
import { UsageAnalyticsQueryDto, UsageAnalyticsResponseDto } from "./dto";
|
||||
|
||||
/**
|
||||
* LLM Usage Controller
|
||||
|
||||
63
apps/api/src/onboarding/onboarding.controller.ts
Normal file
63
apps/api/src/onboarding/onboarding.controller.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { Body, Controller, Get, HttpCode, HttpStatus, Post, UseGuards } from "@nestjs/common";
|
||||
import {
|
||||
AddProviderDto,
|
||||
ConfigureOidcDto,
|
||||
CreateBreakglassDto,
|
||||
TestProviderDto,
|
||||
} from "./onboarding.dto";
|
||||
import { OnboardingGuard } from "./onboarding.guard";
|
||||
import { OnboardingService } from "./onboarding.service";
|
||||
|
||||
@Controller("onboarding")
|
||||
export class OnboardingController {
|
||||
constructor(private readonly onboardingService: OnboardingService) {}
|
||||
|
||||
// GET /api/onboarding/status — returns { completed: boolean }
|
||||
@Get("status")
|
||||
async status(): Promise<{ completed: boolean }> {
|
||||
return {
|
||||
completed: await this.onboardingService.isCompleted(),
|
||||
};
|
||||
}
|
||||
|
||||
// POST /api/onboarding/breakglass — body: { username, password } → create admin
|
||||
@Post("breakglass")
|
||||
@UseGuards(OnboardingGuard)
|
||||
async createBreakglass(
|
||||
@Body() body: CreateBreakglassDto
|
||||
): Promise<{ id: string; username: string }> {
|
||||
return this.onboardingService.createBreakglassUser(body.username, body.password);
|
||||
}
|
||||
|
||||
// POST /api/onboarding/oidc — body: { issuerUrl, clientId, clientSecret } → save OIDC
|
||||
@Post("oidc")
|
||||
@UseGuards(OnboardingGuard)
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
async configureOidc(@Body() body: ConfigureOidcDto): Promise<void> {
|
||||
await this.onboardingService.configureOidc(body.issuerUrl, body.clientId, body.clientSecret);
|
||||
}
|
||||
|
||||
// POST /api/onboarding/provider — body: { name, displayName, type, baseUrl?, apiKey?, models? } → add provider
|
||||
@Post("provider")
|
||||
@UseGuards(OnboardingGuard)
|
||||
async addProvider(@Body() body: AddProviderDto): Promise<{ id: string }> {
|
||||
const userId = await this.onboardingService.getBreakglassUserId();
|
||||
|
||||
return this.onboardingService.addProvider(userId, body);
|
||||
}
|
||||
|
||||
// POST /api/onboarding/provider/test — body: { type, baseUrl?, apiKey? } → test connection
|
||||
@Post("provider/test")
|
||||
@UseGuards(OnboardingGuard)
|
||||
async testProvider(@Body() body: TestProviderDto): Promise<{ success: boolean; error?: string }> {
|
||||
return this.onboardingService.testProvider(body.type, body.baseUrl, body.apiKey);
|
||||
}
|
||||
|
||||
// POST /api/onboarding/complete — mark done
|
||||
@Post("complete")
|
||||
@UseGuards(OnboardingGuard)
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
async complete(): Promise<void> {
|
||||
await this.onboardingService.complete();
|
||||
}
|
||||
}
|
||||
71
apps/api/src/onboarding/onboarding.dto.ts
Normal file
71
apps/api/src/onboarding/onboarding.dto.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { Type } from "class-transformer";
|
||||
import { IsArray, IsOptional, IsString, IsUrl, MinLength, ValidateNested } from "class-validator";
|
||||
|
||||
export class CreateBreakglassDto {
|
||||
@IsString()
|
||||
@MinLength(3)
|
||||
username!: string;
|
||||
|
||||
@IsString()
|
||||
@MinLength(8)
|
||||
password!: string;
|
||||
}
|
||||
|
||||
export class ConfigureOidcDto {
|
||||
@IsString()
|
||||
@IsUrl({ require_tld: false })
|
||||
issuerUrl!: string;
|
||||
|
||||
@IsString()
|
||||
clientId!: string;
|
||||
|
||||
@IsString()
|
||||
clientSecret!: string;
|
||||
}
|
||||
|
||||
export class ProviderModelDto {
|
||||
@IsString()
|
||||
id!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export class AddProviderDto {
|
||||
@IsString()
|
||||
name!: string;
|
||||
|
||||
@IsString()
|
||||
displayName!: string;
|
||||
|
||||
@IsString()
|
||||
type!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
baseUrl?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
apiKey?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => ProviderModelDto)
|
||||
models?: ProviderModelDto[];
|
||||
}
|
||||
|
||||
export class TestProviderDto {
|
||||
@IsString()
|
||||
type!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
baseUrl?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
apiKey?: string;
|
||||
}
|
||||
17
apps/api/src/onboarding/onboarding.guard.ts
Normal file
17
apps/api/src/onboarding/onboarding.guard.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { CanActivate, ExecutionContext, ForbiddenException, Injectable } from "@nestjs/common";
|
||||
import { OnboardingService } from "./onboarding.service";
|
||||
|
||||
@Injectable()
|
||||
export class OnboardingGuard implements CanActivate {
|
||||
constructor(private readonly onboardingService: OnboardingService) {}
|
||||
|
||||
async canActivate(_context: ExecutionContext): Promise<boolean> {
|
||||
const completed = await this.onboardingService.isCompleted();
|
||||
|
||||
if (completed) {
|
||||
throw new ForbiddenException("Onboarding already completed");
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
15
apps/api/src/onboarding/onboarding.module.ts
Normal file
15
apps/api/src/onboarding/onboarding.module.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { ConfigModule } from "@nestjs/config";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { CryptoModule } from "../crypto/crypto.module";
|
||||
import { OnboardingController } from "./onboarding.controller";
|
||||
import { OnboardingService } from "./onboarding.service";
|
||||
import { OnboardingGuard } from "./onboarding.guard";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, CryptoModule, ConfigModule],
|
||||
controllers: [OnboardingController],
|
||||
providers: [OnboardingService, OnboardingGuard],
|
||||
exports: [OnboardingService],
|
||||
})
|
||||
export class OnboardingModule {}
|
||||
206
apps/api/src/onboarding/onboarding.service.spec.ts
Normal file
206
apps/api/src/onboarding/onboarding.service.spec.ts
Normal file
@@ -0,0 +1,206 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { hash } from "bcryptjs";
|
||||
import { OnboardingService } from "./onboarding.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { CryptoService } from "../crypto/crypto.service";
|
||||
|
||||
vi.mock("bcryptjs", () => ({
|
||||
hash: vi.fn(),
|
||||
}));
|
||||
|
||||
describe("OnboardingService", () => {
|
||||
let service: OnboardingService;
|
||||
|
||||
const mockPrismaService = {
|
||||
systemConfig: {
|
||||
findUnique: vi.fn(),
|
||||
upsert: vi.fn(),
|
||||
},
|
||||
breakglassUser: {
|
||||
count: vi.fn(),
|
||||
create: vi.fn(),
|
||||
findFirst: vi.fn(),
|
||||
},
|
||||
llmProvider: {
|
||||
create: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
const mockCryptoService = {
|
||||
encrypt: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
service = new OnboardingService(
|
||||
mockPrismaService as unknown as PrismaService,
|
||||
mockCryptoService as unknown as CryptoService
|
||||
);
|
||||
});
|
||||
|
||||
it("isCompleted returns false when no config exists", async () => {
|
||||
mockPrismaService.systemConfig.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.isCompleted()).resolves.toBe(false);
|
||||
expect(mockPrismaService.systemConfig.findUnique).toHaveBeenCalledWith({
|
||||
where: { key: "onboarding.completed" },
|
||||
});
|
||||
});
|
||||
|
||||
it("isCompleted returns true when completed", async () => {
|
||||
mockPrismaService.systemConfig.findUnique.mockResolvedValue({
|
||||
id: "cfg-1",
|
||||
key: "onboarding.completed",
|
||||
value: "true",
|
||||
encrypted: false,
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
|
||||
await expect(service.isCompleted()).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it("createBreakglassUser hashes password and creates record", async () => {
|
||||
const mockedHash = vi.mocked(hash);
|
||||
mockedHash.mockResolvedValue("hashed-password");
|
||||
|
||||
mockPrismaService.breakglassUser.count.mockResolvedValue(0);
|
||||
mockPrismaService.breakglassUser.create.mockResolvedValue({
|
||||
id: "breakglass-1",
|
||||
username: "admin",
|
||||
});
|
||||
|
||||
const result = await service.createBreakglassUser("admin", "supersecret123");
|
||||
|
||||
expect(mockedHash).toHaveBeenCalledWith("supersecret123", 12);
|
||||
expect(mockPrismaService.breakglassUser.create).toHaveBeenCalledWith({
|
||||
data: {
|
||||
username: "admin",
|
||||
passwordHash: "hashed-password",
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
username: true,
|
||||
},
|
||||
});
|
||||
expect(result).toEqual({ id: "breakglass-1", username: "admin" });
|
||||
});
|
||||
|
||||
it("createBreakglassUser rejects if user already exists", async () => {
|
||||
mockPrismaService.breakglassUser.count.mockResolvedValue(1);
|
||||
|
||||
await expect(service.createBreakglassUser("admin", "supersecret123")).rejects.toThrow(
|
||||
"Breakglass user already exists"
|
||||
);
|
||||
});
|
||||
|
||||
it("configureOidc encrypts secret and saves to SystemConfig", async () => {
|
||||
mockCryptoService.encrypt.mockReturnValue("enc:oidc-secret");
|
||||
mockPrismaService.systemConfig.upsert.mockResolvedValue({
|
||||
id: "cfg",
|
||||
key: "oidc.clientSecret",
|
||||
value: "enc:oidc-secret",
|
||||
encrypted: true,
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
|
||||
await service.configureOidc("https://auth.example.com", "client-id", "client-secret");
|
||||
|
||||
expect(mockCryptoService.encrypt).toHaveBeenCalledWith("client-secret");
|
||||
expect(mockPrismaService.systemConfig.upsert).toHaveBeenCalledTimes(3);
|
||||
expect(mockPrismaService.systemConfig.upsert).toHaveBeenCalledWith({
|
||||
where: { key: "oidc.issuerUrl" },
|
||||
create: {
|
||||
key: "oidc.issuerUrl",
|
||||
value: "https://auth.example.com",
|
||||
encrypted: false,
|
||||
},
|
||||
update: {
|
||||
value: "https://auth.example.com",
|
||||
encrypted: false,
|
||||
},
|
||||
});
|
||||
expect(mockPrismaService.systemConfig.upsert).toHaveBeenCalledWith({
|
||||
where: { key: "oidc.clientId" },
|
||||
create: {
|
||||
key: "oidc.clientId",
|
||||
value: "client-id",
|
||||
encrypted: false,
|
||||
},
|
||||
update: {
|
||||
value: "client-id",
|
||||
encrypted: false,
|
||||
},
|
||||
});
|
||||
expect(mockPrismaService.systemConfig.upsert).toHaveBeenCalledWith({
|
||||
where: { key: "oidc.clientSecret" },
|
||||
create: {
|
||||
key: "oidc.clientSecret",
|
||||
value: "enc:oidc-secret",
|
||||
encrypted: true,
|
||||
},
|
||||
update: {
|
||||
value: "enc:oidc-secret",
|
||||
encrypted: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("addProvider encrypts apiKey and creates LlmProvider", async () => {
|
||||
mockCryptoService.encrypt.mockReturnValue("enc:api-key");
|
||||
mockPrismaService.llmProvider.create.mockResolvedValue({
|
||||
id: "provider-1",
|
||||
});
|
||||
|
||||
const result = await service.addProvider("breakglass-1", {
|
||||
name: "my-openai",
|
||||
displayName: "OpenAI",
|
||||
type: "openai",
|
||||
baseUrl: "https://api.openai.com/v1",
|
||||
apiKey: "sk-test",
|
||||
models: [{ id: "gpt-4o-mini", name: "GPT-4o Mini" }],
|
||||
});
|
||||
|
||||
expect(mockCryptoService.encrypt).toHaveBeenCalledWith("sk-test");
|
||||
expect(mockPrismaService.llmProvider.create).toHaveBeenCalledWith({
|
||||
data: {
|
||||
userId: "breakglass-1",
|
||||
name: "my-openai",
|
||||
displayName: "OpenAI",
|
||||
type: "openai",
|
||||
baseUrl: "https://api.openai.com/v1",
|
||||
apiKey: "enc:api-key",
|
||||
models: [{ id: "gpt-4o-mini", name: "GPT-4o Mini" }],
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
},
|
||||
});
|
||||
expect(result).toEqual({ id: "provider-1" });
|
||||
});
|
||||
|
||||
it("complete sets SystemConfig flag", async () => {
|
||||
mockPrismaService.systemConfig.upsert.mockResolvedValue({
|
||||
id: "cfg-1",
|
||||
key: "onboarding.completed",
|
||||
value: "true",
|
||||
encrypted: false,
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
|
||||
await service.complete();
|
||||
|
||||
expect(mockPrismaService.systemConfig.upsert).toHaveBeenCalledWith({
|
||||
where: { key: "onboarding.completed" },
|
||||
create: {
|
||||
key: "onboarding.completed",
|
||||
value: "true",
|
||||
encrypted: false,
|
||||
},
|
||||
update: {
|
||||
value: "true",
|
||||
encrypted: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
191
apps/api/src/onboarding/onboarding.service.ts
Normal file
191
apps/api/src/onboarding/onboarding.service.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
import { BadRequestException, ConflictException, Injectable } from "@nestjs/common";
|
||||
import type { InputJsonValue } from "@prisma/client/runtime/library";
|
||||
import { hash } from "bcryptjs";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { CryptoService } from "../crypto/crypto.service";
|
||||
|
||||
const BCRYPT_ROUNDS = 12;
|
||||
const TEST_PROVIDER_TIMEOUT_MS = 8000;
|
||||
|
||||
const ONBOARDING_COMPLETED_KEY = "onboarding.completed";
|
||||
const OIDC_ISSUER_URL_KEY = "oidc.issuerUrl";
|
||||
const OIDC_CLIENT_ID_KEY = "oidc.clientId";
|
||||
const OIDC_CLIENT_SECRET_KEY = "oidc.clientSecret";
|
||||
|
||||
interface ProviderModelInput {
|
||||
id: string;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
interface AddProviderInput {
|
||||
name: string;
|
||||
displayName: string;
|
||||
type: string;
|
||||
baseUrl?: string;
|
||||
apiKey?: string;
|
||||
models?: ProviderModelInput[];
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class OnboardingService {
|
||||
constructor(
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly crypto: CryptoService
|
||||
) {}
|
||||
|
||||
// Check if onboarding is completed
|
||||
async isCompleted(): Promise<boolean> {
|
||||
const completedFlag = await this.prisma.systemConfig.findUnique({
|
||||
where: { key: ONBOARDING_COMPLETED_KEY },
|
||||
});
|
||||
|
||||
return completedFlag?.value === "true";
|
||||
}
|
||||
|
||||
// Step 1: Create breakglass admin user
|
||||
async createBreakglassUser(
|
||||
username: string,
|
||||
password: string
|
||||
): Promise<{ id: string; username: string }> {
|
||||
const breakglassCount = await this.prisma.breakglassUser.count();
|
||||
if (breakglassCount > 0) {
|
||||
throw new ConflictException("Breakglass user already exists");
|
||||
}
|
||||
|
||||
const passwordHash = await hash(password, BCRYPT_ROUNDS);
|
||||
|
||||
return this.prisma.breakglassUser.create({
|
||||
data: {
|
||||
username,
|
||||
passwordHash,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
username: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Step 2: Configure OIDC provider (optional)
|
||||
async configureOidc(issuerUrl: string, clientId: string, clientSecret: string): Promise<void> {
|
||||
const encryptedSecret = this.crypto.encrypt(clientSecret);
|
||||
|
||||
await Promise.all([
|
||||
this.upsertSystemConfig(OIDC_ISSUER_URL_KEY, issuerUrl, false),
|
||||
this.upsertSystemConfig(OIDC_CLIENT_ID_KEY, clientId, false),
|
||||
this.upsertSystemConfig(OIDC_CLIENT_SECRET_KEY, encryptedSecret, true),
|
||||
]);
|
||||
}
|
||||
|
||||
// Step 3: Add first LLM provider
|
||||
async addProvider(userId: string, data: AddProviderInput): Promise<{ id: string }> {
|
||||
const encryptedApiKey = data.apiKey ? this.crypto.encrypt(data.apiKey) : undefined;
|
||||
|
||||
return this.prisma.llmProvider.create({
|
||||
data: {
|
||||
userId,
|
||||
name: data.name,
|
||||
displayName: data.displayName,
|
||||
type: data.type,
|
||||
baseUrl: data.baseUrl ?? null,
|
||||
apiKey: encryptedApiKey ?? null,
|
||||
models: (data.models ?? []) as unknown as InputJsonValue,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Step 3b: Test LLM provider connection
|
||||
async testProvider(
|
||||
type: string,
|
||||
baseUrl?: string,
|
||||
apiKey?: string
|
||||
): Promise<{ success: boolean; error?: string }> {
|
||||
const normalizedType = type.trim().toLowerCase();
|
||||
if (!normalizedType) {
|
||||
return { success: false, error: "Provider type is required" };
|
||||
}
|
||||
|
||||
let probeUrl: string;
|
||||
try {
|
||||
probeUrl = this.buildProbeUrl(normalizedType, baseUrl);
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
return { success: false, error: message };
|
||||
}
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
Accept: "application/json",
|
||||
};
|
||||
if (apiKey) {
|
||||
headers.Authorization = `Bearer ${apiKey}`;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(probeUrl, {
|
||||
method: "GET",
|
||||
headers,
|
||||
signal: AbortSignal.timeout(TEST_PROVIDER_TIMEOUT_MS),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Provider returned ${String(response.status)} ${response.statusText}`.trim(),
|
||||
};
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
return { success: false, error: message };
|
||||
}
|
||||
}
|
||||
|
||||
// Step 4: Mark onboarding complete
|
||||
async complete(): Promise<void> {
|
||||
await this.upsertSystemConfig(ONBOARDING_COMPLETED_KEY, "true", false);
|
||||
}
|
||||
|
||||
async getBreakglassUserId(): Promise<string> {
|
||||
const user = await this.prisma.breakglassUser.findFirst({
|
||||
where: { isActive: true },
|
||||
orderBy: { createdAt: "asc" },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
throw new BadRequestException("Create a breakglass user before adding a provider");
|
||||
}
|
||||
|
||||
return user.id;
|
||||
}
|
||||
|
||||
private async upsertSystemConfig(key: string, value: string, encrypted: boolean): Promise<void> {
|
||||
await this.prisma.systemConfig.upsert({
|
||||
where: { key },
|
||||
create: { key, value, encrypted },
|
||||
update: { value, encrypted },
|
||||
});
|
||||
}
|
||||
|
||||
private buildProbeUrl(type: string, baseUrl?: string): string {
|
||||
const resolvedBaseUrl = baseUrl ?? this.getDefaultProviderBaseUrl(type);
|
||||
const normalizedBaseUrl = resolvedBaseUrl.endsWith("/")
|
||||
? resolvedBaseUrl
|
||||
: `${resolvedBaseUrl}/`;
|
||||
const endpointPath = type === "ollama" ? "api/tags" : "models";
|
||||
|
||||
return new URL(endpointPath, normalizedBaseUrl).toString();
|
||||
}
|
||||
|
||||
private getDefaultProviderBaseUrl(type: string): string {
|
||||
if (type === "ollama") {
|
||||
return "http://localhost:11434";
|
||||
}
|
||||
|
||||
return "https://api.openai.com/v1";
|
||||
}
|
||||
}
|
||||
194
apps/api/src/orchestrator/orchestrator.controller.spec.ts
Normal file
194
apps/api/src/orchestrator/orchestrator.controller.spec.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
import { beforeEach, describe, expect, it, vi, afterEach } from "vitest";
|
||||
import type { Response } from "express";
|
||||
import { AgentStatus } from "@prisma/client";
|
||||
import { OrchestratorController } from "./orchestrator.controller";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
|
||||
describe("OrchestratorController", () => {
|
||||
const mockPrismaService = {
|
||||
agent: {
|
||||
findMany: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
let controller: OrchestratorController;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
controller = new OrchestratorController(mockPrismaService as unknown as PrismaService);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
describe("getAgents", () => {
|
||||
it("returns active agents with API widget shape", async () => {
|
||||
mockPrismaService.agent.findMany.mockResolvedValue([
|
||||
{
|
||||
id: "agent-1",
|
||||
name: "Planner",
|
||||
status: AgentStatus.WORKING,
|
||||
role: "planner",
|
||||
createdAt: new Date("2026-02-28T10:00:00.000Z"),
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await controller.getAgents();
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
id: "agent-1",
|
||||
name: "Planner",
|
||||
status: AgentStatus.WORKING,
|
||||
type: "planner",
|
||||
createdAt: new Date("2026-02-28T10:00:00.000Z"),
|
||||
},
|
||||
]);
|
||||
|
||||
expect(mockPrismaService.agent.findMany).toHaveBeenCalledWith({
|
||||
where: {
|
||||
status: {
|
||||
not: AgentStatus.TERMINATED,
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: "desc",
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
status: true,
|
||||
role: true,
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("falls back to type=agent when role is missing", async () => {
|
||||
mockPrismaService.agent.findMany.mockResolvedValue([
|
||||
{
|
||||
id: "agent-2",
|
||||
name: null,
|
||||
status: AgentStatus.IDLE,
|
||||
role: null,
|
||||
createdAt: new Date("2026-02-28T11:00:00.000Z"),
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await controller.getAgents();
|
||||
|
||||
expect(result[0]).toMatchObject({
|
||||
id: "agent-2",
|
||||
type: "agent",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("streamEvents", () => {
|
||||
it("sets SSE headers and writes initial data payload", async () => {
|
||||
const onHandlers: Record<string, (() => void) | undefined> = {};
|
||||
const mockRes = {
|
||||
setHeader: vi.fn(),
|
||||
write: vi.fn(),
|
||||
end: vi.fn(),
|
||||
on: vi.fn((event: string, handler: () => void) => {
|
||||
onHandlers[event] = handler;
|
||||
return mockRes;
|
||||
}),
|
||||
} as unknown as Response;
|
||||
|
||||
mockPrismaService.agent.findMany.mockResolvedValue([
|
||||
{
|
||||
id: "agent-1",
|
||||
name: "Worker",
|
||||
status: AgentStatus.WORKING,
|
||||
role: "worker",
|
||||
createdAt: new Date("2026-02-28T12:00:00.000Z"),
|
||||
},
|
||||
]);
|
||||
|
||||
await controller.streamEvents(mockRes);
|
||||
|
||||
expect(mockRes.setHeader).toHaveBeenCalledWith("Content-Type", "text/event-stream");
|
||||
expect(mockRes.setHeader).toHaveBeenCalledWith("Cache-Control", "no-cache");
|
||||
expect(mockRes.setHeader).toHaveBeenCalledWith("Connection", "keep-alive");
|
||||
expect(mockRes.setHeader).toHaveBeenCalledWith("X-Accel-Buffering", "no");
|
||||
|
||||
expect(mockRes.write).toHaveBeenCalledWith(
|
||||
expect.stringContaining('"type":"agents:updated"')
|
||||
);
|
||||
expect(typeof onHandlers.close).toBe("function");
|
||||
});
|
||||
|
||||
it("polls every 5 seconds and only emits when payload changes", async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
const onHandlers: Record<string, (() => void) | undefined> = {};
|
||||
const mockRes = {
|
||||
setHeader: vi.fn(),
|
||||
write: vi.fn(),
|
||||
end: vi.fn(),
|
||||
on: vi.fn((event: string, handler: () => void) => {
|
||||
onHandlers[event] = handler;
|
||||
return mockRes;
|
||||
}),
|
||||
} as unknown as Response;
|
||||
|
||||
const firstPayload = [
|
||||
{
|
||||
id: "agent-1",
|
||||
name: "Worker",
|
||||
status: AgentStatus.WORKING,
|
||||
role: "worker",
|
||||
createdAt: new Date("2026-02-28T12:00:00.000Z"),
|
||||
},
|
||||
];
|
||||
const secondPayload = [
|
||||
{
|
||||
id: "agent-1",
|
||||
name: "Worker",
|
||||
status: AgentStatus.WAITING,
|
||||
role: "worker",
|
||||
createdAt: new Date("2026-02-28T12:00:00.000Z"),
|
||||
},
|
||||
];
|
||||
|
||||
mockPrismaService.agent.findMany
|
||||
.mockResolvedValueOnce(firstPayload)
|
||||
.mockResolvedValueOnce(firstPayload)
|
||||
.mockResolvedValueOnce(secondPayload);
|
||||
|
||||
await controller.streamEvents(mockRes);
|
||||
|
||||
// 1 initial data event
|
||||
const getDataEventCalls = () =>
|
||||
mockRes.write.mock.calls.filter(
|
||||
(call) => typeof call[0] === "string" && call[0].startsWith("data: ")
|
||||
);
|
||||
|
||||
expect(getDataEventCalls()).toHaveLength(1);
|
||||
|
||||
// No change after first poll => no new data event
|
||||
await vi.advanceTimersByTimeAsync(5000);
|
||||
expect(getDataEventCalls()).toHaveLength(1);
|
||||
|
||||
// Status changed on second poll => emits new data event
|
||||
await vi.advanceTimersByTimeAsync(5000);
|
||||
expect(getDataEventCalls()).toHaveLength(2);
|
||||
|
||||
onHandlers.close?.();
|
||||
expect(mockRes.end).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("security", () => {
|
||||
it("uses AuthGuard at the controller level", () => {
|
||||
const guards = Reflect.getMetadata("__guards__", OrchestratorController) as unknown[];
|
||||
const guardClasses = guards.map((guard) => guard);
|
||||
|
||||
expect(guardClasses).toContain(AuthGuard);
|
||||
});
|
||||
});
|
||||
});
|
||||
115
apps/api/src/orchestrator/orchestrator.controller.ts
Normal file
115
apps/api/src/orchestrator/orchestrator.controller.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { Controller, Get, Res, UseGuards } from "@nestjs/common";
|
||||
import { AgentStatus } from "@prisma/client";
|
||||
import type { Response } from "express";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
|
||||
const AGENT_POLL_INTERVAL_MS = 5_000;
|
||||
const SSE_HEARTBEAT_MS = 15_000;
|
||||
|
||||
interface OrchestratorAgentDto {
|
||||
id: string;
|
||||
name: string | null;
|
||||
status: AgentStatus;
|
||||
type: string;
|
||||
createdAt: Date;
|
||||
}
|
||||
|
||||
@Controller("orchestrator")
|
||||
@UseGuards(AuthGuard)
|
||||
export class OrchestratorController {
|
||||
constructor(private readonly prisma: PrismaService) {}
|
||||
|
||||
@Get("agents")
|
||||
async getAgents(): Promise<OrchestratorAgentDto[]> {
|
||||
return this.fetchActiveAgents();
|
||||
}
|
||||
|
||||
@Get("events")
|
||||
async streamEvents(@Res() res: Response): Promise<void> {
|
||||
res.setHeader("Content-Type", "text/event-stream");
|
||||
res.setHeader("Cache-Control", "no-cache");
|
||||
res.setHeader("Connection", "keep-alive");
|
||||
res.setHeader("X-Accel-Buffering", "no");
|
||||
|
||||
if (typeof res.flushHeaders === "function") {
|
||||
res.flushHeaders();
|
||||
}
|
||||
|
||||
let isClosed = false;
|
||||
let previousSnapshot = "";
|
||||
|
||||
const emitSnapshotIfChanged = async (): Promise<void> => {
|
||||
if (isClosed) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const agents = await this.fetchActiveAgents();
|
||||
const snapshot = JSON.stringify(agents);
|
||||
|
||||
if (snapshot !== previousSnapshot) {
|
||||
previousSnapshot = snapshot;
|
||||
res.write(
|
||||
`data: ${JSON.stringify({
|
||||
type: "agents:updated",
|
||||
agents,
|
||||
timestamp: new Date().toISOString(),
|
||||
})}\n\n`
|
||||
);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
res.write(`event: error\n`);
|
||||
res.write(`data: ${JSON.stringify({ error: message })}\n\n`);
|
||||
}
|
||||
};
|
||||
|
||||
await emitSnapshotIfChanged();
|
||||
|
||||
const pollInterval = setInterval(() => {
|
||||
void emitSnapshotIfChanged();
|
||||
}, AGENT_POLL_INTERVAL_MS);
|
||||
|
||||
const heartbeatInterval = setInterval(() => {
|
||||
if (!isClosed) {
|
||||
res.write(": keepalive\n\n");
|
||||
}
|
||||
}, SSE_HEARTBEAT_MS);
|
||||
|
||||
res.on("close", () => {
|
||||
isClosed = true;
|
||||
clearInterval(pollInterval);
|
||||
clearInterval(heartbeatInterval);
|
||||
res.end();
|
||||
});
|
||||
}
|
||||
|
||||
private async fetchActiveAgents(): Promise<OrchestratorAgentDto[]> {
|
||||
const agents = await this.prisma.agent.findMany({
|
||||
where: {
|
||||
status: {
|
||||
not: AgentStatus.TERMINATED,
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: "desc",
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
name: true,
|
||||
status: true,
|
||||
role: true,
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
return agents.map((agent) => ({
|
||||
id: agent.id,
|
||||
name: agent.name,
|
||||
status: agent.status,
|
||||
type: agent.role ?? "agent",
|
||||
createdAt: agent.createdAt,
|
||||
}));
|
||||
}
|
||||
}
|
||||
10
apps/api/src/orchestrator/orchestrator.module.ts
Normal file
10
apps/api/src/orchestrator/orchestrator.module.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { OrchestratorController } from "./orchestrator.controller";
|
||||
|
||||
@Module({
|
||||
imports: [AuthModule, PrismaModule],
|
||||
controllers: [OrchestratorController],
|
||||
})
|
||||
export class OrchestratorModule {}
|
||||
@@ -66,7 +66,9 @@ interface StartTranscriptionPayload {
|
||||
@WSGateway({
|
||||
namespace: "/speech",
|
||||
cors: {
|
||||
origin: process.env.WEB_URL ?? "http://localhost:3000",
|
||||
origin: (process.env.TRUSTED_ORIGINS ?? process.env.WEB_URL ?? "http://localhost:3000")
|
||||
.split(",")
|
||||
.map((s) => s.trim()),
|
||||
credentials: true,
|
||||
},
|
||||
})
|
||||
|
||||
@@ -50,6 +50,12 @@ export class CreateTaskDto {
|
||||
@IsUUID("4", { message: "parentId must be a valid UUID" })
|
||||
parentId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "assignedAgent must be a string" })
|
||||
@MinLength(1, { message: "assignedAgent must not be empty" })
|
||||
@MaxLength(255, { message: "assignedAgent must not exceed 255 characters" })
|
||||
assignedAgent?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsInt({ message: "sortOrder must be an integer" })
|
||||
@Min(0, { message: "sortOrder must be at least 0" })
|
||||
|
||||
@@ -52,6 +52,12 @@ export class UpdateTaskDto {
|
||||
@IsUUID("4", { message: "parentId must be a valid UUID" })
|
||||
parentId?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "assignedAgent must be a string" })
|
||||
@MinLength(1, { message: "assignedAgent must not be empty" })
|
||||
@MaxLength(255, { message: "assignedAgent must not exceed 255 characters" })
|
||||
assignedAgent?: string | null;
|
||||
|
||||
@IsOptional()
|
||||
@IsInt({ message: "sortOrder must be an integer" })
|
||||
@Min(0, { message: "sortOrder must be at least 0" })
|
||||
|
||||
162
apps/api/src/tasks/tasks.assigned-agent.integration.spec.ts
Normal file
162
apps/api/src/tasks/tasks.assigned-agent.integration.spec.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
import { beforeAll, beforeEach, describe, expect, it, afterAll, vi } from "vitest";
|
||||
import { randomUUID as uuid } from "crypto";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { PrismaClient } from "@prisma/client";
|
||||
import { TasksService } from "./tasks.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { ActivityService } from "../activity/activity.service";
|
||||
|
||||
const shouldRunDbIntegrationTests =
|
||||
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||
const describeFn = shouldRunDbIntegrationTests ? describe : describe.skip;
|
||||
|
||||
describeFn("TasksService assignedAgent Integration", () => {
|
||||
let moduleRef: TestingModule;
|
||||
let prisma: PrismaClient;
|
||||
let service: TasksService;
|
||||
let workspaceId: string;
|
||||
let ownerId: string;
|
||||
let setupComplete = false;
|
||||
|
||||
const activityServiceMock = {
|
||||
logTaskCreated: vi.fn().mockResolvedValue(undefined),
|
||||
logTaskUpdated: vi.fn().mockResolvedValue(undefined),
|
||||
logTaskDeleted: vi.fn().mockResolvedValue(undefined),
|
||||
logTaskCompleted: vi.fn().mockResolvedValue(undefined),
|
||||
logTaskAssigned: vi.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
prisma = new PrismaClient();
|
||||
await prisma.$connect();
|
||||
|
||||
const workspace = await prisma.workspace.create({
|
||||
data: {
|
||||
name: `Tasks Assigned Agent Integration ${Date.now()}`,
|
||||
owner: {
|
||||
create: {
|
||||
email: `tasks-assigned-agent-integration-${Date.now()}@example.com`,
|
||||
name: "Tasks Assigned Agent Integration Owner",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
workspaceId = workspace.id;
|
||||
ownerId = workspace.ownerId;
|
||||
|
||||
moduleRef = await Test.createTestingModule({
|
||||
providers: [
|
||||
TasksService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: prisma,
|
||||
},
|
||||
{
|
||||
provide: ActivityService,
|
||||
useValue: activityServiceMock,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = moduleRef.get<TasksService>(TasksService);
|
||||
setupComplete = true;
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
await prisma.task.deleteMany({ where: { workspaceId } });
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
if (!prisma) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (workspaceId) {
|
||||
await prisma.task.deleteMany({ where: { workspaceId } });
|
||||
await prisma.workspace.deleteMany({ where: { id: workspaceId } });
|
||||
}
|
||||
if (ownerId) {
|
||||
await prisma.user.deleteMany({ where: { id: ownerId } });
|
||||
}
|
||||
|
||||
if (moduleRef) {
|
||||
await moduleRef.close();
|
||||
}
|
||||
await prisma.$disconnect();
|
||||
});
|
||||
|
||||
it("persists assignedAgent on create", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const task = await service.create(workspaceId, ownerId, {
|
||||
title: `Assigned agent create ${uuid()}`,
|
||||
assignedAgent: "fleet-worker-1",
|
||||
});
|
||||
|
||||
expect(task.assignedAgent).toBe("fleet-worker-1");
|
||||
|
||||
const stored = await prisma.task.findUnique({
|
||||
where: {
|
||||
id: task.id,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
assignedAgent: true,
|
||||
},
|
||||
});
|
||||
|
||||
expect(stored).toMatchObject({
|
||||
id: task.id,
|
||||
assignedAgent: "fleet-worker-1",
|
||||
});
|
||||
|
||||
const listed = await service.findAll({ workspaceId, page: 1, limit: 10 }, ownerId);
|
||||
const listedTask = listed.data.find((row) => row.id === task.id);
|
||||
|
||||
expect(listedTask?.assignedAgent).toBe("fleet-worker-1");
|
||||
});
|
||||
|
||||
it("updates and clears assignedAgent", async () => {
|
||||
if (!setupComplete) {
|
||||
return;
|
||||
}
|
||||
|
||||
const created = await service.create(workspaceId, ownerId, {
|
||||
title: `Assigned agent update ${uuid()}`,
|
||||
});
|
||||
|
||||
expect(created.assignedAgent).toBeNull();
|
||||
|
||||
const updated = await service.update(created.id, workspaceId, ownerId, {
|
||||
assignedAgent: "fleet-worker-2",
|
||||
});
|
||||
|
||||
expect(updated.assignedAgent).toBe("fleet-worker-2");
|
||||
|
||||
const cleared = await service.update(created.id, workspaceId, ownerId, {
|
||||
assignedAgent: null,
|
||||
});
|
||||
|
||||
expect(cleared.assignedAgent).toBeNull();
|
||||
|
||||
const stored = await prisma.task.findUnique({
|
||||
where: {
|
||||
id: created.id,
|
||||
},
|
||||
select: {
|
||||
assignedAgent: true,
|
||||
},
|
||||
});
|
||||
|
||||
expect(stored?.assignedAgent).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -48,6 +48,7 @@ describe("TasksService", () => {
|
||||
creatorId: mockUserId,
|
||||
projectId: null,
|
||||
parentId: null,
|
||||
assignedAgent: null,
|
||||
sortOrder: 0,
|
||||
metadata: {},
|
||||
createdAt: new Date(),
|
||||
@@ -158,6 +159,28 @@ describe("TasksService", () => {
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("should include assignedAgent when provided", async () => {
|
||||
const createDto = {
|
||||
title: "Agent-owned Task",
|
||||
assignedAgent: "fleet-worker-1",
|
||||
};
|
||||
|
||||
mockPrismaService.task.create.mockResolvedValue({
|
||||
...mockTask,
|
||||
assignedAgent: createDto.assignedAgent,
|
||||
});
|
||||
|
||||
await service.create(mockWorkspaceId, mockUserId, createDto);
|
||||
|
||||
expect(prisma.task.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
assignedAgent: createDto.assignedAgent,
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
@@ -469,6 +492,26 @@ describe("TasksService", () => {
|
||||
service.update(mockTaskId, mockWorkspaceId, mockUserId, { title: "Test" })
|
||||
).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
|
||||
it("should update assignedAgent when provided", async () => {
|
||||
const updateDto = { assignedAgent: "fleet-worker-2" };
|
||||
|
||||
mockPrismaService.task.findUnique.mockResolvedValue(mockTask);
|
||||
mockPrismaService.task.update.mockResolvedValue({
|
||||
...mockTask,
|
||||
assignedAgent: updateDto.assignedAgent,
|
||||
});
|
||||
|
||||
await service.update(mockTaskId, mockWorkspaceId, mockUserId, updateDto);
|
||||
|
||||
expect(prisma.task.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
assignedAgent: updateDto.assignedAgent,
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
|
||||
@@ -67,6 +67,9 @@ export class TasksService {
|
||||
metadata: createTaskDto.metadata
|
||||
? (createTaskDto.metadata as unknown as Prisma.InputJsonValue)
|
||||
: {},
|
||||
...(createTaskDto.assignedAgent !== undefined && {
|
||||
assignedAgent: createTaskDto.assignedAgent,
|
||||
}),
|
||||
...(assigneeConnection && { assignee: assigneeConnection }),
|
||||
...(projectConnection && { project: projectConnection }),
|
||||
...(parentConnection && { parent: parentConnection }),
|
||||
@@ -291,6 +294,9 @@ export class TasksService {
|
||||
if (updateTaskDto.parentId !== undefined && updateTaskDto.parentId !== null) {
|
||||
data.parent = { connect: { id: updateTaskDto.parentId } };
|
||||
}
|
||||
if (updateTaskDto.assignedAgent !== undefined) {
|
||||
data.assignedAgent = updateTaskDto.assignedAgent;
|
||||
}
|
||||
|
||||
// Handle completedAt based on status changes
|
||||
if (updateTaskDto.status) {
|
||||
|
||||
@@ -63,7 +63,9 @@ interface AuthenticatedSocket extends Socket {
|
||||
@WSGateway({
|
||||
namespace: "/terminal",
|
||||
cors: {
|
||||
origin: process.env.WEB_URL ?? "http://localhost:3000",
|
||||
origin: (process.env.TRUSTED_ORIGINS ?? process.env.WEB_URL ?? "http://localhost:3000")
|
||||
.split(",")
|
||||
.map((s) => s.trim()),
|
||||
credentials: true,
|
||||
},
|
||||
})
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user