Compare commits
86 Commits
feat/ms19-
...
feat/ms22-
| Author | SHA1 | Date | |
|---|---|---|---|
| 496244c8ef | |||
| a3a0d7afca | |||
| ab2b68c93c | |||
| c1ec0ad7ef | |||
| e5b772f7cb | |||
| 7a46c81897 | |||
| 3688f89c37 | |||
| e59e517d5c | |||
| fab833a710 | |||
| 4294deda49 | |||
| 2fe858d61a | |||
| 512a29a240 | |||
| 8ea3c3ee67 | |||
| c4a6be5b6b | |||
| f4c1c9d816 | |||
| ac67697fe4 | |||
| 6521f655a8 | |||
| 0e74b03d9c | |||
| a925f91062 | |||
| 7106512fa9 | |||
| 1df20f0e13 | |||
| 8dab20c022 | |||
| 7073057e8d | |||
| 5e7346adc7 | |||
| d07a840f25 | |||
| 4b2e48af9c | |||
| 7b390d8be2 | |||
| e8502577b8 | |||
| af68f84dcd | |||
| b57f549d39 | |||
| 2c8d0a8daf | |||
| c939a541a7 | |||
| 895ea7fd14 | |||
| e93e7ffaa9 | |||
| 307639eca0 | |||
| 31814f181a | |||
| 5cd6b8622d | |||
| 20c9e68e1b | |||
| 127bf61fe2 | |||
| f99107fbfc | |||
| 5b782bafc9 | |||
| 85d3f930f3 | |||
| 0e6734bdae | |||
| 5bcaaeddd9 | |||
| 676a2a288b | |||
| ac16d6ed88 | |||
| 8388d49786 | |||
| 20f914ea85 | |||
| 1b84741f1a | |||
| ffc10c9a45 | |||
| 62d9ac0e5a | |||
| 8098504fb8 | |||
| 128431ba58 | |||
| d2c51eda91 | |||
| 78b643a945 | |||
| f93503ebcf | |||
| c0e679ab7c | |||
| 6ac63fe755 | |||
| 1667f28d71 | |||
| 66fe475fa1 | |||
| d39ab6aafc | |||
| 147e8ac574 | |||
| c38bfae16c | |||
| 36b4d8323d | |||
| 833662a64f | |||
| b3922e1d5b | |||
| 78b71a0ecc | |||
| dd0568cf15 | |||
| 8964226163 | |||
| 11f22a7e96 | |||
| edcff6a0e0 | |||
| e3cba37e8c | |||
| 21bf7e050f | |||
| 83d5aee53a | |||
| cc5b108b2f | |||
| 5ed0a859da | |||
| bf299bb672 | |||
| ad99cb9a03 | |||
| d05b870f08 | |||
| 1aaf5618ce | |||
| 9b2520ce1f | |||
| b110c469c4 | |||
| 859dcfc4b7 | |||
| 13aa52aa53 | |||
| 417c6ab49c | |||
| 8128eb7fbe |
30
.env.example
30
.env.example
@@ -79,7 +79,7 @@ OIDC_CLIENT_ID=your-client-id-here
|
|||||||
OIDC_CLIENT_SECRET=your-client-secret-here
|
OIDC_CLIENT_SECRET=your-client-secret-here
|
||||||
# Redirect URI must match what's configured in Authentik
|
# Redirect URI must match what's configured in Authentik
|
||||||
# Development: http://localhost:3001/auth/oauth2/callback/authentik
|
# Development: http://localhost:3001/auth/oauth2/callback/authentik
|
||||||
# Production: https://api.mosaicstack.dev/auth/oauth2/callback/authentik
|
# Production: https://mosaic-api.woltje.com/auth/oauth2/callback/authentik
|
||||||
OIDC_REDIRECT_URI=http://localhost:3001/auth/oauth2/callback/authentik
|
OIDC_REDIRECT_URI=http://localhost:3001/auth/oauth2/callback/authentik
|
||||||
|
|
||||||
# Authentik PostgreSQL Database
|
# Authentik PostgreSQL Database
|
||||||
@@ -314,17 +314,19 @@ COORDINATOR_ENABLED=true
|
|||||||
# TTL is in seconds, limits are per TTL window
|
# TTL is in seconds, limits are per TTL window
|
||||||
|
|
||||||
# Global rate limit (applies to all endpoints unless overridden)
|
# Global rate limit (applies to all endpoints unless overridden)
|
||||||
RATE_LIMIT_TTL=60 # Time window in seconds
|
# Time window in seconds
|
||||||
RATE_LIMIT_GLOBAL_LIMIT=100 # Requests per window
|
RATE_LIMIT_TTL=60
|
||||||
|
# Requests per window
|
||||||
|
RATE_LIMIT_GLOBAL_LIMIT=100
|
||||||
|
|
||||||
# Webhook endpoints (/stitcher/webhook, /stitcher/dispatch)
|
# Webhook endpoints (/stitcher/webhook, /stitcher/dispatch) — requests per minute
|
||||||
RATE_LIMIT_WEBHOOK_LIMIT=60 # Requests per minute
|
RATE_LIMIT_WEBHOOK_LIMIT=60
|
||||||
|
|
||||||
# Coordinator endpoints (/coordinator/*)
|
# Coordinator endpoints (/coordinator/*) — requests per minute
|
||||||
RATE_LIMIT_COORDINATOR_LIMIT=100 # Requests per minute
|
RATE_LIMIT_COORDINATOR_LIMIT=100
|
||||||
|
|
||||||
# Health check endpoints (/coordinator/health)
|
# Health check endpoints (/coordinator/health) — requests per minute (higher for monitoring)
|
||||||
RATE_LIMIT_HEALTH_LIMIT=300 # Requests per minute (higher for monitoring)
|
RATE_LIMIT_HEALTH_LIMIT=300
|
||||||
|
|
||||||
# Storage backend for rate limiting (redis or memory)
|
# Storage backend for rate limiting (redis or memory)
|
||||||
# redis: Uses Valkey for distributed rate limiting (recommended for production)
|
# redis: Uses Valkey for distributed rate limiting (recommended for production)
|
||||||
@@ -359,17 +361,17 @@ RATE_LIMIT_STORAGE=redis
|
|||||||
# a single workspace.
|
# a single workspace.
|
||||||
MATRIX_HOMESERVER_URL=http://synapse:8008
|
MATRIX_HOMESERVER_URL=http://synapse:8008
|
||||||
MATRIX_ACCESS_TOKEN=
|
MATRIX_ACCESS_TOKEN=
|
||||||
MATRIX_BOT_USER_ID=@mosaic-bot:matrix.example.com
|
MATRIX_BOT_USER_ID=@mosaic-bot:matrix.woltje.com
|
||||||
MATRIX_SERVER_NAME=matrix.example.com
|
MATRIX_SERVER_NAME=matrix.woltje.com
|
||||||
# MATRIX_CONTROL_ROOM_ID=!roomid:matrix.example.com
|
# MATRIX_CONTROL_ROOM_ID=!roomid:matrix.woltje.com
|
||||||
# MATRIX_WORKSPACE_ID=your-workspace-uuid
|
# MATRIX_WORKSPACE_ID=your-workspace-uuid
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Matrix / Synapse Deployment
|
# Matrix / Synapse Deployment
|
||||||
# ======================
|
# ======================
|
||||||
# Domains for Traefik routing to Matrix services
|
# Domains for Traefik routing to Matrix services
|
||||||
MATRIX_DOMAIN=matrix.example.com
|
MATRIX_DOMAIN=matrix.woltje.com
|
||||||
ELEMENT_DOMAIN=chat.example.com
|
ELEMENT_DOMAIN=chat.woltje.com
|
||||||
|
|
||||||
# Synapse database (created automatically by synapse-db-init in the swarm compose)
|
# Synapse database (created automatically by synapse-db-init in the swarm compose)
|
||||||
SYNAPSE_POSTGRES_DB=synapse
|
SYNAPSE_POSTGRES_DB=synapse
|
||||||
|
|||||||
@@ -1,14 +1,90 @@
|
|||||||
{
|
{
|
||||||
"schema_version": 1,
|
"schema_version": 1,
|
||||||
"mission_id": "prd-implementation-20260222",
|
"mission_id": "ms21-multi-tenant-rbac-data-migration-20260228",
|
||||||
"name": "PRD implementation",
|
"name": "MS21 Multi-Tenant RBAC Data Migration",
|
||||||
"description": "",
|
"description": "Build multi-tenant user/workspace/team management, break-glass auth, RBAC UI enforcement, and migrate jarvis-brain data into Mosaic Stack",
|
||||||
"project_path": "/home/jwoltje/src/mosaic-stack",
|
"project_path": "/home/jwoltje/src/mosaic-stack",
|
||||||
"created_at": "2026-02-23T03:20:55Z",
|
"created_at": "2026-02-28T17:10:22Z",
|
||||||
"status": "active",
|
"status": "active",
|
||||||
"task_prefix": "",
|
"task_prefix": "MS21",
|
||||||
"quality_gates": "",
|
"quality_gates": "pnpm lint && pnpm build && pnpm test",
|
||||||
"milestone_version": "0.0.1",
|
"milestone_version": "0.0.21",
|
||||||
"milestones": [],
|
"milestones": [
|
||||||
"sessions": []
|
{
|
||||||
|
"id": "phase-1",
|
||||||
|
"name": "Schema and Admin API",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "schema-and-admin-api",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "phase-2",
|
||||||
|
"name": "Break-Glass Authentication",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "break-glass-authentication",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "phase-3",
|
||||||
|
"name": "Data Migration",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "data-migration",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "phase-4",
|
||||||
|
"name": "Admin UI",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "admin-ui",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "phase-5",
|
||||||
|
"name": "RBAC UI Enforcement",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "rbac-ui-enforcement",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "phase-6",
|
||||||
|
"name": "Verification",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "verification",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sessions": [
|
||||||
|
{
|
||||||
|
"session_id": "sess-001",
|
||||||
|
"runtime": "unknown",
|
||||||
|
"started_at": "2026-02-28T17:48:51Z",
|
||||||
|
"ended_at": "",
|
||||||
|
"ended_reason": "",
|
||||||
|
"milestone_at_end": "",
|
||||||
|
"tasks_completed": [],
|
||||||
|
"last_task_id": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"session_id": "sess-002",
|
||||||
|
"runtime": "unknown",
|
||||||
|
"started_at": "2026-02-28T20:30:13Z",
|
||||||
|
"ended_at": "",
|
||||||
|
"ended_reason": "",
|
||||||
|
"milestone_at_end": "",
|
||||||
|
"tasks_completed": [],
|
||||||
|
"last_task_id": ""
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
8
.mosaic/orchestrator/session.lock
Normal file
8
.mosaic/orchestrator/session.lock
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"session_id": "sess-002",
|
||||||
|
"runtime": "unknown",
|
||||||
|
"pid": 3178395,
|
||||||
|
"started_at": "2026-02-28T20:30:13Z",
|
||||||
|
"project_path": "/tmp/ms21-ui-001",
|
||||||
|
"milestone_id": ""
|
||||||
|
}
|
||||||
@@ -34,3 +34,9 @@ CVE-2026-26996 # HIGH: minimatch DoS via specially crafted glob patterns (needs
|
|||||||
# OpenBao 2.5.0 compiled with Go 1.25.6, fix needs Go >= 1.25.7.
|
# OpenBao 2.5.0 compiled with Go 1.25.6, fix needs Go >= 1.25.7.
|
||||||
# Cannot build OpenBao from source (large project). Waiting for upstream release.
|
# Cannot build OpenBao from source (large project). Waiting for upstream release.
|
||||||
CVE-2025-68121 # CRITICAL: crypto/tls session resumption
|
CVE-2025-68121 # CRITICAL: crypto/tls session resumption
|
||||||
|
|
||||||
|
# === multer CVEs (upstream via @nestjs/platform-express) ===
|
||||||
|
# multer <2.1.0 — waiting on NestJS to update their dependency
|
||||||
|
# These are DoS vulnerabilities in file upload handling
|
||||||
|
GHSA-xf7r-hgr6-v32p # HIGH: DoS via incomplete cleanup
|
||||||
|
GHSA-v52c-386h-88mc # HIGH: DoS via resource exhaustion
|
||||||
|
|||||||
@@ -1,234 +0,0 @@
|
|||||||
# API Pipeline - Mosaic Stack
|
|
||||||
# Quality gates, build, and Docker publish for @mosaic/api
|
|
||||||
#
|
|
||||||
# Triggers on: apps/api/**, packages/**, root configs
|
|
||||||
# Security chain: source audit + Trivy container scan
|
|
||||||
|
|
||||||
when:
|
|
||||||
- event: [push, pull_request, manual]
|
|
||||||
path:
|
|
||||||
include:
|
|
||||||
- "apps/api/**"
|
|
||||||
- "packages/**"
|
|
||||||
- "pnpm-lock.yaml"
|
|
||||||
- "pnpm-workspace.yaml"
|
|
||||||
- "turbo.json"
|
|
||||||
- "package.json"
|
|
||||||
- ".woodpecker/api.yml"
|
|
||||||
- ".trivyignore"
|
|
||||||
|
|
||||||
variables:
|
|
||||||
- &node_image "node:24-alpine"
|
|
||||||
- &install_deps |
|
|
||||||
corepack enable
|
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
- &use_deps |
|
|
||||||
corepack enable
|
|
||||||
- &kaniko_setup |
|
|
||||||
mkdir -p /kaniko/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:17.7-alpine3.22
|
|
||||||
environment:
|
|
||||||
POSTGRES_DB: test_db
|
|
||||||
POSTGRES_USER: test_user
|
|
||||||
POSTGRES_PASSWORD: test_password
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# === Quality Gates ===
|
|
||||||
|
|
||||||
install:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *install_deps
|
|
||||||
|
|
||||||
security-audit:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm audit --audit-level=high
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
lint:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/api" lint
|
|
||||||
depends_on:
|
|
||||||
- prisma-generate
|
|
||||||
- build-shared
|
|
||||||
|
|
||||||
prisma-generate:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/api" prisma:generate
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
build-shared:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/shared" build
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
typecheck:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/api" typecheck
|
|
||||||
depends_on:
|
|
||||||
- prisma-generate
|
|
||||||
- build-shared
|
|
||||||
|
|
||||||
prisma-migrate:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/api" prisma migrate deploy
|
|
||||||
depends_on:
|
|
||||||
- prisma-generate
|
|
||||||
|
|
||||||
test:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
|
||||||
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts' --exclude 'src/mosaic-telemetry/mosaic-telemetry.module.spec.ts'
|
|
||||||
depends_on:
|
|
||||||
- prisma-migrate
|
|
||||||
|
|
||||||
# === Build ===
|
|
||||||
|
|
||||||
build:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
NODE_ENV: "production"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm turbo build --filter=@mosaic/api
|
|
||||||
depends_on:
|
|
||||||
- lint
|
|
||||||
- typecheck
|
|
||||||
- test
|
|
||||||
- security-audit
|
|
||||||
|
|
||||||
# === Docker Build & Push ===
|
|
||||||
|
|
||||||
docker-build-api:
|
|
||||||
image: gcr.io/kaniko-project/executor:debug
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- *kaniko_setup
|
|
||||||
- |
|
|
||||||
DESTINATIONS=""
|
|
||||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
|
||||||
fi
|
|
||||||
/kaniko/executor --context . --dockerfile apps/api/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
|
||||||
when:
|
|
||||||
- branch: [main]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
|
|
||||||
# === Container Security Scan ===
|
|
||||||
|
|
||||||
security-trivy-api:
|
|
||||||
image: aquasec/trivy:latest
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- |
|
|
||||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
|
||||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
|
||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
SCAN_TAG="latest"
|
|
||||||
else
|
|
||||||
SCAN_TAG="latest"
|
|
||||||
fi
|
|
||||||
mkdir -p ~/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
|
||||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
|
||||||
--ignorefile .trivyignore \
|
|
||||||
git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
|
||||||
when:
|
|
||||||
- branch: [main]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- docker-build-api
|
|
||||||
|
|
||||||
# === Package Linking ===
|
|
||||||
|
|
||||||
link-packages:
|
|
||||||
image: alpine:3
|
|
||||||
environment:
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
commands:
|
|
||||||
- apk add --no-cache curl
|
|
||||||
- sleep 10
|
|
||||||
- |
|
|
||||||
set -e
|
|
||||||
link_package() {
|
|
||||||
PKG="$$1"
|
|
||||||
echo "Linking $$PKG..."
|
|
||||||
for attempt in 1 2 3; do
|
|
||||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
|
||||||
-H "Authorization: token $$GITEA_TOKEN" \
|
|
||||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
|
||||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
|
||||||
echo " Linked $$PKG"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "400" ]; then
|
|
||||||
echo " $$PKG already linked"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
|
||||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
|
||||||
sleep 5
|
|
||||||
else
|
|
||||||
echo " FAILED: $$PKG status $$STATUS"
|
|
||||||
cat /tmp/link-response.txt
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
}
|
|
||||||
link_package "stack-api"
|
|
||||||
when:
|
|
||||||
- branch: [main]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- security-trivy-api
|
|
||||||
337
.woodpecker/ci.yml
Normal file
337
.woodpecker/ci.yml
Normal file
@@ -0,0 +1,337 @@
|
|||||||
|
# Unified CI Pipeline - Mosaic Stack
|
||||||
|
# Single install, parallel quality gates, sequential deploy
|
||||||
|
#
|
||||||
|
# Replaces: api.yml, orchestrator.yml, web.yml
|
||||||
|
# Keeps: coordinator.yml (Python), infra.yml (separate concerns)
|
||||||
|
#
|
||||||
|
# Flow:
|
||||||
|
# install → security-audit
|
||||||
|
# → prisma-generate → lint + typecheck (parallel)
|
||||||
|
# → prisma-migrate → test
|
||||||
|
# → build (after all gates pass)
|
||||||
|
# → docker builds (main only, parallel)
|
||||||
|
# → trivy scans (main only, parallel)
|
||||||
|
# → package linking (main only)
|
||||||
|
|
||||||
|
when:
|
||||||
|
- event: [push, pull_request, manual]
|
||||||
|
path:
|
||||||
|
include:
|
||||||
|
- "apps/api/**"
|
||||||
|
- "apps/orchestrator/**"
|
||||||
|
- "apps/web/**"
|
||||||
|
- "packages/**"
|
||||||
|
- "pnpm-lock.yaml"
|
||||||
|
- "pnpm-workspace.yaml"
|
||||||
|
- "turbo.json"
|
||||||
|
- "package.json"
|
||||||
|
- ".woodpecker/ci.yml"
|
||||||
|
- ".trivyignore"
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- &node_image "node:24-alpine"
|
||||||
|
- &install_deps |
|
||||||
|
corepack enable
|
||||||
|
pnpm install --frozen-lockfile
|
||||||
|
- &use_deps |
|
||||||
|
corepack enable
|
||||||
|
- &turbo_env
|
||||||
|
TURBO_API:
|
||||||
|
from_secret: turbo_api
|
||||||
|
TURBO_TOKEN:
|
||||||
|
from_secret: turbo_token
|
||||||
|
TURBO_TEAM:
|
||||||
|
from_secret: turbo_team
|
||||||
|
- &kaniko_setup |
|
||||||
|
mkdir -p /kaniko/.docker
|
||||||
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:17.7-alpine3.22
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: test_db
|
||||||
|
POSTGRES_USER: test_user
|
||||||
|
POSTGRES_PASSWORD: test_password
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# ─── Install (once) ─────────────────────────────────────────
|
||||||
|
install:
|
||||||
|
image: *node_image
|
||||||
|
commands:
|
||||||
|
- *install_deps
|
||||||
|
|
||||||
|
# ─── Security Audit (once) ──────────────────────────────────
|
||||||
|
security-audit:
|
||||||
|
image: *node_image
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm audit --audit-level=high
|
||||||
|
depends_on:
|
||||||
|
- install
|
||||||
|
|
||||||
|
# ─── Prisma Generate ────────────────────────────────────────
|
||||||
|
prisma-generate:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm --filter "@mosaic/api" prisma:generate
|
||||||
|
depends_on:
|
||||||
|
- install
|
||||||
|
|
||||||
|
# ─── Lint (all packages) ────────────────────────────────────
|
||||||
|
lint:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
<<: *turbo_env
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm turbo lint
|
||||||
|
depends_on:
|
||||||
|
- prisma-generate
|
||||||
|
|
||||||
|
# ─── Typecheck (all packages, parallel with lint) ───────────
|
||||||
|
typecheck:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
<<: *turbo_env
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm turbo typecheck
|
||||||
|
depends_on:
|
||||||
|
- prisma-generate
|
||||||
|
|
||||||
|
# ─── Prisma Migrate (test DB) ──────────────────────────────
|
||||||
|
prisma-migrate:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm --filter "@mosaic/api" prisma migrate deploy
|
||||||
|
depends_on:
|
||||||
|
- prisma-generate
|
||||||
|
|
||||||
|
# ─── Test (all packages) ───────────────────────────────────
|
||||||
|
test:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||||
|
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||||
|
<<: *turbo_env
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts' --exclude 'src/mosaic-telemetry/mosaic-telemetry.module.spec.ts'
|
||||||
|
- pnpm turbo test --filter=@mosaic/orchestrator --filter=@mosaic/web
|
||||||
|
depends_on:
|
||||||
|
- prisma-migrate
|
||||||
|
|
||||||
|
# ─── Build (all packages) ──────────────────────────────────
|
||||||
|
build:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
NODE_ENV: "production"
|
||||||
|
<<: *turbo_env
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm turbo build
|
||||||
|
depends_on:
|
||||||
|
- lint
|
||||||
|
- typecheck
|
||||||
|
- test
|
||||||
|
- security-audit
|
||||||
|
|
||||||
|
# ─── Docker Builds (main only, parallel) ───────────────────
|
||||||
|
|
||||||
|
docker-build-api:
|
||||||
|
image: gcr.io/kaniko-project/executor:debug
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- *kaniko_setup
|
||||||
|
- |
|
||||||
|
DESTINATIONS=""
|
||||||
|
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
||||||
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
||||||
|
fi
|
||||||
|
/kaniko/executor --context . --dockerfile apps/api/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
docker-build-orchestrator:
|
||||||
|
image: gcr.io/kaniko-project/executor:debug
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- *kaniko_setup
|
||||||
|
- |
|
||||||
|
DESTINATIONS=""
|
||||||
|
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
||||||
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
||||||
|
fi
|
||||||
|
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
docker-build-web:
|
||||||
|
image: gcr.io/kaniko-project/executor:debug
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- *kaniko_setup
|
||||||
|
- |
|
||||||
|
DESTINATIONS=""
|
||||||
|
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
||||||
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
||||||
|
fi
|
||||||
|
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --snapshot-mode=redo --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
# ─── Container Security Scans (main only) ──────────────────
|
||||||
|
|
||||||
|
security-trivy-api:
|
||||||
|
image: aquasec/trivy:latest
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- |
|
||||||
|
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||||
|
mkdir -p ~/.docker
|
||||||
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
|
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- docker-build-api
|
||||||
|
|
||||||
|
security-trivy-orchestrator:
|
||||||
|
image: aquasec/trivy:latest
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- |
|
||||||
|
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||||
|
mkdir -p ~/.docker
|
||||||
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
|
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- docker-build-orchestrator
|
||||||
|
|
||||||
|
security-trivy-web:
|
||||||
|
image: aquasec/trivy:latest
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- |
|
||||||
|
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||||
|
mkdir -p ~/.docker
|
||||||
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
|
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- docker-build-web
|
||||||
|
|
||||||
|
# ─── Package Linking (main only, once) ─────────────────────
|
||||||
|
|
||||||
|
link-packages:
|
||||||
|
image: alpine:3
|
||||||
|
environment:
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
commands:
|
||||||
|
- apk add --no-cache curl
|
||||||
|
- sleep 10
|
||||||
|
- |
|
||||||
|
set -e
|
||||||
|
link_package() {
|
||||||
|
PKG="$$1"
|
||||||
|
echo "Linking $$PKG..."
|
||||||
|
for attempt in 1 2 3; do
|
||||||
|
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||||
|
-H "Authorization: token $$GITEA_TOKEN" \
|
||||||
|
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||||
|
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||||
|
echo " Linked $$PKG"
|
||||||
|
return 0
|
||||||
|
elif [ "$$STATUS" = "400" ]; then
|
||||||
|
echo " $$PKG already linked"
|
||||||
|
return 0
|
||||||
|
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||||
|
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||||
|
sleep 5
|
||||||
|
else
|
||||||
|
echo " FAILED: $$PKG status $$STATUS"
|
||||||
|
cat /tmp/link-response.txt
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
link_package "stack-api"
|
||||||
|
link_package "stack-orchestrator"
|
||||||
|
link_package "stack-web"
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- security-trivy-api
|
||||||
|
- security-trivy-orchestrator
|
||||||
|
- security-trivy-web
|
||||||
@@ -1,191 +0,0 @@
|
|||||||
# Orchestrator Pipeline - Mosaic Stack
|
|
||||||
# Quality gates, build, and Docker publish for @mosaic/orchestrator
|
|
||||||
#
|
|
||||||
# Triggers on: apps/orchestrator/**, packages/**, root configs
|
|
||||||
# Security chain: source audit + Trivy container scan
|
|
||||||
|
|
||||||
when:
|
|
||||||
- event: [push, pull_request, manual]
|
|
||||||
path:
|
|
||||||
include:
|
|
||||||
- "apps/orchestrator/**"
|
|
||||||
- "packages/**"
|
|
||||||
- "pnpm-lock.yaml"
|
|
||||||
- "pnpm-workspace.yaml"
|
|
||||||
- "turbo.json"
|
|
||||||
- "package.json"
|
|
||||||
- ".woodpecker/orchestrator.yml"
|
|
||||||
- ".trivyignore"
|
|
||||||
|
|
||||||
variables:
|
|
||||||
- &node_image "node:24-alpine"
|
|
||||||
- &install_deps |
|
|
||||||
corepack enable
|
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
- &use_deps |
|
|
||||||
corepack enable
|
|
||||||
- &kaniko_setup |
|
|
||||||
mkdir -p /kaniko/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# === Quality Gates ===
|
|
||||||
|
|
||||||
install:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *install_deps
|
|
||||||
|
|
||||||
security-audit:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm audit --audit-level=high
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
lint:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/orchestrator" lint
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
typecheck:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/orchestrator" typecheck
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
test:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/orchestrator" test
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
# === Build ===
|
|
||||||
|
|
||||||
build:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
NODE_ENV: "production"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm turbo build --filter=@mosaic/orchestrator
|
|
||||||
depends_on:
|
|
||||||
- lint
|
|
||||||
- typecheck
|
|
||||||
- test
|
|
||||||
- security-audit
|
|
||||||
|
|
||||||
# === Docker Build & Push ===
|
|
||||||
|
|
||||||
docker-build-orchestrator:
|
|
||||||
image: gcr.io/kaniko-project/executor:debug
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- *kaniko_setup
|
|
||||||
- |
|
|
||||||
DESTINATIONS=""
|
|
||||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
|
||||||
fi
|
|
||||||
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
|
||||||
when:
|
|
||||||
- branch: [main]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
|
|
||||||
# === Container Security Scan ===
|
|
||||||
|
|
||||||
security-trivy-orchestrator:
|
|
||||||
image: aquasec/trivy:latest
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- |
|
|
||||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
|
||||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
|
||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
SCAN_TAG="latest"
|
|
||||||
else
|
|
||||||
SCAN_TAG="latest"
|
|
||||||
fi
|
|
||||||
mkdir -p ~/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
|
||||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
|
||||||
--ignorefile .trivyignore \
|
|
||||||
git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
|
||||||
when:
|
|
||||||
- branch: [main]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- docker-build-orchestrator
|
|
||||||
|
|
||||||
# === Package Linking ===
|
|
||||||
|
|
||||||
link-packages:
|
|
||||||
image: alpine:3
|
|
||||||
environment:
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
commands:
|
|
||||||
- apk add --no-cache curl
|
|
||||||
- sleep 10
|
|
||||||
- |
|
|
||||||
set -e
|
|
||||||
link_package() {
|
|
||||||
PKG="$$1"
|
|
||||||
echo "Linking $$PKG..."
|
|
||||||
for attempt in 1 2 3; do
|
|
||||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
|
||||||
-H "Authorization: token $$GITEA_TOKEN" \
|
|
||||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
|
||||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
|
||||||
echo " Linked $$PKG"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "400" ]; then
|
|
||||||
echo " $$PKG already linked"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
|
||||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
|
||||||
sleep 5
|
|
||||||
else
|
|
||||||
echo " FAILED: $$PKG status $$STATUS"
|
|
||||||
cat /tmp/link-response.txt
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
}
|
|
||||||
link_package "stack-orchestrator"
|
|
||||||
when:
|
|
||||||
- branch: [main]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- security-trivy-orchestrator
|
|
||||||
@@ -1,202 +0,0 @@
|
|||||||
# Web Pipeline - Mosaic Stack
|
|
||||||
# Quality gates, build, and Docker publish for @mosaic/web
|
|
||||||
#
|
|
||||||
# Triggers on: apps/web/**, packages/**, root configs
|
|
||||||
# Security chain: source audit + Trivy container scan
|
|
||||||
|
|
||||||
when:
|
|
||||||
- event: [push, pull_request, manual]
|
|
||||||
path:
|
|
||||||
include:
|
|
||||||
- "apps/web/**"
|
|
||||||
- "packages/**"
|
|
||||||
- "pnpm-lock.yaml"
|
|
||||||
- "pnpm-workspace.yaml"
|
|
||||||
- "turbo.json"
|
|
||||||
- "package.json"
|
|
||||||
- ".woodpecker/web.yml"
|
|
||||||
- ".trivyignore"
|
|
||||||
|
|
||||||
variables:
|
|
||||||
- &node_image "node:24-alpine"
|
|
||||||
- &install_deps |
|
|
||||||
corepack enable
|
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
- &use_deps |
|
|
||||||
corepack enable
|
|
||||||
- &kaniko_setup |
|
|
||||||
mkdir -p /kaniko/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# === Quality Gates ===
|
|
||||||
|
|
||||||
install:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *install_deps
|
|
||||||
|
|
||||||
security-audit:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm audit --audit-level=high
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
build-shared:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/shared" build
|
|
||||||
- pnpm --filter "@mosaic/ui" build
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
lint:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/web" lint
|
|
||||||
depends_on:
|
|
||||||
- build-shared
|
|
||||||
|
|
||||||
typecheck:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/web" typecheck
|
|
||||||
depends_on:
|
|
||||||
- build-shared
|
|
||||||
|
|
||||||
test:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/web" test
|
|
||||||
depends_on:
|
|
||||||
- build-shared
|
|
||||||
|
|
||||||
# === Build ===
|
|
||||||
|
|
||||||
build:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
NODE_ENV: "production"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm turbo build --filter=@mosaic/web
|
|
||||||
depends_on:
|
|
||||||
- lint
|
|
||||||
- typecheck
|
|
||||||
- test
|
|
||||||
- security-audit
|
|
||||||
|
|
||||||
# === Docker Build & Push ===
|
|
||||||
|
|
||||||
docker-build-web:
|
|
||||||
image: gcr.io/kaniko-project/executor:debug
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- *kaniko_setup
|
|
||||||
- |
|
|
||||||
DESTINATIONS=""
|
|
||||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
|
||||||
fi
|
|
||||||
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --snapshot-mode=redo --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
|
||||||
when:
|
|
||||||
- branch: [main]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
|
|
||||||
# === Container Security Scan ===
|
|
||||||
|
|
||||||
security-trivy-web:
|
|
||||||
image: aquasec/trivy:latest
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- |
|
|
||||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
|
||||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
|
||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
SCAN_TAG="latest"
|
|
||||||
else
|
|
||||||
SCAN_TAG="latest"
|
|
||||||
fi
|
|
||||||
mkdir -p ~/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
|
||||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
|
||||||
--ignorefile .trivyignore \
|
|
||||||
git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
|
||||||
when:
|
|
||||||
- branch: [main]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- docker-build-web
|
|
||||||
|
|
||||||
# === Package Linking ===
|
|
||||||
|
|
||||||
link-packages:
|
|
||||||
image: alpine:3
|
|
||||||
environment:
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
commands:
|
|
||||||
- apk add --no-cache curl
|
|
||||||
- sleep 10
|
|
||||||
- |
|
|
||||||
set -e
|
|
||||||
link_package() {
|
|
||||||
PKG="$$1"
|
|
||||||
echo "Linking $$PKG..."
|
|
||||||
for attempt in 1 2 3; do
|
|
||||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
|
||||||
-H "Authorization: token $$GITEA_TOKEN" \
|
|
||||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
|
||||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
|
||||||
echo " Linked $$PKG"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "400" ]; then
|
|
||||||
echo " $$PKG already linked"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
|
||||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
|
||||||
sleep 5
|
|
||||||
else
|
|
||||||
echo " FAILED: $$PKG status $$STATUS"
|
|
||||||
cat /tmp/link-response.txt
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
}
|
|
||||||
link_package "stack-web"
|
|
||||||
when:
|
|
||||||
- branch: [main]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- security-trivy-web
|
|
||||||
15
AGENTS.md
15
AGENTS.md
@@ -46,6 +46,21 @@ pnpm lint
|
|||||||
pnpm build
|
pnpm build
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Versioning Protocol (HARD GATE)
|
||||||
|
|
||||||
|
**This project is ALPHA. All versions MUST be `0.0.x`.**
|
||||||
|
|
||||||
|
- The `0.1.0` release is FORBIDDEN until Jason explicitly authorizes it.
|
||||||
|
- Every milestone bump increments the patch: `0.0.20` → `0.0.21` → `0.0.22`, etc.
|
||||||
|
- ALL package.json files in the monorepo MUST stay in sync at the same version.
|
||||||
|
- Use `scripts/version-bump.sh <version>` to bump — it enforces the alpha constraint and updates all packages atomically.
|
||||||
|
- The script rejects any version >= `0.1.0`.
|
||||||
|
- When creating a release tag, the tag MUST match the package version: `v0.0.x`.
|
||||||
|
|
||||||
|
**Milestone-to-version mapping** is defined in the PRD (`docs/PRD.md`) under "Delivery/Milestone Intent". Agents MUST use the version from that table when tagging a milestone release.
|
||||||
|
|
||||||
|
**Violation of this protocol is a blocking error.** If an agent attempts to set a version >= `0.1.0`, stop and escalate.
|
||||||
|
|
||||||
## Standards and Quality
|
## Standards and Quality
|
||||||
|
|
||||||
- Enforce strict typing and no unsafe shortcuts.
|
- Enforce strict typing and no unsafe shortcuts.
|
||||||
|
|||||||
@@ -18,6 +18,12 @@ COPY turbo.json ./
|
|||||||
# ======================
|
# ======================
|
||||||
FROM base AS deps
|
FROM base AS deps
|
||||||
|
|
||||||
|
# Install build tools for native addons (node-pty requires node-gyp compilation)
|
||||||
|
# and OpenSSL for Prisma engine detection
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
python3 make g++ openssl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Copy all package.json files for workspace resolution
|
# Copy all package.json files for workspace resolution
|
||||||
COPY packages/shared/package.json ./packages/shared/
|
COPY packages/shared/package.json ./packages/shared/
|
||||||
COPY packages/ui/package.json ./packages/ui/
|
COPY packages/ui/package.json ./packages/ui/
|
||||||
@@ -25,7 +31,11 @@ COPY packages/config/package.json ./packages/config/
|
|||||||
COPY apps/api/package.json ./apps/api/
|
COPY apps/api/package.json ./apps/api/
|
||||||
|
|
||||||
# Install dependencies (no cache mount — Kaniko builds are ephemeral in CI)
|
# Install dependencies (no cache mount — Kaniko builds are ephemeral in CI)
|
||||||
RUN pnpm install --frozen-lockfile
|
# Then explicitly rebuild node-pty from source since pnpm may skip postinstall
|
||||||
|
# scripts or fail to find prebuilt binaries for this Node.js version
|
||||||
|
RUN pnpm install --frozen-lockfile \
|
||||||
|
&& cd node_modules/.pnpm/node-pty@*/node_modules/node-pty \
|
||||||
|
&& npx node-gyp rebuild 2>&1 || true
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Builder stage
|
# Builder stage
|
||||||
@@ -58,7 +68,11 @@ FROM node:24-slim AS production
|
|||||||
ADD https://github.com/Yelp/dumb-init/releases/download/v1.2.5/dumb-init_1.2.5_x86_64 /usr/local/bin/dumb-init
|
ADD https://github.com/Yelp/dumb-init/releases/download/v1.2.5/dumb-init_1.2.5_x86_64 /usr/local/bin/dumb-init
|
||||||
|
|
||||||
# Single RUN to minimize Kaniko filesystem snapshots (each RUN = full snapshot)
|
# Single RUN to minimize Kaniko filesystem snapshots (each RUN = full snapshot)
|
||||||
RUN rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx \
|
# - openssl: Prisma engine detection requires libssl
|
||||||
|
# - No build tools needed here — native addons are compiled in the deps stage
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends openssl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx \
|
||||||
&& chmod 755 /usr/local/bin/dumb-init \
|
&& chmod 755 /usr/local/bin/dumb-init \
|
||||||
&& groupadd -g 1001 nodejs && useradd -m -u 1001 -g nodejs nestjs
|
&& groupadd -g 1001 nodejs && useradd -m -u 1001 -g nodejs nestjs
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/api",
|
"name": "@mosaic/api",
|
||||||
"version": "0.0.1",
|
"version": "0.0.20",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "nest build",
|
"build": "nest build",
|
||||||
@@ -52,6 +52,7 @@
|
|||||||
"adm-zip": "^0.5.16",
|
"adm-zip": "^0.5.16",
|
||||||
"archiver": "^7.0.1",
|
"archiver": "^7.0.1",
|
||||||
"axios": "^1.13.5",
|
"axios": "^1.13.5",
|
||||||
|
"bcryptjs": "^3.0.3",
|
||||||
"better-auth": "^1.4.17",
|
"better-auth": "^1.4.17",
|
||||||
"bullmq": "^5.67.2",
|
"bullmq": "^5.67.2",
|
||||||
"class-transformer": "^0.5.1",
|
"class-transformer": "^0.5.1",
|
||||||
@@ -85,6 +86,7 @@
|
|||||||
"@swc/core": "^1.10.18",
|
"@swc/core": "^1.10.18",
|
||||||
"@types/adm-zip": "^0.5.7",
|
"@types/adm-zip": "^0.5.7",
|
||||||
"@types/archiver": "^7.0.0",
|
"@types/archiver": "^7.0.0",
|
||||||
|
"@types/bcryptjs": "^3.0.0",
|
||||||
"@types/cookie-parser": "^1.4.10",
|
"@types/cookie-parser": "^1.4.10",
|
||||||
"@types/express": "^5.0.1",
|
"@types/express": "^5.0.1",
|
||||||
"@types/highlight.js": "^10.1.0",
|
"@types/highlight.js": "^10.1.0",
|
||||||
|
|||||||
@@ -0,0 +1,3 @@
|
|||||||
|
-- AlterTable: add tone and formality_level columns to personalities
|
||||||
|
ALTER TABLE "personalities" ADD COLUMN "tone" TEXT NOT NULL DEFAULT 'neutral';
|
||||||
|
ALTER TABLE "personalities" ADD COLUMN "formality_level" "FormalityLevel" NOT NULL DEFAULT 'NEUTRAL';
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "agent_memories" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"workspace_id" UUID NOT NULL,
|
||||||
|
"agent_id" TEXT NOT NULL,
|
||||||
|
"key" TEXT NOT NULL,
|
||||||
|
"value" JSONB NOT NULL,
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "agent_memories_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "agent_memories_workspace_id_agent_id_key_key" ON "agent_memories"("workspace_id", "agent_id", "key");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "agent_memories_workspace_id_idx" ON "agent_memories"("workspace_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "agent_memories_agent_id_idx" ON "agent_memories"("agent_id");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "agent_memories" ADD CONSTRAINT "agent_memories_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "conversation_archives" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"workspace_id" UUID NOT NULL,
|
||||||
|
"session_id" TEXT NOT NULL,
|
||||||
|
"agent_id" TEXT NOT NULL,
|
||||||
|
"messages" JSONB NOT NULL,
|
||||||
|
"message_count" INTEGER NOT NULL,
|
||||||
|
"summary" TEXT NOT NULL,
|
||||||
|
"embedding" vector(1536),
|
||||||
|
"started_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
"ended_at" TIMESTAMPTZ,
|
||||||
|
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "conversation_archives_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "conversation_archives_workspace_id_session_id_key" ON "conversation_archives"("workspace_id", "session_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "conversation_archives_workspace_id_idx" ON "conversation_archives"("workspace_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "conversation_archives_agent_id_idx" ON "conversation_archives"("agent_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "conversation_archives_started_at_idx" ON "conversation_archives"("started_at");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "conversation_archives" ADD CONSTRAINT "conversation_archives_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@@ -0,0 +1,109 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "SystemConfig" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"key" TEXT NOT NULL,
|
||||||
|
"value" TEXT NOT NULL,
|
||||||
|
"encrypted" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "SystemConfig_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "BreakglassUser" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"username" TEXT NOT NULL,
|
||||||
|
"passwordHash" TEXT NOT NULL,
|
||||||
|
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "BreakglassUser_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "LlmProvider" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"userId" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"displayName" TEXT NOT NULL,
|
||||||
|
"type" TEXT NOT NULL,
|
||||||
|
"baseUrl" TEXT,
|
||||||
|
"apiKey" TEXT,
|
||||||
|
"apiType" TEXT NOT NULL DEFAULT 'openai-completions',
|
||||||
|
"models" JSONB NOT NULL DEFAULT '[]',
|
||||||
|
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "LlmProvider_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "UserContainer" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"userId" TEXT NOT NULL,
|
||||||
|
"containerId" TEXT,
|
||||||
|
"containerName" TEXT NOT NULL,
|
||||||
|
"gatewayPort" INTEGER,
|
||||||
|
"gatewayToken" TEXT NOT NULL,
|
||||||
|
"status" TEXT NOT NULL DEFAULT 'stopped',
|
||||||
|
"lastActiveAt" TIMESTAMP(3),
|
||||||
|
"idleTimeoutMin" INTEGER NOT NULL DEFAULT 30,
|
||||||
|
"config" JSONB NOT NULL DEFAULT '{}',
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "UserContainer_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "SystemContainer" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"role" TEXT NOT NULL,
|
||||||
|
"containerId" TEXT,
|
||||||
|
"gatewayPort" INTEGER,
|
||||||
|
"gatewayToken" TEXT NOT NULL,
|
||||||
|
"status" TEXT NOT NULL DEFAULT 'stopped',
|
||||||
|
"primaryModel" TEXT NOT NULL,
|
||||||
|
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "SystemContainer_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "UserAgentConfig" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"userId" TEXT NOT NULL,
|
||||||
|
"primaryModel" TEXT,
|
||||||
|
"fallbackModels" JSONB NOT NULL DEFAULT '[]',
|
||||||
|
"personality" TEXT,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "UserAgentConfig_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "SystemConfig_key_key" ON "SystemConfig"("key");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "BreakglassUser_username_key" ON "BreakglassUser"("username");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "LlmProvider_userId_idx" ON "LlmProvider"("userId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "LlmProvider_userId_name_key" ON "LlmProvider"("userId", "name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "UserContainer_userId_key" ON "UserContainer"("userId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "SystemContainer_name_key" ON "SystemContainer"("name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "UserAgentConfig_userId_key" ON "UserAgentConfig"("userId");
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "findings" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"workspace_id" UUID NOT NULL,
|
||||||
|
"task_id" UUID,
|
||||||
|
"agent_id" TEXT NOT NULL,
|
||||||
|
"type" TEXT NOT NULL,
|
||||||
|
"title" TEXT NOT NULL,
|
||||||
|
"data" JSONB NOT NULL,
|
||||||
|
"summary" TEXT NOT NULL,
|
||||||
|
"embedding" vector(1536),
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "findings_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "findings_id_workspace_id_key" ON "findings"("id", "workspace_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "findings_workspace_id_idx" ON "findings"("workspace_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "findings_agent_id_idx" ON "findings"("agent_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "findings_type_idx" ON "findings"("type");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "findings_task_id_idx" ON "findings"("task_id");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "findings" ADD CONSTRAINT "findings_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "findings" ADD CONSTRAINT "findings_task_id_fkey" FOREIGN KEY ("task_id") REFERENCES "agent_tasks"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "tasks" ADD COLUMN "assigned_agent" TEXT;
|
||||||
@@ -3,6 +3,7 @@
|
|||||||
|
|
||||||
generator client {
|
generator client {
|
||||||
provider = "prisma-client-js"
|
provider = "prisma-client-js"
|
||||||
|
binaryTargets = ["native", "debian-openssl-3.0.x"]
|
||||||
previewFeatures = ["postgresqlExtensions"]
|
previewFeatures = ["postgresqlExtensions"]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -226,6 +227,14 @@ model User {
|
|||||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||||
|
|
||||||
|
// MS21: Admin, local auth, and invitation fields
|
||||||
|
deactivatedAt DateTime? @map("deactivated_at") @db.Timestamptz
|
||||||
|
isLocalAuth Boolean @default(false) @map("is_local_auth")
|
||||||
|
passwordHash String? @map("password_hash")
|
||||||
|
invitedBy String? @map("invited_by") @db.Uuid
|
||||||
|
invitationToken String? @unique @map("invitation_token")
|
||||||
|
invitedAt DateTime? @map("invited_at") @db.Timestamptz
|
||||||
|
|
||||||
// Relations
|
// Relations
|
||||||
ownedWorkspaces Workspace[] @relation("WorkspaceOwner")
|
ownedWorkspaces Workspace[] @relation("WorkspaceOwner")
|
||||||
workspaceMemberships WorkspaceMember[]
|
workspaceMemberships WorkspaceMember[]
|
||||||
@@ -289,6 +298,8 @@ model Workspace {
|
|||||||
agents Agent[]
|
agents Agent[]
|
||||||
agentSessions AgentSession[]
|
agentSessions AgentSession[]
|
||||||
agentTasks AgentTask[]
|
agentTasks AgentTask[]
|
||||||
|
findings Finding[]
|
||||||
|
agentMemories AgentMemory[]
|
||||||
userLayouts UserLayout[]
|
userLayouts UserLayout[]
|
||||||
knowledgeEntries KnowledgeEntry[]
|
knowledgeEntries KnowledgeEntry[]
|
||||||
knowledgeTags KnowledgeTag[]
|
knowledgeTags KnowledgeTag[]
|
||||||
@@ -303,6 +314,7 @@ model Workspace {
|
|||||||
llmUsageLogs LlmUsageLog[]
|
llmUsageLogs LlmUsageLog[]
|
||||||
userCredentials UserCredential[]
|
userCredentials UserCredential[]
|
||||||
terminalSessions TerminalSession[]
|
terminalSessions TerminalSession[]
|
||||||
|
conversationArchives ConversationArchive[]
|
||||||
|
|
||||||
@@index([ownerId])
|
@@index([ownerId])
|
||||||
@@map("workspaces")
|
@@map("workspaces")
|
||||||
@@ -367,6 +379,7 @@ model Task {
|
|||||||
creatorId String @map("creator_id") @db.Uuid
|
creatorId String @map("creator_id") @db.Uuid
|
||||||
projectId String? @map("project_id") @db.Uuid
|
projectId String? @map("project_id") @db.Uuid
|
||||||
parentId String? @map("parent_id") @db.Uuid
|
parentId String? @map("parent_id") @db.Uuid
|
||||||
|
assignedAgent String? @map("assigned_agent")
|
||||||
domainId String? @map("domain_id") @db.Uuid
|
domainId String? @map("domain_id") @db.Uuid
|
||||||
sortOrder Int @default(0) @map("sort_order")
|
sortOrder Int @default(0) @map("sort_order")
|
||||||
metadata Json @default("{}")
|
metadata Json @default("{}")
|
||||||
@@ -680,6 +693,7 @@ model AgentTask {
|
|||||||
createdBy User @relation("AgentTaskCreator", fields: [createdById], references: [id], onDelete: Cascade)
|
createdBy User @relation("AgentTaskCreator", fields: [createdById], references: [id], onDelete: Cascade)
|
||||||
createdById String @map("created_by_id") @db.Uuid
|
createdById String @map("created_by_id") @db.Uuid
|
||||||
runnerJobs RunnerJob[]
|
runnerJobs RunnerJob[]
|
||||||
|
findings Finding[]
|
||||||
|
|
||||||
@@unique([id, workspaceId])
|
@@unique([id, workspaceId])
|
||||||
@@index([workspaceId])
|
@@index([workspaceId])
|
||||||
@@ -689,6 +703,33 @@ model AgentTask {
|
|||||||
@@map("agent_tasks")
|
@@map("agent_tasks")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model Finding {
|
||||||
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
workspaceId String @map("workspace_id") @db.Uuid
|
||||||
|
taskId String? @map("task_id") @db.Uuid
|
||||||
|
|
||||||
|
agentId String @map("agent_id")
|
||||||
|
type String
|
||||||
|
title String
|
||||||
|
data Json
|
||||||
|
summary String @db.Text
|
||||||
|
// Note: vector dimension (1536) must match EMBEDDING_DIMENSION constant in @mosaic/shared
|
||||||
|
embedding Unsupported("vector(1536)")?
|
||||||
|
|
||||||
|
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||||
|
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||||
|
|
||||||
|
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||||
|
task AgentTask? @relation(fields: [taskId], references: [id], onDelete: SetNull)
|
||||||
|
|
||||||
|
@@unique([id, workspaceId])
|
||||||
|
@@index([workspaceId])
|
||||||
|
@@index([agentId])
|
||||||
|
@@index([type])
|
||||||
|
@@index([taskId])
|
||||||
|
@@map("findings")
|
||||||
|
}
|
||||||
|
|
||||||
model AgentSession {
|
model AgentSession {
|
||||||
id String @id @default(uuid()) @db.Uuid
|
id String @id @default(uuid()) @db.Uuid
|
||||||
workspaceId String @map("workspace_id") @db.Uuid
|
workspaceId String @map("workspace_id") @db.Uuid
|
||||||
@@ -726,6 +767,23 @@ model AgentSession {
|
|||||||
@@map("agent_sessions")
|
@@map("agent_sessions")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model AgentMemory {
|
||||||
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
workspaceId String @map("workspace_id") @db.Uuid
|
||||||
|
agentId String @map("agent_id")
|
||||||
|
key String
|
||||||
|
value Json
|
||||||
|
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||||
|
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||||
|
|
||||||
|
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([workspaceId, agentId, key])
|
||||||
|
@@index([workspaceId])
|
||||||
|
@@index([agentId])
|
||||||
|
@@map("agent_memories")
|
||||||
|
}
|
||||||
|
|
||||||
model WidgetDefinition {
|
model WidgetDefinition {
|
||||||
id String @id @default(uuid()) @db.Uuid
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
|
||||||
@@ -1067,6 +1125,10 @@ model Personality {
|
|||||||
displayName String @map("display_name")
|
displayName String @map("display_name")
|
||||||
description String? @db.Text
|
description String? @db.Text
|
||||||
|
|
||||||
|
// Tone and formality
|
||||||
|
tone String @default("neutral")
|
||||||
|
formalityLevel FormalityLevel @default(NEUTRAL) @map("formality_level")
|
||||||
|
|
||||||
// System prompt
|
// System prompt
|
||||||
systemPrompt String @map("system_prompt") @db.Text
|
systemPrompt String @map("system_prompt") @db.Text
|
||||||
|
|
||||||
@@ -1533,3 +1595,111 @@ model TerminalSession {
|
|||||||
@@index([workspaceId, status])
|
@@index([workspaceId, status])
|
||||||
@@map("terminal_sessions")
|
@@map("terminal_sessions")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// CONVERSATION ARCHIVE MODULE
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
model ConversationArchive {
|
||||||
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
workspaceId String @map("workspace_id") @db.Uuid
|
||||||
|
sessionId String @map("session_id")
|
||||||
|
agentId String @map("agent_id")
|
||||||
|
messages Json
|
||||||
|
messageCount Int @map("message_count")
|
||||||
|
summary String @db.Text
|
||||||
|
// Note: vector dimension (1536) must match EMBEDDING_DIMENSION constant in @mosaic/shared
|
||||||
|
embedding Unsupported("vector(1536)")?
|
||||||
|
startedAt DateTime @map("started_at") @db.Timestamptz
|
||||||
|
endedAt DateTime? @map("ended_at") @db.Timestamptz
|
||||||
|
metadata Json @default("{}")
|
||||||
|
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||||
|
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||||
|
|
||||||
|
// Relations
|
||||||
|
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([workspaceId, sessionId])
|
||||||
|
@@index([workspaceId])
|
||||||
|
@@index([agentId])
|
||||||
|
@@index([startedAt])
|
||||||
|
@@map("conversation_archives")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// AGENT FLEET MODULE
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
model SystemConfig {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
key String @unique
|
||||||
|
value String
|
||||||
|
encrypted Boolean @default(false)
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
model BreakglassUser {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
username String @unique
|
||||||
|
passwordHash String
|
||||||
|
isActive Boolean @default(true)
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
model LlmProvider {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
userId String
|
||||||
|
name String
|
||||||
|
displayName String
|
||||||
|
type String
|
||||||
|
baseUrl String?
|
||||||
|
apiKey String?
|
||||||
|
apiType String @default("openai-completions")
|
||||||
|
models Json @default("[]")
|
||||||
|
isActive Boolean @default(true)
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
|
@@unique([userId, name])
|
||||||
|
@@index([userId])
|
||||||
|
}
|
||||||
|
|
||||||
|
model UserContainer {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
userId String @unique
|
||||||
|
containerId String?
|
||||||
|
containerName String
|
||||||
|
gatewayPort Int?
|
||||||
|
gatewayToken String
|
||||||
|
status String @default("stopped")
|
||||||
|
lastActiveAt DateTime?
|
||||||
|
idleTimeoutMin Int @default(30)
|
||||||
|
config Json @default("{}")
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
model SystemContainer {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
name String @unique
|
||||||
|
role String
|
||||||
|
containerId String?
|
||||||
|
gatewayPort Int?
|
||||||
|
gatewayToken String
|
||||||
|
status String @default("stopped")
|
||||||
|
primaryModel String
|
||||||
|
isActive Boolean @default(true)
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
model UserAgentConfig {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
userId String @unique
|
||||||
|
primaryModel String?
|
||||||
|
fallbackModels Json @default("[]")
|
||||||
|
personality String?
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
}
|
||||||
|
|||||||
258
apps/api/src/admin/admin.controller.spec.ts
Normal file
258
apps/api/src/admin/admin.controller.spec.ts
Normal file
@@ -0,0 +1,258 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { AdminController } from "./admin.controller";
|
||||||
|
import { AdminService } from "./admin.service";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { AdminGuard } from "../auth/guards/admin.guard";
|
||||||
|
import { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
import type { ExecutionContext } from "@nestjs/common";
|
||||||
|
|
||||||
|
describe("AdminController", () => {
|
||||||
|
let controller: AdminController;
|
||||||
|
let service: AdminService;
|
||||||
|
|
||||||
|
const mockAdminService = {
|
||||||
|
listUsers: vi.fn(),
|
||||||
|
inviteUser: vi.fn(),
|
||||||
|
updateUser: vi.fn(),
|
||||||
|
deactivateUser: vi.fn(),
|
||||||
|
createWorkspace: vi.fn(),
|
||||||
|
updateWorkspace: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockAuthGuard = {
|
||||||
|
canActivate: vi.fn((context: ExecutionContext) => {
|
||||||
|
const request = context.switchToHttp().getRequest();
|
||||||
|
request.user = {
|
||||||
|
id: "550e8400-e29b-41d4-a716-446655440001",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Admin User",
|
||||||
|
};
|
||||||
|
return true;
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockAdminGuard = {
|
||||||
|
canActivate: vi.fn(() => true),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockAdminId = "550e8400-e29b-41d4-a716-446655440001";
|
||||||
|
const mockUserId = "550e8400-e29b-41d4-a716-446655440002";
|
||||||
|
const mockWorkspaceId = "550e8400-e29b-41d4-a716-446655440003";
|
||||||
|
|
||||||
|
const mockAdminUser = {
|
||||||
|
id: mockAdminId,
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Admin User",
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockUserResponse = {
|
||||||
|
id: mockUserId,
|
||||||
|
name: "Test User",
|
||||||
|
email: "test@example.com",
|
||||||
|
emailVerified: false,
|
||||||
|
image: null,
|
||||||
|
createdAt: new Date("2026-01-01"),
|
||||||
|
deactivatedAt: null,
|
||||||
|
isLocalAuth: false,
|
||||||
|
invitedAt: null,
|
||||||
|
invitedBy: null,
|
||||||
|
workspaceMemberships: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockWorkspaceResponse = {
|
||||||
|
id: mockWorkspaceId,
|
||||||
|
name: "Test Workspace",
|
||||||
|
ownerId: mockAdminId,
|
||||||
|
settings: {},
|
||||||
|
createdAt: new Date("2026-01-01"),
|
||||||
|
updatedAt: new Date("2026-01-01"),
|
||||||
|
memberCount: 1,
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
controllers: [AdminController],
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: AdminService,
|
||||||
|
useValue: mockAdminService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
.overrideGuard(AuthGuard)
|
||||||
|
.useValue(mockAuthGuard)
|
||||||
|
.overrideGuard(AdminGuard)
|
||||||
|
.useValue(mockAdminGuard)
|
||||||
|
.compile();
|
||||||
|
|
||||||
|
controller = module.get<AdminController>(AdminController);
|
||||||
|
service = module.get<AdminService>(AdminService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be defined", () => {
|
||||||
|
expect(controller).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("listUsers", () => {
|
||||||
|
it("should return paginated users", async () => {
|
||||||
|
const paginatedResult = {
|
||||||
|
data: [mockUserResponse],
|
||||||
|
meta: { total: 1, page: 1, limit: 50, totalPages: 1 },
|
||||||
|
};
|
||||||
|
mockAdminService.listUsers.mockResolvedValue(paginatedResult);
|
||||||
|
|
||||||
|
const result = await controller.listUsers({ page: 1, limit: 50 });
|
||||||
|
|
||||||
|
expect(result).toEqual(paginatedResult);
|
||||||
|
expect(service.listUsers).toHaveBeenCalledWith(1, 50);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default pagination", async () => {
|
||||||
|
const paginatedResult = {
|
||||||
|
data: [],
|
||||||
|
meta: { total: 0, page: 1, limit: 50, totalPages: 0 },
|
||||||
|
};
|
||||||
|
mockAdminService.listUsers.mockResolvedValue(paginatedResult);
|
||||||
|
|
||||||
|
await controller.listUsers({});
|
||||||
|
|
||||||
|
expect(service.listUsers).toHaveBeenCalledWith(undefined, undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("inviteUser", () => {
|
||||||
|
it("should invite a user", async () => {
|
||||||
|
const inviteDto = { email: "new@example.com" };
|
||||||
|
const invitationResponse = {
|
||||||
|
userId: "new-id",
|
||||||
|
invitationToken: "token",
|
||||||
|
email: "new@example.com",
|
||||||
|
invitedAt: new Date(),
|
||||||
|
};
|
||||||
|
mockAdminService.inviteUser.mockResolvedValue(invitationResponse);
|
||||||
|
|
||||||
|
const result = await controller.inviteUser(inviteDto, mockAdminUser);
|
||||||
|
|
||||||
|
expect(result).toEqual(invitationResponse);
|
||||||
|
expect(service.inviteUser).toHaveBeenCalledWith(inviteDto, mockAdminId);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should invite a user with workspace and role", async () => {
|
||||||
|
const inviteDto = {
|
||||||
|
email: "new@example.com",
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
role: WorkspaceMemberRole.ADMIN,
|
||||||
|
};
|
||||||
|
mockAdminService.inviteUser.mockResolvedValue({
|
||||||
|
userId: "new-id",
|
||||||
|
invitationToken: "token",
|
||||||
|
email: "new@example.com",
|
||||||
|
invitedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await controller.inviteUser(inviteDto, mockAdminUser);
|
||||||
|
|
||||||
|
expect(service.inviteUser).toHaveBeenCalledWith(inviteDto, mockAdminId);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("updateUser", () => {
|
||||||
|
it("should update a user", async () => {
|
||||||
|
const updateDto = { name: "Updated Name" };
|
||||||
|
mockAdminService.updateUser.mockResolvedValue({
|
||||||
|
...mockUserResponse,
|
||||||
|
name: "Updated Name",
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.updateUser(mockUserId, updateDto);
|
||||||
|
|
||||||
|
expect(result.name).toBe("Updated Name");
|
||||||
|
expect(service.updateUser).toHaveBeenCalledWith(mockUserId, updateDto);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should deactivate a user via update", async () => {
|
||||||
|
const deactivatedAt = "2026-02-28T00:00:00.000Z";
|
||||||
|
const updateDto = { deactivatedAt };
|
||||||
|
mockAdminService.updateUser.mockResolvedValue({
|
||||||
|
...mockUserResponse,
|
||||||
|
deactivatedAt: new Date(deactivatedAt),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.updateUser(mockUserId, updateDto);
|
||||||
|
|
||||||
|
expect(result.deactivatedAt).toEqual(new Date(deactivatedAt));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("deactivateUser", () => {
|
||||||
|
it("should soft-delete a user", async () => {
|
||||||
|
mockAdminService.deactivateUser.mockResolvedValue({
|
||||||
|
...mockUserResponse,
|
||||||
|
deactivatedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.deactivateUser(mockUserId);
|
||||||
|
|
||||||
|
expect(result.deactivatedAt).toBeDefined();
|
||||||
|
expect(service.deactivateUser).toHaveBeenCalledWith(mockUserId);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("createWorkspace", () => {
|
||||||
|
it("should create a workspace", async () => {
|
||||||
|
const createDto = { name: "New Workspace", ownerId: mockAdminId };
|
||||||
|
mockAdminService.createWorkspace.mockResolvedValue(mockWorkspaceResponse);
|
||||||
|
|
||||||
|
const result = await controller.createWorkspace(createDto);
|
||||||
|
|
||||||
|
expect(result).toEqual(mockWorkspaceResponse);
|
||||||
|
expect(service.createWorkspace).toHaveBeenCalledWith(createDto);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create workspace with settings", async () => {
|
||||||
|
const createDto = {
|
||||||
|
name: "New Workspace",
|
||||||
|
ownerId: mockAdminId,
|
||||||
|
settings: { feature: true },
|
||||||
|
};
|
||||||
|
mockAdminService.createWorkspace.mockResolvedValue({
|
||||||
|
...mockWorkspaceResponse,
|
||||||
|
settings: { feature: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.createWorkspace(createDto);
|
||||||
|
|
||||||
|
expect(result.settings).toEqual({ feature: true });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("updateWorkspace", () => {
|
||||||
|
it("should update a workspace", async () => {
|
||||||
|
const updateDto = { name: "Updated Workspace" };
|
||||||
|
mockAdminService.updateWorkspace.mockResolvedValue({
|
||||||
|
...mockWorkspaceResponse,
|
||||||
|
name: "Updated Workspace",
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.updateWorkspace(mockWorkspaceId, updateDto);
|
||||||
|
|
||||||
|
expect(result.name).toBe("Updated Workspace");
|
||||||
|
expect(service.updateWorkspace).toHaveBeenCalledWith(mockWorkspaceId, updateDto);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should update workspace settings", async () => {
|
||||||
|
const updateDto = { settings: { notifications: false } };
|
||||||
|
mockAdminService.updateWorkspace.mockResolvedValue({
|
||||||
|
...mockWorkspaceResponse,
|
||||||
|
settings: { notifications: false },
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.updateWorkspace(mockWorkspaceId, updateDto);
|
||||||
|
|
||||||
|
expect(result.settings).toEqual({ notifications: false });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
64
apps/api/src/admin/admin.controller.ts
Normal file
64
apps/api/src/admin/admin.controller.ts
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import {
|
||||||
|
Controller,
|
||||||
|
Get,
|
||||||
|
Post,
|
||||||
|
Patch,
|
||||||
|
Delete,
|
||||||
|
Body,
|
||||||
|
Param,
|
||||||
|
Query,
|
||||||
|
UseGuards,
|
||||||
|
ParseUUIDPipe,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { AdminService } from "./admin.service";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { AdminGuard } from "../auth/guards/admin.guard";
|
||||||
|
import { CurrentUser } from "../auth/decorators/current-user.decorator";
|
||||||
|
import type { AuthUser } from "@mosaic/shared";
|
||||||
|
import { InviteUserDto } from "./dto/invite-user.dto";
|
||||||
|
import { UpdateUserDto } from "./dto/update-user.dto";
|
||||||
|
import { CreateWorkspaceDto } from "./dto/create-workspace.dto";
|
||||||
|
import { UpdateWorkspaceDto } from "./dto/update-workspace.dto";
|
||||||
|
import { QueryUsersDto } from "./dto/query-users.dto";
|
||||||
|
|
||||||
|
@Controller("admin")
|
||||||
|
@UseGuards(AuthGuard, AdminGuard)
|
||||||
|
export class AdminController {
|
||||||
|
constructor(private readonly adminService: AdminService) {}
|
||||||
|
|
||||||
|
@Get("users")
|
||||||
|
async listUsers(@Query() query: QueryUsersDto) {
|
||||||
|
return this.adminService.listUsers(query.page, query.limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post("users/invite")
|
||||||
|
async inviteUser(@Body() dto: InviteUserDto, @CurrentUser() user: AuthUser) {
|
||||||
|
return this.adminService.inviteUser(dto, user.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Patch("users/:id")
|
||||||
|
async updateUser(
|
||||||
|
@Param("id", new ParseUUIDPipe({ version: "4" })) id: string,
|
||||||
|
@Body() dto: UpdateUserDto
|
||||||
|
) {
|
||||||
|
return this.adminService.updateUser(id, dto);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Delete("users/:id")
|
||||||
|
async deactivateUser(@Param("id", new ParseUUIDPipe({ version: "4" })) id: string) {
|
||||||
|
return this.adminService.deactivateUser(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post("workspaces")
|
||||||
|
async createWorkspace(@Body() dto: CreateWorkspaceDto) {
|
||||||
|
return this.adminService.createWorkspace(dto);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Patch("workspaces/:id")
|
||||||
|
async updateWorkspace(
|
||||||
|
@Param("id", new ParseUUIDPipe({ version: "4" })) id: string,
|
||||||
|
@Body() dto: UpdateWorkspaceDto
|
||||||
|
) {
|
||||||
|
return this.adminService.updateWorkspace(id, dto);
|
||||||
|
}
|
||||||
|
}
|
||||||
13
apps/api/src/admin/admin.module.ts
Normal file
13
apps/api/src/admin/admin.module.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { AdminController } from "./admin.controller";
|
||||||
|
import { AdminService } from "./admin.service";
|
||||||
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
|
import { AuthModule } from "../auth/auth.module";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [PrismaModule, AuthModule],
|
||||||
|
controllers: [AdminController],
|
||||||
|
providers: [AdminService],
|
||||||
|
exports: [AdminService],
|
||||||
|
})
|
||||||
|
export class AdminModule {}
|
||||||
477
apps/api/src/admin/admin.service.spec.ts
Normal file
477
apps/api/src/admin/admin.service.spec.ts
Normal file
@@ -0,0 +1,477 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { AdminService } from "./admin.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { BadRequestException, ConflictException, NotFoundException } from "@nestjs/common";
|
||||||
|
import { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
|
||||||
|
describe("AdminService", () => {
|
||||||
|
let service: AdminService;
|
||||||
|
|
||||||
|
const mockPrismaService = {
|
||||||
|
user: {
|
||||||
|
findMany: vi.fn(),
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
count: vi.fn(),
|
||||||
|
create: vi.fn(),
|
||||||
|
update: vi.fn(),
|
||||||
|
},
|
||||||
|
workspace: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
create: vi.fn(),
|
||||||
|
update: vi.fn(),
|
||||||
|
},
|
||||||
|
workspaceMember: {
|
||||||
|
create: vi.fn(),
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
deleteMany: vi.fn(),
|
||||||
|
},
|
||||||
|
$transaction: vi.fn(async (ops) => {
|
||||||
|
if (typeof ops === "function") {
|
||||||
|
return ops(mockPrismaService);
|
||||||
|
}
|
||||||
|
return Promise.all(ops);
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockAdminId = "550e8400-e29b-41d4-a716-446655440001";
|
||||||
|
const mockUserId = "550e8400-e29b-41d4-a716-446655440002";
|
||||||
|
const mockWorkspaceId = "550e8400-e29b-41d4-a716-446655440003";
|
||||||
|
|
||||||
|
const mockUser = {
|
||||||
|
id: mockUserId,
|
||||||
|
name: "Test User",
|
||||||
|
email: "test@example.com",
|
||||||
|
emailVerified: false,
|
||||||
|
image: null,
|
||||||
|
createdAt: new Date("2026-01-01"),
|
||||||
|
updatedAt: new Date("2026-01-01"),
|
||||||
|
deactivatedAt: null,
|
||||||
|
isLocalAuth: false,
|
||||||
|
passwordHash: null,
|
||||||
|
invitedBy: null,
|
||||||
|
invitationToken: null,
|
||||||
|
invitedAt: null,
|
||||||
|
authProviderId: null,
|
||||||
|
preferences: {},
|
||||||
|
workspaceMemberships: [
|
||||||
|
{
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
userId: mockUserId,
|
||||||
|
role: WorkspaceMemberRole.MEMBER,
|
||||||
|
joinedAt: new Date("2026-01-01"),
|
||||||
|
workspace: { id: mockWorkspaceId, name: "Test Workspace" },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockWorkspace = {
|
||||||
|
id: mockWorkspaceId,
|
||||||
|
name: "Test Workspace",
|
||||||
|
ownerId: mockAdminId,
|
||||||
|
settings: {},
|
||||||
|
createdAt: new Date("2026-01-01"),
|
||||||
|
updatedAt: new Date("2026-01-01"),
|
||||||
|
matrixRoomId: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
AdminService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: mockPrismaService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<AdminService>(AdminService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be defined", () => {
|
||||||
|
expect(service).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("listUsers", () => {
|
||||||
|
it("should return paginated users with memberships", async () => {
|
||||||
|
mockPrismaService.user.findMany.mockResolvedValue([mockUser]);
|
||||||
|
mockPrismaService.user.count.mockResolvedValue(1);
|
||||||
|
|
||||||
|
const result = await service.listUsers(1, 50);
|
||||||
|
|
||||||
|
expect(result.data).toHaveLength(1);
|
||||||
|
expect(result.data[0]?.id).toBe(mockUserId);
|
||||||
|
expect(result.data[0]?.workspaceMemberships).toHaveLength(1);
|
||||||
|
expect(result.meta).toEqual({
|
||||||
|
total: 1,
|
||||||
|
page: 1,
|
||||||
|
limit: 50,
|
||||||
|
totalPages: 1,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default pagination when not provided", async () => {
|
||||||
|
mockPrismaService.user.findMany.mockResolvedValue([]);
|
||||||
|
mockPrismaService.user.count.mockResolvedValue(0);
|
||||||
|
|
||||||
|
await service.listUsers();
|
||||||
|
|
||||||
|
expect(mockPrismaService.user.findMany).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
skip: 0,
|
||||||
|
take: 50,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should calculate pagination correctly", async () => {
|
||||||
|
mockPrismaService.user.findMany.mockResolvedValue([]);
|
||||||
|
mockPrismaService.user.count.mockResolvedValue(150);
|
||||||
|
|
||||||
|
const result = await service.listUsers(3, 25);
|
||||||
|
|
||||||
|
expect(mockPrismaService.user.findMany).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
skip: 50,
|
||||||
|
take: 25,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
expect(result.meta.totalPages).toBe(6);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("inviteUser", () => {
|
||||||
|
it("should create a user with invitation token", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
const createdUser = {
|
||||||
|
id: "new-user-id",
|
||||||
|
email: "new@example.com",
|
||||||
|
name: "new",
|
||||||
|
invitationToken: "some-token",
|
||||||
|
};
|
||||||
|
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||||
|
|
||||||
|
const result = await service.inviteUser({ email: "new@example.com" }, mockAdminId);
|
||||||
|
|
||||||
|
expect(result.email).toBe("new@example.com");
|
||||||
|
expect(result.invitationToken).toBeDefined();
|
||||||
|
expect(result.userId).toBe("new-user-id");
|
||||||
|
expect(mockPrismaService.user.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
email: "new@example.com",
|
||||||
|
invitedBy: mockAdminId,
|
||||||
|
invitationToken: expect.any(String),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should add user to workspace when workspaceId provided", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||||
|
const createdUser = { id: "new-user-id", email: "new@example.com", name: "new" };
|
||||||
|
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||||
|
|
||||||
|
await service.inviteUser(
|
||||||
|
{
|
||||||
|
email: "new@example.com",
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
role: WorkspaceMemberRole.ADMIN,
|
||||||
|
},
|
||||||
|
mockAdminId
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockPrismaService.workspaceMember.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
userId: "new-user-id",
|
||||||
|
role: WorkspaceMemberRole.ADMIN,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw ConflictException if email already exists", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
|
||||||
|
await expect(service.inviteUser({ email: "test@example.com" }, mockAdminId)).rejects.toThrow(
|
||||||
|
ConflictException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException if workspace does not exist", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.inviteUser({ email: "new@example.com", workspaceId: "non-existent" }, mockAdminId)
|
||||||
|
).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use email prefix as default name", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
const createdUser = { id: "new-user-id", email: "jane.doe@example.com", name: "jane.doe" };
|
||||||
|
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||||
|
|
||||||
|
await service.inviteUser({ email: "jane.doe@example.com" }, mockAdminId);
|
||||||
|
|
||||||
|
expect(mockPrismaService.user.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
name: "jane.doe",
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use provided name when given", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
const createdUser = { id: "new-user-id", email: "j@example.com", name: "Jane Doe" };
|
||||||
|
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||||
|
|
||||||
|
await service.inviteUser({ email: "j@example.com", name: "Jane Doe" }, mockAdminId);
|
||||||
|
|
||||||
|
expect(mockPrismaService.user.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
name: "Jane Doe",
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("updateUser", () => {
|
||||||
|
it("should update user fields", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
name: "Updated Name",
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateUser(mockUserId, { name: "Updated Name" });
|
||||||
|
|
||||||
|
expect(result.name).toBe("Updated Name");
|
||||||
|
expect(mockPrismaService.user.update).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
where: { id: mockUserId },
|
||||||
|
data: { name: "Updated Name" },
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set deactivatedAt when provided", async () => {
|
||||||
|
const deactivatedAt = "2026-02-28T00:00:00.000Z";
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
deactivatedAt: new Date(deactivatedAt),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateUser(mockUserId, { deactivatedAt });
|
||||||
|
|
||||||
|
expect(result.deactivatedAt).toEqual(new Date(deactivatedAt));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should clear deactivatedAt when set to null", async () => {
|
||||||
|
const deactivatedUser = { ...mockUser, deactivatedAt: new Date() };
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(deactivatedUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...deactivatedUser,
|
||||||
|
deactivatedAt: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateUser(mockUserId, { deactivatedAt: null });
|
||||||
|
|
||||||
|
expect(result.deactivatedAt).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException if user does not exist", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.updateUser("non-existent", { name: "Test" })).rejects.toThrow(
|
||||||
|
NotFoundException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should update emailVerified", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
emailVerified: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateUser(mockUserId, { emailVerified: true });
|
||||||
|
|
||||||
|
expect(result.emailVerified).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should update preferences", async () => {
|
||||||
|
const prefs = { theme: "dark" };
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
preferences: prefs,
|
||||||
|
});
|
||||||
|
|
||||||
|
await service.updateUser(mockUserId, { preferences: prefs });
|
||||||
|
|
||||||
|
expect(mockPrismaService.user.update).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({ preferences: prefs }),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("deactivateUser", () => {
|
||||||
|
it("should set deactivatedAt and invalidate sessions", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
deactivatedAt: new Date(),
|
||||||
|
});
|
||||||
|
mockPrismaService.session.deleteMany.mockResolvedValue({ count: 3 });
|
||||||
|
|
||||||
|
const result = await service.deactivateUser(mockUserId);
|
||||||
|
|
||||||
|
expect(result.deactivatedAt).toBeDefined();
|
||||||
|
expect(mockPrismaService.user.update).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
where: { id: mockUserId },
|
||||||
|
data: { deactivatedAt: expect.any(Date) },
|
||||||
|
})
|
||||||
|
);
|
||||||
|
expect(mockPrismaService.session.deleteMany).toHaveBeenCalledWith({ where: { userId: mockUserId } });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException if user does not exist", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.deactivateUser("non-existent")).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw BadRequestException if user is already deactivated", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
deactivatedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(service.deactivateUser(mockUserId)).rejects.toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("createWorkspace", () => {
|
||||||
|
it("should create a workspace with owner membership", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.workspace.create.mockResolvedValue(mockWorkspace);
|
||||||
|
|
||||||
|
const result = await service.createWorkspace({
|
||||||
|
name: "New Workspace",
|
||||||
|
ownerId: mockAdminId,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.name).toBe("Test Workspace");
|
||||||
|
expect(result.memberCount).toBe(1);
|
||||||
|
expect(mockPrismaService.workspace.create).toHaveBeenCalled();
|
||||||
|
expect(mockPrismaService.workspaceMember.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
workspaceId: mockWorkspace.id,
|
||||||
|
userId: mockAdminId,
|
||||||
|
role: WorkspaceMemberRole.OWNER,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException if owner does not exist", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.createWorkspace({ name: "New Workspace", ownerId: "non-existent" })
|
||||||
|
).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass settings when provided", async () => {
|
||||||
|
const settings = { theme: "dark", features: ["chat"] };
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.workspace.create.mockResolvedValue({
|
||||||
|
...mockWorkspace,
|
||||||
|
settings,
|
||||||
|
});
|
||||||
|
|
||||||
|
await service.createWorkspace({
|
||||||
|
name: "New Workspace",
|
||||||
|
ownerId: mockAdminId,
|
||||||
|
settings,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockPrismaService.workspace.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({ settings }),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("updateWorkspace", () => {
|
||||||
|
it("should update workspace name", async () => {
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||||
|
mockPrismaService.workspace.update.mockResolvedValue({
|
||||||
|
...mockWorkspace,
|
||||||
|
name: "Updated Workspace",
|
||||||
|
_count: { members: 3 },
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateWorkspace(mockWorkspaceId, {
|
||||||
|
name: "Updated Workspace",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.name).toBe("Updated Workspace");
|
||||||
|
expect(result.memberCount).toBe(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should update workspace settings", async () => {
|
||||||
|
const newSettings = { notifications: true };
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||||
|
mockPrismaService.workspace.update.mockResolvedValue({
|
||||||
|
...mockWorkspace,
|
||||||
|
settings: newSettings,
|
||||||
|
_count: { members: 1 },
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateWorkspace(mockWorkspaceId, {
|
||||||
|
settings: newSettings,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.settings).toEqual(newSettings);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException if workspace does not exist", async () => {
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.updateWorkspace("non-existent", { name: "Test" })).rejects.toThrow(
|
||||||
|
NotFoundException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should only update provided fields", async () => {
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||||
|
mockPrismaService.workspace.update.mockResolvedValue({
|
||||||
|
...mockWorkspace,
|
||||||
|
_count: { members: 1 },
|
||||||
|
});
|
||||||
|
|
||||||
|
await service.updateWorkspace(mockWorkspaceId, { name: "Only Name" });
|
||||||
|
|
||||||
|
expect(mockPrismaService.workspace.update).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: { name: "Only Name" },
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
309
apps/api/src/admin/admin.service.ts
Normal file
309
apps/api/src/admin/admin.service.ts
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
import {
|
||||||
|
BadRequestException,
|
||||||
|
ConflictException,
|
||||||
|
Injectable,
|
||||||
|
Logger,
|
||||||
|
NotFoundException,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { Prisma, WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
import { randomUUID } from "node:crypto";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import type { InviteUserDto } from "./dto/invite-user.dto";
|
||||||
|
import type { UpdateUserDto } from "./dto/update-user.dto";
|
||||||
|
import type { CreateWorkspaceDto } from "./dto/create-workspace.dto";
|
||||||
|
import type {
|
||||||
|
AdminUserResponse,
|
||||||
|
AdminWorkspaceResponse,
|
||||||
|
InvitationResponse,
|
||||||
|
PaginatedResponse,
|
||||||
|
} from "./types/admin.types";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AdminService {
|
||||||
|
private readonly logger = new Logger(AdminService.name);
|
||||||
|
|
||||||
|
constructor(private readonly prisma: PrismaService) {}
|
||||||
|
|
||||||
|
async listUsers(page = 1, limit = 50): Promise<PaginatedResponse<AdminUserResponse>> {
|
||||||
|
const skip = (page - 1) * limit;
|
||||||
|
|
||||||
|
const [users, total] = await Promise.all([
|
||||||
|
this.prisma.user.findMany({
|
||||||
|
include: {
|
||||||
|
workspaceMemberships: {
|
||||||
|
include: {
|
||||||
|
workspace: { select: { id: true, name: true } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { createdAt: "desc" },
|
||||||
|
skip,
|
||||||
|
take: limit,
|
||||||
|
}),
|
||||||
|
this.prisma.user.count(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
data: users.map((user) => ({
|
||||||
|
id: user.id,
|
||||||
|
name: user.name,
|
||||||
|
email: user.email,
|
||||||
|
emailVerified: user.emailVerified,
|
||||||
|
image: user.image,
|
||||||
|
createdAt: user.createdAt,
|
||||||
|
deactivatedAt: user.deactivatedAt,
|
||||||
|
isLocalAuth: user.isLocalAuth,
|
||||||
|
invitedAt: user.invitedAt,
|
||||||
|
invitedBy: user.invitedBy,
|
||||||
|
workspaceMemberships: user.workspaceMemberships.map((m) => ({
|
||||||
|
workspaceId: m.workspaceId,
|
||||||
|
workspaceName: m.workspace.name,
|
||||||
|
role: m.role,
|
||||||
|
joinedAt: m.joinedAt,
|
||||||
|
})),
|
||||||
|
})),
|
||||||
|
meta: {
|
||||||
|
total,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
totalPages: Math.ceil(total / limit),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async inviteUser(dto: InviteUserDto, inviterId: string): Promise<InvitationResponse> {
|
||||||
|
const existing = await this.prisma.user.findUnique({
|
||||||
|
where: { email: dto.email },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
throw new ConflictException(`User with email ${dto.email} already exists`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dto.workspaceId) {
|
||||||
|
const workspace = await this.prisma.workspace.findUnique({
|
||||||
|
where: { id: dto.workspaceId },
|
||||||
|
});
|
||||||
|
if (!workspace) {
|
||||||
|
throw new NotFoundException(`Workspace ${dto.workspaceId} not found`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const invitationToken = randomUUID();
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
const user = await this.prisma.$transaction(async (tx) => {
|
||||||
|
const created = await tx.user.create({
|
||||||
|
data: {
|
||||||
|
email: dto.email,
|
||||||
|
name: dto.name ?? dto.email.split("@")[0] ?? dto.email,
|
||||||
|
emailVerified: false,
|
||||||
|
invitedBy: inviterId,
|
||||||
|
invitationToken,
|
||||||
|
invitedAt: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (dto.workspaceId) {
|
||||||
|
await tx.workspaceMember.create({
|
||||||
|
data: {
|
||||||
|
workspaceId: dto.workspaceId,
|
||||||
|
userId: created.id,
|
||||||
|
role: dto.role ?? WorkspaceMemberRole.MEMBER,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return created;
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`User invited: ${user.email} by ${inviterId}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
userId: user.id,
|
||||||
|
invitationToken,
|
||||||
|
email: user.email,
|
||||||
|
invitedAt: now,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateUser(id: string, dto: UpdateUserDto): Promise<AdminUserResponse> {
|
||||||
|
const existing = await this.prisma.user.findUnique({ where: { id } });
|
||||||
|
if (!existing) {
|
||||||
|
throw new NotFoundException(`User ${id} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data: Prisma.UserUpdateInput = {};
|
||||||
|
|
||||||
|
if (dto.name !== undefined) {
|
||||||
|
data.name = dto.name;
|
||||||
|
}
|
||||||
|
if (dto.emailVerified !== undefined) {
|
||||||
|
data.emailVerified = dto.emailVerified;
|
||||||
|
}
|
||||||
|
if (dto.preferences !== undefined) {
|
||||||
|
data.preferences = dto.preferences as Prisma.InputJsonValue;
|
||||||
|
}
|
||||||
|
if (dto.deactivatedAt !== undefined) {
|
||||||
|
data.deactivatedAt = dto.deactivatedAt ? new Date(dto.deactivatedAt) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await this.prisma.user.update({
|
||||||
|
where: { id },
|
||||||
|
data,
|
||||||
|
include: {
|
||||||
|
workspaceMemberships: {
|
||||||
|
include: {
|
||||||
|
workspace: { select: { id: true, name: true } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`User updated: ${id}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: user.id,
|
||||||
|
name: user.name,
|
||||||
|
email: user.email,
|
||||||
|
emailVerified: user.emailVerified,
|
||||||
|
image: user.image,
|
||||||
|
createdAt: user.createdAt,
|
||||||
|
deactivatedAt: user.deactivatedAt,
|
||||||
|
isLocalAuth: user.isLocalAuth,
|
||||||
|
invitedAt: user.invitedAt,
|
||||||
|
invitedBy: user.invitedBy,
|
||||||
|
workspaceMemberships: user.workspaceMemberships.map((m) => ({
|
||||||
|
workspaceId: m.workspaceId,
|
||||||
|
workspaceName: m.workspace.name,
|
||||||
|
role: m.role,
|
||||||
|
joinedAt: m.joinedAt,
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async deactivateUser(id: string): Promise<AdminUserResponse> {
|
||||||
|
const existing = await this.prisma.user.findUnique({ where: { id } });
|
||||||
|
if (!existing) {
|
||||||
|
throw new NotFoundException(`User ${id} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existing.deactivatedAt) {
|
||||||
|
throw new BadRequestException(`User ${id} is already deactivated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [user] = await this.prisma.$transaction([
|
||||||
|
this.prisma.user.update({
|
||||||
|
where: { id },
|
||||||
|
data: { deactivatedAt: new Date() },
|
||||||
|
include: {
|
||||||
|
workspaceMemberships: {
|
||||||
|
include: {
|
||||||
|
workspace: { select: { id: true, name: true } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
this.prisma.session.deleteMany({ where: { userId: id } }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
this.logger.log(`User deactivated and sessions invalidated: ${id}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: user.id,
|
||||||
|
name: user.name,
|
||||||
|
email: user.email,
|
||||||
|
emailVerified: user.emailVerified,
|
||||||
|
image: user.image,
|
||||||
|
createdAt: user.createdAt,
|
||||||
|
deactivatedAt: user.deactivatedAt,
|
||||||
|
isLocalAuth: user.isLocalAuth,
|
||||||
|
invitedAt: user.invitedAt,
|
||||||
|
invitedBy: user.invitedBy,
|
||||||
|
workspaceMemberships: user.workspaceMemberships.map((m) => ({
|
||||||
|
workspaceId: m.workspaceId,
|
||||||
|
workspaceName: m.workspace.name,
|
||||||
|
role: m.role,
|
||||||
|
joinedAt: m.joinedAt,
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async createWorkspace(dto: CreateWorkspaceDto): Promise<AdminWorkspaceResponse> {
|
||||||
|
const owner = await this.prisma.user.findUnique({ where: { id: dto.ownerId } });
|
||||||
|
if (!owner) {
|
||||||
|
throw new NotFoundException(`User ${dto.ownerId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspace = await this.prisma.$transaction(async (tx) => {
|
||||||
|
const created = await tx.workspace.create({
|
||||||
|
data: {
|
||||||
|
name: dto.name,
|
||||||
|
ownerId: dto.ownerId,
|
||||||
|
settings: dto.settings ? (dto.settings as Prisma.InputJsonValue) : {},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await tx.workspaceMember.create({
|
||||||
|
data: {
|
||||||
|
workspaceId: created.id,
|
||||||
|
userId: dto.ownerId,
|
||||||
|
role: WorkspaceMemberRole.OWNER,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return created;
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`Workspace created: ${workspace.id} with owner ${dto.ownerId}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
settings: workspace.settings as Record<string, unknown>,
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
updatedAt: workspace.updatedAt,
|
||||||
|
memberCount: 1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateWorkspace(
|
||||||
|
id: string,
|
||||||
|
dto: { name?: string; settings?: Record<string, unknown> }
|
||||||
|
): Promise<AdminWorkspaceResponse> {
|
||||||
|
const existing = await this.prisma.workspace.findUnique({ where: { id } });
|
||||||
|
if (!existing) {
|
||||||
|
throw new NotFoundException(`Workspace ${id} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data: Prisma.WorkspaceUpdateInput = {};
|
||||||
|
|
||||||
|
if (dto.name !== undefined) {
|
||||||
|
data.name = dto.name;
|
||||||
|
}
|
||||||
|
if (dto.settings !== undefined) {
|
||||||
|
data.settings = dto.settings as Prisma.InputJsonValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspace = await this.prisma.workspace.update({
|
||||||
|
where: { id },
|
||||||
|
data,
|
||||||
|
include: {
|
||||||
|
_count: { select: { members: true } },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`Workspace updated: ${id}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
settings: workspace.settings as Record<string, unknown>,
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
updatedAt: workspace.updatedAt,
|
||||||
|
memberCount: workspace._count.members,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
15
apps/api/src/admin/dto/create-workspace.dto.ts
Normal file
15
apps/api/src/admin/dto/create-workspace.dto.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { IsObject, IsOptional, IsString, IsUUID, MaxLength, MinLength } from "class-validator";
|
||||||
|
|
||||||
|
export class CreateWorkspaceDto {
|
||||||
|
@IsString({ message: "name must be a string" })
|
||||||
|
@MinLength(1, { message: "name must not be empty" })
|
||||||
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
|
name!: string;
|
||||||
|
|
||||||
|
@IsUUID("4", { message: "ownerId must be a valid UUID" })
|
||||||
|
ownerId!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsObject({ message: "settings must be an object" })
|
||||||
|
settings?: Record<string, unknown>;
|
||||||
|
}
|
||||||
20
apps/api/src/admin/dto/invite-user.dto.ts
Normal file
20
apps/api/src/admin/dto/invite-user.dto.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
import { IsEmail, IsEnum, IsOptional, IsString, IsUUID, MaxLength } from "class-validator";
|
||||||
|
|
||||||
|
export class InviteUserDto {
|
||||||
|
@IsEmail({}, { message: "email must be a valid email address" })
|
||||||
|
email!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "name must be a string" })
|
||||||
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
|
name?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsUUID("4", { message: "workspaceId must be a valid UUID" })
|
||||||
|
workspaceId?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsEnum(WorkspaceMemberRole, { message: "role must be a valid WorkspaceMemberRole" })
|
||||||
|
role?: WorkspaceMemberRole;
|
||||||
|
}
|
||||||
15
apps/api/src/admin/dto/manage-member.dto.ts
Normal file
15
apps/api/src/admin/dto/manage-member.dto.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
import { IsEnum, IsUUID } from "class-validator";
|
||||||
|
|
||||||
|
export class AddMemberDto {
|
||||||
|
@IsUUID("4", { message: "userId must be a valid UUID" })
|
||||||
|
userId!: string;
|
||||||
|
|
||||||
|
@IsEnum(WorkspaceMemberRole, { message: "role must be a valid WorkspaceMemberRole" })
|
||||||
|
role!: WorkspaceMemberRole;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class UpdateMemberRoleDto {
|
||||||
|
@IsEnum(WorkspaceMemberRole, { message: "role must be a valid WorkspaceMemberRole" })
|
||||||
|
role!: WorkspaceMemberRole;
|
||||||
|
}
|
||||||
17
apps/api/src/admin/dto/query-users.dto.ts
Normal file
17
apps/api/src/admin/dto/query-users.dto.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { IsInt, IsOptional, Max, Min } from "class-validator";
|
||||||
|
import { Type } from "class-transformer";
|
||||||
|
|
||||||
|
export class QueryUsersDto {
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsInt({ message: "page must be an integer" })
|
||||||
|
@Min(1, { message: "page must be at least 1" })
|
||||||
|
page?: number;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsInt({ message: "limit must be an integer" })
|
||||||
|
@Min(1, { message: "limit must be at least 1" })
|
||||||
|
@Max(100, { message: "limit must not exceed 100" })
|
||||||
|
limit?: number;
|
||||||
|
}
|
||||||
27
apps/api/src/admin/dto/update-user.dto.ts
Normal file
27
apps/api/src/admin/dto/update-user.dto.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import {
|
||||||
|
IsBoolean,
|
||||||
|
IsDateString,
|
||||||
|
IsObject,
|
||||||
|
IsOptional,
|
||||||
|
IsString,
|
||||||
|
MaxLength,
|
||||||
|
} from "class-validator";
|
||||||
|
|
||||||
|
export class UpdateUserDto {
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "name must be a string" })
|
||||||
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
|
name?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsDateString({}, { message: "deactivatedAt must be a valid ISO 8601 date string" })
|
||||||
|
deactivatedAt?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsBoolean({ message: "emailVerified must be a boolean" })
|
||||||
|
emailVerified?: boolean;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsObject({ message: "preferences must be an object" })
|
||||||
|
preferences?: Record<string, unknown>;
|
||||||
|
}
|
||||||
13
apps/api/src/admin/dto/update-workspace.dto.ts
Normal file
13
apps/api/src/admin/dto/update-workspace.dto.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { IsObject, IsOptional, IsString, MaxLength, MinLength } from "class-validator";
|
||||||
|
|
||||||
|
export class UpdateWorkspaceDto {
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "name must be a string" })
|
||||||
|
@MinLength(1, { message: "name must not be empty" })
|
||||||
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
|
name?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsObject({ message: "settings must be an object" })
|
||||||
|
settings?: Record<string, unknown>;
|
||||||
|
}
|
||||||
49
apps/api/src/admin/types/admin.types.ts
Normal file
49
apps/api/src/admin/types/admin.types.ts
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import type { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
|
||||||
|
export interface AdminUserResponse {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
email: string;
|
||||||
|
emailVerified: boolean;
|
||||||
|
image: string | null;
|
||||||
|
createdAt: Date;
|
||||||
|
deactivatedAt: Date | null;
|
||||||
|
isLocalAuth: boolean;
|
||||||
|
invitedAt: Date | null;
|
||||||
|
invitedBy: string | null;
|
||||||
|
workspaceMemberships: WorkspaceMembershipResponse[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WorkspaceMembershipResponse {
|
||||||
|
workspaceId: string;
|
||||||
|
workspaceName: string;
|
||||||
|
role: WorkspaceMemberRole;
|
||||||
|
joinedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PaginatedResponse<T> {
|
||||||
|
data: T[];
|
||||||
|
meta: {
|
||||||
|
total: number;
|
||||||
|
page: number;
|
||||||
|
limit: number;
|
||||||
|
totalPages: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface InvitationResponse {
|
||||||
|
userId: string;
|
||||||
|
invitationToken: string;
|
||||||
|
email: string;
|
||||||
|
invitedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AdminWorkspaceResponse {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
ownerId: string;
|
||||||
|
settings: Record<string, unknown>;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
memberCount: number;
|
||||||
|
}
|
||||||
40
apps/api/src/agent-config/agent-config.controller.ts
Normal file
40
apps/api/src/agent-config/agent-config.controller.ts
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import {
|
||||||
|
Controller,
|
||||||
|
ForbiddenException,
|
||||||
|
Get,
|
||||||
|
Param,
|
||||||
|
Req,
|
||||||
|
UnauthorizedException,
|
||||||
|
UseGuards,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { AgentConfigService } from "./agent-config.service";
|
||||||
|
import { AgentConfigGuard, type AgentConfigRequest } from "./agent-config.guard";
|
||||||
|
|
||||||
|
@Controller("internal")
|
||||||
|
@UseGuards(AgentConfigGuard)
|
||||||
|
export class AgentConfigController {
|
||||||
|
constructor(private readonly agentConfigService: AgentConfigService) {}
|
||||||
|
|
||||||
|
// GET /api/internal/agent-config/:id
|
||||||
|
// Auth: Bearer token (validated against UserContainer.gatewayToken or SystemContainer.gatewayToken)
|
||||||
|
// Returns: assembled openclaw.json
|
||||||
|
//
|
||||||
|
// The :id param is the container record ID (cuid)
|
||||||
|
// Token must match the container requesting its own config
|
||||||
|
@Get("agent-config/:id")
|
||||||
|
async getAgentConfig(
|
||||||
|
@Param("id") id: string,
|
||||||
|
@Req() request: AgentConfigRequest
|
||||||
|
): Promise<object> {
|
||||||
|
const containerAuth = request.containerAuth;
|
||||||
|
if (!containerAuth) {
|
||||||
|
throw new UnauthorizedException("Missing container authentication context");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (containerAuth.id !== id) {
|
||||||
|
throw new ForbiddenException("Token is not authorized for the requested container");
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.agentConfigService.generateConfigForContainer(containerAuth.type, id);
|
||||||
|
}
|
||||||
|
}
|
||||||
43
apps/api/src/agent-config/agent-config.guard.ts
Normal file
43
apps/api/src/agent-config/agent-config.guard.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import { CanActivate, ExecutionContext, Injectable, UnauthorizedException } from "@nestjs/common";
|
||||||
|
import type { Request } from "express";
|
||||||
|
import { AgentConfigService, type ContainerTokenValidation } from "./agent-config.service";
|
||||||
|
|
||||||
|
export interface AgentConfigRequest extends Request {
|
||||||
|
containerAuth?: ContainerTokenValidation;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AgentConfigGuard implements CanActivate {
|
||||||
|
constructor(private readonly agentConfigService: AgentConfigService) {}
|
||||||
|
|
||||||
|
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||||
|
const request = context.switchToHttp().getRequest<AgentConfigRequest>();
|
||||||
|
const token = this.extractBearerToken(request.headers.authorization);
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
throw new UnauthorizedException("Missing Bearer token");
|
||||||
|
}
|
||||||
|
|
||||||
|
const containerAuth = await this.agentConfigService.validateContainerToken(token);
|
||||||
|
if (!containerAuth) {
|
||||||
|
throw new UnauthorizedException("Invalid container token");
|
||||||
|
}
|
||||||
|
|
||||||
|
request.containerAuth = containerAuth;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractBearerToken(headerValue: string | string[] | undefined): string | null {
|
||||||
|
const normalizedHeader = Array.isArray(headerValue) ? headerValue[0] : headerValue;
|
||||||
|
if (!normalizedHeader) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [scheme, token] = normalizedHeader.split(" ");
|
||||||
|
if (!scheme || !token || scheme.toLowerCase() !== "bearer") {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
}
|
||||||
14
apps/api/src/agent-config/agent-config.module.ts
Normal file
14
apps/api/src/agent-config/agent-config.module.ts
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
|
import { CryptoModule } from "../crypto/crypto.module";
|
||||||
|
import { AgentConfigController } from "./agent-config.controller";
|
||||||
|
import { AgentConfigService } from "./agent-config.service";
|
||||||
|
import { AgentConfigGuard } from "./agent-config.guard";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [PrismaModule, CryptoModule],
|
||||||
|
controllers: [AgentConfigController],
|
||||||
|
providers: [AgentConfigService, AgentConfigGuard],
|
||||||
|
exports: [AgentConfigService],
|
||||||
|
})
|
||||||
|
export class AgentConfigModule {}
|
||||||
215
apps/api/src/agent-config/agent-config.service.spec.ts
Normal file
215
apps/api/src/agent-config/agent-config.service.spec.ts
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||||
|
import { AgentConfigService } from "./agent-config.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { CryptoService } from "../crypto/crypto.service";
|
||||||
|
|
||||||
|
describe("AgentConfigService", () => {
|
||||||
|
let service: AgentConfigService;
|
||||||
|
|
||||||
|
const mockPrismaService = {
|
||||||
|
userAgentConfig: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
},
|
||||||
|
llmProvider: {
|
||||||
|
findMany: vi.fn(),
|
||||||
|
},
|
||||||
|
userContainer: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
findMany: vi.fn(),
|
||||||
|
},
|
||||||
|
systemContainer: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
findMany: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockCryptoService = {
|
||||||
|
isEncrypted: vi.fn((value: string) => value.startsWith("enc:")),
|
||||||
|
decrypt: vi.fn((value: string) => value.replace(/^enc:/, "")),
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
service = new AgentConfigService(
|
||||||
|
mockPrismaService as unknown as PrismaService,
|
||||||
|
mockCryptoService as unknown as CryptoService
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("generateUserConfig returns valid openclaw.json structure", async () => {
|
||||||
|
mockPrismaService.userAgentConfig.findUnique.mockResolvedValue({
|
||||||
|
id: "cfg-1",
|
||||||
|
userId: "user-1",
|
||||||
|
primaryModel: "my-zai/glm-5",
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.userContainer.findUnique.mockResolvedValue({
|
||||||
|
id: "container-1",
|
||||||
|
userId: "user-1",
|
||||||
|
gatewayPort: 19001,
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.llmProvider.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "provider-1",
|
||||||
|
userId: "user-1",
|
||||||
|
name: "my-zai",
|
||||||
|
displayName: "Z.ai",
|
||||||
|
type: "zai",
|
||||||
|
baseUrl: "https://api.z.ai/v1",
|
||||||
|
apiKey: "enc:secret-zai-key",
|
||||||
|
apiType: "openai-completions",
|
||||||
|
models: [{ id: "glm-5" }],
|
||||||
|
isActive: true,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const result = await service.generateUserConfig("user-1");
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
gateway: {
|
||||||
|
mode: "local",
|
||||||
|
port: 19001,
|
||||||
|
bind: "lan",
|
||||||
|
auth: { mode: "token" },
|
||||||
|
http: {
|
||||||
|
endpoints: {
|
||||||
|
chatCompletions: { enabled: true },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
agents: {
|
||||||
|
defaults: {
|
||||||
|
model: {
|
||||||
|
primary: "my-zai/glm-5",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
models: {
|
||||||
|
providers: {
|
||||||
|
"my-zai": {
|
||||||
|
apiKey: "secret-zai-key",
|
||||||
|
baseUrl: "https://api.z.ai/v1",
|
||||||
|
models: {
|
||||||
|
"glm-5": {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("generateUserConfig decrypts API keys correctly", async () => {
|
||||||
|
mockPrismaService.userAgentConfig.findUnique.mockResolvedValue({
|
||||||
|
id: "cfg-1",
|
||||||
|
userId: "user-1",
|
||||||
|
primaryModel: "openai-work/gpt-4.1",
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.userContainer.findUnique.mockResolvedValue({
|
||||||
|
id: "container-1",
|
||||||
|
userId: "user-1",
|
||||||
|
gatewayPort: 18789,
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.llmProvider.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "provider-1",
|
||||||
|
userId: "user-1",
|
||||||
|
name: "openai-work",
|
||||||
|
displayName: "OpenAI Work",
|
||||||
|
type: "openai",
|
||||||
|
baseUrl: "https://api.openai.com/v1",
|
||||||
|
apiKey: "enc:encrypted-openai-key",
|
||||||
|
apiType: "openai-completions",
|
||||||
|
models: [{ id: "gpt-4.1" }],
|
||||||
|
isActive: true,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const result = await service.generateUserConfig("user-1");
|
||||||
|
|
||||||
|
expect(mockCryptoService.decrypt).toHaveBeenCalledWith("enc:encrypted-openai-key");
|
||||||
|
expect(result.models.providers["openai-work"]?.apiKey).toBe("encrypted-openai-key");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("generateUserConfig handles user with no providers", async () => {
|
||||||
|
mockPrismaService.userAgentConfig.findUnique.mockResolvedValue({
|
||||||
|
id: "cfg-1",
|
||||||
|
userId: "user-2",
|
||||||
|
primaryModel: "openai/gpt-4o-mini",
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.userContainer.findUnique.mockResolvedValue({
|
||||||
|
id: "container-2",
|
||||||
|
userId: "user-2",
|
||||||
|
gatewayPort: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.llmProvider.findMany.mockResolvedValue([]);
|
||||||
|
|
||||||
|
const result = await service.generateUserConfig("user-2");
|
||||||
|
|
||||||
|
expect(result.models.providers).toEqual({});
|
||||||
|
expect(result.gateway.port).toBe(18789);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("validateContainerToken returns correct type for user container", async () => {
|
||||||
|
mockPrismaService.userContainer.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "user-container-1",
|
||||||
|
gatewayToken: "enc:user-token-1",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
mockPrismaService.systemContainer.findMany.mockResolvedValue([]);
|
||||||
|
|
||||||
|
const result = await service.validateContainerToken("user-token-1");
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
type: "user",
|
||||||
|
id: "user-container-1",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("validateContainerToken returns correct type for system container", async () => {
|
||||||
|
mockPrismaService.userContainer.findMany.mockResolvedValue([]);
|
||||||
|
mockPrismaService.systemContainer.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "system-container-1",
|
||||||
|
gatewayToken: "enc:system-token-1",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const result = await service.validateContainerToken("system-token-1");
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
type: "system",
|
||||||
|
id: "system-container-1",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("validateContainerToken returns null for invalid token", async () => {
|
||||||
|
mockPrismaService.userContainer.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "user-container-1",
|
||||||
|
gatewayToken: "enc:user-token-1",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
mockPrismaService.systemContainer.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "system-container-1",
|
||||||
|
gatewayToken: "enc:system-token-1",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const result = await service.validateContainerToken("no-match");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
288
apps/api/src/agent-config/agent-config.service.ts
Normal file
288
apps/api/src/agent-config/agent-config.service.ts
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
import { Injectable, NotFoundException } from "@nestjs/common";
|
||||||
|
import type { LlmProvider } from "@prisma/client";
|
||||||
|
import { timingSafeEqual } from "node:crypto";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { CryptoService } from "../crypto/crypto.service";
|
||||||
|
|
||||||
|
const DEFAULT_GATEWAY_PORT = 18789;
|
||||||
|
const DEFAULT_PRIMARY_MODEL = "openai/gpt-4o-mini";
|
||||||
|
|
||||||
|
type ContainerType = "user" | "system";
|
||||||
|
|
||||||
|
export interface ContainerTokenValidation {
|
||||||
|
type: ContainerType;
|
||||||
|
id: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
type OpenClawModelMap = Record<string, Record<string, never>>;
|
||||||
|
|
||||||
|
interface OpenClawProviderConfig {
|
||||||
|
apiKey?: string;
|
||||||
|
baseUrl?: string;
|
||||||
|
models: OpenClawModelMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface OpenClawConfig {
|
||||||
|
gateway: {
|
||||||
|
mode: "local";
|
||||||
|
port: number;
|
||||||
|
bind: "lan";
|
||||||
|
auth: { mode: "token" };
|
||||||
|
http: {
|
||||||
|
endpoints: {
|
||||||
|
chatCompletions: { enabled: true };
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
agents: {
|
||||||
|
defaults: {
|
||||||
|
model: {
|
||||||
|
primary: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
models: {
|
||||||
|
providers: Record<string, OpenClawProviderConfig>;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AgentConfigService {
|
||||||
|
constructor(
|
||||||
|
private readonly prisma: PrismaService,
|
||||||
|
private readonly crypto: CryptoService
|
||||||
|
) {}
|
||||||
|
|
||||||
|
// Generate complete openclaw.json for a user container
|
||||||
|
async generateUserConfig(userId: string): Promise<OpenClawConfig> {
|
||||||
|
const [userAgentConfig, providers, userContainer] = await Promise.all([
|
||||||
|
this.prisma.userAgentConfig.findUnique({
|
||||||
|
where: { userId },
|
||||||
|
}),
|
||||||
|
this.prisma.llmProvider.findMany({
|
||||||
|
where: {
|
||||||
|
userId,
|
||||||
|
isActive: true,
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
createdAt: "asc",
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
this.prisma.userContainer.findUnique({
|
||||||
|
where: { userId },
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!userContainer) {
|
||||||
|
throw new NotFoundException(`User container not found for user ${userId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const primaryModel =
|
||||||
|
userAgentConfig?.primaryModel ??
|
||||||
|
this.resolvePrimaryModelFromProviders(providers) ??
|
||||||
|
DEFAULT_PRIMARY_MODEL;
|
||||||
|
|
||||||
|
return this.buildOpenClawConfig(primaryModel, userContainer.gatewayPort, providers);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate config for a system container
|
||||||
|
async generateSystemConfig(containerId: string): Promise<OpenClawConfig> {
|
||||||
|
const systemContainer = await this.prisma.systemContainer.findUnique({
|
||||||
|
where: { id: containerId },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!systemContainer) {
|
||||||
|
throw new NotFoundException(`System container ${containerId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.buildOpenClawConfig(
|
||||||
|
systemContainer.primaryModel || DEFAULT_PRIMARY_MODEL,
|
||||||
|
systemContainer.gatewayPort,
|
||||||
|
[]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateConfigForContainer(
|
||||||
|
type: ContainerType,
|
||||||
|
containerId: string
|
||||||
|
): Promise<OpenClawConfig> {
|
||||||
|
if (type === "system") {
|
||||||
|
return this.generateSystemConfig(containerId);
|
||||||
|
}
|
||||||
|
|
||||||
|
const userContainer = await this.prisma.userContainer.findUnique({
|
||||||
|
where: { id: containerId },
|
||||||
|
select: { userId: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!userContainer) {
|
||||||
|
throw new NotFoundException(`User container ${containerId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.generateUserConfig(userContainer.userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate a container's bearer token
|
||||||
|
async validateContainerToken(token: string): Promise<ContainerTokenValidation | null> {
|
||||||
|
if (!token) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [userContainers, systemContainers] = await Promise.all([
|
||||||
|
this.prisma.userContainer.findMany({
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
gatewayToken: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
this.prisma.systemContainer.findMany({
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
gatewayToken: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
for (const container of userContainers) {
|
||||||
|
const storedToken = this.decryptContainerToken(container.gatewayToken);
|
||||||
|
if (storedToken && this.tokensEqual(storedToken, token)) {
|
||||||
|
return { type: "user", id: container.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const container of systemContainers) {
|
||||||
|
const storedToken = this.decryptContainerToken(container.gatewayToken);
|
||||||
|
if (storedToken && this.tokensEqual(storedToken, token)) {
|
||||||
|
return { type: "system", id: container.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildOpenClawConfig(
|
||||||
|
primaryModel: string,
|
||||||
|
gatewayPort: number | null,
|
||||||
|
providers: LlmProvider[]
|
||||||
|
): OpenClawConfig {
|
||||||
|
return {
|
||||||
|
gateway: {
|
||||||
|
mode: "local",
|
||||||
|
port: gatewayPort ?? DEFAULT_GATEWAY_PORT,
|
||||||
|
bind: "lan",
|
||||||
|
auth: { mode: "token" },
|
||||||
|
http: {
|
||||||
|
endpoints: {
|
||||||
|
chatCompletions: { enabled: true },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
agents: {
|
||||||
|
defaults: {
|
||||||
|
model: {
|
||||||
|
primary: primaryModel,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
models: {
|
||||||
|
providers: this.buildProviderConfig(providers),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildProviderConfig(providers: LlmProvider[]): Record<string, OpenClawProviderConfig> {
|
||||||
|
const providerConfig: Record<string, OpenClawProviderConfig> = {};
|
||||||
|
|
||||||
|
for (const provider of providers) {
|
||||||
|
const config: OpenClawProviderConfig = {
|
||||||
|
models: this.extractModels(provider.models),
|
||||||
|
};
|
||||||
|
|
||||||
|
const apiKey = this.decryptIfNeeded(provider.apiKey);
|
||||||
|
if (apiKey) {
|
||||||
|
config.apiKey = apiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (provider.baseUrl) {
|
||||||
|
config.baseUrl = provider.baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
providerConfig[provider.name] = config;
|
||||||
|
}
|
||||||
|
|
||||||
|
return providerConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractModels(models: unknown): OpenClawModelMap {
|
||||||
|
const modelMap: OpenClawModelMap = {};
|
||||||
|
|
||||||
|
if (!Array.isArray(models)) {
|
||||||
|
return modelMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const modelEntry of models) {
|
||||||
|
if (typeof modelEntry === "string") {
|
||||||
|
modelMap[modelEntry] = {};
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.hasModelId(modelEntry)) {
|
||||||
|
modelMap[modelEntry.id] = {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return modelMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
private resolvePrimaryModelFromProviders(providers: LlmProvider[]): string | null {
|
||||||
|
for (const provider of providers) {
|
||||||
|
const modelIds = Object.keys(this.extractModels(provider.models));
|
||||||
|
const firstModelId = modelIds[0];
|
||||||
|
|
||||||
|
if (firstModelId) {
|
||||||
|
return `${provider.name}/${firstModelId}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private decryptIfNeeded(value: string | null | undefined): string | undefined {
|
||||||
|
if (!value) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.crypto.isEncrypted(value)) {
|
||||||
|
return this.crypto.decrypt(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
private decryptContainerToken(value: string): string | null {
|
||||||
|
try {
|
||||||
|
return this.decryptIfNeeded(value) ?? null;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private tokensEqual(left: string, right: string): boolean {
|
||||||
|
const leftBuffer = Buffer.from(left, "utf8");
|
||||||
|
const rightBuffer = Buffer.from(right, "utf8");
|
||||||
|
|
||||||
|
if (leftBuffer.length !== rightBuffer.length) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return timingSafeEqual(leftBuffer, rightBuffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
private hasModelId(modelEntry: unknown): modelEntry is { id: string } {
|
||||||
|
if (typeof modelEntry !== "object" || modelEntry === null || !("id" in modelEntry)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return typeof (modelEntry as { id?: unknown }).id === "string";
|
||||||
|
}
|
||||||
|
}
|
||||||
102
apps/api/src/agent-memory/agent-memory.controller.spec.ts
Normal file
102
apps/api/src/agent-memory/agent-memory.controller.spec.ts
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { AgentMemoryController } from "./agent-memory.controller";
|
||||||
|
import { AgentMemoryService } from "./agent-memory.service";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
|
||||||
|
describe("AgentMemoryController", () => {
|
||||||
|
let controller: AgentMemoryController;
|
||||||
|
|
||||||
|
const mockAgentMemoryService = {
|
||||||
|
upsert: vi.fn(),
|
||||||
|
findAll: vi.fn(),
|
||||||
|
findOne: vi.fn(),
|
||||||
|
remove: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockGuard = { canActivate: vi.fn(() => true) };
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
controllers: [AgentMemoryController],
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: AgentMemoryService,
|
||||||
|
useValue: mockAgentMemoryService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
.overrideGuard(AuthGuard)
|
||||||
|
.useValue(mockGuard)
|
||||||
|
.overrideGuard(WorkspaceGuard)
|
||||||
|
.useValue(mockGuard)
|
||||||
|
.overrideGuard(PermissionGuard)
|
||||||
|
.useValue(mockGuard)
|
||||||
|
.compile();
|
||||||
|
|
||||||
|
controller = module.get<AgentMemoryController>(AgentMemoryController);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
const workspaceId = "workspace-1";
|
||||||
|
const agentId = "agent-1";
|
||||||
|
const key = "context";
|
||||||
|
|
||||||
|
describe("upsert", () => {
|
||||||
|
it("should upsert a memory entry", async () => {
|
||||||
|
const dto = { value: { foo: "bar" } };
|
||||||
|
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: dto.value };
|
||||||
|
|
||||||
|
mockAgentMemoryService.upsert.mockResolvedValue(mockEntry);
|
||||||
|
|
||||||
|
const result = await controller.upsert(agentId, key, dto, workspaceId);
|
||||||
|
|
||||||
|
expect(mockAgentMemoryService.upsert).toHaveBeenCalledWith(workspaceId, agentId, key, dto);
|
||||||
|
expect(result).toEqual(mockEntry);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findAll", () => {
|
||||||
|
it("should list all memory entries for an agent", async () => {
|
||||||
|
const mockEntries = [
|
||||||
|
{ id: "mem-1", key: "a", value: 1 },
|
||||||
|
{ id: "mem-2", key: "b", value: 2 },
|
||||||
|
];
|
||||||
|
|
||||||
|
mockAgentMemoryService.findAll.mockResolvedValue(mockEntries);
|
||||||
|
|
||||||
|
const result = await controller.findAll(agentId, workspaceId);
|
||||||
|
|
||||||
|
expect(mockAgentMemoryService.findAll).toHaveBeenCalledWith(workspaceId, agentId);
|
||||||
|
expect(result).toEqual(mockEntries);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findOne", () => {
|
||||||
|
it("should get a single memory entry", async () => {
|
||||||
|
const mockEntry = { id: "mem-1", key, value: "v" };
|
||||||
|
|
||||||
|
mockAgentMemoryService.findOne.mockResolvedValue(mockEntry);
|
||||||
|
|
||||||
|
const result = await controller.findOne(agentId, key, workspaceId);
|
||||||
|
|
||||||
|
expect(mockAgentMemoryService.findOne).toHaveBeenCalledWith(workspaceId, agentId, key);
|
||||||
|
expect(result).toEqual(mockEntry);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("remove", () => {
|
||||||
|
it("should delete a memory entry", async () => {
|
||||||
|
const mockResponse = { message: "Memory entry deleted successfully" };
|
||||||
|
|
||||||
|
mockAgentMemoryService.remove.mockResolvedValue(mockResponse);
|
||||||
|
|
||||||
|
const result = await controller.remove(agentId, key, workspaceId);
|
||||||
|
|
||||||
|
expect(mockAgentMemoryService.remove).toHaveBeenCalledWith(workspaceId, agentId, key);
|
||||||
|
expect(result).toEqual(mockResponse);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
89
apps/api/src/agent-memory/agent-memory.controller.ts
Normal file
89
apps/api/src/agent-memory/agent-memory.controller.ts
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
import {
|
||||||
|
Controller,
|
||||||
|
Get,
|
||||||
|
Put,
|
||||||
|
Delete,
|
||||||
|
Body,
|
||||||
|
Param,
|
||||||
|
UseGuards,
|
||||||
|
HttpCode,
|
||||||
|
HttpStatus,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { AgentMemoryService } from "./agent-memory.service";
|
||||||
|
import { UpsertAgentMemoryDto } from "./dto";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||||
|
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Controller for per-agent key/value memory endpoints.
|
||||||
|
* All endpoints require authentication and workspace context.
|
||||||
|
*
|
||||||
|
* Guards are applied in order:
|
||||||
|
* 1. AuthGuard - Verifies user authentication
|
||||||
|
* 2. WorkspaceGuard - Validates workspace access
|
||||||
|
* 3. PermissionGuard - Checks role-based permissions
|
||||||
|
*/
|
||||||
|
@Controller("agents/:agentId/memory")
|
||||||
|
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||||
|
export class AgentMemoryController {
|
||||||
|
constructor(private readonly agentMemoryService: AgentMemoryService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PUT /api/agents/:agentId/memory/:key
|
||||||
|
* Upsert a memory entry for an agent
|
||||||
|
* Requires: MEMBER role or higher
|
||||||
|
*/
|
||||||
|
@Put(":key")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||||
|
async upsert(
|
||||||
|
@Param("agentId") agentId: string,
|
||||||
|
@Param("key") key: string,
|
||||||
|
@Body() dto: UpsertAgentMemoryDto,
|
||||||
|
@Workspace() workspaceId: string
|
||||||
|
) {
|
||||||
|
return this.agentMemoryService.upsert(workspaceId, agentId, key, dto);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/agents/:agentId/memory
|
||||||
|
* List all memory entries for an agent
|
||||||
|
* Requires: Any workspace member (including GUEST)
|
||||||
|
*/
|
||||||
|
@Get()
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
|
async findAll(@Param("agentId") agentId: string, @Workspace() workspaceId: string) {
|
||||||
|
return this.agentMemoryService.findAll(workspaceId, agentId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/agents/:agentId/memory/:key
|
||||||
|
* Get a single memory entry by key
|
||||||
|
* Requires: Any workspace member (including GUEST)
|
||||||
|
*/
|
||||||
|
@Get(":key")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
|
async findOne(
|
||||||
|
@Param("agentId") agentId: string,
|
||||||
|
@Param("key") key: string,
|
||||||
|
@Workspace() workspaceId: string
|
||||||
|
) {
|
||||||
|
return this.agentMemoryService.findOne(workspaceId, agentId, key);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DELETE /api/agents/:agentId/memory/:key
|
||||||
|
* Remove a memory entry
|
||||||
|
* Requires: MEMBER role or higher
|
||||||
|
*/
|
||||||
|
@Delete(":key")
|
||||||
|
@HttpCode(HttpStatus.OK)
|
||||||
|
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||||
|
async remove(
|
||||||
|
@Param("agentId") agentId: string,
|
||||||
|
@Param("key") key: string,
|
||||||
|
@Workspace() workspaceId: string
|
||||||
|
) {
|
||||||
|
return this.agentMemoryService.remove(workspaceId, agentId, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
198
apps/api/src/agent-memory/agent-memory.integration.spec.ts
Normal file
198
apps/api/src/agent-memory/agent-memory.integration.spec.ts
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
import { beforeAll, beforeEach, describe, expect, it, afterAll } from "vitest";
|
||||||
|
import { randomUUID as uuid } from "crypto";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { NotFoundException } from "@nestjs/common";
|
||||||
|
import { PrismaClient } from "@prisma/client";
|
||||||
|
import { AgentMemoryService } from "./agent-memory.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
|
||||||
|
const shouldRunDbIntegrationTests =
|
||||||
|
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||||
|
const describeFn = shouldRunDbIntegrationTests ? describe : describe.skip;
|
||||||
|
|
||||||
|
async function createWorkspace(
|
||||||
|
prisma: PrismaClient,
|
||||||
|
label: string
|
||||||
|
): Promise<{ workspaceId: string; ownerId: string }> {
|
||||||
|
const workspace = await prisma.workspace.create({
|
||||||
|
data: {
|
||||||
|
name: `${label} ${Date.now()}`,
|
||||||
|
owner: {
|
||||||
|
create: {
|
||||||
|
email: `${label.toLowerCase().replace(/\s+/g, "-")}-${Date.now()}@example.com`,
|
||||||
|
name: `${label} Owner`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
workspaceId: workspace.id,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describeFn("AgentMemoryService Integration", () => {
|
||||||
|
let moduleRef: TestingModule;
|
||||||
|
let prisma: PrismaClient;
|
||||||
|
let service: AgentMemoryService;
|
||||||
|
let setupComplete = false;
|
||||||
|
|
||||||
|
let workspaceAId: string;
|
||||||
|
let workspaceAOwnerId: string;
|
||||||
|
let workspaceBId: string;
|
||||||
|
let workspaceBOwnerId: string;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
prisma = new PrismaClient();
|
||||||
|
await prisma.$connect();
|
||||||
|
|
||||||
|
const workspaceA = await createWorkspace(prisma, "Agent Memory Integration A");
|
||||||
|
workspaceAId = workspaceA.workspaceId;
|
||||||
|
workspaceAOwnerId = workspaceA.ownerId;
|
||||||
|
|
||||||
|
const workspaceB = await createWorkspace(prisma, "Agent Memory Integration B");
|
||||||
|
workspaceBId = workspaceB.workspaceId;
|
||||||
|
workspaceBOwnerId = workspaceB.ownerId;
|
||||||
|
|
||||||
|
moduleRef = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
AgentMemoryService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: prisma,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = moduleRef.get<AgentMemoryService>(AgentMemoryService);
|
||||||
|
setupComplete = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await prisma.agentMemory.deleteMany({
|
||||||
|
where: {
|
||||||
|
workspaceId: {
|
||||||
|
in: [workspaceAId, workspaceBId],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
if (!prisma) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspaceIds = [workspaceAId, workspaceBId].filter(
|
||||||
|
(id): id is string => typeof id === "string"
|
||||||
|
);
|
||||||
|
const ownerIds = [workspaceAOwnerId, workspaceBOwnerId].filter(
|
||||||
|
(id): id is string => typeof id === "string"
|
||||||
|
);
|
||||||
|
|
||||||
|
if (workspaceIds.length > 0) {
|
||||||
|
await prisma.agentMemory.deleteMany({
|
||||||
|
where: {
|
||||||
|
workspaceId: {
|
||||||
|
in: workspaceIds,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await prisma.workspace.deleteMany({ where: { id: { in: workspaceIds } } });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ownerIds.length > 0) {
|
||||||
|
await prisma.user.deleteMany({ where: { id: { in: ownerIds } } });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (moduleRef) {
|
||||||
|
await moduleRef.close();
|
||||||
|
}
|
||||||
|
await prisma.$disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("upserts and lists memory entries", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const agentId = `agent-${uuid()}`;
|
||||||
|
|
||||||
|
const entry = await service.upsert(workspaceAId, agentId, "session-context", {
|
||||||
|
value: { intent: "create-tests", depth: "integration" },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(entry.workspaceId).toBe(workspaceAId);
|
||||||
|
expect(entry.agentId).toBe(agentId);
|
||||||
|
expect(entry.key).toBe("session-context");
|
||||||
|
|
||||||
|
const listed = await service.findAll(workspaceAId, agentId);
|
||||||
|
|
||||||
|
expect(listed).toHaveLength(1);
|
||||||
|
expect(listed[0]?.id).toBe(entry.id);
|
||||||
|
expect(listed[0]?.value).toMatchObject({ intent: "create-tests" });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("updates existing key via upsert without creating duplicates", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const agentId = `agent-${uuid()}`;
|
||||||
|
|
||||||
|
const first = await service.upsert(workspaceAId, agentId, "preferences", {
|
||||||
|
value: { model: "fast" },
|
||||||
|
});
|
||||||
|
|
||||||
|
const second = await service.upsert(workspaceAId, agentId, "preferences", {
|
||||||
|
value: { model: "accurate" },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(second.id).toBe(first.id);
|
||||||
|
expect(second.value).toMatchObject({ model: "accurate" });
|
||||||
|
|
||||||
|
const rowCount = await prisma.agentMemory.count({
|
||||||
|
where: {
|
||||||
|
workspaceId: workspaceAId,
|
||||||
|
agentId,
|
||||||
|
key: "preferences",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(rowCount).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("lists keys in sorted order and isolates by workspace", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const agentId = `agent-${uuid()}`;
|
||||||
|
|
||||||
|
await service.upsert(workspaceAId, agentId, "beta", { value: { v: 2 } });
|
||||||
|
await service.upsert(workspaceAId, agentId, "alpha", { value: { v: 1 } });
|
||||||
|
await service.upsert(workspaceBId, agentId, "alpha", { value: { v: 99 } });
|
||||||
|
|
||||||
|
const workspaceAEntries = await service.findAll(workspaceAId, agentId);
|
||||||
|
const workspaceBEntries = await service.findAll(workspaceBId, agentId);
|
||||||
|
|
||||||
|
expect(workspaceAEntries.map((row) => row.key)).toEqual(["alpha", "beta"]);
|
||||||
|
expect(workspaceBEntries).toHaveLength(1);
|
||||||
|
expect(workspaceBEntries[0]?.value).toMatchObject({ v: 99 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("throws NotFoundException when requesting unknown key", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await expect(service.findOne(workspaceAId, `agent-${uuid()}`, "missing")).rejects.toThrow(
|
||||||
|
NotFoundException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
13
apps/api/src/agent-memory/agent-memory.module.ts
Normal file
13
apps/api/src/agent-memory/agent-memory.module.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { AgentMemoryController } from "./agent-memory.controller";
|
||||||
|
import { AgentMemoryService } from "./agent-memory.service";
|
||||||
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
|
import { AuthModule } from "../auth/auth.module";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [PrismaModule, AuthModule],
|
||||||
|
controllers: [AgentMemoryController],
|
||||||
|
providers: [AgentMemoryService],
|
||||||
|
exports: [AgentMemoryService],
|
||||||
|
})
|
||||||
|
export class AgentMemoryModule {}
|
||||||
126
apps/api/src/agent-memory/agent-memory.service.spec.ts
Normal file
126
apps/api/src/agent-memory/agent-memory.service.spec.ts
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { AgentMemoryService } from "./agent-memory.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { NotFoundException } from "@nestjs/common";
|
||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
|
||||||
|
describe("AgentMemoryService", () => {
|
||||||
|
let service: AgentMemoryService;
|
||||||
|
|
||||||
|
const mockPrismaService = {
|
||||||
|
agentMemory: {
|
||||||
|
upsert: vi.fn(),
|
||||||
|
findMany: vi.fn(),
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
delete: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
AgentMemoryService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: mockPrismaService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<AgentMemoryService>(AgentMemoryService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
const workspaceId = "workspace-1";
|
||||||
|
const agentId = "agent-1";
|
||||||
|
const key = "session-context";
|
||||||
|
|
||||||
|
describe("upsert", () => {
|
||||||
|
it("should upsert a memory entry", async () => {
|
||||||
|
const dto = { value: { data: "some context" } };
|
||||||
|
const mockEntry = {
|
||||||
|
id: "mem-1",
|
||||||
|
workspaceId,
|
||||||
|
agentId,
|
||||||
|
key,
|
||||||
|
value: dto.value,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
mockPrismaService.agentMemory.upsert.mockResolvedValue(mockEntry);
|
||||||
|
|
||||||
|
const result = await service.upsert(workspaceId, agentId, key, dto);
|
||||||
|
|
||||||
|
expect(mockPrismaService.agentMemory.upsert).toHaveBeenCalledWith({
|
||||||
|
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||||
|
create: { workspaceId, agentId, key, value: dto.value },
|
||||||
|
update: { value: dto.value },
|
||||||
|
});
|
||||||
|
expect(result).toEqual(mockEntry);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findAll", () => {
|
||||||
|
it("should return all memory entries for an agent", async () => {
|
||||||
|
const mockEntries = [
|
||||||
|
{ id: "mem-1", key: "a", value: 1 },
|
||||||
|
{ id: "mem-2", key: "b", value: 2 },
|
||||||
|
];
|
||||||
|
|
||||||
|
mockPrismaService.agentMemory.findMany.mockResolvedValue(mockEntries);
|
||||||
|
|
||||||
|
const result = await service.findAll(workspaceId, agentId);
|
||||||
|
|
||||||
|
expect(mockPrismaService.agentMemory.findMany).toHaveBeenCalledWith({
|
||||||
|
where: { workspaceId, agentId },
|
||||||
|
orderBy: { key: "asc" },
|
||||||
|
});
|
||||||
|
expect(result).toEqual(mockEntries);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findOne", () => {
|
||||||
|
it("should return a memory entry by key", async () => {
|
||||||
|
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: "ctx" };
|
||||||
|
|
||||||
|
mockPrismaService.agentMemory.findUnique.mockResolvedValue(mockEntry);
|
||||||
|
|
||||||
|
const result = await service.findOne(workspaceId, agentId, key);
|
||||||
|
|
||||||
|
expect(mockPrismaService.agentMemory.findUnique).toHaveBeenCalledWith({
|
||||||
|
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||||
|
});
|
||||||
|
expect(result).toEqual(mockEntry);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException when key not found", async () => {
|
||||||
|
mockPrismaService.agentMemory.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.findOne(workspaceId, agentId, key)).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("remove", () => {
|
||||||
|
it("should delete a memory entry", async () => {
|
||||||
|
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: "x" };
|
||||||
|
|
||||||
|
mockPrismaService.agentMemory.findUnique.mockResolvedValue(mockEntry);
|
||||||
|
mockPrismaService.agentMemory.delete.mockResolvedValue(mockEntry);
|
||||||
|
|
||||||
|
const result = await service.remove(workspaceId, agentId, key);
|
||||||
|
|
||||||
|
expect(mockPrismaService.agentMemory.delete).toHaveBeenCalledWith({
|
||||||
|
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||||
|
});
|
||||||
|
expect(result).toEqual({ message: "Memory entry deleted successfully" });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException when key not found", async () => {
|
||||||
|
mockPrismaService.agentMemory.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.remove(workspaceId, agentId, key)).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
79
apps/api/src/agent-memory/agent-memory.service.ts
Normal file
79
apps/api/src/agent-memory/agent-memory.service.ts
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
import { Injectable, NotFoundException } from "@nestjs/common";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { Prisma } from "@prisma/client";
|
||||||
|
import type { UpsertAgentMemoryDto } from "./dto";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AgentMemoryService {
|
||||||
|
constructor(private readonly prisma: PrismaService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Upsert a memory entry for an agent.
|
||||||
|
*/
|
||||||
|
async upsert(workspaceId: string, agentId: string, key: string, dto: UpsertAgentMemoryDto) {
|
||||||
|
return this.prisma.agentMemory.upsert({
|
||||||
|
where: {
|
||||||
|
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
workspaceId,
|
||||||
|
agentId,
|
||||||
|
key,
|
||||||
|
value: dto.value as Prisma.InputJsonValue,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
value: dto.value as Prisma.InputJsonValue,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all memory entries for an agent in a workspace.
|
||||||
|
*/
|
||||||
|
async findAll(workspaceId: string, agentId: string) {
|
||||||
|
return this.prisma.agentMemory.findMany({
|
||||||
|
where: { workspaceId, agentId },
|
||||||
|
orderBy: { key: "asc" },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a single memory entry by key.
|
||||||
|
*/
|
||||||
|
async findOne(workspaceId: string, agentId: string, key: string) {
|
||||||
|
const entry = await this.prisma.agentMemory.findUnique({
|
||||||
|
where: {
|
||||||
|
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!entry) {
|
||||||
|
throw new NotFoundException(`Memory key "${key}" not found for agent "${agentId}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a memory entry by key.
|
||||||
|
*/
|
||||||
|
async remove(workspaceId: string, agentId: string, key: string) {
|
||||||
|
const entry = await this.prisma.agentMemory.findUnique({
|
||||||
|
where: {
|
||||||
|
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!entry) {
|
||||||
|
throw new NotFoundException(`Memory key "${key}" not found for agent "${agentId}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.prisma.agentMemory.delete({
|
||||||
|
where: {
|
||||||
|
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { message: "Memory entry deleted successfully" };
|
||||||
|
}
|
||||||
|
}
|
||||||
1
apps/api/src/agent-memory/dto/index.ts
Normal file
1
apps/api/src/agent-memory/dto/index.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export * from "./upsert-agent-memory.dto";
|
||||||
10
apps/api/src/agent-memory/dto/upsert-agent-memory.dto.ts
Normal file
10
apps/api/src/agent-memory/dto/upsert-agent-memory.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { IsNotEmpty } from "class-validator";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for upserting an agent memory entry.
|
||||||
|
* The value accepts any JSON-serializable data.
|
||||||
|
*/
|
||||||
|
export class UpsertAgentMemoryDto {
|
||||||
|
@IsNotEmpty({ message: "value must not be empty" })
|
||||||
|
value!: unknown;
|
||||||
|
}
|
||||||
@@ -27,6 +27,8 @@ import { LlmUsageModule } from "./llm-usage/llm-usage.module";
|
|||||||
import { BrainModule } from "./brain/brain.module";
|
import { BrainModule } from "./brain/brain.module";
|
||||||
import { CronModule } from "./cron/cron.module";
|
import { CronModule } from "./cron/cron.module";
|
||||||
import { AgentTasksModule } from "./agent-tasks/agent-tasks.module";
|
import { AgentTasksModule } from "./agent-tasks/agent-tasks.module";
|
||||||
|
import { FindingsModule } from "./findings/findings.module";
|
||||||
|
import { AgentMemoryModule } from "./agent-memory/agent-memory.module";
|
||||||
import { ValkeyModule } from "./valkey/valkey.module";
|
import { ValkeyModule } from "./valkey/valkey.module";
|
||||||
import { BullMqModule } from "./bullmq/bullmq.module";
|
import { BullMqModule } from "./bullmq/bullmq.module";
|
||||||
import { StitcherModule } from "./stitcher/stitcher.module";
|
import { StitcherModule } from "./stitcher/stitcher.module";
|
||||||
@@ -37,11 +39,19 @@ import { JobStepsModule } from "./job-steps/job-steps.module";
|
|||||||
import { CoordinatorIntegrationModule } from "./coordinator-integration/coordinator-integration.module";
|
import { CoordinatorIntegrationModule } from "./coordinator-integration/coordinator-integration.module";
|
||||||
import { FederationModule } from "./federation/federation.module";
|
import { FederationModule } from "./federation/federation.module";
|
||||||
import { CredentialsModule } from "./credentials/credentials.module";
|
import { CredentialsModule } from "./credentials/credentials.module";
|
||||||
|
import { CryptoModule } from "./crypto/crypto.module";
|
||||||
import { MosaicTelemetryModule } from "./mosaic-telemetry";
|
import { MosaicTelemetryModule } from "./mosaic-telemetry";
|
||||||
import { SpeechModule } from "./speech/speech.module";
|
import { SpeechModule } from "./speech/speech.module";
|
||||||
import { DashboardModule } from "./dashboard/dashboard.module";
|
import { DashboardModule } from "./dashboard/dashboard.module";
|
||||||
import { TerminalModule } from "./terminal/terminal.module";
|
import { TerminalModule } from "./terminal/terminal.module";
|
||||||
|
import { PersonalitiesModule } from "./personalities/personalities.module";
|
||||||
|
import { WorkspacesModule } from "./workspaces/workspaces.module";
|
||||||
|
import { AdminModule } from "./admin/admin.module";
|
||||||
|
import { TeamsModule } from "./teams/teams.module";
|
||||||
|
import { ImportModule } from "./import/import.module";
|
||||||
|
import { ConversationArchiveModule } from "./conversation-archive/conversation-archive.module";
|
||||||
import { RlsContextInterceptor } from "./common/interceptors/rls-context.interceptor";
|
import { RlsContextInterceptor } from "./common/interceptors/rls-context.interceptor";
|
||||||
|
import { AgentConfigModule } from "./agent-config/agent-config.module";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [
|
imports: [
|
||||||
@@ -95,16 +105,26 @@ import { RlsContextInterceptor } from "./common/interceptors/rls-context.interce
|
|||||||
BrainModule,
|
BrainModule,
|
||||||
CronModule,
|
CronModule,
|
||||||
AgentTasksModule,
|
AgentTasksModule,
|
||||||
|
FindingsModule,
|
||||||
|
AgentMemoryModule,
|
||||||
RunnerJobsModule,
|
RunnerJobsModule,
|
||||||
JobEventsModule,
|
JobEventsModule,
|
||||||
JobStepsModule,
|
JobStepsModule,
|
||||||
CoordinatorIntegrationModule,
|
CoordinatorIntegrationModule,
|
||||||
FederationModule,
|
FederationModule,
|
||||||
CredentialsModule,
|
CredentialsModule,
|
||||||
|
CryptoModule,
|
||||||
MosaicTelemetryModule,
|
MosaicTelemetryModule,
|
||||||
SpeechModule,
|
SpeechModule,
|
||||||
DashboardModule,
|
DashboardModule,
|
||||||
TerminalModule,
|
TerminalModule,
|
||||||
|
PersonalitiesModule,
|
||||||
|
WorkspacesModule,
|
||||||
|
AdminModule,
|
||||||
|
TeamsModule,
|
||||||
|
ImportModule,
|
||||||
|
ConversationArchiveModule,
|
||||||
|
AgentConfigModule,
|
||||||
],
|
],
|
||||||
controllers: [AppController, CsrfController],
|
controllers: [AppController, CsrfController],
|
||||||
providers: [
|
providers: [
|
||||||
|
|||||||
@@ -361,16 +361,13 @@ describe("AuthController", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe("getProfile", () => {
|
describe("getProfile", () => {
|
||||||
it("should return complete user profile with workspace fields", () => {
|
it("should return complete user profile with identity fields", () => {
|
||||||
const mockUser: AuthUser = {
|
const mockUser: AuthUser = {
|
||||||
id: "user-123",
|
id: "user-123",
|
||||||
email: "test@example.com",
|
email: "test@example.com",
|
||||||
name: "Test User",
|
name: "Test User",
|
||||||
image: "https://example.com/avatar.jpg",
|
image: "https://example.com/avatar.jpg",
|
||||||
emailVerified: true,
|
emailVerified: true,
|
||||||
workspaceId: "workspace-123",
|
|
||||||
currentWorkspaceId: "workspace-456",
|
|
||||||
workspaceRole: "admin",
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = controller.getProfile(mockUser);
|
const result = controller.getProfile(mockUser);
|
||||||
@@ -381,13 +378,10 @@ describe("AuthController", () => {
|
|||||||
name: mockUser.name,
|
name: mockUser.name,
|
||||||
image: mockUser.image,
|
image: mockUser.image,
|
||||||
emailVerified: mockUser.emailVerified,
|
emailVerified: mockUser.emailVerified,
|
||||||
workspaceId: mockUser.workspaceId,
|
|
||||||
currentWorkspaceId: mockUser.currentWorkspaceId,
|
|
||||||
workspaceRole: mockUser.workspaceRole,
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return user profile with optional fields undefined", () => {
|
it("should return user profile with only required fields", () => {
|
||||||
const mockUser: AuthUser = {
|
const mockUser: AuthUser = {
|
||||||
id: "user-123",
|
id: "user-123",
|
||||||
email: "test@example.com",
|
email: "test@example.com",
|
||||||
@@ -400,12 +394,11 @@ describe("AuthController", () => {
|
|||||||
id: mockUser.id,
|
id: mockUser.id,
|
||||||
email: mockUser.email,
|
email: mockUser.email,
|
||||||
name: mockUser.name,
|
name: mockUser.name,
|
||||||
image: undefined,
|
|
||||||
emailVerified: undefined,
|
|
||||||
workspaceId: undefined,
|
|
||||||
currentWorkspaceId: undefined,
|
|
||||||
workspaceRole: undefined,
|
|
||||||
});
|
});
|
||||||
|
// Workspace fields are not included — served by GET /api/workspaces
|
||||||
|
expect(result).not.toHaveProperty("workspaceId");
|
||||||
|
expect(result).not.toHaveProperty("currentWorkspaceId");
|
||||||
|
expect(result).not.toHaveProperty("workspaceRole");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -72,15 +72,10 @@ export class AuthController {
|
|||||||
if (user.emailVerified !== undefined) {
|
if (user.emailVerified !== undefined) {
|
||||||
profile.emailVerified = user.emailVerified;
|
profile.emailVerified = user.emailVerified;
|
||||||
}
|
}
|
||||||
if (user.workspaceId !== undefined) {
|
|
||||||
profile.workspaceId = user.workspaceId;
|
// Workspace context is served by GET /api/workspaces, not the auth profile.
|
||||||
}
|
// The deprecated workspaceId/currentWorkspaceId/workspaceRole fields on
|
||||||
if (user.currentWorkspaceId !== undefined) {
|
// AuthUser are never populated by BetterAuth and are omitted here.
|
||||||
profile.currentWorkspaceId = user.currentWorkspaceId;
|
|
||||||
}
|
|
||||||
if (user.workspaceRole !== undefined) {
|
|
||||||
profile.workspaceRole = user.workspaceRole;
|
|
||||||
}
|
|
||||||
|
|
||||||
return profile;
|
return profile;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,11 +3,14 @@ import { PrismaModule } from "../prisma/prisma.module";
|
|||||||
import { AuthService } from "./auth.service";
|
import { AuthService } from "./auth.service";
|
||||||
import { AuthController } from "./auth.controller";
|
import { AuthController } from "./auth.controller";
|
||||||
import { AuthGuard } from "./guards/auth.guard";
|
import { AuthGuard } from "./guards/auth.guard";
|
||||||
|
import { LocalAuthController } from "./local/local-auth.controller";
|
||||||
|
import { LocalAuthService } from "./local/local-auth.service";
|
||||||
|
import { LocalAuthEnabledGuard } from "./local/local-auth.guard";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [PrismaModule],
|
imports: [PrismaModule],
|
||||||
controllers: [AuthController],
|
controllers: [AuthController, LocalAuthController],
|
||||||
providers: [AuthService, AuthGuard],
|
providers: [AuthService, AuthGuard, LocalAuthService, LocalAuthEnabledGuard],
|
||||||
exports: [AuthService, AuthGuard],
|
exports: [AuthService, AuthGuard],
|
||||||
})
|
})
|
||||||
export class AuthModule {}
|
export class AuthModule {}
|
||||||
|
|||||||
10
apps/api/src/auth/local/dto/local-login.dto.ts
Normal file
10
apps/api/src/auth/local/dto/local-login.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { IsEmail, IsString, MinLength } from "class-validator";
|
||||||
|
|
||||||
|
export class LocalLoginDto {
|
||||||
|
@IsEmail({}, { message: "email must be a valid email address" })
|
||||||
|
email!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "password must be a string" })
|
||||||
|
@MinLength(1, { message: "password must not be empty" })
|
||||||
|
password!: string;
|
||||||
|
}
|
||||||
20
apps/api/src/auth/local/dto/local-setup.dto.ts
Normal file
20
apps/api/src/auth/local/dto/local-setup.dto.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { IsEmail, IsString, MinLength, MaxLength } from "class-validator";
|
||||||
|
|
||||||
|
export class LocalSetupDto {
|
||||||
|
@IsEmail({}, { message: "email must be a valid email address" })
|
||||||
|
email!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "name must be a string" })
|
||||||
|
@MinLength(1, { message: "name must not be empty" })
|
||||||
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
|
name!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "password must be a string" })
|
||||||
|
@MinLength(12, { message: "password must be at least 12 characters" })
|
||||||
|
@MaxLength(128, { message: "password must not exceed 128 characters" })
|
||||||
|
password!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "setupToken must be a string" })
|
||||||
|
@MinLength(1, { message: "setupToken must not be empty" })
|
||||||
|
setupToken!: string;
|
||||||
|
}
|
||||||
232
apps/api/src/auth/local/local-auth.controller.spec.ts
Normal file
232
apps/api/src/auth/local/local-auth.controller.spec.ts
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import {
|
||||||
|
NotFoundException,
|
||||||
|
ForbiddenException,
|
||||||
|
UnauthorizedException,
|
||||||
|
ConflictException,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { LocalAuthController } from "./local-auth.controller";
|
||||||
|
import { LocalAuthService } from "./local-auth.service";
|
||||||
|
import { LocalAuthEnabledGuard } from "./local-auth.guard";
|
||||||
|
|
||||||
|
describe("LocalAuthController", () => {
|
||||||
|
let controller: LocalAuthController;
|
||||||
|
let localAuthService: LocalAuthService;
|
||||||
|
|
||||||
|
const mockLocalAuthService = {
|
||||||
|
setup: vi.fn(),
|
||||||
|
login: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockRequest = {
|
||||||
|
headers: { "user-agent": "TestAgent/1.0" },
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
};
|
||||||
|
|
||||||
|
const originalEnv = {
|
||||||
|
ENABLE_LOCAL_AUTH: process.env.ENABLE_LOCAL_AUTH,
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = "true";
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
controllers: [LocalAuthController],
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: LocalAuthService,
|
||||||
|
useValue: mockLocalAuthService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
.overrideGuard(LocalAuthEnabledGuard)
|
||||||
|
.useValue({ canActivate: () => true })
|
||||||
|
.compile();
|
||||||
|
|
||||||
|
controller = module.get<LocalAuthController>(LocalAuthController);
|
||||||
|
localAuthService = module.get<LocalAuthService>(LocalAuthService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
if (originalEnv.ENABLE_LOCAL_AUTH !== undefined) {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = originalEnv.ENABLE_LOCAL_AUTH;
|
||||||
|
} else {
|
||||||
|
delete process.env.ENABLE_LOCAL_AUTH;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("setup", () => {
|
||||||
|
const setupDto = {
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
password: "securePassword123!",
|
||||||
|
setupToken: "valid-token-123",
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockSetupResult = {
|
||||||
|
user: {
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
isLocalAuth: true,
|
||||||
|
createdAt: new Date("2026-02-28T00:00:00Z"),
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
token: "session-token-abc",
|
||||||
|
expiresAt: new Date("2026-03-07T00:00:00Z"),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
it("should create a break-glass user and return user data with session", async () => {
|
||||||
|
mockLocalAuthService.setup.mockResolvedValue(mockSetupResult);
|
||||||
|
|
||||||
|
const result = await controller.setup(setupDto, mockRequest as never);
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
user: mockSetupResult.user,
|
||||||
|
session: mockSetupResult.session,
|
||||||
|
});
|
||||||
|
expect(mockLocalAuthService.setup).toHaveBeenCalledWith(
|
||||||
|
"admin@example.com",
|
||||||
|
"Break Glass Admin",
|
||||||
|
"securePassword123!",
|
||||||
|
"valid-token-123",
|
||||||
|
"127.0.0.1",
|
||||||
|
"TestAgent/1.0"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract client IP from x-forwarded-for header", async () => {
|
||||||
|
mockLocalAuthService.setup.mockResolvedValue(mockSetupResult);
|
||||||
|
const reqWithProxy = {
|
||||||
|
...mockRequest,
|
||||||
|
headers: {
|
||||||
|
...mockRequest.headers,
|
||||||
|
"x-forwarded-for": "203.0.113.50, 70.41.3.18",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
await controller.setup(setupDto, reqWithProxy as never);
|
||||||
|
|
||||||
|
expect(mockLocalAuthService.setup).toHaveBeenCalledWith(
|
||||||
|
expect.any(String) as string,
|
||||||
|
expect.any(String) as string,
|
||||||
|
expect.any(String) as string,
|
||||||
|
expect.any(String) as string,
|
||||||
|
"203.0.113.50",
|
||||||
|
"TestAgent/1.0"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should propagate ForbiddenException from service", async () => {
|
||||||
|
mockLocalAuthService.setup.mockRejectedValue(new ForbiddenException("Invalid setup token"));
|
||||||
|
|
||||||
|
await expect(controller.setup(setupDto, mockRequest as never)).rejects.toThrow(
|
||||||
|
ForbiddenException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should propagate ConflictException from service", async () => {
|
||||||
|
mockLocalAuthService.setup.mockRejectedValue(
|
||||||
|
new ConflictException("A user with this email already exists")
|
||||||
|
);
|
||||||
|
|
||||||
|
await expect(controller.setup(setupDto, mockRequest as never)).rejects.toThrow(
|
||||||
|
ConflictException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("login", () => {
|
||||||
|
const loginDto = {
|
||||||
|
email: "admin@example.com",
|
||||||
|
password: "securePassword123!",
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockLoginResult = {
|
||||||
|
user: {
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
token: "session-token-abc",
|
||||||
|
expiresAt: new Date("2026-03-07T00:00:00Z"),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
it("should authenticate and return user data with session", async () => {
|
||||||
|
mockLocalAuthService.login.mockResolvedValue(mockLoginResult);
|
||||||
|
|
||||||
|
const result = await controller.login(loginDto, mockRequest as never);
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
user: mockLoginResult.user,
|
||||||
|
session: mockLoginResult.session,
|
||||||
|
});
|
||||||
|
expect(mockLocalAuthService.login).toHaveBeenCalledWith(
|
||||||
|
"admin@example.com",
|
||||||
|
"securePassword123!",
|
||||||
|
"127.0.0.1",
|
||||||
|
"TestAgent/1.0"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should propagate UnauthorizedException from service", async () => {
|
||||||
|
mockLocalAuthService.login.mockRejectedValue(
|
||||||
|
new UnauthorizedException("Invalid email or password")
|
||||||
|
);
|
||||||
|
|
||||||
|
await expect(controller.login(loginDto, mockRequest as never)).rejects.toThrow(
|
||||||
|
UnauthorizedException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("LocalAuthEnabledGuard", () => {
|
||||||
|
let guard: LocalAuthEnabledGuard;
|
||||||
|
|
||||||
|
const originalEnv = process.env.ENABLE_LOCAL_AUTH;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
guard = new LocalAuthEnabledGuard();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
if (originalEnv !== undefined) {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = originalEnv;
|
||||||
|
} else {
|
||||||
|
delete process.env.ENABLE_LOCAL_AUTH;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should allow access when ENABLE_LOCAL_AUTH is true", () => {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = "true";
|
||||||
|
|
||||||
|
expect(guard.canActivate()).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException when ENABLE_LOCAL_AUTH is not set", () => {
|
||||||
|
delete process.env.ENABLE_LOCAL_AUTH;
|
||||||
|
|
||||||
|
expect(() => guard.canActivate()).toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException when ENABLE_LOCAL_AUTH is false", () => {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = "false";
|
||||||
|
|
||||||
|
expect(() => guard.canActivate()).toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException when ENABLE_LOCAL_AUTH is empty", () => {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = "";
|
||||||
|
|
||||||
|
expect(() => guard.canActivate()).toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
});
|
||||||
81
apps/api/src/auth/local/local-auth.controller.ts
Normal file
81
apps/api/src/auth/local/local-auth.controller.ts
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import {
|
||||||
|
Controller,
|
||||||
|
Post,
|
||||||
|
Body,
|
||||||
|
UseGuards,
|
||||||
|
Req,
|
||||||
|
Logger,
|
||||||
|
HttpCode,
|
||||||
|
HttpStatus,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { Throttle } from "@nestjs/throttler";
|
||||||
|
import type { Request as ExpressRequest } from "express";
|
||||||
|
import { SkipCsrf } from "../../common/decorators/skip-csrf.decorator";
|
||||||
|
import { LocalAuthService } from "./local-auth.service";
|
||||||
|
import { LocalAuthEnabledGuard } from "./local-auth.guard";
|
||||||
|
import { LocalLoginDto } from "./dto/local-login.dto";
|
||||||
|
import { LocalSetupDto } from "./dto/local-setup.dto";
|
||||||
|
|
||||||
|
@Controller("auth/local")
|
||||||
|
@UseGuards(LocalAuthEnabledGuard)
|
||||||
|
export class LocalAuthController {
|
||||||
|
private readonly logger = new Logger(LocalAuthController.name);
|
||||||
|
|
||||||
|
constructor(private readonly localAuthService: LocalAuthService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* First-time break-glass user creation.
|
||||||
|
* Requires BREAKGLASS_SETUP_TOKEN from environment.
|
||||||
|
*/
|
||||||
|
@Post("setup")
|
||||||
|
@SkipCsrf()
|
||||||
|
@Throttle({ strict: { limit: 5, ttl: 60000 } })
|
||||||
|
async setup(@Body() dto: LocalSetupDto, @Req() req: ExpressRequest) {
|
||||||
|
const ipAddress = this.getClientIp(req);
|
||||||
|
const userAgent = req.headers["user-agent"];
|
||||||
|
|
||||||
|
this.logger.log(`Break-glass setup attempt from ${ipAddress}`);
|
||||||
|
|
||||||
|
const result = await this.localAuthService.setup(
|
||||||
|
dto.email,
|
||||||
|
dto.name,
|
||||||
|
dto.password,
|
||||||
|
dto.setupToken,
|
||||||
|
ipAddress,
|
||||||
|
userAgent
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
user: result.user,
|
||||||
|
session: result.session,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Break-glass login with email + password.
|
||||||
|
*/
|
||||||
|
@Post("login")
|
||||||
|
@SkipCsrf()
|
||||||
|
@HttpCode(HttpStatus.OK)
|
||||||
|
@Throttle({ strict: { limit: 10, ttl: 60000 } })
|
||||||
|
async login(@Body() dto: LocalLoginDto, @Req() req: ExpressRequest) {
|
||||||
|
const ipAddress = this.getClientIp(req);
|
||||||
|
const userAgent = req.headers["user-agent"];
|
||||||
|
|
||||||
|
const result = await this.localAuthService.login(dto.email, dto.password, ipAddress, userAgent);
|
||||||
|
|
||||||
|
return {
|
||||||
|
user: result.user,
|
||||||
|
session: result.session,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private getClientIp(req: ExpressRequest): string {
|
||||||
|
const forwardedFor = req.headers["x-forwarded-for"];
|
||||||
|
if (forwardedFor) {
|
||||||
|
const ips = Array.isArray(forwardedFor) ? forwardedFor[0] : forwardedFor;
|
||||||
|
return ips?.split(",")[0]?.trim() ?? "unknown";
|
||||||
|
}
|
||||||
|
return req.ip ?? req.socket.remoteAddress ?? "unknown";
|
||||||
|
}
|
||||||
|
}
|
||||||
15
apps/api/src/auth/local/local-auth.guard.ts
Normal file
15
apps/api/src/auth/local/local-auth.guard.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { Injectable, CanActivate, NotFoundException } from "@nestjs/common";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Guard that checks if local authentication is enabled via ENABLE_LOCAL_AUTH env var.
|
||||||
|
* Returns 404 when disabled so endpoints are invisible to callers.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class LocalAuthEnabledGuard implements CanActivate {
|
||||||
|
canActivate(): boolean {
|
||||||
|
if (process.env.ENABLE_LOCAL_AUTH !== "true") {
|
||||||
|
throw new NotFoundException();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
389
apps/api/src/auth/local/local-auth.service.spec.ts
Normal file
389
apps/api/src/auth/local/local-auth.service.spec.ts
Normal file
@@ -0,0 +1,389 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import {
|
||||||
|
ConflictException,
|
||||||
|
ForbiddenException,
|
||||||
|
InternalServerErrorException,
|
||||||
|
UnauthorizedException,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { hash } from "bcryptjs";
|
||||||
|
import { LocalAuthService } from "./local-auth.service";
|
||||||
|
import { PrismaService } from "../../prisma/prisma.service";
|
||||||
|
|
||||||
|
describe("LocalAuthService", () => {
|
||||||
|
let service: LocalAuthService;
|
||||||
|
|
||||||
|
const mockTxSession = {
|
||||||
|
create: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockTxWorkspace = {
|
||||||
|
findFirst: vi.fn(),
|
||||||
|
create: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockTxWorkspaceMember = {
|
||||||
|
create: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockTxUser = {
|
||||||
|
create: vi.fn(),
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockTx = {
|
||||||
|
user: mockTxUser,
|
||||||
|
workspace: mockTxWorkspace,
|
||||||
|
workspaceMember: mockTxWorkspaceMember,
|
||||||
|
session: mockTxSession,
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockPrismaService = {
|
||||||
|
user: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
create: vi.fn(),
|
||||||
|
},
|
||||||
|
$transaction: vi
|
||||||
|
.fn()
|
||||||
|
.mockImplementation((fn: (tx: typeof mockTx) => Promise<unknown>) => fn(mockTx)),
|
||||||
|
};
|
||||||
|
|
||||||
|
const originalEnv = {
|
||||||
|
BREAKGLASS_SETUP_TOKEN: process.env.BREAKGLASS_SETUP_TOKEN,
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
LocalAuthService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: mockPrismaService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<LocalAuthService>(LocalAuthService);
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
if (originalEnv.BREAKGLASS_SETUP_TOKEN !== undefined) {
|
||||||
|
process.env.BREAKGLASS_SETUP_TOKEN = originalEnv.BREAKGLASS_SETUP_TOKEN;
|
||||||
|
} else {
|
||||||
|
delete process.env.BREAKGLASS_SETUP_TOKEN;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("setup", () => {
|
||||||
|
const validSetupArgs = {
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
password: "securePassword123!",
|
||||||
|
setupToken: "valid-token-123",
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockCreatedUser = {
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
isLocalAuth: true,
|
||||||
|
createdAt: new Date("2026-02-28T00:00:00Z"),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockWorkspace = {
|
||||||
|
id: "workspace-uuid-123",
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
process.env.BREAKGLASS_SETUP_TOKEN = "valid-token-123";
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
mockTxUser.create.mockResolvedValue(mockCreatedUser);
|
||||||
|
mockTxWorkspace.findFirst.mockResolvedValue(mockWorkspace);
|
||||||
|
mockTxWorkspaceMember.create.mockResolvedValue({});
|
||||||
|
mockTxSession.create.mockResolvedValue({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create a local auth user with hashed password", async () => {
|
||||||
|
const result = await service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result.user).toEqual(mockCreatedUser);
|
||||||
|
expect(result.session.token).toBeDefined();
|
||||||
|
expect(result.session.token.length).toBeGreaterThan(0);
|
||||||
|
expect(result.session.expiresAt).toBeInstanceOf(Date);
|
||||||
|
expect(result.session.expiresAt.getTime()).toBeGreaterThan(Date.now());
|
||||||
|
|
||||||
|
expect(mockTxUser.create).toHaveBeenCalledWith({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
isLocalAuth: true,
|
||||||
|
emailVerified: true,
|
||||||
|
passwordHash: expect.any(String) as string,
|
||||||
|
}),
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
email: true,
|
||||||
|
name: true,
|
||||||
|
isLocalAuth: true,
|
||||||
|
createdAt: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should assign OWNER role on default workspace", async () => {
|
||||||
|
await service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockTxWorkspaceMember.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
workspaceId: "workspace-uuid-123",
|
||||||
|
userId: "user-uuid-123",
|
||||||
|
role: "OWNER",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create a new workspace if none exists", async () => {
|
||||||
|
mockTxWorkspace.findFirst.mockResolvedValue(null);
|
||||||
|
mockTxWorkspace.create.mockResolvedValue({ id: "new-workspace-uuid" });
|
||||||
|
|
||||||
|
await service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockTxWorkspace.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
name: "Default Workspace",
|
||||||
|
ownerId: "user-uuid-123",
|
||||||
|
settings: {},
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
expect(mockTxWorkspaceMember.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
workspaceId: "new-workspace-uuid",
|
||||||
|
userId: "user-uuid-123",
|
||||||
|
role: "OWNER",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create a BetterAuth-compatible session", async () => {
|
||||||
|
await service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken,
|
||||||
|
"192.168.1.1",
|
||||||
|
"TestAgent/1.0"
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockTxSession.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
userId: "user-uuid-123",
|
||||||
|
token: expect.any(String) as string,
|
||||||
|
expiresAt: expect.any(Date) as Date,
|
||||||
|
ipAddress: "192.168.1.1",
|
||||||
|
userAgent: "TestAgent/1.0",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when BREAKGLASS_SETUP_TOKEN is not set", async () => {
|
||||||
|
delete process.env.BREAKGLASS_SETUP_TOKEN;
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
)
|
||||||
|
).rejects.toThrow(ForbiddenException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when BREAKGLASS_SETUP_TOKEN is empty", async () => {
|
||||||
|
process.env.BREAKGLASS_SETUP_TOKEN = "";
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
)
|
||||||
|
).rejects.toThrow(ForbiddenException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when setup token does not match", async () => {
|
||||||
|
await expect(
|
||||||
|
service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
"wrong-token"
|
||||||
|
)
|
||||||
|
).rejects.toThrow(ForbiddenException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when email already exists", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
id: "existing-user",
|
||||||
|
email: "admin@example.com",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
)
|
||||||
|
).rejects.toThrow(ConflictException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return session token and expiry", async () => {
|
||||||
|
const result = await service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(typeof result.session.token).toBe("string");
|
||||||
|
expect(result.session.token.length).toBe(64); // 32 bytes hex
|
||||||
|
expect(result.session.expiresAt).toBeInstanceOf(Date);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("login", () => {
|
||||||
|
const validPasswordHash = "$2a$12$LJ3m4ys3Lz/YgP7xYz5k5uU6b5F6X1234567890abcdefghijkl";
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Create a real bcrypt hash for testing
|
||||||
|
const realHash = await hash("securePassword123!", 4); // Low rounds for test speed
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
isLocalAuth: true,
|
||||||
|
passwordHash: realHash,
|
||||||
|
deactivatedAt: null,
|
||||||
|
});
|
||||||
|
mockPrismaService.session.create.mockResolvedValue({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should authenticate a valid local auth user", async () => {
|
||||||
|
const result = await service.login("admin@example.com", "securePassword123!");
|
||||||
|
|
||||||
|
expect(result.user).toEqual({
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
});
|
||||||
|
expect(result.session.token).toBeDefined();
|
||||||
|
expect(result.session.expiresAt).toBeInstanceOf(Date);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create a session with ip and user agent", async () => {
|
||||||
|
await service.login("admin@example.com", "securePassword123!", "10.0.0.1", "Mozilla/5.0");
|
||||||
|
|
||||||
|
expect(mockPrismaService.session.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
userId: "user-uuid-123",
|
||||||
|
token: expect.any(String) as string,
|
||||||
|
expiresAt: expect.any(Date) as Date,
|
||||||
|
ipAddress: "10.0.0.1",
|
||||||
|
userAgent: "Mozilla/5.0",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when user does not exist", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.login("nonexistent@example.com", "password123456")).rejects.toThrow(
|
||||||
|
UnauthorizedException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when user is not a local auth user", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "OIDC User",
|
||||||
|
isLocalAuth: false,
|
||||||
|
passwordHash: null,
|
||||||
|
deactivatedAt: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(service.login("admin@example.com", "password123456")).rejects.toThrow(
|
||||||
|
UnauthorizedException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when user is deactivated", async () => {
|
||||||
|
const realHash = await hash("securePassword123!", 4);
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Deactivated User",
|
||||||
|
isLocalAuth: true,
|
||||||
|
passwordHash: realHash,
|
||||||
|
deactivatedAt: new Date("2026-01-01"),
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(service.login("admin@example.com", "securePassword123!")).rejects.toThrow(
|
||||||
|
new UnauthorizedException("Account has been deactivated")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when password is incorrect", async () => {
|
||||||
|
await expect(service.login("admin@example.com", "wrongPassword123!")).rejects.toThrow(
|
||||||
|
UnauthorizedException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw InternalServerError when local auth user has no password hash", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Broken User",
|
||||||
|
isLocalAuth: true,
|
||||||
|
passwordHash: null,
|
||||||
|
deactivatedAt: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(service.login("admin@example.com", "securePassword123!")).rejects.toThrow(
|
||||||
|
InternalServerErrorException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not reveal whether email exists in error messages", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await service.login("nonexistent@example.com", "password123456");
|
||||||
|
} catch (error) {
|
||||||
|
expect(error).toBeInstanceOf(UnauthorizedException);
|
||||||
|
expect((error as UnauthorizedException).message).toBe("Invalid email or password");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
230
apps/api/src/auth/local/local-auth.service.ts
Normal file
230
apps/api/src/auth/local/local-auth.service.ts
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
import {
|
||||||
|
Injectable,
|
||||||
|
Logger,
|
||||||
|
ForbiddenException,
|
||||||
|
UnauthorizedException,
|
||||||
|
ConflictException,
|
||||||
|
InternalServerErrorException,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
import { hash, compare } from "bcryptjs";
|
||||||
|
import { randomBytes, timingSafeEqual } from "crypto";
|
||||||
|
import { PrismaService } from "../../prisma/prisma.service";
|
||||||
|
|
||||||
|
const BCRYPT_ROUNDS = 12;
|
||||||
|
|
||||||
|
/** Session expiry: 7 days (matches BetterAuth config in auth.config.ts) */
|
||||||
|
const SESSION_EXPIRY_MS = 7 * 24 * 60 * 60 * 1000;
|
||||||
|
|
||||||
|
interface SetupResult {
|
||||||
|
user: {
|
||||||
|
id: string;
|
||||||
|
email: string;
|
||||||
|
name: string;
|
||||||
|
isLocalAuth: boolean;
|
||||||
|
createdAt: Date;
|
||||||
|
};
|
||||||
|
session: {
|
||||||
|
token: string;
|
||||||
|
expiresAt: Date;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LoginResult {
|
||||||
|
user: {
|
||||||
|
id: string;
|
||||||
|
email: string;
|
||||||
|
name: string;
|
||||||
|
};
|
||||||
|
session: {
|
||||||
|
token: string;
|
||||||
|
expiresAt: Date;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class LocalAuthService {
|
||||||
|
private readonly logger = new Logger(LocalAuthService.name);
|
||||||
|
|
||||||
|
constructor(private readonly prisma: PrismaService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* First-time break-glass user creation.
|
||||||
|
* Validates the setup token, creates a local auth user with bcrypt-hashed password,
|
||||||
|
* and assigns OWNER role on the default workspace.
|
||||||
|
*/
|
||||||
|
async setup(
|
||||||
|
email: string,
|
||||||
|
name: string,
|
||||||
|
password: string,
|
||||||
|
setupToken: string,
|
||||||
|
ipAddress?: string,
|
||||||
|
userAgent?: string
|
||||||
|
): Promise<SetupResult> {
|
||||||
|
this.validateSetupToken(setupToken);
|
||||||
|
|
||||||
|
const existing = await this.prisma.user.findUnique({ where: { email } });
|
||||||
|
if (existing) {
|
||||||
|
throw new ConflictException("A user with this email already exists");
|
||||||
|
}
|
||||||
|
|
||||||
|
const passwordHash = await hash(password, BCRYPT_ROUNDS);
|
||||||
|
|
||||||
|
const result = await this.prisma.$transaction(async (tx) => {
|
||||||
|
const user = await tx.user.create({
|
||||||
|
data: {
|
||||||
|
email,
|
||||||
|
name,
|
||||||
|
isLocalAuth: true,
|
||||||
|
passwordHash,
|
||||||
|
emailVerified: true,
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
email: true,
|
||||||
|
name: true,
|
||||||
|
isLocalAuth: true,
|
||||||
|
createdAt: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Find or create a default workspace and assign OWNER role
|
||||||
|
await this.assignDefaultWorkspace(tx, user.id);
|
||||||
|
|
||||||
|
// Create a BetterAuth-compatible session
|
||||||
|
const session = await this.createSession(tx, user.id, ipAddress, userAgent);
|
||||||
|
|
||||||
|
return { user, session };
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`Break-glass user created: ${email}`);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Break-glass login: verify email + password against bcrypt hash.
|
||||||
|
* Only works for users with isLocalAuth=true.
|
||||||
|
*/
|
||||||
|
async login(
|
||||||
|
email: string,
|
||||||
|
password: string,
|
||||||
|
ipAddress?: string,
|
||||||
|
userAgent?: string
|
||||||
|
): Promise<LoginResult> {
|
||||||
|
const user = await this.prisma.user.findUnique({
|
||||||
|
where: { email },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
email: true,
|
||||||
|
name: true,
|
||||||
|
isLocalAuth: true,
|
||||||
|
passwordHash: true,
|
||||||
|
deactivatedAt: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user?.isLocalAuth) {
|
||||||
|
throw new UnauthorizedException("Invalid email or password");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (user.deactivatedAt) {
|
||||||
|
throw new UnauthorizedException("Account has been deactivated");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!user.passwordHash) {
|
||||||
|
this.logger.error(`Local auth user ${email} has no password hash`);
|
||||||
|
throw new InternalServerErrorException("Account configuration error");
|
||||||
|
}
|
||||||
|
|
||||||
|
const passwordValid = await compare(password, user.passwordHash);
|
||||||
|
if (!passwordValid) {
|
||||||
|
throw new UnauthorizedException("Invalid email or password");
|
||||||
|
}
|
||||||
|
|
||||||
|
const session = await this.createSession(this.prisma, user.id, ipAddress, userAgent);
|
||||||
|
|
||||||
|
this.logger.log(`Break-glass login: ${email}`);
|
||||||
|
return {
|
||||||
|
user: { id: user.id, email: user.email, name: user.name },
|
||||||
|
session,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate the setup token against the environment variable.
|
||||||
|
*/
|
||||||
|
private validateSetupToken(token: string): void {
|
||||||
|
const expectedToken = process.env.BREAKGLASS_SETUP_TOKEN;
|
||||||
|
|
||||||
|
if (!expectedToken || expectedToken.trim() === "") {
|
||||||
|
throw new ForbiddenException(
|
||||||
|
"Break-glass setup is not configured. Set BREAKGLASS_SETUP_TOKEN environment variable."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const tokenBuffer = Buffer.from(token);
|
||||||
|
const expectedBuffer = Buffer.from(expectedToken);
|
||||||
|
if (
|
||||||
|
tokenBuffer.length !== expectedBuffer.length ||
|
||||||
|
!timingSafeEqual(tokenBuffer, expectedBuffer)
|
||||||
|
) {
|
||||||
|
this.logger.warn("Invalid break-glass setup token attempt");
|
||||||
|
throw new ForbiddenException("Invalid setup token");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the first workspace or create a default one, then assign OWNER role.
|
||||||
|
*/
|
||||||
|
private async assignDefaultWorkspace(
|
||||||
|
tx: Parameters<Parameters<PrismaService["$transaction"]>[0]>[0],
|
||||||
|
userId: string
|
||||||
|
): Promise<void> {
|
||||||
|
let workspace = await tx.workspace.findFirst({
|
||||||
|
orderBy: { createdAt: "asc" },
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
workspace ??= await tx.workspace.create({
|
||||||
|
data: {
|
||||||
|
name: "Default Workspace",
|
||||||
|
ownerId: userId,
|
||||||
|
settings: {},
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
await tx.workspaceMember.create({
|
||||||
|
data: {
|
||||||
|
workspaceId: workspace.id,
|
||||||
|
userId,
|
||||||
|
role: WorkspaceMemberRole.OWNER,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a BetterAuth-compatible session record.
|
||||||
|
*/
|
||||||
|
private async createSession(
|
||||||
|
tx: { session: { create: typeof PrismaService.prototype.session.create } },
|
||||||
|
userId: string,
|
||||||
|
ipAddress?: string,
|
||||||
|
userAgent?: string
|
||||||
|
): Promise<{ token: string; expiresAt: Date }> {
|
||||||
|
const token = randomBytes(32).toString("hex");
|
||||||
|
const expiresAt = new Date(Date.now() + SESSION_EXPIRY_MS);
|
||||||
|
|
||||||
|
await tx.session.create({
|
||||||
|
data: {
|
||||||
|
userId,
|
||||||
|
token,
|
||||||
|
expiresAt,
|
||||||
|
ipAddress: ipAddress ?? null,
|
||||||
|
userAgent: userAgent ?? null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { token, expiresAt };
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -110,10 +110,10 @@ export class WorkspaceGuard implements CanActivate {
|
|||||||
return paramWorkspaceId;
|
return paramWorkspaceId;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. Check request body
|
// 3. Check request body (body may be undefined for GET requests despite Express typings)
|
||||||
const bodyWorkspaceId = request.body.workspaceId;
|
const body = request.body as Record<string, unknown> | undefined;
|
||||||
if (typeof bodyWorkspaceId === "string") {
|
if (body && typeof body.workspaceId === "string") {
|
||||||
return bodyWorkspaceId;
|
return body.workspaceId;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. Check query string (backward compatibility for existing clients)
|
// 4. Check query string (backward compatibility for existing clients)
|
||||||
|
|||||||
@@ -270,7 +270,7 @@ describe("sanitizeForLogging", () => {
|
|||||||
const duration = Date.now() - start;
|
const duration = Date.now() - start;
|
||||||
|
|
||||||
expect(result.password).toBe("[REDACTED]");
|
expect(result.password).toBe("[REDACTED]");
|
||||||
expect(duration).toBeLessThan(100); // Should complete in under 100ms
|
expect(duration).toBeLessThan(500); // Should complete in under 500ms
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,69 @@
|
|||||||
|
import { Controller, Post, Get, Body, Param, Query, UseGuards } from "@nestjs/common";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||||
|
import { Workspace, RequirePermission, Permission } from "../common/decorators";
|
||||||
|
import { ConversationArchiveService } from "./conversation-archive.service";
|
||||||
|
import { IngestConversationDto, SearchConversationDto, ListConversationsDto } from "./dto";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Controller for conversation archive endpoints.
|
||||||
|
* All endpoints require workspace membership.
|
||||||
|
*/
|
||||||
|
@Controller("conversations")
|
||||||
|
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||||
|
export class ConversationArchiveController {
|
||||||
|
constructor(private readonly service: ConversationArchiveService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/conversations/ingest
|
||||||
|
* Ingest a conversation session log and auto-embed for semantic search.
|
||||||
|
* Requires: MEMBER or higher
|
||||||
|
*/
|
||||||
|
@Post("ingest")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||||
|
async ingest(
|
||||||
|
@Workspace() workspaceId: string,
|
||||||
|
@Body() dto: IngestConversationDto
|
||||||
|
): Promise<{ id: string }> {
|
||||||
|
return this.service.ingest(workspaceId, dto);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/conversations/search
|
||||||
|
* Vector similarity search across archived conversations.
|
||||||
|
* Requires: Any workspace member
|
||||||
|
*/
|
||||||
|
@Post("search")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
|
async search(
|
||||||
|
@Workspace() workspaceId: string,
|
||||||
|
@Body() dto: SearchConversationDto
|
||||||
|
): Promise<unknown> {
|
||||||
|
return this.service.search(workspaceId, dto);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/conversations
|
||||||
|
* List conversation archives with filtering and pagination.
|
||||||
|
* Requires: Any workspace member
|
||||||
|
*/
|
||||||
|
@Get()
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
|
async findAll(
|
||||||
|
@Workspace() workspaceId: string,
|
||||||
|
@Query() query: ListConversationsDto
|
||||||
|
): Promise<unknown> {
|
||||||
|
return this.service.findAll(workspaceId, query);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/conversations/:id
|
||||||
|
* Get a single conversation archive by ID (includes full messages).
|
||||||
|
* Requires: Any workspace member
|
||||||
|
*/
|
||||||
|
@Get(":id")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
|
async findOne(@Workspace() workspaceId: string, @Param("id") id: string): Promise<unknown> {
|
||||||
|
return this.service.findOne(workspaceId, id);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,239 @@
|
|||||||
|
import { beforeAll, beforeEach, describe, expect, it, afterAll, vi } from "vitest";
|
||||||
|
import { randomUUID as uuid } from "crypto";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { ConflictException } from "@nestjs/common";
|
||||||
|
import { PrismaClient, Prisma } from "@prisma/client";
|
||||||
|
import { EMBEDDING_DIMENSION } from "@mosaic/shared";
|
||||||
|
import { ConversationArchiveService } from "./conversation-archive.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||||
|
|
||||||
|
const shouldRunDbIntegrationTests =
|
||||||
|
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||||
|
const describeFn = shouldRunDbIntegrationTests ? describe : describe.skip;
|
||||||
|
|
||||||
|
function vector(value: number): number[] {
|
||||||
|
return Array.from({ length: EMBEDDING_DIMENSION }, () => value);
|
||||||
|
}
|
||||||
|
|
||||||
|
function toVectorLiteral(input: number[]): string {
|
||||||
|
return `[${input.join(",")}]`;
|
||||||
|
}
|
||||||
|
|
||||||
|
describeFn("ConversationArchiveService Integration", () => {
|
||||||
|
let moduleRef: TestingModule;
|
||||||
|
let prisma: PrismaClient;
|
||||||
|
let service: ConversationArchiveService;
|
||||||
|
let workspaceId: string;
|
||||||
|
let ownerId: string;
|
||||||
|
let setupComplete = false;
|
||||||
|
|
||||||
|
const embeddingServiceMock = {
|
||||||
|
isConfigured: vi.fn(),
|
||||||
|
generateEmbedding: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
prisma = new PrismaClient();
|
||||||
|
await prisma.$connect();
|
||||||
|
|
||||||
|
const workspace = await prisma.workspace.create({
|
||||||
|
data: {
|
||||||
|
name: `Conversation Archive Integration ${Date.now()}`,
|
||||||
|
owner: {
|
||||||
|
create: {
|
||||||
|
email: `conversation-archive-integration-${Date.now()}@example.com`,
|
||||||
|
name: "Conversation Archive Integration Owner",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
workspaceId = workspace.id;
|
||||||
|
ownerId = workspace.ownerId;
|
||||||
|
|
||||||
|
moduleRef = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
ConversationArchiveService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: prisma,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: EmbeddingService,
|
||||||
|
useValue: embeddingServiceMock,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = moduleRef.get<ConversationArchiveService>(ConversationArchiveService);
|
||||||
|
setupComplete = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
embeddingServiceMock.isConfigured.mockReturnValue(false);
|
||||||
|
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await prisma.conversationArchive.deleteMany({ where: { workspaceId } });
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
if (!prisma) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workspaceId) {
|
||||||
|
await prisma.conversationArchive.deleteMany({ where: { workspaceId } });
|
||||||
|
await prisma.workspace.deleteMany({ where: { id: workspaceId } });
|
||||||
|
}
|
||||||
|
if (ownerId) {
|
||||||
|
await prisma.user.deleteMany({ where: { id: ownerId } });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (moduleRef) {
|
||||||
|
await moduleRef.close();
|
||||||
|
}
|
||||||
|
await prisma.$disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("ingests a conversation log", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionId = `session-${uuid()}`;
|
||||||
|
|
||||||
|
const result = await service.ingest(workspaceId, {
|
||||||
|
sessionId,
|
||||||
|
agentId: "agent-conversation-ingest",
|
||||||
|
messages: [
|
||||||
|
{ role: "user", content: "Can you summarize deployment issues?" },
|
||||||
|
{ role: "assistant", content: "Yes, three retries timed out in staging." },
|
||||||
|
],
|
||||||
|
summary: "Deployment retry failures discussed",
|
||||||
|
startedAt: "2026-02-28T21:00:00.000Z",
|
||||||
|
endedAt: "2026-02-28T21:05:00.000Z",
|
||||||
|
metadata: { source: "integration-test" },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.id).toBeDefined();
|
||||||
|
|
||||||
|
const stored = await prisma.conversationArchive.findUnique({
|
||||||
|
where: {
|
||||||
|
id: result.id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(stored).toBeTruthy();
|
||||||
|
expect(stored?.workspaceId).toBe(workspaceId);
|
||||||
|
expect(stored?.sessionId).toBe(sessionId);
|
||||||
|
expect(stored?.messageCount).toBe(2);
|
||||||
|
expect(stored?.summary).toBe("Deployment retry failures discussed");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("rejects duplicate session ingest per workspace", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionId = `session-${uuid()}`;
|
||||||
|
const dto = {
|
||||||
|
sessionId,
|
||||||
|
agentId: "agent-conversation-duplicate",
|
||||||
|
messages: [{ role: "user", content: "hello" }],
|
||||||
|
summary: "simple conversation",
|
||||||
|
startedAt: "2026-02-28T22:00:00.000Z",
|
||||||
|
};
|
||||||
|
|
||||||
|
await service.ingest(workspaceId, dto);
|
||||||
|
|
||||||
|
await expect(service.ingest(workspaceId, dto)).rejects.toThrow(ConflictException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("rejects semantic search when embeddings are disabled", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
embeddingServiceMock.isConfigured.mockReturnValue(false);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.search(workspaceId, {
|
||||||
|
query: "deployment retries",
|
||||||
|
})
|
||||||
|
).rejects.toThrow(ConflictException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("searches archived conversations by vector similarity", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const near = vector(0.02);
|
||||||
|
const far = vector(0.85);
|
||||||
|
|
||||||
|
const matching = await prisma.conversationArchive.create({
|
||||||
|
data: {
|
||||||
|
workspaceId,
|
||||||
|
sessionId: `session-search-${uuid()}`,
|
||||||
|
agentId: "agent-conversation-search-a",
|
||||||
|
messages: [
|
||||||
|
{ role: "user", content: "What caused deployment retries?" },
|
||||||
|
{ role: "assistant", content: "A connection pool timeout." },
|
||||||
|
] as unknown as Prisma.InputJsonValue,
|
||||||
|
messageCount: 2,
|
||||||
|
summary: "Deployment retries caused by connection pool timeout",
|
||||||
|
startedAt: new Date("2026-02-28T23:00:00.000Z"),
|
||||||
|
metadata: { channel: "cli" } as Prisma.InputJsonValue,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const nonMatching = await prisma.conversationArchive.create({
|
||||||
|
data: {
|
||||||
|
workspaceId,
|
||||||
|
sessionId: `session-search-${uuid()}`,
|
||||||
|
agentId: "agent-conversation-search-b",
|
||||||
|
messages: [
|
||||||
|
{ role: "user", content: "How is billing configured?" },
|
||||||
|
] as unknown as Prisma.InputJsonValue,
|
||||||
|
messageCount: 1,
|
||||||
|
summary: "Billing and quotas conversation",
|
||||||
|
startedAt: new Date("2026-02-28T23:10:00.000Z"),
|
||||||
|
metadata: { channel: "cli" } as Prisma.InputJsonValue,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.$executeRaw`
|
||||||
|
UPDATE conversation_archives
|
||||||
|
SET embedding = ${toVectorLiteral(near)}::vector(${EMBEDDING_DIMENSION})
|
||||||
|
WHERE id = ${matching.id}::uuid
|
||||||
|
`;
|
||||||
|
await prisma.$executeRaw`
|
||||||
|
UPDATE conversation_archives
|
||||||
|
SET embedding = ${toVectorLiteral(far)}::vector(${EMBEDDING_DIMENSION})
|
||||||
|
WHERE id = ${nonMatching.id}::uuid
|
||||||
|
`;
|
||||||
|
|
||||||
|
embeddingServiceMock.isConfigured.mockReturnValue(true);
|
||||||
|
embeddingServiceMock.generateEmbedding.mockResolvedValue(near);
|
||||||
|
|
||||||
|
const result = await service.search(workspaceId, {
|
||||||
|
query: "deployment retries timeout",
|
||||||
|
agentId: "agent-conversation-search-a",
|
||||||
|
similarityThreshold: 0,
|
||||||
|
limit: 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
const rows = result.data as Array<{ id: string; agent_id: string; similarity: number }>;
|
||||||
|
|
||||||
|
expect(result.pagination.total).toBe(1);
|
||||||
|
expect(rows).toHaveLength(1);
|
||||||
|
expect(rows[0]?.id).toBe(matching.id);
|
||||||
|
expect(rows[0]?.agent_id).toBe("agent-conversation-search-a");
|
||||||
|
expect(rows[0]?.similarity).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
|
import { AuthModule } from "../auth/auth.module";
|
||||||
|
import { KnowledgeModule } from "../knowledge/knowledge.module";
|
||||||
|
import { ConversationArchiveService } from "./conversation-archive.service";
|
||||||
|
import { ConversationArchiveController } from "./conversation-archive.controller";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [PrismaModule, AuthModule, KnowledgeModule],
|
||||||
|
controllers: [ConversationArchiveController],
|
||||||
|
providers: [ConversationArchiveService],
|
||||||
|
exports: [ConversationArchiveService],
|
||||||
|
})
|
||||||
|
export class ConversationArchiveModule {}
|
||||||
@@ -0,0 +1,149 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { ConflictException, NotFoundException } from "@nestjs/common";
|
||||||
|
import { ConversationArchiveService } from "./conversation-archive.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||||
|
|
||||||
|
const mockPrisma = {
|
||||||
|
conversationArchive: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
create: vi.fn(),
|
||||||
|
count: vi.fn(),
|
||||||
|
findMany: vi.fn(),
|
||||||
|
findFirst: vi.fn(),
|
||||||
|
},
|
||||||
|
$queryRaw: vi.fn(),
|
||||||
|
$executeRaw: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockEmbedding = {
|
||||||
|
isConfigured: vi.fn(),
|
||||||
|
generateEmbedding: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe("ConversationArchiveService", () => {
|
||||||
|
let service: ConversationArchiveService;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
ConversationArchiveService,
|
||||||
|
{ provide: PrismaService, useValue: mockPrisma },
|
||||||
|
{ provide: EmbeddingService, useValue: mockEmbedding },
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<ConversationArchiveService>(ConversationArchiveService);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("ingest", () => {
|
||||||
|
const workspaceId = "ws-1";
|
||||||
|
const dto = {
|
||||||
|
sessionId: "sess-abc",
|
||||||
|
agentId: "agent-xyz",
|
||||||
|
messages: [
|
||||||
|
{ role: "user", content: "Hello" },
|
||||||
|
{ role: "assistant", content: "Hi there!" },
|
||||||
|
],
|
||||||
|
summary: "A greeting conversation",
|
||||||
|
startedAt: "2026-02-28T10:00:00Z",
|
||||||
|
};
|
||||||
|
|
||||||
|
it("creates a conversation archive and returns id", async () => {
|
||||||
|
mockPrisma.conversationArchive.findUnique.mockResolvedValue(null);
|
||||||
|
mockPrisma.conversationArchive.create.mockResolvedValue({ id: "conv-1" });
|
||||||
|
mockEmbedding.isConfigured.mockReturnValue(false);
|
||||||
|
|
||||||
|
const result = await service.ingest(workspaceId, dto);
|
||||||
|
|
||||||
|
expect(result).toEqual({ id: "conv-1" });
|
||||||
|
expect(mockPrisma.conversationArchive.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
workspaceId,
|
||||||
|
sessionId: dto.sessionId,
|
||||||
|
agentId: dto.agentId,
|
||||||
|
messageCount: 2,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("throws ConflictException when session already exists", async () => {
|
||||||
|
mockPrisma.conversationArchive.findUnique.mockResolvedValue({ id: "existing" });
|
||||||
|
|
||||||
|
await expect(service.ingest(workspaceId, dto)).rejects.toThrow(ConflictException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findAll", () => {
|
||||||
|
const workspaceId = "ws-1";
|
||||||
|
|
||||||
|
it("returns paginated list", async () => {
|
||||||
|
mockPrisma.conversationArchive.count.mockResolvedValue(5);
|
||||||
|
mockPrisma.conversationArchive.findMany.mockResolvedValue([
|
||||||
|
{ id: "conv-1", sessionId: "sess-1" },
|
||||||
|
]);
|
||||||
|
|
||||||
|
const result = await service.findAll(workspaceId, { page: 1, limit: 10 });
|
||||||
|
|
||||||
|
expect(result.pagination.total).toBe(5);
|
||||||
|
expect(result.data).toHaveLength(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("uses default pagination when not provided", async () => {
|
||||||
|
mockPrisma.conversationArchive.count.mockResolvedValue(0);
|
||||||
|
mockPrisma.conversationArchive.findMany.mockResolvedValue([]);
|
||||||
|
|
||||||
|
const result = await service.findAll(workspaceId, {});
|
||||||
|
|
||||||
|
expect(result.pagination.page).toBe(1);
|
||||||
|
expect(result.pagination.limit).toBe(20);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findOne", () => {
|
||||||
|
const workspaceId = "ws-1";
|
||||||
|
|
||||||
|
it("returns record when found", async () => {
|
||||||
|
const record = { id: "conv-1", workspaceId, sessionId: "sess-1" };
|
||||||
|
mockPrisma.conversationArchive.findFirst.mockResolvedValue(record);
|
||||||
|
|
||||||
|
const result = await service.findOne(workspaceId, "conv-1");
|
||||||
|
|
||||||
|
expect(result).toEqual(record);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("throws NotFoundException when record does not exist", async () => {
|
||||||
|
mockPrisma.conversationArchive.findFirst.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.findOne(workspaceId, "missing")).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("search", () => {
|
||||||
|
it("throws ConflictException when embedding is not configured", async () => {
|
||||||
|
mockEmbedding.isConfigured.mockReturnValue(false);
|
||||||
|
|
||||||
|
await expect(service.search("ws-1", { query: "test query" })).rejects.toThrow(
|
||||||
|
ConflictException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("performs vector search when configured", async () => {
|
||||||
|
mockEmbedding.isConfigured.mockReturnValue(true);
|
||||||
|
mockEmbedding.generateEmbedding.mockResolvedValue(new Array(1536).fill(0.1));
|
||||||
|
mockPrisma.$queryRaw
|
||||||
|
.mockResolvedValueOnce([{ id: "conv-1", similarity: 0.9 }])
|
||||||
|
.mockResolvedValueOnce([{ count: BigInt(1) }]);
|
||||||
|
|
||||||
|
const result = await service.search("ws-1", { query: "greetings" });
|
||||||
|
|
||||||
|
expect(result.data).toHaveLength(1);
|
||||||
|
expect(result.pagination.total).toBe(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,277 @@
|
|||||||
|
import { Injectable, Logger, NotFoundException, ConflictException } from "@nestjs/common";
|
||||||
|
import { Prisma } from "@prisma/client";
|
||||||
|
import { EMBEDDING_DIMENSION } from "@mosaic/shared";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||||
|
import type { IngestConversationDto, SearchConversationDto, ListConversationsDto } from "./dto";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Shape of a raw conversation archive row from $queryRaw vector search
|
||||||
|
*/
|
||||||
|
interface RawConversationResult {
|
||||||
|
id: string;
|
||||||
|
workspace_id: string;
|
||||||
|
session_id: string;
|
||||||
|
agent_id: string;
|
||||||
|
messages: unknown;
|
||||||
|
message_count: number;
|
||||||
|
summary: string;
|
||||||
|
started_at: Date;
|
||||||
|
ended_at: Date | null;
|
||||||
|
metadata: unknown;
|
||||||
|
created_at: Date;
|
||||||
|
updated_at: Date;
|
||||||
|
similarity: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Paginated response wrapper
|
||||||
|
*/
|
||||||
|
export interface PaginatedConversations<T> {
|
||||||
|
data: T[];
|
||||||
|
pagination: {
|
||||||
|
page: number;
|
||||||
|
limit: number;
|
||||||
|
total: number;
|
||||||
|
totalPages: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class ConversationArchiveService {
|
||||||
|
private readonly logger = new Logger(ConversationArchiveService.name);
|
||||||
|
private readonly defaultSimilarityThreshold = 0.5;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private readonly prisma: PrismaService,
|
||||||
|
private readonly embedding: EmbeddingService
|
||||||
|
) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ingest a conversation session log.
|
||||||
|
* Generates a vector embedding from the summary + message content and stores it alongside the record.
|
||||||
|
*/
|
||||||
|
async ingest(workspaceId: string, dto: IngestConversationDto): Promise<{ id: string }> {
|
||||||
|
// Verify no duplicate session in this workspace
|
||||||
|
const existing = await this.prisma.conversationArchive.findUnique({
|
||||||
|
where: { workspaceId_sessionId: { workspaceId, sessionId: dto.sessionId } },
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
throw new ConflictException(
|
||||||
|
`Conversation session '${dto.sessionId}' already exists in this workspace`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageCount = dto.messages.length;
|
||||||
|
|
||||||
|
// Create record first to get ID for embedding
|
||||||
|
const record = await this.prisma.conversationArchive.create({
|
||||||
|
data: {
|
||||||
|
workspaceId,
|
||||||
|
sessionId: dto.sessionId,
|
||||||
|
agentId: dto.agentId,
|
||||||
|
messages: dto.messages as unknown as Prisma.InputJsonValue,
|
||||||
|
messageCount,
|
||||||
|
summary: dto.summary,
|
||||||
|
startedAt: new Date(dto.startedAt),
|
||||||
|
endedAt: dto.endedAt ? new Date(dto.endedAt) : null,
|
||||||
|
metadata: (dto.metadata ?? {}) as Prisma.InputJsonValue,
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Generate and store embedding asynchronously (non-blocking for ingest)
|
||||||
|
if (this.embedding.isConfigured()) {
|
||||||
|
const textForEmbedding = this.buildEmbeddingText(dto.summary, dto.messages);
|
||||||
|
this.storeEmbedding(record.id, textForEmbedding).catch((err: unknown) => {
|
||||||
|
this.logger.error(`Failed to store embedding for conversation ${record.id}`, err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.log(`Ingested conversation ${record.id} (session: ${dto.sessionId})`);
|
||||||
|
return { id: record.id };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Semantic vector search across conversation archives in a workspace.
|
||||||
|
*/
|
||||||
|
async search(
|
||||||
|
workspaceId: string,
|
||||||
|
dto: SearchConversationDto
|
||||||
|
): Promise<PaginatedConversations<RawConversationResult>> {
|
||||||
|
if (!this.embedding.isConfigured()) {
|
||||||
|
throw new ConflictException("Semantic search requires OpenAI API key to be configured");
|
||||||
|
}
|
||||||
|
|
||||||
|
const limit = dto.limit ?? 20;
|
||||||
|
const threshold = dto.similarityThreshold ?? this.defaultSimilarityThreshold;
|
||||||
|
const distanceThreshold = 1 - threshold;
|
||||||
|
|
||||||
|
const queryEmbedding = await this.embedding.generateEmbedding(dto.query);
|
||||||
|
const embeddingStr = `[${queryEmbedding.join(",")}]`;
|
||||||
|
|
||||||
|
const agentFilter = dto.agentId ? Prisma.sql`AND ca.agent_id = ${dto.agentId}` : Prisma.sql``;
|
||||||
|
|
||||||
|
const rows = await this.prisma.$queryRaw<RawConversationResult[]>`
|
||||||
|
SELECT
|
||||||
|
ca.id,
|
||||||
|
ca.workspace_id,
|
||||||
|
ca.session_id,
|
||||||
|
ca.agent_id,
|
||||||
|
ca.messages,
|
||||||
|
ca.message_count,
|
||||||
|
ca.summary,
|
||||||
|
ca.started_at,
|
||||||
|
ca.ended_at,
|
||||||
|
ca.metadata,
|
||||||
|
ca.created_at,
|
||||||
|
ca.updated_at,
|
||||||
|
(1 - (ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION}))) AS similarity
|
||||||
|
FROM conversation_archives ca
|
||||||
|
WHERE ca.workspace_id = ${workspaceId}::uuid
|
||||||
|
AND ca.embedding IS NOT NULL
|
||||||
|
AND (ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION})) <= ${distanceThreshold}
|
||||||
|
${agentFilter}
|
||||||
|
ORDER BY ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION})
|
||||||
|
LIMIT ${limit}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const countResult = await this.prisma.$queryRaw<[{ count: bigint }]>`
|
||||||
|
SELECT COUNT(*) AS count
|
||||||
|
FROM conversation_archives ca
|
||||||
|
WHERE ca.workspace_id = ${workspaceId}::uuid
|
||||||
|
AND ca.embedding IS NOT NULL
|
||||||
|
AND (ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION})) <= ${distanceThreshold}
|
||||||
|
${agentFilter}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const total = Number(countResult[0].count);
|
||||||
|
|
||||||
|
return {
|
||||||
|
data: rows,
|
||||||
|
pagination: {
|
||||||
|
page: 1,
|
||||||
|
limit,
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / limit),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List conversation archives with filtering and pagination.
|
||||||
|
*/
|
||||||
|
async findAll(
|
||||||
|
workspaceId: string,
|
||||||
|
query: ListConversationsDto
|
||||||
|
): Promise<PaginatedConversations<object>> {
|
||||||
|
const page = query.page ?? 1;
|
||||||
|
const limit = query.limit ?? 20;
|
||||||
|
const skip = (page - 1) * limit;
|
||||||
|
|
||||||
|
const where: Prisma.ConversationArchiveWhereInput = {
|
||||||
|
workspaceId,
|
||||||
|
...(query.agentId ? { agentId: query.agentId } : {}),
|
||||||
|
...(query.startedAfter || query.startedBefore
|
||||||
|
? {
|
||||||
|
startedAt: {
|
||||||
|
...(query.startedAfter ? { gte: new Date(query.startedAfter) } : {}),
|
||||||
|
...(query.startedBefore ? { lte: new Date(query.startedBefore) } : {}),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {}),
|
||||||
|
};
|
||||||
|
|
||||||
|
const [total, records] = await Promise.all([
|
||||||
|
this.prisma.conversationArchive.count({ where }),
|
||||||
|
this.prisma.conversationArchive.findMany({
|
||||||
|
where,
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
workspaceId: true,
|
||||||
|
sessionId: true,
|
||||||
|
agentId: true,
|
||||||
|
messageCount: true,
|
||||||
|
summary: true,
|
||||||
|
startedAt: true,
|
||||||
|
endedAt: true,
|
||||||
|
metadata: true,
|
||||||
|
createdAt: true,
|
||||||
|
updatedAt: true,
|
||||||
|
},
|
||||||
|
orderBy: { startedAt: "desc" },
|
||||||
|
skip,
|
||||||
|
take: limit,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
data: records,
|
||||||
|
pagination: {
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
total,
|
||||||
|
totalPages: Math.ceil(total / limit),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a single conversation archive by ID.
|
||||||
|
*/
|
||||||
|
async findOne(workspaceId: string, id: string): Promise<object> {
|
||||||
|
const record = await this.prisma.conversationArchive.findFirst({
|
||||||
|
where: { id, workspaceId },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
workspaceId: true,
|
||||||
|
sessionId: true,
|
||||||
|
agentId: true,
|
||||||
|
messages: true,
|
||||||
|
messageCount: true,
|
||||||
|
summary: true,
|
||||||
|
startedAt: true,
|
||||||
|
endedAt: true,
|
||||||
|
metadata: true,
|
||||||
|
createdAt: true,
|
||||||
|
updatedAt: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!record) {
|
||||||
|
throw new NotFoundException(`Conversation archive '${id}' not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return record;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build text content for embedding from summary and messages.
|
||||||
|
*/
|
||||||
|
private buildEmbeddingText(
|
||||||
|
summary: string,
|
||||||
|
messages: { role: string; content: string }[]
|
||||||
|
): string {
|
||||||
|
const messageText = messages.map((m) => `${m.role}: ${m.content}`).join("\n");
|
||||||
|
return `${summary}\n\n${messageText}`.trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate embedding and store it on the conversation_archives row.
|
||||||
|
*/
|
||||||
|
private async storeEmbedding(id: string, text: string): Promise<void> {
|
||||||
|
const vector = await this.embedding.generateEmbedding(text);
|
||||||
|
const embeddingStr = `[${vector.join(",")}]`;
|
||||||
|
|
||||||
|
await this.prisma.$executeRaw`
|
||||||
|
UPDATE conversation_archives
|
||||||
|
SET embedding = ${embeddingStr}::vector(${EMBEDDING_DIMENSION}),
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE id = ${id}::uuid
|
||||||
|
`;
|
||||||
|
|
||||||
|
this.logger.log(`Stored embedding for conversation ${id}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
3
apps/api/src/conversation-archive/dto/index.ts
Normal file
3
apps/api/src/conversation-archive/dto/index.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export { IngestConversationDto, ConversationMessageDto } from "./ingest-conversation.dto";
|
||||||
|
export { SearchConversationDto } from "./search-conversation.dto";
|
||||||
|
export { ListConversationsDto } from "./list-conversations.dto";
|
||||||
@@ -0,0 +1,64 @@
|
|||||||
|
import {
|
||||||
|
IsString,
|
||||||
|
IsArray,
|
||||||
|
IsOptional,
|
||||||
|
IsDateString,
|
||||||
|
MinLength,
|
||||||
|
MaxLength,
|
||||||
|
IsObject,
|
||||||
|
ValidateNested,
|
||||||
|
ArrayMinSize,
|
||||||
|
} from "class-validator";
|
||||||
|
import { Type } from "class-transformer";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a single message in a conversation session
|
||||||
|
*/
|
||||||
|
export class ConversationMessageDto {
|
||||||
|
@IsString()
|
||||||
|
role!: string;
|
||||||
|
|
||||||
|
@IsString()
|
||||||
|
@MinLength(1)
|
||||||
|
content!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsDateString()
|
||||||
|
timestamp?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for ingesting a conversation session log
|
||||||
|
*/
|
||||||
|
export class IngestConversationDto {
|
||||||
|
@IsString()
|
||||||
|
@MinLength(1)
|
||||||
|
@MaxLength(500)
|
||||||
|
sessionId!: string;
|
||||||
|
|
||||||
|
@IsString()
|
||||||
|
@MinLength(1)
|
||||||
|
@MaxLength(500)
|
||||||
|
agentId!: string;
|
||||||
|
|
||||||
|
@IsArray()
|
||||||
|
@ArrayMinSize(1)
|
||||||
|
@ValidateNested({ each: true })
|
||||||
|
@Type(() => ConversationMessageDto)
|
||||||
|
messages!: ConversationMessageDto[];
|
||||||
|
|
||||||
|
@IsString()
|
||||||
|
@MinLength(1)
|
||||||
|
summary!: string;
|
||||||
|
|
||||||
|
@IsDateString()
|
||||||
|
startedAt!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsDateString()
|
||||||
|
endedAt?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsObject()
|
||||||
|
metadata?: Record<string, unknown>;
|
||||||
|
}
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
import { IsString, IsOptional, MaxLength, IsInt, Min, Max, IsDateString } from "class-validator";
|
||||||
|
import { Type } from "class-transformer";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for listing/filtering conversation archives
|
||||||
|
*/
|
||||||
|
export class ListConversationsDto {
|
||||||
|
@IsOptional()
|
||||||
|
@IsString()
|
||||||
|
@MaxLength(500)
|
||||||
|
agentId?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsDateString()
|
||||||
|
startedAfter?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsDateString()
|
||||||
|
startedBefore?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsInt()
|
||||||
|
@Min(1)
|
||||||
|
page?: number;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsInt()
|
||||||
|
@Min(1)
|
||||||
|
@Max(100)
|
||||||
|
limit?: number;
|
||||||
|
}
|
||||||
@@ -0,0 +1,40 @@
|
|||||||
|
import {
|
||||||
|
IsString,
|
||||||
|
IsOptional,
|
||||||
|
MinLength,
|
||||||
|
MaxLength,
|
||||||
|
IsInt,
|
||||||
|
Min,
|
||||||
|
Max,
|
||||||
|
IsNumber,
|
||||||
|
} from "class-validator";
|
||||||
|
import { Type } from "class-transformer";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for semantic search across conversation archives
|
||||||
|
*/
|
||||||
|
export class SearchConversationDto {
|
||||||
|
@IsString()
|
||||||
|
@MinLength(1)
|
||||||
|
@MaxLength(1000)
|
||||||
|
query!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString()
|
||||||
|
@MaxLength(500)
|
||||||
|
agentId?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsInt()
|
||||||
|
@Min(1)
|
||||||
|
@Max(100)
|
||||||
|
limit?: number;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsNumber()
|
||||||
|
@Min(0)
|
||||||
|
@Max(1)
|
||||||
|
similarityThreshold?: number;
|
||||||
|
}
|
||||||
@@ -245,7 +245,7 @@ describe("CoordinatorIntegrationController - Rate Limiting", () => {
|
|||||||
.set("X-API-Key", "test-coordinator-key");
|
.set("X-API-Key", "test-coordinator-key");
|
||||||
|
|
||||||
expect(response.status).toBe(HttpStatus.TOO_MANY_REQUESTS);
|
expect(response.status).toBe(HttpStatus.TOO_MANY_REQUESTS);
|
||||||
});
|
}, 30000);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("Per-API-Key Rate Limiting", () => {
|
describe("Per-API-Key Rate Limiting", () => {
|
||||||
|
|||||||
10
apps/api/src/crypto/crypto.module.ts
Normal file
10
apps/api/src/crypto/crypto.module.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { ConfigModule } from "@nestjs/config";
|
||||||
|
import { CryptoService } from "./crypto.service";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [ConfigModule],
|
||||||
|
providers: [CryptoService],
|
||||||
|
exports: [CryptoService],
|
||||||
|
})
|
||||||
|
export class CryptoModule {}
|
||||||
71
apps/api/src/crypto/crypto.service.spec.ts
Normal file
71
apps/api/src/crypto/crypto.service.spec.ts
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import { describe, it, expect, beforeEach } from "vitest";
|
||||||
|
import { ConfigService } from "@nestjs/config";
|
||||||
|
import { CryptoService } from "./crypto.service";
|
||||||
|
|
||||||
|
function createConfigService(secret?: string): ConfigService {
|
||||||
|
return {
|
||||||
|
get: (key: string) => {
|
||||||
|
if (key === "MOSAIC_SECRET_KEY") {
|
||||||
|
return secret;
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
},
|
||||||
|
} as unknown as ConfigService;
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("CryptoService", () => {
|
||||||
|
let service: CryptoService;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
service = new CryptoService(createConfigService("this-is-a-test-secret-key-with-32+chars"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("encrypt -> decrypt roundtrip", () => {
|
||||||
|
const plaintext = "my-secret-api-key";
|
||||||
|
|
||||||
|
const encrypted = service.encrypt(plaintext);
|
||||||
|
const decrypted = service.decrypt(encrypted);
|
||||||
|
|
||||||
|
expect(encrypted.startsWith("enc:")).toBe(true);
|
||||||
|
expect(decrypted).toBe(plaintext);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("decrypt rejects tampered ciphertext", () => {
|
||||||
|
const encrypted = service.encrypt("sensitive-token");
|
||||||
|
const payload = encrypted.slice(4);
|
||||||
|
const bytes = Buffer.from(payload, "base64");
|
||||||
|
|
||||||
|
bytes[bytes.length - 1] = bytes[bytes.length - 1]! ^ 0xff;
|
||||||
|
|
||||||
|
const tampered = `enc:${bytes.toString("base64")}`;
|
||||||
|
|
||||||
|
expect(() => service.decrypt(tampered)).toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("decrypt rejects non-encrypted string", () => {
|
||||||
|
expect(() => service.decrypt("plain-text-value")).toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("isEncrypted detects prefix correctly", () => {
|
||||||
|
expect(service.isEncrypted("enc:abc")).toBe(true);
|
||||||
|
expect(service.isEncrypted("ENC:abc")).toBe(false);
|
||||||
|
expect(service.isEncrypted("plain-text")).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("generateToken returns 64-char hex string", () => {
|
||||||
|
const token = service.generateToken();
|
||||||
|
|
||||||
|
expect(token).toMatch(/^[0-9a-f]{64}$/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("different plaintexts produce different ciphertexts (random IV)", () => {
|
||||||
|
const encryptedA = service.encrypt("value-a");
|
||||||
|
const encryptedB = service.encrypt("value-b");
|
||||||
|
|
||||||
|
expect(encryptedA).not.toBe(encryptedB);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("missing MOSAIC_SECRET_KEY throws on construction", () => {
|
||||||
|
expect(() => new CryptoService(createConfigService(undefined))).toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
82
apps/api/src/crypto/crypto.service.ts
Normal file
82
apps/api/src/crypto/crypto.service.ts
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { ConfigService } from "@nestjs/config";
|
||||||
|
import { createCipheriv, createDecipheriv, hkdfSync, randomBytes } from "crypto";
|
||||||
|
|
||||||
|
const ALGORITHM = "aes-256-gcm";
|
||||||
|
const ENCRYPTED_PREFIX = "enc:";
|
||||||
|
const IV_LENGTH = 12;
|
||||||
|
const AUTH_TAG_LENGTH = 16;
|
||||||
|
const DERIVED_KEY_LENGTH = 32;
|
||||||
|
const HKDF_SALT = "mosaic.crypto.v1";
|
||||||
|
const HKDF_INFO = "mosaic-db-secret-encryption";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class CryptoService {
|
||||||
|
private readonly key: Buffer;
|
||||||
|
|
||||||
|
constructor(private readonly config: ConfigService) {
|
||||||
|
const secret = this.config.get<string>("MOSAIC_SECRET_KEY");
|
||||||
|
|
||||||
|
if (!secret) {
|
||||||
|
throw new Error("MOSAIC_SECRET_KEY environment variable is required");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (secret.length < 32) {
|
||||||
|
throw new Error("MOSAIC_SECRET_KEY must be at least 32 characters");
|
||||||
|
}
|
||||||
|
|
||||||
|
this.key = Buffer.from(
|
||||||
|
hkdfSync(
|
||||||
|
"sha256",
|
||||||
|
Buffer.from(secret, "utf8"),
|
||||||
|
Buffer.from(HKDF_SALT, "utf8"),
|
||||||
|
Buffer.from(HKDF_INFO, "utf8"),
|
||||||
|
DERIVED_KEY_LENGTH
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
encrypt(plaintext: string): string {
|
||||||
|
const iv = randomBytes(IV_LENGTH);
|
||||||
|
const cipher = createCipheriv(ALGORITHM, this.key, iv);
|
||||||
|
const ciphertext = Buffer.concat([cipher.update(plaintext, "utf8"), cipher.final()]);
|
||||||
|
const authTag = cipher.getAuthTag();
|
||||||
|
const payload = Buffer.concat([iv, ciphertext, authTag]).toString("base64");
|
||||||
|
|
||||||
|
return `${ENCRYPTED_PREFIX}${payload}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
decrypt(encrypted: string): string {
|
||||||
|
if (!this.isEncrypted(encrypted)) {
|
||||||
|
throw new Error("Value is not encrypted");
|
||||||
|
}
|
||||||
|
|
||||||
|
const payloadBase64 = encrypted.slice(ENCRYPTED_PREFIX.length);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const payload = Buffer.from(payloadBase64, "base64");
|
||||||
|
if (payload.length < IV_LENGTH + AUTH_TAG_LENGTH) {
|
||||||
|
throw new Error("Encrypted payload is too short");
|
||||||
|
}
|
||||||
|
|
||||||
|
const iv = payload.subarray(0, IV_LENGTH);
|
||||||
|
const authTag = payload.subarray(payload.length - AUTH_TAG_LENGTH);
|
||||||
|
const ciphertext = payload.subarray(IV_LENGTH, payload.length - AUTH_TAG_LENGTH);
|
||||||
|
|
||||||
|
const decipher = createDecipheriv(ALGORITHM, this.key, iv);
|
||||||
|
decipher.setAuthTag(authTag);
|
||||||
|
|
||||||
|
return Buffer.concat([decipher.update(ciphertext), decipher.final()]).toString("utf8");
|
||||||
|
} catch {
|
||||||
|
throw new Error("Failed to decrypt value");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
isEncrypted(value: string): boolean {
|
||||||
|
return value.startsWith(ENCRYPTED_PREFIX);
|
||||||
|
}
|
||||||
|
|
||||||
|
generateToken(): string {
|
||||||
|
return randomBytes(32).toString("hex");
|
||||||
|
}
|
||||||
|
}
|
||||||
33
apps/api/src/findings/dto/create-finding.dto.ts
Normal file
33
apps/api/src/findings/dto/create-finding.dto.ts
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import { IsObject, IsOptional, IsString, IsUUID, MaxLength, MinLength } from "class-validator";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for creating a finding
|
||||||
|
*/
|
||||||
|
export class CreateFindingDto {
|
||||||
|
@IsOptional()
|
||||||
|
@IsUUID("4", { message: "taskId must be a valid UUID" })
|
||||||
|
taskId?: string;
|
||||||
|
|
||||||
|
@IsString({ message: "agentId must be a string" })
|
||||||
|
@MinLength(1, { message: "agentId must not be empty" })
|
||||||
|
@MaxLength(255, { message: "agentId must not exceed 255 characters" })
|
||||||
|
agentId!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "type must be a string" })
|
||||||
|
@MinLength(1, { message: "type must not be empty" })
|
||||||
|
@MaxLength(100, { message: "type must not exceed 100 characters" })
|
||||||
|
type!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "title must be a string" })
|
||||||
|
@MinLength(1, { message: "title must not be empty" })
|
||||||
|
@MaxLength(255, { message: "title must not exceed 255 characters" })
|
||||||
|
title!: string;
|
||||||
|
|
||||||
|
@IsObject({ message: "data must be an object" })
|
||||||
|
data!: Record<string, unknown>;
|
||||||
|
|
||||||
|
@IsString({ message: "summary must be a string" })
|
||||||
|
@MinLength(1, { message: "summary must not be empty" })
|
||||||
|
@MaxLength(20000, { message: "summary must not exceed 20000 characters" })
|
||||||
|
summary!: string;
|
||||||
|
}
|
||||||
3
apps/api/src/findings/dto/index.ts
Normal file
3
apps/api/src/findings/dto/index.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export { CreateFindingDto } from "./create-finding.dto";
|
||||||
|
export { QueryFindingsDto } from "./query-findings.dto";
|
||||||
|
export { SearchFindingsDto } from "./search-findings.dto";
|
||||||
32
apps/api/src/findings/dto/query-findings.dto.ts
Normal file
32
apps/api/src/findings/dto/query-findings.dto.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import { Type } from "class-transformer";
|
||||||
|
import { IsInt, IsOptional, IsString, IsUUID, Max, Min } from "class-validator";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for querying findings with filters and pagination
|
||||||
|
*/
|
||||||
|
export class QueryFindingsDto {
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsInt({ message: "page must be an integer" })
|
||||||
|
@Min(1, { message: "page must be at least 1" })
|
||||||
|
page?: number;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsInt({ message: "limit must be an integer" })
|
||||||
|
@Min(1, { message: "limit must be at least 1" })
|
||||||
|
@Max(100, { message: "limit must not exceed 100" })
|
||||||
|
limit?: number;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "agentId must be a string" })
|
||||||
|
agentId?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "type must be a string" })
|
||||||
|
type?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsUUID("4", { message: "taskId must be a valid UUID" })
|
||||||
|
taskId?: string;
|
||||||
|
}
|
||||||
52
apps/api/src/findings/dto/search-findings.dto.ts
Normal file
52
apps/api/src/findings/dto/search-findings.dto.ts
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import { Type } from "class-transformer";
|
||||||
|
import {
|
||||||
|
IsInt,
|
||||||
|
IsNumber,
|
||||||
|
IsOptional,
|
||||||
|
IsString,
|
||||||
|
IsUUID,
|
||||||
|
Max,
|
||||||
|
MaxLength,
|
||||||
|
Min,
|
||||||
|
} from "class-validator";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for finding semantic similarity search
|
||||||
|
*/
|
||||||
|
export class SearchFindingsDto {
|
||||||
|
@IsString({ message: "query must be a string" })
|
||||||
|
@MaxLength(1000, { message: "query must not exceed 1000 characters" })
|
||||||
|
query!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsInt({ message: "page must be an integer" })
|
||||||
|
@Min(1, { message: "page must be at least 1" })
|
||||||
|
page?: number;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsInt({ message: "limit must be an integer" })
|
||||||
|
@Min(1, { message: "limit must be at least 1" })
|
||||||
|
@Max(100, { message: "limit must not exceed 100" })
|
||||||
|
limit?: number;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsNumber({}, { message: "similarityThreshold must be a number" })
|
||||||
|
@Min(0, { message: "similarityThreshold must be at least 0" })
|
||||||
|
@Max(1, { message: "similarityThreshold must not exceed 1" })
|
||||||
|
similarityThreshold?: number;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "agentId must be a string" })
|
||||||
|
agentId?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "type must be a string" })
|
||||||
|
type?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsUUID("4", { message: "taskId must be a valid UUID" })
|
||||||
|
taskId?: string;
|
||||||
|
}
|
||||||
195
apps/api/src/findings/findings.controller.spec.ts
Normal file
195
apps/api/src/findings/findings.controller.spec.ts
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
import { FindingsController } from "./findings.controller";
|
||||||
|
import { FindingsService } from "./findings.service";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||||
|
import { CreateFindingDto, QueryFindingsDto, SearchFindingsDto } from "./dto";
|
||||||
|
|
||||||
|
describe("FindingsController", () => {
|
||||||
|
let controller: FindingsController;
|
||||||
|
let service: FindingsService;
|
||||||
|
|
||||||
|
const mockFindingsService = {
|
||||||
|
create: vi.fn(),
|
||||||
|
findAll: vi.fn(),
|
||||||
|
findOne: vi.fn(),
|
||||||
|
search: vi.fn(),
|
||||||
|
remove: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockAuthGuard = {
|
||||||
|
canActivate: vi.fn(() => true),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockWorkspaceGuard = {
|
||||||
|
canActivate: vi.fn(() => true),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockPermissionGuard = {
|
||||||
|
canActivate: vi.fn(() => true),
|
||||||
|
};
|
||||||
|
|
||||||
|
const workspaceId = "550e8400-e29b-41d4-a716-446655440001";
|
||||||
|
const findingId = "550e8400-e29b-41d4-a716-446655440002";
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
controllers: [FindingsController],
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: FindingsService,
|
||||||
|
useValue: mockFindingsService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
.overrideGuard(AuthGuard)
|
||||||
|
.useValue(mockAuthGuard)
|
||||||
|
.overrideGuard(WorkspaceGuard)
|
||||||
|
.useValue(mockWorkspaceGuard)
|
||||||
|
.overrideGuard(PermissionGuard)
|
||||||
|
.useValue(mockPermissionGuard)
|
||||||
|
.compile();
|
||||||
|
|
||||||
|
controller = module.get<FindingsController>(FindingsController);
|
||||||
|
service = module.get<FindingsService>(FindingsService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be defined", () => {
|
||||||
|
expect(controller).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("create", () => {
|
||||||
|
it("should create a finding", async () => {
|
||||||
|
const createDto: CreateFindingDto = {
|
||||||
|
agentId: "research-agent",
|
||||||
|
type: "security",
|
||||||
|
title: "SQL injection risk",
|
||||||
|
data: { severity: "high" },
|
||||||
|
summary: "Potential SQL injection in search endpoint.",
|
||||||
|
};
|
||||||
|
|
||||||
|
const createdFinding = {
|
||||||
|
id: findingId,
|
||||||
|
workspaceId,
|
||||||
|
taskId: null,
|
||||||
|
...createDto,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
mockFindingsService.create.mockResolvedValue(createdFinding);
|
||||||
|
|
||||||
|
const result = await controller.create(createDto, workspaceId);
|
||||||
|
|
||||||
|
expect(result).toEqual(createdFinding);
|
||||||
|
expect(service.create).toHaveBeenCalledWith(workspaceId, createDto);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findAll", () => {
|
||||||
|
it("should return paginated findings", async () => {
|
||||||
|
const query: QueryFindingsDto = {
|
||||||
|
page: 1,
|
||||||
|
limit: 10,
|
||||||
|
type: "security",
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = {
|
||||||
|
data: [],
|
||||||
|
meta: {
|
||||||
|
total: 0,
|
||||||
|
page: 1,
|
||||||
|
limit: 10,
|
||||||
|
totalPages: 0,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
mockFindingsService.findAll.mockResolvedValue(response);
|
||||||
|
|
||||||
|
const result = await controller.findAll(query, workspaceId);
|
||||||
|
|
||||||
|
expect(result).toEqual(response);
|
||||||
|
expect(service.findAll).toHaveBeenCalledWith(workspaceId, query);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findOne", () => {
|
||||||
|
it("should return a finding", async () => {
|
||||||
|
const finding = {
|
||||||
|
id: findingId,
|
||||||
|
workspaceId,
|
||||||
|
taskId: null,
|
||||||
|
agentId: "research-agent",
|
||||||
|
type: "security",
|
||||||
|
title: "SQL injection risk",
|
||||||
|
data: { severity: "high" },
|
||||||
|
summary: "Potential SQL injection in search endpoint.",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
mockFindingsService.findOne.mockResolvedValue(finding);
|
||||||
|
|
||||||
|
const result = await controller.findOne(findingId, workspaceId);
|
||||||
|
|
||||||
|
expect(result).toEqual(finding);
|
||||||
|
expect(service.findOne).toHaveBeenCalledWith(findingId, workspaceId);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("search", () => {
|
||||||
|
it("should perform semantic search", async () => {
|
||||||
|
const searchDto: SearchFindingsDto = {
|
||||||
|
query: "sql injection",
|
||||||
|
limit: 5,
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = {
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
id: findingId,
|
||||||
|
workspaceId,
|
||||||
|
taskId: null,
|
||||||
|
agentId: "research-agent",
|
||||||
|
type: "security",
|
||||||
|
title: "SQL injection risk",
|
||||||
|
data: { severity: "high" },
|
||||||
|
summary: "Potential SQL injection in search endpoint.",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
score: 0.91,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
meta: {
|
||||||
|
total: 1,
|
||||||
|
page: 1,
|
||||||
|
limit: 5,
|
||||||
|
totalPages: 1,
|
||||||
|
},
|
||||||
|
query: "sql injection",
|
||||||
|
};
|
||||||
|
|
||||||
|
mockFindingsService.search.mockResolvedValue(response);
|
||||||
|
|
||||||
|
const result = await controller.search(searchDto, workspaceId);
|
||||||
|
|
||||||
|
expect(result).toEqual(response);
|
||||||
|
expect(service.search).toHaveBeenCalledWith(workspaceId, searchDto);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("remove", () => {
|
||||||
|
it("should delete a finding", async () => {
|
||||||
|
const response = { message: "Finding deleted successfully" };
|
||||||
|
mockFindingsService.remove.mockResolvedValue(response);
|
||||||
|
|
||||||
|
const result = await controller.remove(findingId, workspaceId);
|
||||||
|
|
||||||
|
expect(result).toEqual(response);
|
||||||
|
expect(service.remove).toHaveBeenCalledWith(findingId, workspaceId);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
81
apps/api/src/findings/findings.controller.ts
Normal file
81
apps/api/src/findings/findings.controller.ts
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import { Body, Controller, Delete, Get, Param, Post, Query, UseGuards } from "@nestjs/common";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||||
|
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||||
|
import { CreateFindingDto, QueryFindingsDto, SearchFindingsDto } from "./dto";
|
||||||
|
import {
|
||||||
|
FindingsService,
|
||||||
|
FindingsSearchResponse,
|
||||||
|
PaginatedFindingsResponse,
|
||||||
|
} from "./findings.service";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Controller for findings endpoints
|
||||||
|
* All endpoints require authentication and workspace context
|
||||||
|
*/
|
||||||
|
@Controller("findings")
|
||||||
|
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||||
|
export class FindingsController {
|
||||||
|
constructor(private readonly findingsService: FindingsService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/findings
|
||||||
|
* Create a new finding and embed its summary
|
||||||
|
* Requires: MEMBER role or higher
|
||||||
|
*/
|
||||||
|
@Post()
|
||||||
|
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||||
|
async create(@Body() createFindingDto: CreateFindingDto, @Workspace() workspaceId: string) {
|
||||||
|
return this.findingsService.create(workspaceId, createFindingDto);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/findings
|
||||||
|
* Get paginated findings with optional filters
|
||||||
|
* Requires: Any workspace member
|
||||||
|
*/
|
||||||
|
@Get()
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
|
async findAll(
|
||||||
|
@Query() query: QueryFindingsDto,
|
||||||
|
@Workspace() workspaceId: string
|
||||||
|
): Promise<PaginatedFindingsResponse> {
|
||||||
|
return this.findingsService.findAll(workspaceId, query);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/findings/:id
|
||||||
|
* Get a single finding by ID
|
||||||
|
* Requires: Any workspace member
|
||||||
|
*/
|
||||||
|
@Get(":id")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
|
async findOne(@Param("id") id: string, @Workspace() workspaceId: string) {
|
||||||
|
return this.findingsService.findOne(id, workspaceId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/findings/search
|
||||||
|
* Semantic search findings by vector similarity
|
||||||
|
* Requires: Any workspace member
|
||||||
|
*/
|
||||||
|
@Post("search")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
|
async search(
|
||||||
|
@Body() searchDto: SearchFindingsDto,
|
||||||
|
@Workspace() workspaceId: string
|
||||||
|
): Promise<FindingsSearchResponse> {
|
||||||
|
return this.findingsService.search(workspaceId, searchDto);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DELETE /api/findings/:id
|
||||||
|
* Delete a finding
|
||||||
|
* Requires: ADMIN role or higher
|
||||||
|
*/
|
||||||
|
@Delete(":id")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ADMIN)
|
||||||
|
async remove(@Param("id") id: string, @Workspace() workspaceId: string) {
|
||||||
|
return this.findingsService.remove(id, workspaceId);
|
||||||
|
}
|
||||||
|
}
|
||||||
226
apps/api/src/findings/findings.integration.spec.ts
Normal file
226
apps/api/src/findings/findings.integration.spec.ts
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
import { beforeAll, beforeEach, describe, expect, it, afterAll, vi } from "vitest";
|
||||||
|
import { randomUUID as uuid } from "crypto";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { BadRequestException, NotFoundException } from "@nestjs/common";
|
||||||
|
import { PrismaClient, Prisma } from "@prisma/client";
|
||||||
|
import { FindingsService } from "./findings.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||||
|
|
||||||
|
const shouldRunDbIntegrationTests =
|
||||||
|
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||||
|
const describeFn = shouldRunDbIntegrationTests ? describe : describe.skip;
|
||||||
|
|
||||||
|
const EMBEDDING_DIMENSION = 1536;
|
||||||
|
|
||||||
|
function vector(value: number): number[] {
|
||||||
|
return Array.from({ length: EMBEDDING_DIMENSION }, () => value);
|
||||||
|
}
|
||||||
|
|
||||||
|
function toVectorLiteral(input: number[]): string {
|
||||||
|
return `[${input.join(",")}]`;
|
||||||
|
}
|
||||||
|
|
||||||
|
describeFn("FindingsService Integration", () => {
|
||||||
|
let moduleRef: TestingModule;
|
||||||
|
let prisma: PrismaClient;
|
||||||
|
let service: FindingsService;
|
||||||
|
let workspaceId: string;
|
||||||
|
let ownerId: string;
|
||||||
|
let setupComplete = false;
|
||||||
|
|
||||||
|
const embeddingServiceMock = {
|
||||||
|
isConfigured: vi.fn(),
|
||||||
|
generateEmbedding: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
prisma = new PrismaClient();
|
||||||
|
await prisma.$connect();
|
||||||
|
|
||||||
|
const workspace = await prisma.workspace.create({
|
||||||
|
data: {
|
||||||
|
name: `Findings Integration ${Date.now()}`,
|
||||||
|
owner: {
|
||||||
|
create: {
|
||||||
|
email: `findings-integration-${Date.now()}@example.com`,
|
||||||
|
name: "Findings Integration Owner",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
workspaceId = workspace.id;
|
||||||
|
ownerId = workspace.ownerId;
|
||||||
|
|
||||||
|
moduleRef = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
FindingsService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: prisma,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: EmbeddingService,
|
||||||
|
useValue: embeddingServiceMock,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = moduleRef.get<FindingsService>(FindingsService);
|
||||||
|
setupComplete = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
embeddingServiceMock.isConfigured.mockReturnValue(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
if (!prisma) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workspaceId) {
|
||||||
|
await prisma.finding.deleteMany({ where: { workspaceId } });
|
||||||
|
await prisma.workspace.deleteMany({ where: { id: workspaceId } });
|
||||||
|
}
|
||||||
|
if (ownerId) {
|
||||||
|
await prisma.user.deleteMany({ where: { id: ownerId } });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (moduleRef) {
|
||||||
|
await moduleRef.close();
|
||||||
|
}
|
||||||
|
await prisma.$disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("creates, lists, fetches, and deletes findings", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const created = await service.create(workspaceId, {
|
||||||
|
agentId: "agent-findings-crud",
|
||||||
|
type: "security",
|
||||||
|
title: "Unescaped SQL fragment",
|
||||||
|
data: { severity: "high" },
|
||||||
|
summary: "Potential injection risk in dynamic query path.",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(created.id).toBeDefined();
|
||||||
|
expect(created.workspaceId).toBe(workspaceId);
|
||||||
|
expect(created.taskId).toBeNull();
|
||||||
|
|
||||||
|
const listed = await service.findAll(workspaceId, {
|
||||||
|
page: 1,
|
||||||
|
limit: 10,
|
||||||
|
agentId: "agent-findings-crud",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(listed.meta.total).toBeGreaterThanOrEqual(1);
|
||||||
|
expect(listed.data.some((row) => row.id === created.id)).toBe(true);
|
||||||
|
|
||||||
|
const found = await service.findOne(created.id, workspaceId);
|
||||||
|
expect(found.id).toBe(created.id);
|
||||||
|
expect(found.title).toBe("Unescaped SQL fragment");
|
||||||
|
|
||||||
|
await expect(service.findOne(created.id, uuid())).rejects.toThrow(NotFoundException);
|
||||||
|
|
||||||
|
await expect(service.remove(created.id, workspaceId)).resolves.toEqual({
|
||||||
|
message: "Finding deleted successfully",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(service.findOne(created.id, workspaceId)).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("rejects create when taskId does not exist in workspace", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.create(workspaceId, {
|
||||||
|
taskId: uuid(),
|
||||||
|
agentId: "agent-findings-missing-task",
|
||||||
|
type: "bug",
|
||||||
|
title: "Invalid task id",
|
||||||
|
data: { source: "integration-test" },
|
||||||
|
summary: "Should fail when task relation is not found.",
|
||||||
|
})
|
||||||
|
).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("rejects vector search when embeddings are disabled", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
embeddingServiceMock.isConfigured.mockReturnValue(false);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.search(workspaceId, {
|
||||||
|
query: "security issue",
|
||||||
|
})
|
||||||
|
).rejects.toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("searches findings by vector similarity with filters", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const near = vector(0.01);
|
||||||
|
const far = vector(0.9);
|
||||||
|
|
||||||
|
const matchedFinding = await prisma.finding.create({
|
||||||
|
data: {
|
||||||
|
workspaceId,
|
||||||
|
agentId: "agent-findings-search-a",
|
||||||
|
type: "incident",
|
||||||
|
title: "Authentication bypass",
|
||||||
|
data: { score: 0.9 } as Prisma.InputJsonValue,
|
||||||
|
summary: "Bypass risk found in login checks.",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const otherFinding = await prisma.finding.create({
|
||||||
|
data: {
|
||||||
|
workspaceId,
|
||||||
|
agentId: "agent-findings-search-b",
|
||||||
|
type: "incident",
|
||||||
|
title: "Retry timeout",
|
||||||
|
data: { score: 0.2 } as Prisma.InputJsonValue,
|
||||||
|
summary: "Timeout issue in downstream retries.",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await prisma.$executeRaw`
|
||||||
|
UPDATE findings
|
||||||
|
SET embedding = ${toVectorLiteral(near)}::vector(1536)
|
||||||
|
WHERE id = ${matchedFinding.id}::uuid
|
||||||
|
`;
|
||||||
|
await prisma.$executeRaw`
|
||||||
|
UPDATE findings
|
||||||
|
SET embedding = ${toVectorLiteral(far)}::vector(1536)
|
||||||
|
WHERE id = ${otherFinding.id}::uuid
|
||||||
|
`;
|
||||||
|
|
||||||
|
embeddingServiceMock.isConfigured.mockReturnValue(true);
|
||||||
|
embeddingServiceMock.generateEmbedding.mockResolvedValue(near);
|
||||||
|
|
||||||
|
const result = await service.search(workspaceId, {
|
||||||
|
query: "authentication bypass risk",
|
||||||
|
agentId: "agent-findings-search-a",
|
||||||
|
limit: 10,
|
||||||
|
similarityThreshold: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.query).toBe("authentication bypass risk");
|
||||||
|
expect(result.meta.total).toBe(1);
|
||||||
|
expect(result.data).toHaveLength(1);
|
||||||
|
expect(result.data[0]?.id).toBe(matchedFinding.id);
|
||||||
|
expect(result.data[0]?.agentId).toBe("agent-findings-search-a");
|
||||||
|
expect(result.data.find((row) => row.id === otherFinding.id)).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
14
apps/api/src/findings/findings.module.ts
Normal file
14
apps/api/src/findings/findings.module.ts
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
|
import { AuthModule } from "../auth/auth.module";
|
||||||
|
import { KnowledgeModule } from "../knowledge/knowledge.module";
|
||||||
|
import { FindingsController } from "./findings.controller";
|
||||||
|
import { FindingsService } from "./findings.service";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [PrismaModule, AuthModule, KnowledgeModule],
|
||||||
|
controllers: [FindingsController],
|
||||||
|
providers: [FindingsService],
|
||||||
|
exports: [FindingsService],
|
||||||
|
})
|
||||||
|
export class FindingsModule {}
|
||||||
300
apps/api/src/findings/findings.service.spec.ts
Normal file
300
apps/api/src/findings/findings.service.spec.ts
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
import { BadRequestException, NotFoundException } from "@nestjs/common";
|
||||||
|
import { FindingsService } from "./findings.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||||
|
|
||||||
|
describe("FindingsService", () => {
|
||||||
|
let service: FindingsService;
|
||||||
|
let prisma: PrismaService;
|
||||||
|
let embeddingService: EmbeddingService;
|
||||||
|
|
||||||
|
const mockWorkspaceId = "550e8400-e29b-41d4-a716-446655440001";
|
||||||
|
const mockFindingId = "550e8400-e29b-41d4-a716-446655440002";
|
||||||
|
const mockTaskId = "550e8400-e29b-41d4-a716-446655440003";
|
||||||
|
|
||||||
|
const mockPrismaService = {
|
||||||
|
finding: {
|
||||||
|
create: vi.fn(),
|
||||||
|
findMany: vi.fn(),
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
count: vi.fn(),
|
||||||
|
delete: vi.fn(),
|
||||||
|
},
|
||||||
|
agentTask: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
},
|
||||||
|
$queryRaw: vi.fn(),
|
||||||
|
$executeRaw: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockEmbeddingService = {
|
||||||
|
isConfigured: vi.fn(),
|
||||||
|
generateEmbedding: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
FindingsService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: mockPrismaService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: EmbeddingService,
|
||||||
|
useValue: mockEmbeddingService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<FindingsService>(FindingsService);
|
||||||
|
prisma = module.get<PrismaService>(PrismaService);
|
||||||
|
embeddingService = module.get<EmbeddingService>(EmbeddingService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be defined", () => {
|
||||||
|
expect(service).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("create", () => {
|
||||||
|
it("should create a finding and store embedding when configured", async () => {
|
||||||
|
const createDto = {
|
||||||
|
taskId: mockTaskId,
|
||||||
|
agentId: "research-agent",
|
||||||
|
type: "security",
|
||||||
|
title: "SQL injection risk",
|
||||||
|
data: { severity: "high" },
|
||||||
|
summary: "Potential SQL injection in search endpoint.",
|
||||||
|
};
|
||||||
|
|
||||||
|
const createdFinding = {
|
||||||
|
id: mockFindingId,
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
...createDto,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
mockPrismaService.agentTask.findUnique.mockResolvedValue({
|
||||||
|
id: mockTaskId,
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
});
|
||||||
|
mockPrismaService.finding.create.mockResolvedValue(createdFinding);
|
||||||
|
mockPrismaService.finding.findUnique.mockResolvedValue(createdFinding);
|
||||||
|
mockEmbeddingService.isConfigured.mockReturnValue(true);
|
||||||
|
mockEmbeddingService.generateEmbedding.mockResolvedValue([0.1, 0.2, 0.3]);
|
||||||
|
mockPrismaService.$executeRaw.mockResolvedValue(1);
|
||||||
|
|
||||||
|
const result = await service.create(mockWorkspaceId, createDto);
|
||||||
|
|
||||||
|
expect(result).toEqual(createdFinding);
|
||||||
|
expect(prisma.finding.create).toHaveBeenCalledWith({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
taskId: mockTaskId,
|
||||||
|
agentId: "research-agent",
|
||||||
|
type: "security",
|
||||||
|
title: "SQL injection risk",
|
||||||
|
}),
|
||||||
|
select: expect.any(Object),
|
||||||
|
});
|
||||||
|
expect(embeddingService.generateEmbedding).toHaveBeenCalledWith(createDto.summary);
|
||||||
|
expect(prisma.$executeRaw).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create a finding without embedding when not configured", async () => {
|
||||||
|
const createDto = {
|
||||||
|
agentId: "research-agent",
|
||||||
|
type: "security",
|
||||||
|
title: "SQL injection risk",
|
||||||
|
data: { severity: "high" },
|
||||||
|
summary: "Potential SQL injection in search endpoint.",
|
||||||
|
};
|
||||||
|
|
||||||
|
const createdFinding = {
|
||||||
|
id: mockFindingId,
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
taskId: null,
|
||||||
|
...createDto,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
mockPrismaService.finding.create.mockResolvedValue(createdFinding);
|
||||||
|
mockEmbeddingService.isConfigured.mockReturnValue(false);
|
||||||
|
|
||||||
|
const result = await service.create(mockWorkspaceId, createDto);
|
||||||
|
|
||||||
|
expect(result).toEqual(createdFinding);
|
||||||
|
expect(embeddingService.generateEmbedding).not.toHaveBeenCalled();
|
||||||
|
expect(prisma.$executeRaw).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findAll", () => {
|
||||||
|
it("should return paginated findings with filters", async () => {
|
||||||
|
const findings = [
|
||||||
|
{
|
||||||
|
id: mockFindingId,
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
taskId: null,
|
||||||
|
agentId: "research-agent",
|
||||||
|
type: "security",
|
||||||
|
title: "SQL injection risk",
|
||||||
|
data: { severity: "high" },
|
||||||
|
summary: "Potential SQL injection in search endpoint.",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
mockPrismaService.finding.findMany.mockResolvedValue(findings);
|
||||||
|
mockPrismaService.finding.count.mockResolvedValue(1);
|
||||||
|
|
||||||
|
const result = await service.findAll(mockWorkspaceId, {
|
||||||
|
page: 1,
|
||||||
|
limit: 10,
|
||||||
|
type: "security",
|
||||||
|
agentId: "research-agent",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
data: findings,
|
||||||
|
meta: {
|
||||||
|
total: 1,
|
||||||
|
page: 1,
|
||||||
|
limit: 10,
|
||||||
|
totalPages: 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(prisma.finding.findMany).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
where: {
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
type: "security",
|
||||||
|
agentId: "research-agent",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findOne", () => {
|
||||||
|
it("should return a finding", async () => {
|
||||||
|
const finding = {
|
||||||
|
id: mockFindingId,
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
taskId: null,
|
||||||
|
agentId: "research-agent",
|
||||||
|
type: "security",
|
||||||
|
title: "SQL injection risk",
|
||||||
|
data: { severity: "high" },
|
||||||
|
summary: "Potential SQL injection in search endpoint.",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
mockPrismaService.finding.findUnique.mockResolvedValue(finding);
|
||||||
|
|
||||||
|
const result = await service.findOne(mockFindingId, mockWorkspaceId);
|
||||||
|
|
||||||
|
expect(result).toEqual(finding);
|
||||||
|
expect(prisma.finding.findUnique).toHaveBeenCalledWith({
|
||||||
|
where: {
|
||||||
|
id: mockFindingId,
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
},
|
||||||
|
select: expect.any(Object),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when finding does not exist", async () => {
|
||||||
|
mockPrismaService.finding.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.findOne(mockFindingId, mockWorkspaceId)).rejects.toThrow(
|
||||||
|
NotFoundException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("search", () => {
|
||||||
|
it("should throw BadRequestException when embeddings are not configured", async () => {
|
||||||
|
mockEmbeddingService.isConfigured.mockReturnValue(false);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.search(mockWorkspaceId, {
|
||||||
|
query: "sql injection",
|
||||||
|
})
|
||||||
|
).rejects.toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return similarity-ranked search results", async () => {
|
||||||
|
mockEmbeddingService.isConfigured.mockReturnValue(true);
|
||||||
|
mockEmbeddingService.generateEmbedding.mockResolvedValue([0.1, 0.2, 0.3]);
|
||||||
|
mockPrismaService.$queryRaw
|
||||||
|
.mockResolvedValueOnce([
|
||||||
|
{
|
||||||
|
id: mockFindingId,
|
||||||
|
workspace_id: mockWorkspaceId,
|
||||||
|
task_id: null,
|
||||||
|
agent_id: "research-agent",
|
||||||
|
type: "security",
|
||||||
|
title: "SQL injection risk",
|
||||||
|
data: { severity: "high" },
|
||||||
|
summary: "Potential SQL injection in search endpoint.",
|
||||||
|
created_at: new Date(),
|
||||||
|
updated_at: new Date(),
|
||||||
|
score: 0.91,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
.mockResolvedValueOnce([{ count: BigInt(1) }]);
|
||||||
|
|
||||||
|
const result = await service.search(mockWorkspaceId, {
|
||||||
|
query: "sql injection",
|
||||||
|
page: 1,
|
||||||
|
limit: 5,
|
||||||
|
similarityThreshold: 0.5,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.query).toBe("sql injection");
|
||||||
|
expect(result.data).toHaveLength(1);
|
||||||
|
expect(result.data[0].score).toBe(0.91);
|
||||||
|
expect(result.meta.total).toBe(1);
|
||||||
|
expect(prisma.$queryRaw).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("remove", () => {
|
||||||
|
it("should delete a finding", async () => {
|
||||||
|
mockPrismaService.finding.findUnique.mockResolvedValue({
|
||||||
|
id: mockFindingId,
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
});
|
||||||
|
mockPrismaService.finding.delete.mockResolvedValue({
|
||||||
|
id: mockFindingId,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.remove(mockFindingId, mockWorkspaceId);
|
||||||
|
|
||||||
|
expect(result).toEqual({ message: "Finding deleted successfully" });
|
||||||
|
expect(prisma.finding.delete).toHaveBeenCalledWith({
|
||||||
|
where: {
|
||||||
|
id: mockFindingId,
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when finding does not exist", async () => {
|
||||||
|
mockPrismaService.finding.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.remove(mockFindingId, mockWorkspaceId)).rejects.toThrow(
|
||||||
|
NotFoundException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
337
apps/api/src/findings/findings.service.ts
Normal file
337
apps/api/src/findings/findings.service.ts
Normal file
@@ -0,0 +1,337 @@
|
|||||||
|
import { BadRequestException, Injectable, Logger, NotFoundException } from "@nestjs/common";
|
||||||
|
import { Prisma } from "@prisma/client";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||||
|
import type { CreateFindingDto, QueryFindingsDto, SearchFindingsDto } from "./dto";
|
||||||
|
|
||||||
|
const findingSelect = {
|
||||||
|
id: true,
|
||||||
|
workspaceId: true,
|
||||||
|
taskId: true,
|
||||||
|
agentId: true,
|
||||||
|
type: true,
|
||||||
|
title: true,
|
||||||
|
data: true,
|
||||||
|
summary: true,
|
||||||
|
createdAt: true,
|
||||||
|
updatedAt: true,
|
||||||
|
} satisfies Prisma.FindingSelect;
|
||||||
|
|
||||||
|
type FindingRecord = Prisma.FindingGetPayload<{ select: typeof findingSelect }>;
|
||||||
|
|
||||||
|
interface RawFindingSearchResult {
|
||||||
|
id: string;
|
||||||
|
workspace_id: string;
|
||||||
|
task_id: string | null;
|
||||||
|
agent_id: string;
|
||||||
|
type: string;
|
||||||
|
title: string;
|
||||||
|
data: Prisma.JsonValue;
|
||||||
|
summary: string;
|
||||||
|
created_at: Date;
|
||||||
|
updated_at: Date;
|
||||||
|
score: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FindingSearchResult extends FindingRecord {
|
||||||
|
score: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PaginatedMeta {
|
||||||
|
total: number;
|
||||||
|
page: number;
|
||||||
|
limit: number;
|
||||||
|
totalPages: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PaginatedFindingsResponse {
|
||||||
|
data: FindingRecord[];
|
||||||
|
meta: PaginatedMeta;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FindingsSearchResponse {
|
||||||
|
data: FindingSearchResult[];
|
||||||
|
meta: PaginatedMeta;
|
||||||
|
query: string;
|
||||||
|
similarityThreshold: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Service for managing structured findings with vector search support
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class FindingsService {
|
||||||
|
private readonly logger = new Logger(FindingsService.name);
|
||||||
|
private readonly defaultSimilarityThreshold: number;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private readonly prisma: PrismaService,
|
||||||
|
private readonly embeddingService: EmbeddingService
|
||||||
|
) {
|
||||||
|
const parsedThreshold = Number.parseFloat(process.env.FINDINGS_SIMILARITY_THRESHOLD ?? "0.5");
|
||||||
|
|
||||||
|
this.defaultSimilarityThreshold =
|
||||||
|
Number.isFinite(parsedThreshold) && parsedThreshold >= 0 && parsedThreshold <= 1
|
||||||
|
? parsedThreshold
|
||||||
|
: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a finding and generate its embedding from the summary when available
|
||||||
|
*/
|
||||||
|
async create(workspaceId: string, createFindingDto: CreateFindingDto): Promise<FindingRecord> {
|
||||||
|
if (createFindingDto.taskId) {
|
||||||
|
const task = await this.prisma.agentTask.findUnique({
|
||||||
|
where: {
|
||||||
|
id: createFindingDto.taskId,
|
||||||
|
workspaceId,
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!task) {
|
||||||
|
throw new NotFoundException(`Agent task with ID ${createFindingDto.taskId} not found`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const createInput: Prisma.FindingUncheckedCreateInput = {
|
||||||
|
workspaceId,
|
||||||
|
agentId: createFindingDto.agentId,
|
||||||
|
type: createFindingDto.type,
|
||||||
|
title: createFindingDto.title,
|
||||||
|
data: createFindingDto.data as Prisma.InputJsonValue,
|
||||||
|
summary: createFindingDto.summary,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (createFindingDto.taskId) {
|
||||||
|
createInput.taskId = createFindingDto.taskId;
|
||||||
|
}
|
||||||
|
|
||||||
|
const finding = await this.prisma.finding.create({
|
||||||
|
data: createInput,
|
||||||
|
select: findingSelect,
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.generateAndStoreEmbedding(finding.id, workspaceId, finding.summary);
|
||||||
|
|
||||||
|
if (this.embeddingService.isConfigured()) {
|
||||||
|
return this.findOne(finding.id, workspaceId);
|
||||||
|
}
|
||||||
|
|
||||||
|
return finding;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get paginated findings with optional filters
|
||||||
|
*/
|
||||||
|
async findAll(workspaceId: string, query: QueryFindingsDto): Promise<PaginatedFindingsResponse> {
|
||||||
|
const page = query.page ?? 1;
|
||||||
|
const limit = query.limit ?? 50;
|
||||||
|
const skip = (page - 1) * limit;
|
||||||
|
|
||||||
|
const where: Prisma.FindingWhereInput = {
|
||||||
|
workspaceId,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (query.agentId) {
|
||||||
|
where.agentId = query.agentId;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (query.type) {
|
||||||
|
where.type = query.type;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (query.taskId) {
|
||||||
|
where.taskId = query.taskId;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [data, total] = await Promise.all([
|
||||||
|
this.prisma.finding.findMany({
|
||||||
|
where,
|
||||||
|
select: findingSelect,
|
||||||
|
orderBy: {
|
||||||
|
createdAt: "desc",
|
||||||
|
},
|
||||||
|
skip,
|
||||||
|
take: limit,
|
||||||
|
}),
|
||||||
|
this.prisma.finding.count({ where }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
data,
|
||||||
|
meta: {
|
||||||
|
total,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
totalPages: Math.ceil(total / limit),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a single finding by ID
|
||||||
|
*/
|
||||||
|
async findOne(id: string, workspaceId: string): Promise<FindingRecord> {
|
||||||
|
const finding = await this.prisma.finding.findUnique({
|
||||||
|
where: {
|
||||||
|
id,
|
||||||
|
workspaceId,
|
||||||
|
},
|
||||||
|
select: findingSelect,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!finding) {
|
||||||
|
throw new NotFoundException(`Finding with ID ${id} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return finding;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Semantic search findings using vector similarity
|
||||||
|
*/
|
||||||
|
async search(workspaceId: string, searchDto: SearchFindingsDto): Promise<FindingsSearchResponse> {
|
||||||
|
if (!this.embeddingService.isConfigured()) {
|
||||||
|
throw new BadRequestException(
|
||||||
|
"Finding vector search requires OPENAI_API_KEY to be configured"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const page = searchDto.page ?? 1;
|
||||||
|
const limit = searchDto.limit ?? 20;
|
||||||
|
const offset = (page - 1) * limit;
|
||||||
|
const similarityThreshold = searchDto.similarityThreshold ?? this.defaultSimilarityThreshold;
|
||||||
|
const distanceThreshold = 1 - similarityThreshold;
|
||||||
|
|
||||||
|
const queryEmbedding = await this.embeddingService.generateEmbedding(searchDto.query);
|
||||||
|
const embeddingString = `[${queryEmbedding.join(",")}]`;
|
||||||
|
|
||||||
|
const agentFilter = searchDto.agentId
|
||||||
|
? Prisma.sql`AND f.agent_id = ${searchDto.agentId}`
|
||||||
|
: Prisma.sql``;
|
||||||
|
const typeFilter = searchDto.type ? Prisma.sql`AND f.type = ${searchDto.type}` : Prisma.sql``;
|
||||||
|
const taskFilter = searchDto.taskId
|
||||||
|
? Prisma.sql`AND f.task_id = ${searchDto.taskId}::uuid`
|
||||||
|
: Prisma.sql``;
|
||||||
|
|
||||||
|
const searchResults = await this.prisma.$queryRaw<RawFindingSearchResult[]>`
|
||||||
|
SELECT
|
||||||
|
f.id,
|
||||||
|
f.workspace_id,
|
||||||
|
f.task_id,
|
||||||
|
f.agent_id,
|
||||||
|
f.type,
|
||||||
|
f.title,
|
||||||
|
f.data,
|
||||||
|
f.summary,
|
||||||
|
f.created_at,
|
||||||
|
f.updated_at,
|
||||||
|
(1 - (f.embedding <=> ${embeddingString}::vector)) AS score
|
||||||
|
FROM findings f
|
||||||
|
WHERE f.workspace_id = ${workspaceId}::uuid
|
||||||
|
AND f.embedding IS NOT NULL
|
||||||
|
${agentFilter}
|
||||||
|
${typeFilter}
|
||||||
|
${taskFilter}
|
||||||
|
AND (f.embedding <=> ${embeddingString}::vector) <= ${distanceThreshold}
|
||||||
|
ORDER BY f.embedding <=> ${embeddingString}::vector
|
||||||
|
LIMIT ${limit}
|
||||||
|
OFFSET ${offset}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const countResult = await this.prisma.$queryRaw<[{ count: bigint }]>`
|
||||||
|
SELECT COUNT(*) as count
|
||||||
|
FROM findings f
|
||||||
|
WHERE f.workspace_id = ${workspaceId}::uuid
|
||||||
|
AND f.embedding IS NOT NULL
|
||||||
|
${agentFilter}
|
||||||
|
${typeFilter}
|
||||||
|
${taskFilter}
|
||||||
|
AND (f.embedding <=> ${embeddingString}::vector) <= ${distanceThreshold}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const total = Number(countResult[0].count);
|
||||||
|
|
||||||
|
const data: FindingSearchResult[] = searchResults.map((row) => ({
|
||||||
|
id: row.id,
|
||||||
|
workspaceId: row.workspace_id,
|
||||||
|
taskId: row.task_id,
|
||||||
|
agentId: row.agent_id,
|
||||||
|
type: row.type,
|
||||||
|
title: row.title,
|
||||||
|
data: row.data,
|
||||||
|
summary: row.summary,
|
||||||
|
createdAt: row.created_at,
|
||||||
|
updatedAt: row.updated_at,
|
||||||
|
score: row.score,
|
||||||
|
}));
|
||||||
|
|
||||||
|
return {
|
||||||
|
data,
|
||||||
|
meta: {
|
||||||
|
total,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
totalPages: Math.ceil(total / limit),
|
||||||
|
},
|
||||||
|
query: searchDto.query,
|
||||||
|
similarityThreshold,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a finding
|
||||||
|
*/
|
||||||
|
async remove(id: string, workspaceId: string): Promise<{ message: string }> {
|
||||||
|
const existingFinding = await this.prisma.finding.findUnique({
|
||||||
|
where: {
|
||||||
|
id,
|
||||||
|
workspaceId,
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!existingFinding) {
|
||||||
|
throw new NotFoundException(`Finding with ID ${id} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.prisma.finding.delete({
|
||||||
|
where: {
|
||||||
|
id,
|
||||||
|
workspaceId,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { message: "Finding deleted successfully" };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate and persist embedding for a finding summary
|
||||||
|
*/
|
||||||
|
private async generateAndStoreEmbedding(
|
||||||
|
findingId: string,
|
||||||
|
workspaceId: string,
|
||||||
|
summary: string
|
||||||
|
): Promise<void> {
|
||||||
|
if (!this.embeddingService.isConfigured()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const embedding = await this.embeddingService.generateEmbedding(summary);
|
||||||
|
const embeddingString = `[${embedding.join(",")}]`;
|
||||||
|
|
||||||
|
await this.prisma.$executeRaw`
|
||||||
|
UPDATE findings
|
||||||
|
SET embedding = ${embeddingString}::vector,
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE id = ${findingId}::uuid
|
||||||
|
AND workspace_id = ${workspaceId}::uuid
|
||||||
|
`;
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
this.logger.warn(`Failed to generate embedding for finding ${findingId}: ${message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
89
apps/api/src/import/dto/import-project.dto.ts
Normal file
89
apps/api/src/import/dto/import-project.dto.ts
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
import { IsNumber, IsOptional, IsString, MaxLength, MinLength } from "class-validator";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for a single jarvis-brain project record.
|
||||||
|
* This matches the project object shape consumed by scripts/migrate-brain.ts.
|
||||||
|
*/
|
||||||
|
export class ImportProjectDto {
|
||||||
|
@IsString({ message: "id must be a string" })
|
||||||
|
@MinLength(1, { message: "id must not be empty" })
|
||||||
|
@MaxLength(255, { message: "id must not exceed 255 characters" })
|
||||||
|
id!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "name must be a string" })
|
||||||
|
@MinLength(1, { message: "name must not be empty" })
|
||||||
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
|
name!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "description must be a string" })
|
||||||
|
description?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "domain must be a string" })
|
||||||
|
domain?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "status must be a string" })
|
||||||
|
status?: string | null;
|
||||||
|
|
||||||
|
// jarvis-brain project priority can be a number, string, or null
|
||||||
|
@IsOptional()
|
||||||
|
priority?: number | string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsNumber({}, { message: "progress must be a number" })
|
||||||
|
progress?: number | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "repo must be a string" })
|
||||||
|
repo?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "branch must be a string" })
|
||||||
|
branch?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "current_milestone must be a string" })
|
||||||
|
current_milestone?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "next_milestone must be a string" })
|
||||||
|
next_milestone?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "blocker must be a string" })
|
||||||
|
blocker?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "owner must be a string" })
|
||||||
|
owner?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "docs_path must be a string" })
|
||||||
|
docs_path?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "created must be a string" })
|
||||||
|
created?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "updated must be a string" })
|
||||||
|
updated?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "target_date must be a string" })
|
||||||
|
target_date?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "notes must be a string" })
|
||||||
|
notes?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "notes_nontechnical must be a string" })
|
||||||
|
notes_nontechnical?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "parent must be a string" })
|
||||||
|
parent?: string | null;
|
||||||
|
}
|
||||||
5
apps/api/src/import/dto/import-response.dto.ts
Normal file
5
apps/api/src/import/dto/import-response.dto.ts
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
export interface ImportResponseDto {
|
||||||
|
imported: number;
|
||||||
|
skipped: number;
|
||||||
|
errors: string[];
|
||||||
|
}
|
||||||
76
apps/api/src/import/dto/import-task.dto.ts
Normal file
76
apps/api/src/import/dto/import-task.dto.ts
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import { IsArray, IsNumber, IsOptional, IsString, MaxLength, MinLength } from "class-validator";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for a single jarvis-brain task record.
|
||||||
|
* This matches the task object shape consumed by scripts/migrate-brain.ts.
|
||||||
|
*/
|
||||||
|
export class ImportTaskDto {
|
||||||
|
@IsString({ message: "id must be a string" })
|
||||||
|
@MinLength(1, { message: "id must not be empty" })
|
||||||
|
@MaxLength(255, { message: "id must not exceed 255 characters" })
|
||||||
|
id!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "title must be a string" })
|
||||||
|
@MinLength(1, { message: "title must not be empty" })
|
||||||
|
@MaxLength(255, { message: "title must not exceed 255 characters" })
|
||||||
|
title!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "domain must be a string" })
|
||||||
|
domain?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "project must be a string" })
|
||||||
|
project?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsArray({ message: "related must be an array" })
|
||||||
|
@IsString({ each: true, message: "related items must be strings" })
|
||||||
|
related?: string[];
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "priority must be a string" })
|
||||||
|
priority?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "status must be a string" })
|
||||||
|
status?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsNumber({}, { message: "progress must be a number" })
|
||||||
|
progress?: number | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "due must be a string" })
|
||||||
|
due?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsArray({ message: "blocks must be an array" })
|
||||||
|
@IsString({ each: true, message: "blocks items must be strings" })
|
||||||
|
blocks?: string[];
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsArray({ message: "blocked_by must be an array" })
|
||||||
|
@IsString({ each: true, message: "blocked_by items must be strings" })
|
||||||
|
blocked_by?: string[];
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "assignee must be a string" })
|
||||||
|
assignee?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "created must be a string" })
|
||||||
|
created?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "updated must be a string" })
|
||||||
|
updated?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "notes must be a string" })
|
||||||
|
notes?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "notes_nontechnical must be a string" })
|
||||||
|
notes_nontechnical?: string | null;
|
||||||
|
}
|
||||||
3
apps/api/src/import/dto/index.ts
Normal file
3
apps/api/src/import/dto/index.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export { ImportTaskDto } from "./import-task.dto";
|
||||||
|
export { ImportProjectDto } from "./import-project.dto";
|
||||||
|
export type { ImportResponseDto } from "./import-response.dto";
|
||||||
33
apps/api/src/import/import.controller.ts
Normal file
33
apps/api/src/import/import.controller.ts
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import { Body, Controller, ParseArrayPipe, Post, UseGuards } from "@nestjs/common";
|
||||||
|
import type { AuthUser } from "@mosaic/shared";
|
||||||
|
import { CurrentUser } from "../auth/decorators/current-user.decorator";
|
||||||
|
import { AdminGuard } from "../auth/guards/admin.guard";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { Workspace } from "../common/decorators";
|
||||||
|
import { WorkspaceGuard } from "../common/guards";
|
||||||
|
import { ImportProjectDto, type ImportResponseDto, ImportTaskDto } from "./dto";
|
||||||
|
import { ImportService } from "./import.service";
|
||||||
|
|
||||||
|
@Controller("import")
|
||||||
|
@UseGuards(AuthGuard, WorkspaceGuard, AdminGuard)
|
||||||
|
export class ImportController {
|
||||||
|
constructor(private readonly importService: ImportService) {}
|
||||||
|
|
||||||
|
@Post("tasks")
|
||||||
|
async importTasks(
|
||||||
|
@Body(new ParseArrayPipe({ items: ImportTaskDto })) taskPayload: ImportTaskDto[],
|
||||||
|
@Workspace() workspaceId: string,
|
||||||
|
@CurrentUser() user: AuthUser
|
||||||
|
): Promise<ImportResponseDto> {
|
||||||
|
return this.importService.importTasks(workspaceId, user.id, taskPayload);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post("projects")
|
||||||
|
async importProjects(
|
||||||
|
@Body(new ParseArrayPipe({ items: ImportProjectDto })) projectPayload: ImportProjectDto[],
|
||||||
|
@Workspace() workspaceId: string,
|
||||||
|
@CurrentUser() user: AuthUser
|
||||||
|
): Promise<ImportResponseDto> {
|
||||||
|
return this.importService.importProjects(workspaceId, user.id, projectPayload);
|
||||||
|
}
|
||||||
|
}
|
||||||
13
apps/api/src/import/import.module.ts
Normal file
13
apps/api/src/import/import.module.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { AuthModule } from "../auth/auth.module";
|
||||||
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
|
import { ImportController } from "./import.controller";
|
||||||
|
import { ImportService } from "./import.service";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [PrismaModule, AuthModule],
|
||||||
|
controllers: [ImportController],
|
||||||
|
providers: [ImportService],
|
||||||
|
exports: [ImportService],
|
||||||
|
})
|
||||||
|
export class ImportModule {}
|
||||||
251
apps/api/src/import/import.service.spec.ts
Normal file
251
apps/api/src/import/import.service.spec.ts
Normal file
@@ -0,0 +1,251 @@
|
|||||||
|
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { ProjectStatus, TaskPriority, TaskStatus } from "@prisma/client";
|
||||||
|
import { ImportService } from "./import.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
|
||||||
|
describe("ImportService", () => {
|
||||||
|
let service: ImportService;
|
||||||
|
|
||||||
|
const mockPrismaService = {
|
||||||
|
withWorkspaceContext: vi.fn(),
|
||||||
|
domain: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
create: vi.fn(),
|
||||||
|
},
|
||||||
|
project: {
|
||||||
|
findFirst: vi.fn(),
|
||||||
|
create: vi.fn(),
|
||||||
|
},
|
||||||
|
task: {
|
||||||
|
findFirst: vi.fn(),
|
||||||
|
create: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const workspaceId = "550e8400-e29b-41d4-a716-446655440001";
|
||||||
|
const userId = "550e8400-e29b-41d4-a716-446655440002";
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
ImportService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: mockPrismaService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<ImportService>(ImportService);
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
mockPrismaService.withWorkspaceContext.mockImplementation(
|
||||||
|
async (_userId: string, _workspaceId: string, fn: (client: unknown) => Promise<unknown>) => {
|
||||||
|
return fn(mockPrismaService);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be defined", () => {
|
||||||
|
expect(service).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("importTasks", () => {
|
||||||
|
it("maps status/priority/domain and imports a task", async () => {
|
||||||
|
mockPrismaService.task.findFirst.mockResolvedValue(null);
|
||||||
|
mockPrismaService.domain.findUnique.mockResolvedValue(null);
|
||||||
|
mockPrismaService.domain.create.mockResolvedValue({ id: "domain-id" });
|
||||||
|
mockPrismaService.project.findFirst.mockResolvedValue(null);
|
||||||
|
mockPrismaService.task.create.mockResolvedValue({ id: "task-id" });
|
||||||
|
|
||||||
|
const result = await service.importTasks(workspaceId, userId, [
|
||||||
|
{
|
||||||
|
id: "task-1",
|
||||||
|
title: "Import me",
|
||||||
|
domain: "Platform Ops",
|
||||||
|
status: "in-progress",
|
||||||
|
priority: "critical",
|
||||||
|
project: null,
|
||||||
|
related: [],
|
||||||
|
blocks: [],
|
||||||
|
blocked_by: [],
|
||||||
|
progress: 42,
|
||||||
|
due: "2026-03-15",
|
||||||
|
created: "2026-03-01T10:00:00.000Z",
|
||||||
|
updated: "2026-03-05T12:00:00.000Z",
|
||||||
|
assignee: null,
|
||||||
|
notes: "notes",
|
||||||
|
notes_nontechnical: "non technical",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(result).toEqual({ imported: 1, skipped: 0, errors: [] });
|
||||||
|
expect(mockPrismaService.task.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
title: "Import me",
|
||||||
|
status: TaskStatus.IN_PROGRESS,
|
||||||
|
priority: TaskPriority.HIGH,
|
||||||
|
domainId: "domain-id",
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("skips existing task by brainId", async () => {
|
||||||
|
mockPrismaService.task.findFirst.mockResolvedValue({ id: "existing-task-id" });
|
||||||
|
|
||||||
|
const result = await service.importTasks(workspaceId, userId, [
|
||||||
|
{
|
||||||
|
id: "task-1",
|
||||||
|
title: "Existing",
|
||||||
|
domain: null,
|
||||||
|
status: "pending",
|
||||||
|
priority: "medium",
|
||||||
|
project: null,
|
||||||
|
related: [],
|
||||||
|
blocks: [],
|
||||||
|
blocked_by: [],
|
||||||
|
progress: null,
|
||||||
|
due: null,
|
||||||
|
created: null,
|
||||||
|
updated: null,
|
||||||
|
assignee: null,
|
||||||
|
notes: null,
|
||||||
|
notes_nontechnical: null,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(result.imported).toBe(0);
|
||||||
|
expect(result.skipped).toBe(1);
|
||||||
|
expect(mockPrismaService.task.create).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("collects mapping/missing-project errors while importing", async () => {
|
||||||
|
mockPrismaService.task.findFirst.mockResolvedValue(null);
|
||||||
|
mockPrismaService.project.findFirst.mockResolvedValue(null);
|
||||||
|
mockPrismaService.task.create.mockResolvedValue({ id: "task-id" });
|
||||||
|
|
||||||
|
const result = await service.importTasks(workspaceId, userId, [
|
||||||
|
{
|
||||||
|
id: "task-1",
|
||||||
|
title: "Needs project",
|
||||||
|
domain: null,
|
||||||
|
status: "mystery-status",
|
||||||
|
priority: "mystery-priority",
|
||||||
|
project: "brain-project-1",
|
||||||
|
related: [],
|
||||||
|
blocks: [],
|
||||||
|
blocked_by: [],
|
||||||
|
progress: null,
|
||||||
|
due: null,
|
||||||
|
created: null,
|
||||||
|
updated: null,
|
||||||
|
assignee: null,
|
||||||
|
notes: null,
|
||||||
|
notes_nontechnical: null,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(result.imported).toBe(1);
|
||||||
|
expect(result.errors).toEqual(
|
||||||
|
expect.arrayContaining([
|
||||||
|
expect.stringContaining('Unknown task status "mystery-status"'),
|
||||||
|
expect.stringContaining('Unknown task priority "mystery-priority"'),
|
||||||
|
expect.stringContaining('referenced project "brain-project-1" not found'),
|
||||||
|
])
|
||||||
|
);
|
||||||
|
expect(mockPrismaService.task.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
status: TaskStatus.NOT_STARTED,
|
||||||
|
priority: TaskPriority.MEDIUM,
|
||||||
|
projectId: null,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("importProjects", () => {
|
||||||
|
it("maps status/domain and imports a project", async () => {
|
||||||
|
mockPrismaService.project.findFirst.mockResolvedValue(null);
|
||||||
|
mockPrismaService.domain.findUnique.mockResolvedValue(null);
|
||||||
|
mockPrismaService.domain.create.mockResolvedValue({ id: "domain-id" });
|
||||||
|
mockPrismaService.project.create.mockResolvedValue({ id: "project-id" });
|
||||||
|
|
||||||
|
const result = await service.importProjects(workspaceId, userId, [
|
||||||
|
{
|
||||||
|
id: "project-1",
|
||||||
|
name: "Project One",
|
||||||
|
description: "desc",
|
||||||
|
domain: "Backend",
|
||||||
|
status: "in-progress",
|
||||||
|
priority: "high",
|
||||||
|
progress: 50,
|
||||||
|
repo: "git@example.com/repo",
|
||||||
|
branch: "main",
|
||||||
|
current_milestone: "MS21",
|
||||||
|
next_milestone: "MS22",
|
||||||
|
blocker: null,
|
||||||
|
owner: "owner",
|
||||||
|
docs_path: "docs/PRD.md",
|
||||||
|
created: "2026-03-01",
|
||||||
|
updated: "2026-03-05",
|
||||||
|
target_date: "2026-04-01",
|
||||||
|
notes: "notes",
|
||||||
|
notes_nontechnical: "non tech",
|
||||||
|
parent: null,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(result).toEqual({ imported: 1, skipped: 0, errors: [] });
|
||||||
|
expect(mockPrismaService.project.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
name: "Project One",
|
||||||
|
status: ProjectStatus.ACTIVE,
|
||||||
|
domainId: "domain-id",
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("captures create failures as errors", async () => {
|
||||||
|
mockPrismaService.project.findFirst.mockResolvedValue(null);
|
||||||
|
mockPrismaService.project.create.mockRejectedValue(new Error("db failed"));
|
||||||
|
|
||||||
|
const result = await service.importProjects(workspaceId, userId, [
|
||||||
|
{
|
||||||
|
id: "project-1",
|
||||||
|
name: "Project One",
|
||||||
|
description: null,
|
||||||
|
domain: null,
|
||||||
|
status: "planning",
|
||||||
|
priority: null,
|
||||||
|
progress: null,
|
||||||
|
repo: null,
|
||||||
|
branch: null,
|
||||||
|
current_milestone: null,
|
||||||
|
next_milestone: null,
|
||||||
|
blocker: null,
|
||||||
|
owner: null,
|
||||||
|
docs_path: null,
|
||||||
|
created: null,
|
||||||
|
updated: null,
|
||||||
|
target_date: null,
|
||||||
|
notes: null,
|
||||||
|
notes_nontechnical: null,
|
||||||
|
parent: null,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(result.imported).toBe(0);
|
||||||
|
expect(result.skipped).toBe(1);
|
||||||
|
expect(result.errors).toEqual([
|
||||||
|
expect.stringContaining("project project-1: failed to import: db failed"),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
496
apps/api/src/import/import.service.ts
Normal file
496
apps/api/src/import/import.service.ts
Normal file
@@ -0,0 +1,496 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { Prisma, PrismaClient, ProjectStatus, TaskPriority, TaskStatus } from "@prisma/client";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import type { ImportProjectDto, ImportResponseDto, ImportTaskDto } from "./dto";
|
||||||
|
|
||||||
|
interface TaskStatusMapping {
|
||||||
|
status: TaskStatus;
|
||||||
|
issue: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TaskPriorityMapping {
|
||||||
|
priority: TaskPriority;
|
||||||
|
issue: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ProjectStatusMapping {
|
||||||
|
status: ProjectStatus;
|
||||||
|
issue: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class ImportService {
|
||||||
|
constructor(private readonly prisma: PrismaService) {}
|
||||||
|
|
||||||
|
async importTasks(
|
||||||
|
workspaceId: string,
|
||||||
|
userId: string,
|
||||||
|
taskPayload: ImportTaskDto[]
|
||||||
|
): Promise<ImportResponseDto> {
|
||||||
|
const errors: string[] = [];
|
||||||
|
let imported = 0;
|
||||||
|
let skipped = 0;
|
||||||
|
|
||||||
|
const importTimestamp = new Date().toISOString();
|
||||||
|
const seenBrainTaskIds = new Set<string>();
|
||||||
|
const domainIdBySlug = new Map<string, string>();
|
||||||
|
const projectIdByBrainId = new Map<string, string | null>();
|
||||||
|
|
||||||
|
await this.prisma.withWorkspaceContext(userId, workspaceId, async (tx: PrismaClient) => {
|
||||||
|
for (const [index, task] of taskPayload.entries()) {
|
||||||
|
const brainId = task.id.trim();
|
||||||
|
|
||||||
|
if (seenBrainTaskIds.has(brainId)) {
|
||||||
|
skipped += 1;
|
||||||
|
errors.push(`task ${brainId}: duplicate item in request body`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
seenBrainTaskIds.add(brainId);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const existingTask = await tx.task.findFirst({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
metadata: {
|
||||||
|
path: ["brainId"],
|
||||||
|
equals: brainId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existingTask) {
|
||||||
|
skipped += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const mappedStatus = this.mapTaskStatus(task.status ?? null);
|
||||||
|
if (mappedStatus.issue) {
|
||||||
|
errors.push(`task ${brainId}: ${mappedStatus.issue}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const mappedPriority = this.mapTaskPriority(task.priority ?? null);
|
||||||
|
if (mappedPriority.issue) {
|
||||||
|
errors.push(`task ${brainId}: ${mappedPriority.issue}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectBrainId = task.project?.trim() ? task.project.trim() : null;
|
||||||
|
const projectId = await this.resolveProjectId(
|
||||||
|
tx,
|
||||||
|
workspaceId,
|
||||||
|
projectBrainId,
|
||||||
|
projectIdByBrainId,
|
||||||
|
brainId,
|
||||||
|
errors
|
||||||
|
);
|
||||||
|
|
||||||
|
const domainId = await this.resolveDomainId(
|
||||||
|
tx,
|
||||||
|
workspaceId,
|
||||||
|
task.domain ?? null,
|
||||||
|
importTimestamp,
|
||||||
|
domainIdBySlug
|
||||||
|
);
|
||||||
|
|
||||||
|
const createdAt =
|
||||||
|
this.normalizeDate(task.created ?? null, `task ${brainId}.created`, errors) ??
|
||||||
|
new Date();
|
||||||
|
const updatedAt =
|
||||||
|
this.normalizeDate(task.updated ?? null, `task ${brainId}.updated`, errors) ??
|
||||||
|
createdAt;
|
||||||
|
const dueDate = this.normalizeDate(task.due ?? null, `task ${brainId}.due`, errors);
|
||||||
|
const completedAt = mappedStatus.status === TaskStatus.COMPLETED ? updatedAt : null;
|
||||||
|
|
||||||
|
const metadata = this.asJsonValue({
|
||||||
|
source: "jarvis-brain",
|
||||||
|
brainId,
|
||||||
|
brainDomain: task.domain ?? null,
|
||||||
|
brainProjectId: projectBrainId,
|
||||||
|
rawStatus: task.status ?? null,
|
||||||
|
rawPriority: task.priority ?? null,
|
||||||
|
related: task.related ?? [],
|
||||||
|
blocks: task.blocks ?? [],
|
||||||
|
blockedBy: task.blocked_by ?? [],
|
||||||
|
assignee: task.assignee ?? null,
|
||||||
|
progress: task.progress ?? null,
|
||||||
|
notes: task.notes ?? null,
|
||||||
|
notesNonTechnical: task.notes_nontechnical ?? null,
|
||||||
|
importedAt: importTimestamp,
|
||||||
|
});
|
||||||
|
|
||||||
|
await tx.task.create({
|
||||||
|
data: {
|
||||||
|
workspaceId,
|
||||||
|
title: task.title,
|
||||||
|
description: task.notes ?? null,
|
||||||
|
status: mappedStatus.status,
|
||||||
|
priority: mappedPriority.priority,
|
||||||
|
dueDate,
|
||||||
|
creatorId: userId,
|
||||||
|
projectId,
|
||||||
|
domainId,
|
||||||
|
metadata,
|
||||||
|
createdAt,
|
||||||
|
updatedAt,
|
||||||
|
completedAt,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
imported += 1;
|
||||||
|
} catch (error) {
|
||||||
|
skipped += 1;
|
||||||
|
errors.push(
|
||||||
|
`task ${brainId || `index-${String(index)}`}: failed to import: ${this.getErrorMessage(error)}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
imported,
|
||||||
|
skipped,
|
||||||
|
errors,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async importProjects(
|
||||||
|
workspaceId: string,
|
||||||
|
userId: string,
|
||||||
|
projectPayload: ImportProjectDto[]
|
||||||
|
): Promise<ImportResponseDto> {
|
||||||
|
const errors: string[] = [];
|
||||||
|
let imported = 0;
|
||||||
|
let skipped = 0;
|
||||||
|
|
||||||
|
const importTimestamp = new Date().toISOString();
|
||||||
|
const seenBrainProjectIds = new Set<string>();
|
||||||
|
const domainIdBySlug = new Map<string, string>();
|
||||||
|
|
||||||
|
await this.prisma.withWorkspaceContext(userId, workspaceId, async (tx: PrismaClient) => {
|
||||||
|
for (const [index, project] of projectPayload.entries()) {
|
||||||
|
const brainId = project.id.trim();
|
||||||
|
|
||||||
|
if (seenBrainProjectIds.has(brainId)) {
|
||||||
|
skipped += 1;
|
||||||
|
errors.push(`project ${brainId}: duplicate item in request body`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
seenBrainProjectIds.add(brainId);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const existingProject = await tx.project.findFirst({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
metadata: {
|
||||||
|
path: ["brainId"],
|
||||||
|
equals: brainId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existingProject) {
|
||||||
|
skipped += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const mappedStatus = this.mapProjectStatus(project.status ?? null);
|
||||||
|
if (mappedStatus.issue) {
|
||||||
|
errors.push(`project ${brainId}: ${mappedStatus.issue}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const domainId = await this.resolveDomainId(
|
||||||
|
tx,
|
||||||
|
workspaceId,
|
||||||
|
project.domain ?? null,
|
||||||
|
importTimestamp,
|
||||||
|
domainIdBySlug
|
||||||
|
);
|
||||||
|
|
||||||
|
const createdAt =
|
||||||
|
this.normalizeDate(project.created ?? null, `project ${brainId}.created`, errors) ??
|
||||||
|
new Date();
|
||||||
|
const updatedAt =
|
||||||
|
this.normalizeDate(project.updated ?? null, `project ${brainId}.updated`, errors) ??
|
||||||
|
createdAt;
|
||||||
|
const startDate = this.normalizeDate(
|
||||||
|
project.created ?? null,
|
||||||
|
`project ${brainId}.startDate`,
|
||||||
|
errors
|
||||||
|
);
|
||||||
|
const endDate = this.normalizeDate(
|
||||||
|
project.target_date ?? null,
|
||||||
|
`project ${brainId}.target_date`,
|
||||||
|
errors
|
||||||
|
);
|
||||||
|
|
||||||
|
const metadata = this.asJsonValue({
|
||||||
|
source: "jarvis-brain",
|
||||||
|
brainId,
|
||||||
|
brainDomain: project.domain ?? null,
|
||||||
|
rawStatus: project.status ?? null,
|
||||||
|
rawPriority: project.priority ?? null,
|
||||||
|
progress: project.progress ?? null,
|
||||||
|
repo: project.repo ?? null,
|
||||||
|
branch: project.branch ?? null,
|
||||||
|
currentMilestone: project.current_milestone ?? null,
|
||||||
|
nextMilestone: project.next_milestone ?? null,
|
||||||
|
blocker: project.blocker ?? null,
|
||||||
|
owner: project.owner ?? null,
|
||||||
|
docsPath: project.docs_path ?? null,
|
||||||
|
targetDate: project.target_date ?? null,
|
||||||
|
notes: project.notes ?? null,
|
||||||
|
notesNonTechnical: project.notes_nontechnical ?? null,
|
||||||
|
parent: project.parent ?? null,
|
||||||
|
importedAt: importTimestamp,
|
||||||
|
});
|
||||||
|
|
||||||
|
await tx.project.create({
|
||||||
|
data: {
|
||||||
|
workspaceId,
|
||||||
|
name: project.name,
|
||||||
|
description: project.description ?? null,
|
||||||
|
status: mappedStatus.status,
|
||||||
|
startDate,
|
||||||
|
endDate,
|
||||||
|
creatorId: userId,
|
||||||
|
domainId,
|
||||||
|
metadata,
|
||||||
|
createdAt,
|
||||||
|
updatedAt,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
imported += 1;
|
||||||
|
} catch (error) {
|
||||||
|
skipped += 1;
|
||||||
|
errors.push(
|
||||||
|
`project ${brainId || `index-${String(index)}`}: failed to import: ${this.getErrorMessage(error)}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
imported,
|
||||||
|
skipped,
|
||||||
|
errors,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async resolveProjectId(
|
||||||
|
tx: PrismaClient,
|
||||||
|
workspaceId: string,
|
||||||
|
projectBrainId: string | null,
|
||||||
|
projectIdByBrainId: Map<string, string | null>,
|
||||||
|
taskBrainId: string,
|
||||||
|
errors: string[]
|
||||||
|
): Promise<string | null> {
|
||||||
|
if (!projectBrainId) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (projectIdByBrainId.has(projectBrainId)) {
|
||||||
|
return projectIdByBrainId.get(projectBrainId) ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingProject = await tx.project.findFirst({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
metadata: {
|
||||||
|
path: ["brainId"],
|
||||||
|
equals: projectBrainId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!existingProject) {
|
||||||
|
projectIdByBrainId.set(projectBrainId, null);
|
||||||
|
errors.push(`task ${taskBrainId}: referenced project "${projectBrainId}" not found`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
projectIdByBrainId.set(projectBrainId, existingProject.id);
|
||||||
|
return existingProject.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async resolveDomainId(
|
||||||
|
tx: PrismaClient,
|
||||||
|
workspaceId: string,
|
||||||
|
rawDomain: string | null,
|
||||||
|
importTimestamp: string,
|
||||||
|
domainIdBySlug: Map<string, string>
|
||||||
|
): Promise<string | null> {
|
||||||
|
const domainSlug = this.normalizeDomain(rawDomain);
|
||||||
|
if (!domainSlug) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const cachedId = domainIdBySlug.get(domainSlug);
|
||||||
|
if (cachedId) {
|
||||||
|
return cachedId;
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingDomain = await tx.domain.findUnique({
|
||||||
|
where: {
|
||||||
|
workspaceId_slug: {
|
||||||
|
workspaceId,
|
||||||
|
slug: domainSlug,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existingDomain) {
|
||||||
|
domainIdBySlug.set(domainSlug, existingDomain.id);
|
||||||
|
return existingDomain.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
const trimmedDomainName = rawDomain?.trim();
|
||||||
|
const domainName =
|
||||||
|
trimmedDomainName && trimmedDomainName.length > 0 ? trimmedDomainName : domainSlug;
|
||||||
|
const createdDomain = await tx.domain.create({
|
||||||
|
data: {
|
||||||
|
workspaceId,
|
||||||
|
slug: domainSlug,
|
||||||
|
name: domainName,
|
||||||
|
metadata: this.asJsonValue({
|
||||||
|
source: "jarvis-brain",
|
||||||
|
brainId: domainName,
|
||||||
|
sourceValues: [domainName],
|
||||||
|
importedAt: importTimestamp,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
domainIdBySlug.set(domainSlug, createdDomain.id);
|
||||||
|
return createdDomain.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
private normalizeKey(value: string | null | undefined): string {
|
||||||
|
return value?.trim().toLowerCase() ?? "";
|
||||||
|
}
|
||||||
|
|
||||||
|
private mapTaskStatus(rawStatus: string | null): TaskStatusMapping {
|
||||||
|
const statusKey = this.normalizeKey(rawStatus);
|
||||||
|
|
||||||
|
switch (statusKey) {
|
||||||
|
case "done":
|
||||||
|
return { status: TaskStatus.COMPLETED, issue: null };
|
||||||
|
case "in-progress":
|
||||||
|
return { status: TaskStatus.IN_PROGRESS, issue: null };
|
||||||
|
case "backlog":
|
||||||
|
case "pending":
|
||||||
|
case "scheduled":
|
||||||
|
case "not-started":
|
||||||
|
case "planned":
|
||||||
|
return { status: TaskStatus.NOT_STARTED, issue: null };
|
||||||
|
case "blocked":
|
||||||
|
case "on-hold":
|
||||||
|
return { status: TaskStatus.PAUSED, issue: null };
|
||||||
|
case "cancelled":
|
||||||
|
return { status: TaskStatus.ARCHIVED, issue: null };
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
status: TaskStatus.NOT_STARTED,
|
||||||
|
issue: `Unknown task status "${rawStatus ?? "null"}" mapped to NOT_STARTED`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private mapTaskPriority(rawPriority: string | null): TaskPriorityMapping {
|
||||||
|
const priorityKey = this.normalizeKey(rawPriority);
|
||||||
|
|
||||||
|
switch (priorityKey) {
|
||||||
|
case "critical":
|
||||||
|
case "high":
|
||||||
|
return { priority: TaskPriority.HIGH, issue: null };
|
||||||
|
case "medium":
|
||||||
|
return { priority: TaskPriority.MEDIUM, issue: null };
|
||||||
|
case "low":
|
||||||
|
return { priority: TaskPriority.LOW, issue: null };
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
priority: TaskPriority.MEDIUM,
|
||||||
|
issue: `Unknown task priority "${rawPriority ?? "null"}" mapped to MEDIUM`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private mapProjectStatus(rawStatus: string | null): ProjectStatusMapping {
|
||||||
|
const statusKey = this.normalizeKey(rawStatus);
|
||||||
|
|
||||||
|
switch (statusKey) {
|
||||||
|
case "active":
|
||||||
|
case "in-progress":
|
||||||
|
return { status: ProjectStatus.ACTIVE, issue: null };
|
||||||
|
case "backlog":
|
||||||
|
case "planning":
|
||||||
|
return { status: ProjectStatus.PLANNING, issue: null };
|
||||||
|
case "paused":
|
||||||
|
case "blocked":
|
||||||
|
return { status: ProjectStatus.PAUSED, issue: null };
|
||||||
|
case "archived":
|
||||||
|
case "maintenance":
|
||||||
|
return { status: ProjectStatus.ARCHIVED, issue: null };
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
status: ProjectStatus.PLANNING,
|
||||||
|
issue: `Unknown project status "${rawStatus ?? "null"}" mapped to PLANNING`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private normalizeDomain(rawDomain: string | null | undefined): string | null {
|
||||||
|
if (!rawDomain) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const trimmed = rawDomain.trim();
|
||||||
|
if (trimmed.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const slug = trimmed
|
||||||
|
.toLowerCase()
|
||||||
|
.replace(/[^a-z0-9]+/g, "-")
|
||||||
|
.replace(/^-+|-+$/g, "");
|
||||||
|
|
||||||
|
return slug.length > 0 ? slug : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private normalizeDate(rawValue: string | null, context: string, errors: string[]): Date | null {
|
||||||
|
if (!rawValue) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const trimmed = rawValue.trim();
|
||||||
|
if (trimmed.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const value = /^\d{4}-\d{2}-\d{2}$/.test(trimmed) ? `${trimmed}T00:00:00.000Z` : trimmed;
|
||||||
|
const parsedDate = new Date(value);
|
||||||
|
|
||||||
|
if (Number.isNaN(parsedDate.getTime())) {
|
||||||
|
errors.push(`${context}: invalid date "${rawValue}"`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsedDate;
|
||||||
|
}
|
||||||
|
|
||||||
|
private asJsonValue(value: Record<string, unknown>): Prisma.InputJsonValue {
|
||||||
|
return value as Prisma.InputJsonValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getErrorMessage(error: unknown): string {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
return error.message;
|
||||||
|
}
|
||||||
|
|
||||||
|
return String(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import { IsOptional, IsEnum, IsString, IsInt, Min, Max } from "class-validator";
|
import { IsOptional, IsEnum, IsString, IsInt, IsIn, Min, Max } from "class-validator";
|
||||||
import { Type } from "class-transformer";
|
import { Type } from "class-transformer";
|
||||||
import { EntryStatus } from "@prisma/client";
|
import { EntryStatus, Visibility } from "@prisma/client";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* DTO for querying knowledge entries (list endpoint)
|
* DTO for querying knowledge entries (list endpoint)
|
||||||
@@ -10,10 +10,28 @@ export class EntryQueryDto {
|
|||||||
@IsEnum(EntryStatus, { message: "status must be a valid EntryStatus" })
|
@IsEnum(EntryStatus, { message: "status must be a valid EntryStatus" })
|
||||||
status?: EntryStatus;
|
status?: EntryStatus;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsEnum(Visibility, { message: "visibility must be a valid Visibility" })
|
||||||
|
visibility?: Visibility;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsString({ message: "tag must be a string" })
|
@IsString({ message: "tag must be a string" })
|
||||||
tag?: string;
|
tag?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "search must be a string" })
|
||||||
|
search?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsIn(["updatedAt", "createdAt", "title"], {
|
||||||
|
message: "sortBy must be updatedAt, createdAt, or title",
|
||||||
|
})
|
||||||
|
sortBy?: "updatedAt" | "createdAt" | "title";
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsIn(["asc", "desc"], { message: "sortOrder must be asc or desc" })
|
||||||
|
sortOrder?: "asc" | "desc";
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@Type(() => Number)
|
@Type(() => Number)
|
||||||
@IsInt({ message: "page must be an integer" })
|
@IsInt({ message: "page must be an integer" })
|
||||||
|
|||||||
@@ -48,6 +48,10 @@ export class KnowledgeService {
|
|||||||
where.status = query.status;
|
where.status = query.status;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (query.visibility) {
|
||||||
|
where.visibility = query.visibility;
|
||||||
|
}
|
||||||
|
|
||||||
if (query.tag) {
|
if (query.tag) {
|
||||||
where.tags = {
|
where.tags = {
|
||||||
some: {
|
some: {
|
||||||
@@ -58,6 +62,20 @@ export class KnowledgeService {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (query.search) {
|
||||||
|
where.OR = [
|
||||||
|
{ title: { contains: query.search, mode: "insensitive" } },
|
||||||
|
{ content: { contains: query.search, mode: "insensitive" } },
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build orderBy
|
||||||
|
const sortField = query.sortBy ?? "updatedAt";
|
||||||
|
const sortDirection = query.sortOrder ?? "desc";
|
||||||
|
const orderBy: Prisma.KnowledgeEntryOrderByWithRelationInput = {
|
||||||
|
[sortField]: sortDirection,
|
||||||
|
};
|
||||||
|
|
||||||
// Get total count
|
// Get total count
|
||||||
const total = await this.prisma.knowledgeEntry.count({ where });
|
const total = await this.prisma.knowledgeEntry.count({ where });
|
||||||
|
|
||||||
@@ -71,9 +89,7 @@ export class KnowledgeService {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
orderBy: {
|
orderBy,
|
||||||
updatedAt: "desc",
|
|
||||||
},
|
|
||||||
skip,
|
skip,
|
||||||
take: limit,
|
take: limit,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,59 +1,38 @@
|
|||||||
import {
|
import { FormalityLevel } from "@prisma/client";
|
||||||
IsString,
|
import { IsString, IsEnum, IsOptional, IsBoolean, MinLength, MaxLength } from "class-validator";
|
||||||
IsOptional,
|
|
||||||
IsBoolean,
|
|
||||||
IsNumber,
|
|
||||||
IsInt,
|
|
||||||
IsUUID,
|
|
||||||
MinLength,
|
|
||||||
MaxLength,
|
|
||||||
Min,
|
|
||||||
Max,
|
|
||||||
} from "class-validator";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* DTO for creating a new personality/assistant configuration
|
* DTO for creating a new personality
|
||||||
|
* Field names match the frontend API contract from @mosaic/shared Personality type.
|
||||||
*/
|
*/
|
||||||
export class CreatePersonalityDto {
|
export class CreatePersonalityDto {
|
||||||
@IsString()
|
@IsString({ message: "name must be a string" })
|
||||||
@MinLength(1)
|
@MinLength(1, { message: "name must not be empty" })
|
||||||
@MaxLength(100)
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
name!: string; // unique identifier slug
|
name!: string;
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@MinLength(1)
|
|
||||||
@MaxLength(200)
|
|
||||||
displayName!: string; // human-readable name
|
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsString()
|
@IsString({ message: "description must be a string" })
|
||||||
@MaxLength(1000)
|
@MaxLength(2000, { message: "description must not exceed 2000 characters" })
|
||||||
description?: string;
|
description?: string;
|
||||||
|
|
||||||
@IsString()
|
@IsString({ message: "tone must be a string" })
|
||||||
@MinLength(10)
|
@MinLength(1, { message: "tone must not be empty" })
|
||||||
systemPrompt!: string;
|
@MaxLength(100, { message: "tone must not exceed 100 characters" })
|
||||||
|
tone!: string;
|
||||||
|
|
||||||
|
@IsEnum(FormalityLevel, { message: "formalityLevel must be a valid FormalityLevel" })
|
||||||
|
formalityLevel!: FormalityLevel;
|
||||||
|
|
||||||
|
@IsString({ message: "systemPromptTemplate must be a string" })
|
||||||
|
@MinLength(1, { message: "systemPromptTemplate must not be empty" })
|
||||||
|
systemPromptTemplate!: string;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsNumber()
|
@IsBoolean({ message: "isDefault must be a boolean" })
|
||||||
@Min(0)
|
|
||||||
@Max(2)
|
|
||||||
temperature?: number; // null = use provider default
|
|
||||||
|
|
||||||
@IsOptional()
|
|
||||||
@IsInt()
|
|
||||||
@Min(1)
|
|
||||||
maxTokens?: number; // null = use provider default
|
|
||||||
|
|
||||||
@IsOptional()
|
|
||||||
@IsUUID("4")
|
|
||||||
llmProviderInstanceId?: string; // FK to LlmProviderInstance
|
|
||||||
|
|
||||||
@IsOptional()
|
|
||||||
@IsBoolean()
|
|
||||||
isDefault?: boolean;
|
isDefault?: boolean;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsBoolean()
|
@IsBoolean({ message: "isActive must be a boolean" })
|
||||||
isEnabled?: boolean;
|
isActive?: boolean;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
export * from "./create-personality.dto";
|
export * from "./create-personality.dto";
|
||||||
export * from "./update-personality.dto";
|
export * from "./update-personality.dto";
|
||||||
|
export * from "./personality-query.dto";
|
||||||
|
|||||||
12
apps/api/src/personalities/dto/personality-query.dto.ts
Normal file
12
apps/api/src/personalities/dto/personality-query.dto.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { IsBoolean, IsOptional } from "class-validator";
|
||||||
|
import { Transform } from "class-transformer";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for querying/filtering personalities
|
||||||
|
*/
|
||||||
|
export class PersonalityQueryDto {
|
||||||
|
@IsOptional()
|
||||||
|
@IsBoolean({ message: "isActive must be a boolean" })
|
||||||
|
@Transform(({ value }) => value === "true" || value === true)
|
||||||
|
isActive?: boolean;
|
||||||
|
}
|
||||||
@@ -1,62 +1,42 @@
|
|||||||
import {
|
import { FormalityLevel } from "@prisma/client";
|
||||||
IsString,
|
import { IsString, IsEnum, IsOptional, IsBoolean, MinLength, MaxLength } from "class-validator";
|
||||||
IsOptional,
|
|
||||||
IsBoolean,
|
|
||||||
IsNumber,
|
|
||||||
IsInt,
|
|
||||||
IsUUID,
|
|
||||||
MinLength,
|
|
||||||
MaxLength,
|
|
||||||
Min,
|
|
||||||
Max,
|
|
||||||
} from "class-validator";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* DTO for updating an existing personality/assistant configuration
|
* DTO for updating an existing personality
|
||||||
|
* All fields are optional; only provided fields are updated.
|
||||||
*/
|
*/
|
||||||
export class UpdatePersonalityDto {
|
export class UpdatePersonalityDto {
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsString()
|
@IsString({ message: "name must be a string" })
|
||||||
@MinLength(1)
|
@MinLength(1, { message: "name must not be empty" })
|
||||||
@MaxLength(100)
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
name?: string; // unique identifier slug
|
name?: string;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsString()
|
@IsString({ message: "description must be a string" })
|
||||||
@MinLength(1)
|
@MaxLength(2000, { message: "description must not exceed 2000 characters" })
|
||||||
@MaxLength(200)
|
|
||||||
displayName?: string; // human-readable name
|
|
||||||
|
|
||||||
@IsOptional()
|
|
||||||
@IsString()
|
|
||||||
@MaxLength(1000)
|
|
||||||
description?: string;
|
description?: string;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsString()
|
@IsString({ message: "tone must be a string" })
|
||||||
@MinLength(10)
|
@MinLength(1, { message: "tone must not be empty" })
|
||||||
systemPrompt?: string;
|
@MaxLength(100, { message: "tone must not exceed 100 characters" })
|
||||||
|
tone?: string;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsNumber()
|
@IsEnum(FormalityLevel, { message: "formalityLevel must be a valid FormalityLevel" })
|
||||||
@Min(0)
|
formalityLevel?: FormalityLevel;
|
||||||
@Max(2)
|
|
||||||
temperature?: number; // null = use provider default
|
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsInt()
|
@IsString({ message: "systemPromptTemplate must be a string" })
|
||||||
@Min(1)
|
@MinLength(1, { message: "systemPromptTemplate must not be empty" })
|
||||||
maxTokens?: number; // null = use provider default
|
systemPromptTemplate?: string;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsUUID("4")
|
@IsBoolean({ message: "isDefault must be a boolean" })
|
||||||
llmProviderInstanceId?: string; // FK to LlmProviderInstance
|
|
||||||
|
|
||||||
@IsOptional()
|
|
||||||
@IsBoolean()
|
|
||||||
isDefault?: boolean;
|
isDefault?: boolean;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsBoolean()
|
@IsBoolean({ message: "isActive must be a boolean" })
|
||||||
isEnabled?: boolean;
|
isActive?: boolean;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,20 +1,24 @@
|
|||||||
import type { Personality as PrismaPersonality } from "@prisma/client";
|
import type { FormalityLevel } from "@prisma/client";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Personality entity representing an assistant configuration
|
* Personality response entity
|
||||||
|
* Maps Prisma Personality fields to the frontend API contract.
|
||||||
|
*
|
||||||
|
* Field mapping (Prisma -> API):
|
||||||
|
* systemPrompt -> systemPromptTemplate
|
||||||
|
* isEnabled -> isActive
|
||||||
|
* (tone, formalityLevel are identical in both)
|
||||||
*/
|
*/
|
||||||
export class Personality implements PrismaPersonality {
|
export interface PersonalityResponse {
|
||||||
id!: string;
|
id: string;
|
||||||
workspaceId!: string;
|
workspaceId: string;
|
||||||
name!: string; // unique identifier slug
|
name: string;
|
||||||
displayName!: string; // human-readable name
|
description: string | null;
|
||||||
description!: string | null;
|
tone: string;
|
||||||
systemPrompt!: string;
|
formalityLevel: FormalityLevel;
|
||||||
temperature!: number | null; // null = use provider default
|
systemPromptTemplate: string;
|
||||||
maxTokens!: number | null; // null = use provider default
|
isDefault: boolean;
|
||||||
llmProviderInstanceId!: string | null; // FK to LlmProviderInstance
|
isActive: boolean;
|
||||||
isDefault!: boolean;
|
createdAt: Date;
|
||||||
isEnabled!: boolean;
|
updatedAt: Date;
|
||||||
createdAt!: Date;
|
|
||||||
updatedAt!: Date;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,36 +2,32 @@ import { describe, it, expect, beforeEach, vi } from "vitest";
|
|||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import { PersonalitiesController } from "./personalities.controller";
|
import { PersonalitiesController } from "./personalities.controller";
|
||||||
import { PersonalitiesService } from "./personalities.service";
|
import { PersonalitiesService } from "./personalities.service";
|
||||||
import { CreatePersonalityDto, UpdatePersonalityDto } from "./dto";
|
import type { CreatePersonalityDto } from "./dto/create-personality.dto";
|
||||||
|
import type { UpdatePersonalityDto } from "./dto/update-personality.dto";
|
||||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||||
|
import { FormalityLevel } from "@prisma/client";
|
||||||
|
|
||||||
describe("PersonalitiesController", () => {
|
describe("PersonalitiesController", () => {
|
||||||
let controller: PersonalitiesController;
|
let controller: PersonalitiesController;
|
||||||
let service: PersonalitiesService;
|
let service: PersonalitiesService;
|
||||||
|
|
||||||
const mockWorkspaceId = "workspace-123";
|
const mockWorkspaceId = "workspace-123";
|
||||||
const mockUserId = "user-123";
|
|
||||||
const mockPersonalityId = "personality-123";
|
const mockPersonalityId = "personality-123";
|
||||||
|
|
||||||
|
/** API response shape (frontend field names) */
|
||||||
const mockPersonality = {
|
const mockPersonality = {
|
||||||
id: mockPersonalityId,
|
id: mockPersonalityId,
|
||||||
workspaceId: mockWorkspaceId,
|
workspaceId: mockWorkspaceId,
|
||||||
name: "professional-assistant",
|
name: "professional-assistant",
|
||||||
displayName: "Professional Assistant",
|
|
||||||
description: "A professional communication assistant",
|
description: "A professional communication assistant",
|
||||||
systemPrompt: "You are a professional assistant who helps with tasks.",
|
tone: "professional",
|
||||||
temperature: 0.7,
|
formalityLevel: FormalityLevel.FORMAL,
|
||||||
maxTokens: 2000,
|
systemPromptTemplate: "You are a professional assistant who helps with tasks.",
|
||||||
llmProviderInstanceId: "provider-123",
|
|
||||||
isDefault: true,
|
isDefault: true,
|
||||||
isEnabled: true,
|
isActive: true,
|
||||||
createdAt: new Date(),
|
createdAt: new Date("2026-01-01"),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date("2026-01-01"),
|
||||||
};
|
|
||||||
|
|
||||||
const mockRequest = {
|
|
||||||
user: { id: mockUserId },
|
|
||||||
workspaceId: mockWorkspaceId,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const mockPersonalitiesService = {
|
const mockPersonalitiesService = {
|
||||||
@@ -57,46 +53,43 @@ describe("PersonalitiesController", () => {
|
|||||||
})
|
})
|
||||||
.overrideGuard(AuthGuard)
|
.overrideGuard(AuthGuard)
|
||||||
.useValue({ canActivate: () => true })
|
.useValue({ canActivate: () => true })
|
||||||
|
.overrideGuard(WorkspaceGuard)
|
||||||
|
.useValue({
|
||||||
|
canActivate: (ctx: {
|
||||||
|
switchToHttp: () => { getRequest: () => { workspaceId: string } };
|
||||||
|
}) => {
|
||||||
|
const req = ctx.switchToHttp().getRequest();
|
||||||
|
req.workspaceId = mockWorkspaceId;
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.overrideGuard(PermissionGuard)
|
||||||
|
.useValue({ canActivate: () => true })
|
||||||
.compile();
|
.compile();
|
||||||
|
|
||||||
controller = module.get<PersonalitiesController>(PersonalitiesController);
|
controller = module.get<PersonalitiesController>(PersonalitiesController);
|
||||||
service = module.get<PersonalitiesService>(PersonalitiesService);
|
service = module.get<PersonalitiesService>(PersonalitiesService);
|
||||||
|
|
||||||
// Reset mocks
|
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("findAll", () => {
|
describe("findAll", () => {
|
||||||
it("should return all personalities", async () => {
|
it("should return success response with personalities list", async () => {
|
||||||
const mockPersonalities = [mockPersonality];
|
const mockList = [mockPersonality];
|
||||||
mockPersonalitiesService.findAll.mockResolvedValue(mockPersonalities);
|
mockPersonalitiesService.findAll.mockResolvedValue(mockList);
|
||||||
|
|
||||||
const result = await controller.findAll(mockRequest);
|
const result = await controller.findAll(mockWorkspaceId, {});
|
||||||
|
|
||||||
expect(result).toEqual(mockPersonalities);
|
expect(result).toEqual({ success: true, data: mockList });
|
||||||
expect(service.findAll).toHaveBeenCalledWith(mockWorkspaceId);
|
expect(service.findAll).toHaveBeenCalledWith(mockWorkspaceId, {});
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
describe("findOne", () => {
|
it("should pass isActive query filter to service", async () => {
|
||||||
it("should return a personality by id", async () => {
|
mockPersonalitiesService.findAll.mockResolvedValue([mockPersonality]);
|
||||||
mockPersonalitiesService.findOne.mockResolvedValue(mockPersonality);
|
|
||||||
|
|
||||||
const result = await controller.findOne(mockRequest, mockPersonalityId);
|
await controller.findAll(mockWorkspaceId, { isActive: true });
|
||||||
|
|
||||||
expect(result).toEqual(mockPersonality);
|
expect(service.findAll).toHaveBeenCalledWith(mockWorkspaceId, { isActive: true });
|
||||||
expect(service.findOne).toHaveBeenCalledWith(mockWorkspaceId, mockPersonalityId);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("findByName", () => {
|
|
||||||
it("should return a personality by name", async () => {
|
|
||||||
mockPersonalitiesService.findByName.mockResolvedValue(mockPersonality);
|
|
||||||
|
|
||||||
const result = await controller.findByName(mockRequest, "professional-assistant");
|
|
||||||
|
|
||||||
expect(result).toEqual(mockPersonality);
|
|
||||||
expect(service.findByName).toHaveBeenCalledWith(mockWorkspaceId, "professional-assistant");
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -104,32 +97,40 @@ describe("PersonalitiesController", () => {
|
|||||||
it("should return the default personality", async () => {
|
it("should return the default personality", async () => {
|
||||||
mockPersonalitiesService.findDefault.mockResolvedValue(mockPersonality);
|
mockPersonalitiesService.findDefault.mockResolvedValue(mockPersonality);
|
||||||
|
|
||||||
const result = await controller.findDefault(mockRequest);
|
const result = await controller.findDefault(mockWorkspaceId);
|
||||||
|
|
||||||
expect(result).toEqual(mockPersonality);
|
expect(result).toEqual(mockPersonality);
|
||||||
expect(service.findDefault).toHaveBeenCalledWith(mockWorkspaceId);
|
expect(service.findDefault).toHaveBeenCalledWith(mockWorkspaceId);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("findOne", () => {
|
||||||
|
it("should return a personality by id", async () => {
|
||||||
|
mockPersonalitiesService.findOne.mockResolvedValue(mockPersonality);
|
||||||
|
|
||||||
|
const result = await controller.findOne(mockWorkspaceId, mockPersonalityId);
|
||||||
|
|
||||||
|
expect(result).toEqual(mockPersonality);
|
||||||
|
expect(service.findOne).toHaveBeenCalledWith(mockWorkspaceId, mockPersonalityId);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe("create", () => {
|
describe("create", () => {
|
||||||
it("should create a new personality", async () => {
|
it("should create a new personality", async () => {
|
||||||
const createDto: CreatePersonalityDto = {
|
const createDto: CreatePersonalityDto = {
|
||||||
name: "casual-helper",
|
name: "casual-helper",
|
||||||
displayName: "Casual Helper",
|
|
||||||
description: "A casual helper",
|
description: "A casual helper",
|
||||||
systemPrompt: "You are a casual assistant.",
|
tone: "casual",
|
||||||
temperature: 0.8,
|
formalityLevel: FormalityLevel.CASUAL,
|
||||||
maxTokens: 1500,
|
systemPromptTemplate: "You are a casual assistant.",
|
||||||
};
|
};
|
||||||
|
|
||||||
mockPersonalitiesService.create.mockResolvedValue({
|
const created = { ...mockPersonality, ...createDto, isActive: true, isDefault: false };
|
||||||
...mockPersonality,
|
mockPersonalitiesService.create.mockResolvedValue(created);
|
||||||
...createDto,
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await controller.create(mockRequest, createDto);
|
const result = await controller.create(mockWorkspaceId, createDto);
|
||||||
|
|
||||||
expect(result).toMatchObject(createDto);
|
expect(result).toMatchObject({ name: createDto.name, tone: createDto.tone });
|
||||||
expect(service.create).toHaveBeenCalledWith(mockWorkspaceId, createDto);
|
expect(service.create).toHaveBeenCalledWith(mockWorkspaceId, createDto);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -138,15 +139,13 @@ describe("PersonalitiesController", () => {
|
|||||||
it("should update a personality", async () => {
|
it("should update a personality", async () => {
|
||||||
const updateDto: UpdatePersonalityDto = {
|
const updateDto: UpdatePersonalityDto = {
|
||||||
description: "Updated description",
|
description: "Updated description",
|
||||||
temperature: 0.9,
|
tone: "enthusiastic",
|
||||||
};
|
};
|
||||||
|
|
||||||
mockPersonalitiesService.update.mockResolvedValue({
|
const updated = { ...mockPersonality, ...updateDto };
|
||||||
...mockPersonality,
|
mockPersonalitiesService.update.mockResolvedValue(updated);
|
||||||
...updateDto,
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await controller.update(mockRequest, mockPersonalityId, updateDto);
|
const result = await controller.update(mockWorkspaceId, mockPersonalityId, updateDto);
|
||||||
|
|
||||||
expect(result).toMatchObject(updateDto);
|
expect(result).toMatchObject(updateDto);
|
||||||
expect(service.update).toHaveBeenCalledWith(mockWorkspaceId, mockPersonalityId, updateDto);
|
expect(service.update).toHaveBeenCalledWith(mockWorkspaceId, mockPersonalityId, updateDto);
|
||||||
@@ -157,7 +156,7 @@ describe("PersonalitiesController", () => {
|
|||||||
it("should delete a personality", async () => {
|
it("should delete a personality", async () => {
|
||||||
mockPersonalitiesService.delete.mockResolvedValue(undefined);
|
mockPersonalitiesService.delete.mockResolvedValue(undefined);
|
||||||
|
|
||||||
await controller.delete(mockRequest, mockPersonalityId);
|
await controller.delete(mockWorkspaceId, mockPersonalityId);
|
||||||
|
|
||||||
expect(service.delete).toHaveBeenCalledWith(mockWorkspaceId, mockPersonalityId);
|
expect(service.delete).toHaveBeenCalledWith(mockWorkspaceId, mockPersonalityId);
|
||||||
});
|
});
|
||||||
@@ -165,12 +164,10 @@ describe("PersonalitiesController", () => {
|
|||||||
|
|
||||||
describe("setDefault", () => {
|
describe("setDefault", () => {
|
||||||
it("should set a personality as default", async () => {
|
it("should set a personality as default", async () => {
|
||||||
mockPersonalitiesService.setDefault.mockResolvedValue({
|
const updated = { ...mockPersonality, isDefault: true };
|
||||||
...mockPersonality,
|
mockPersonalitiesService.setDefault.mockResolvedValue(updated);
|
||||||
isDefault: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await controller.setDefault(mockRequest, mockPersonalityId);
|
const result = await controller.setDefault(mockWorkspaceId, mockPersonalityId);
|
||||||
|
|
||||||
expect(result).toMatchObject({ isDefault: true });
|
expect(result).toMatchObject({ isDefault: true });
|
||||||
expect(service.setDefault).toHaveBeenCalledWith(mockWorkspaceId, mockPersonalityId);
|
expect(service.setDefault).toHaveBeenCalledWith(mockWorkspaceId, mockPersonalityId);
|
||||||
|
|||||||
@@ -6,105 +6,122 @@ import {
|
|||||||
Delete,
|
Delete,
|
||||||
Body,
|
Body,
|
||||||
Param,
|
Param,
|
||||||
|
Query,
|
||||||
UseGuards,
|
UseGuards,
|
||||||
Req,
|
|
||||||
HttpCode,
|
HttpCode,
|
||||||
HttpStatus,
|
HttpStatus,
|
||||||
} from "@nestjs/common";
|
} from "@nestjs/common";
|
||||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||||
|
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||||
import { PersonalitiesService } from "./personalities.service";
|
import { PersonalitiesService } from "./personalities.service";
|
||||||
import { CreatePersonalityDto, UpdatePersonalityDto } from "./dto";
|
import { CreatePersonalityDto } from "./dto/create-personality.dto";
|
||||||
import { Personality } from "./entities/personality.entity";
|
import { UpdatePersonalityDto } from "./dto/update-personality.dto";
|
||||||
|
import { PersonalityQueryDto } from "./dto/personality-query.dto";
|
||||||
interface AuthenticatedRequest {
|
import type { PersonalityResponse } from "./entities/personality.entity";
|
||||||
user: { id: string };
|
|
||||||
workspaceId: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Controller for managing personality/assistant configurations
|
* Controller for personality CRUD endpoints.
|
||||||
|
* Route: /api/personalities
|
||||||
|
*
|
||||||
|
* Guards applied in order:
|
||||||
|
* 1. AuthGuard - verifies the user is authenticated
|
||||||
|
* 2. WorkspaceGuard - validates workspace access
|
||||||
|
* 3. PermissionGuard - checks role-based permissions
|
||||||
*/
|
*/
|
||||||
@Controller("personality")
|
@Controller("personalities")
|
||||||
@UseGuards(AuthGuard)
|
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||||
export class PersonalitiesController {
|
export class PersonalitiesController {
|
||||||
constructor(private readonly personalitiesService: PersonalitiesService) {}
|
constructor(private readonly personalitiesService: PersonalitiesService) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List all personalities for the workspace
|
* GET /api/personalities
|
||||||
|
* List all personalities for the workspace.
|
||||||
|
* Supports ?isActive=true|false filter.
|
||||||
*/
|
*/
|
||||||
@Get()
|
@Get()
|
||||||
async findAll(@Req() req: AuthenticatedRequest): Promise<Personality[]> {
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
return this.personalitiesService.findAll(req.workspaceId);
|
async findAll(
|
||||||
|
@Workspace() workspaceId: string,
|
||||||
|
@Query() query: PersonalityQueryDto
|
||||||
|
): Promise<{ success: true; data: PersonalityResponse[] }> {
|
||||||
|
const data = await this.personalitiesService.findAll(workspaceId, query);
|
||||||
|
return { success: true, data };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the default personality for the workspace
|
* GET /api/personalities/default
|
||||||
|
* Get the default personality for the workspace.
|
||||||
|
* Must be declared before :id to avoid route conflicts.
|
||||||
*/
|
*/
|
||||||
@Get("default")
|
@Get("default")
|
||||||
async findDefault(@Req() req: AuthenticatedRequest): Promise<Personality> {
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
return this.personalitiesService.findDefault(req.workspaceId);
|
async findDefault(@Workspace() workspaceId: string): Promise<PersonalityResponse> {
|
||||||
|
return this.personalitiesService.findDefault(workspaceId);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a personality by its unique name
|
* GET /api/personalities/:id
|
||||||
*/
|
* Get a single personality by ID.
|
||||||
@Get("by-name/:name")
|
|
||||||
async findByName(
|
|
||||||
@Req() req: AuthenticatedRequest,
|
|
||||||
@Param("name") name: string
|
|
||||||
): Promise<Personality> {
|
|
||||||
return this.personalitiesService.findByName(req.workspaceId, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a personality by ID
|
|
||||||
*/
|
*/
|
||||||
@Get(":id")
|
@Get(":id")
|
||||||
async findOne(@Req() req: AuthenticatedRequest, @Param("id") id: string): Promise<Personality> {
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
return this.personalitiesService.findOne(req.workspaceId, id);
|
async findOne(
|
||||||
|
@Workspace() workspaceId: string,
|
||||||
|
@Param("id") id: string
|
||||||
|
): Promise<PersonalityResponse> {
|
||||||
|
return this.personalitiesService.findOne(workspaceId, id);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new personality
|
* POST /api/personalities
|
||||||
|
* Create a new personality.
|
||||||
*/
|
*/
|
||||||
@Post()
|
@Post()
|
||||||
@HttpCode(HttpStatus.CREATED)
|
@HttpCode(HttpStatus.CREATED)
|
||||||
|
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||||
async create(
|
async create(
|
||||||
@Req() req: AuthenticatedRequest,
|
@Workspace() workspaceId: string,
|
||||||
@Body() dto: CreatePersonalityDto
|
@Body() dto: CreatePersonalityDto
|
||||||
): Promise<Personality> {
|
): Promise<PersonalityResponse> {
|
||||||
return this.personalitiesService.create(req.workspaceId, dto);
|
return this.personalitiesService.create(workspaceId, dto);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update a personality
|
* PATCH /api/personalities/:id
|
||||||
|
* Update an existing personality.
|
||||||
*/
|
*/
|
||||||
@Patch(":id")
|
@Patch(":id")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||||
async update(
|
async update(
|
||||||
@Req() req: AuthenticatedRequest,
|
@Workspace() workspaceId: string,
|
||||||
@Param("id") id: string,
|
@Param("id") id: string,
|
||||||
@Body() dto: UpdatePersonalityDto
|
@Body() dto: UpdatePersonalityDto
|
||||||
): Promise<Personality> {
|
): Promise<PersonalityResponse> {
|
||||||
return this.personalitiesService.update(req.workspaceId, id, dto);
|
return this.personalitiesService.update(workspaceId, id, dto);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Delete a personality
|
* DELETE /api/personalities/:id
|
||||||
|
* Delete a personality.
|
||||||
*/
|
*/
|
||||||
@Delete(":id")
|
@Delete(":id")
|
||||||
@HttpCode(HttpStatus.NO_CONTENT)
|
@HttpCode(HttpStatus.NO_CONTENT)
|
||||||
async delete(@Req() req: AuthenticatedRequest, @Param("id") id: string): Promise<void> {
|
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||||
return this.personalitiesService.delete(req.workspaceId, id);
|
async delete(@Workspace() workspaceId: string, @Param("id") id: string): Promise<void> {
|
||||||
|
return this.personalitiesService.delete(workspaceId, id);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set a personality as the default
|
* POST /api/personalities/:id/set-default
|
||||||
|
* Convenience endpoint to set a personality as the default.
|
||||||
*/
|
*/
|
||||||
@Post(":id/set-default")
|
@Post(":id/set-default")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||||
async setDefault(
|
async setDefault(
|
||||||
@Req() req: AuthenticatedRequest,
|
@Workspace() workspaceId: string,
|
||||||
@Param("id") id: string
|
@Param("id") id: string
|
||||||
): Promise<Personality> {
|
): Promise<PersonalityResponse> {
|
||||||
return this.personalitiesService.setDefault(req.workspaceId, id);
|
return this.personalitiesService.setDefault(workspaceId, id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,8 +2,10 @@ import { describe, it, expect, beforeEach, vi } from "vitest";
|
|||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import { PersonalitiesService } from "./personalities.service";
|
import { PersonalitiesService } from "./personalities.service";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
import { CreatePersonalityDto, UpdatePersonalityDto } from "./dto";
|
import type { CreatePersonalityDto } from "./dto/create-personality.dto";
|
||||||
|
import type { UpdatePersonalityDto } from "./dto/update-personality.dto";
|
||||||
import { NotFoundException, ConflictException } from "@nestjs/common";
|
import { NotFoundException, ConflictException } from "@nestjs/common";
|
||||||
|
import { FormalityLevel } from "@prisma/client";
|
||||||
|
|
||||||
describe("PersonalitiesService", () => {
|
describe("PersonalitiesService", () => {
|
||||||
let service: PersonalitiesService;
|
let service: PersonalitiesService;
|
||||||
@@ -11,22 +13,39 @@ describe("PersonalitiesService", () => {
|
|||||||
|
|
||||||
const mockWorkspaceId = "workspace-123";
|
const mockWorkspaceId = "workspace-123";
|
||||||
const mockPersonalityId = "personality-123";
|
const mockPersonalityId = "personality-123";
|
||||||
const mockProviderId = "provider-123";
|
|
||||||
|
|
||||||
const mockPersonality = {
|
/** Raw Prisma record shape (uses Prisma field names) */
|
||||||
|
const mockPrismaRecord = {
|
||||||
id: mockPersonalityId,
|
id: mockPersonalityId,
|
||||||
workspaceId: mockWorkspaceId,
|
workspaceId: mockWorkspaceId,
|
||||||
name: "professional-assistant",
|
name: "professional-assistant",
|
||||||
displayName: "Professional Assistant",
|
displayName: "Professional Assistant",
|
||||||
description: "A professional communication assistant",
|
description: "A professional communication assistant",
|
||||||
|
tone: "professional",
|
||||||
|
formalityLevel: FormalityLevel.FORMAL,
|
||||||
systemPrompt: "You are a professional assistant who helps with tasks.",
|
systemPrompt: "You are a professional assistant who helps with tasks.",
|
||||||
temperature: 0.7,
|
temperature: 0.7,
|
||||||
maxTokens: 2000,
|
maxTokens: 2000,
|
||||||
llmProviderInstanceId: mockProviderId,
|
llmProviderInstanceId: "provider-123",
|
||||||
isDefault: true,
|
isDefault: true,
|
||||||
isEnabled: true,
|
isEnabled: true,
|
||||||
createdAt: new Date(),
|
createdAt: new Date("2026-01-01"),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date("2026-01-01"),
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Expected API response shape (uses frontend field names) */
|
||||||
|
const mockResponse = {
|
||||||
|
id: mockPersonalityId,
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
name: "professional-assistant",
|
||||||
|
description: "A professional communication assistant",
|
||||||
|
tone: "professional",
|
||||||
|
formalityLevel: FormalityLevel.FORMAL,
|
||||||
|
systemPromptTemplate: "You are a professional assistant who helps with tasks.",
|
||||||
|
isDefault: true,
|
||||||
|
isActive: true,
|
||||||
|
createdAt: new Date("2026-01-01"),
|
||||||
|
updatedAt: new Date("2026-01-01"),
|
||||||
};
|
};
|
||||||
|
|
||||||
const mockPrismaService = {
|
const mockPrismaService = {
|
||||||
@@ -37,9 +56,7 @@ describe("PersonalitiesService", () => {
|
|||||||
create: vi.fn(),
|
create: vi.fn(),
|
||||||
update: vi.fn(),
|
update: vi.fn(),
|
||||||
delete: vi.fn(),
|
delete: vi.fn(),
|
||||||
count: vi.fn(),
|
|
||||||
},
|
},
|
||||||
$transaction: vi.fn((callback) => callback(mockPrismaService)),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
@@ -56,44 +73,54 @@ describe("PersonalitiesService", () => {
|
|||||||
service = module.get<PersonalitiesService>(PersonalitiesService);
|
service = module.get<PersonalitiesService>(PersonalitiesService);
|
||||||
prisma = module.get<PrismaService>(PrismaService);
|
prisma = module.get<PrismaService>(PrismaService);
|
||||||
|
|
||||||
// Reset mocks
|
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("create", () => {
|
describe("create", () => {
|
||||||
const createDto: CreatePersonalityDto = {
|
const createDto: CreatePersonalityDto = {
|
||||||
name: "casual-helper",
|
name: "casual-helper",
|
||||||
displayName: "Casual Helper",
|
|
||||||
description: "A casual communication helper",
|
description: "A casual communication helper",
|
||||||
systemPrompt: "You are a casual assistant.",
|
tone: "casual",
|
||||||
temperature: 0.8,
|
formalityLevel: FormalityLevel.CASUAL,
|
||||||
maxTokens: 1500,
|
systemPromptTemplate: "You are a casual assistant.",
|
||||||
llmProviderInstanceId: mockProviderId,
|
isDefault: false,
|
||||||
|
isActive: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
it("should create a new personality", async () => {
|
const createdRecord = {
|
||||||
|
...mockPrismaRecord,
|
||||||
|
name: createDto.name,
|
||||||
|
description: createDto.description,
|
||||||
|
tone: createDto.tone,
|
||||||
|
formalityLevel: createDto.formalityLevel,
|
||||||
|
systemPrompt: createDto.systemPromptTemplate,
|
||||||
|
isDefault: false,
|
||||||
|
isEnabled: true,
|
||||||
|
id: "new-personality-id",
|
||||||
|
};
|
||||||
|
|
||||||
|
it("should create a new personality and return API response shape", async () => {
|
||||||
mockPrismaService.personality.findFirst.mockResolvedValue(null);
|
mockPrismaService.personality.findFirst.mockResolvedValue(null);
|
||||||
mockPrismaService.personality.create.mockResolvedValue({
|
mockPrismaService.personality.create.mockResolvedValue(createdRecord);
|
||||||
...mockPersonality,
|
|
||||||
...createDto,
|
|
||||||
id: "new-personality-id",
|
|
||||||
isDefault: false,
|
|
||||||
isEnabled: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = await service.create(mockWorkspaceId, createDto);
|
const result = await service.create(mockWorkspaceId, createDto);
|
||||||
|
|
||||||
expect(result).toMatchObject(createDto);
|
expect(result.name).toBe(createDto.name);
|
||||||
|
expect(result.tone).toBe(createDto.tone);
|
||||||
|
expect(result.formalityLevel).toBe(createDto.formalityLevel);
|
||||||
|
expect(result.systemPromptTemplate).toBe(createDto.systemPromptTemplate);
|
||||||
|
expect(result.isActive).toBe(true);
|
||||||
|
expect(result.isDefault).toBe(false);
|
||||||
|
|
||||||
expect(prisma.personality.create).toHaveBeenCalledWith({
|
expect(prisma.personality.create).toHaveBeenCalledWith({
|
||||||
data: {
|
data: {
|
||||||
workspaceId: mockWorkspaceId,
|
workspaceId: mockWorkspaceId,
|
||||||
name: createDto.name,
|
name: createDto.name,
|
||||||
displayName: createDto.displayName,
|
displayName: createDto.name,
|
||||||
description: createDto.description ?? null,
|
description: createDto.description ?? null,
|
||||||
systemPrompt: createDto.systemPrompt,
|
tone: createDto.tone,
|
||||||
temperature: createDto.temperature ?? null,
|
formalityLevel: createDto.formalityLevel,
|
||||||
maxTokens: createDto.maxTokens ?? null,
|
systemPrompt: createDto.systemPromptTemplate,
|
||||||
llmProviderInstanceId: createDto.llmProviderInstanceId ?? null,
|
|
||||||
isDefault: false,
|
isDefault: false,
|
||||||
isEnabled: true,
|
isEnabled: true,
|
||||||
},
|
},
|
||||||
@@ -101,68 +128,73 @@ describe("PersonalitiesService", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should throw ConflictException when name already exists", async () => {
|
it("should throw ConflictException when name already exists", async () => {
|
||||||
mockPrismaService.personality.findFirst.mockResolvedValue(mockPersonality);
|
mockPrismaService.personality.findFirst.mockResolvedValue(mockPrismaRecord);
|
||||||
|
|
||||||
await expect(service.create(mockWorkspaceId, createDto)).rejects.toThrow(ConflictException);
|
await expect(service.create(mockWorkspaceId, createDto)).rejects.toThrow(ConflictException);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should unset other defaults when creating a new default personality", async () => {
|
it("should unset other defaults when creating a new default personality", async () => {
|
||||||
const createDefaultDto = { ...createDto, isDefault: true };
|
const createDefaultDto: CreatePersonalityDto = { ...createDto, isDefault: true };
|
||||||
// First call to findFirst checks for name conflict (should be null)
|
const otherDefault = { ...mockPrismaRecord, id: "other-id" };
|
||||||
// Second call to findFirst finds the existing default personality
|
|
||||||
mockPrismaService.personality.findFirst
|
mockPrismaService.personality.findFirst
|
||||||
.mockResolvedValueOnce(null) // No name conflict
|
.mockResolvedValueOnce(null) // name conflict check
|
||||||
.mockResolvedValueOnce(mockPersonality); // Existing default
|
.mockResolvedValueOnce(otherDefault); // existing default lookup
|
||||||
mockPrismaService.personality.update.mockResolvedValue({
|
mockPrismaService.personality.update.mockResolvedValue({ ...otherDefault, isDefault: false });
|
||||||
...mockPersonality,
|
|
||||||
isDefault: false,
|
|
||||||
});
|
|
||||||
mockPrismaService.personality.create.mockResolvedValue({
|
mockPrismaService.personality.create.mockResolvedValue({
|
||||||
...mockPersonality,
|
...createdRecord,
|
||||||
...createDefaultDto,
|
isDefault: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
await service.create(mockWorkspaceId, createDefaultDto);
|
await service.create(mockWorkspaceId, createDefaultDto);
|
||||||
|
|
||||||
expect(prisma.personality.update).toHaveBeenCalledWith({
|
expect(prisma.personality.update).toHaveBeenCalledWith({
|
||||||
where: { id: mockPersonalityId },
|
where: { id: "other-id" },
|
||||||
data: { isDefault: false },
|
data: { isDefault: false },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("findAll", () => {
|
describe("findAll", () => {
|
||||||
it("should return all personalities for a workspace", async () => {
|
it("should return mapped response list for a workspace", async () => {
|
||||||
const mockPersonalities = [mockPersonality];
|
mockPrismaService.personality.findMany.mockResolvedValue([mockPrismaRecord]);
|
||||||
mockPrismaService.personality.findMany.mockResolvedValue(mockPersonalities);
|
|
||||||
|
|
||||||
const result = await service.findAll(mockWorkspaceId);
|
const result = await service.findAll(mockWorkspaceId);
|
||||||
|
|
||||||
expect(result).toEqual(mockPersonalities);
|
expect(result).toHaveLength(1);
|
||||||
|
expect(result[0]).toEqual(mockResponse);
|
||||||
expect(prisma.personality.findMany).toHaveBeenCalledWith({
|
expect(prisma.personality.findMany).toHaveBeenCalledWith({
|
||||||
where: { workspaceId: mockWorkspaceId },
|
where: { workspaceId: mockWorkspaceId },
|
||||||
orderBy: [{ isDefault: "desc" }, { name: "asc" }],
|
orderBy: [{ isDefault: "desc" }, { name: "asc" }],
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should filter by isActive when provided", async () => {
|
||||||
|
mockPrismaService.personality.findMany.mockResolvedValue([mockPrismaRecord]);
|
||||||
|
|
||||||
|
await service.findAll(mockWorkspaceId, { isActive: true });
|
||||||
|
|
||||||
|
expect(prisma.personality.findMany).toHaveBeenCalledWith({
|
||||||
|
where: { workspaceId: mockWorkspaceId, isEnabled: true },
|
||||||
|
orderBy: [{ isDefault: "desc" }, { name: "asc" }],
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("findOne", () => {
|
describe("findOne", () => {
|
||||||
it("should return a personality by id", async () => {
|
it("should return a mapped personality response by id", async () => {
|
||||||
mockPrismaService.personality.findUnique.mockResolvedValue(mockPersonality);
|
mockPrismaService.personality.findFirst.mockResolvedValue(mockPrismaRecord);
|
||||||
|
|
||||||
const result = await service.findOne(mockWorkspaceId, mockPersonalityId);
|
const result = await service.findOne(mockWorkspaceId, mockPersonalityId);
|
||||||
|
|
||||||
expect(result).toEqual(mockPersonality);
|
expect(result).toEqual(mockResponse);
|
||||||
expect(prisma.personality.findUnique).toHaveBeenCalledWith({
|
expect(prisma.personality.findFirst).toHaveBeenCalledWith({
|
||||||
where: {
|
where: { id: mockPersonalityId, workspaceId: mockWorkspaceId },
|
||||||
id: mockPersonalityId,
|
|
||||||
workspaceId: mockWorkspaceId,
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw NotFoundException when personality not found", async () => {
|
it("should throw NotFoundException when personality not found", async () => {
|
||||||
mockPrismaService.personality.findUnique.mockResolvedValue(null);
|
mockPrismaService.personality.findFirst.mockResolvedValue(null);
|
||||||
|
|
||||||
await expect(service.findOne(mockWorkspaceId, mockPersonalityId)).rejects.toThrow(
|
await expect(service.findOne(mockWorkspaceId, mockPersonalityId)).rejects.toThrow(
|
||||||
NotFoundException
|
NotFoundException
|
||||||
@@ -171,17 +203,14 @@ describe("PersonalitiesService", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe("findByName", () => {
|
describe("findByName", () => {
|
||||||
it("should return a personality by name", async () => {
|
it("should return a mapped personality response by name", async () => {
|
||||||
mockPrismaService.personality.findFirst.mockResolvedValue(mockPersonality);
|
mockPrismaService.personality.findFirst.mockResolvedValue(mockPrismaRecord);
|
||||||
|
|
||||||
const result = await service.findByName(mockWorkspaceId, "professional-assistant");
|
const result = await service.findByName(mockWorkspaceId, "professional-assistant");
|
||||||
|
|
||||||
expect(result).toEqual(mockPersonality);
|
expect(result).toEqual(mockResponse);
|
||||||
expect(prisma.personality.findFirst).toHaveBeenCalledWith({
|
expect(prisma.personality.findFirst).toHaveBeenCalledWith({
|
||||||
where: {
|
where: { workspaceId: mockWorkspaceId, name: "professional-assistant" },
|
||||||
workspaceId: mockWorkspaceId,
|
|
||||||
name: "professional-assistant",
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -196,11 +225,11 @@ describe("PersonalitiesService", () => {
|
|||||||
|
|
||||||
describe("findDefault", () => {
|
describe("findDefault", () => {
|
||||||
it("should return the default personality", async () => {
|
it("should return the default personality", async () => {
|
||||||
mockPrismaService.personality.findFirst.mockResolvedValue(mockPersonality);
|
mockPrismaService.personality.findFirst.mockResolvedValue(mockPrismaRecord);
|
||||||
|
|
||||||
const result = await service.findDefault(mockWorkspaceId);
|
const result = await service.findDefault(mockWorkspaceId);
|
||||||
|
|
||||||
expect(result).toEqual(mockPersonality);
|
expect(result).toEqual(mockResponse);
|
||||||
expect(prisma.personality.findFirst).toHaveBeenCalledWith({
|
expect(prisma.personality.findFirst).toHaveBeenCalledWith({
|
||||||
where: { workspaceId: mockWorkspaceId, isDefault: true, isEnabled: true },
|
where: { workspaceId: mockWorkspaceId, isDefault: true, isEnabled: true },
|
||||||
});
|
});
|
||||||
@@ -216,41 +245,45 @@ describe("PersonalitiesService", () => {
|
|||||||
describe("update", () => {
|
describe("update", () => {
|
||||||
const updateDto: UpdatePersonalityDto = {
|
const updateDto: UpdatePersonalityDto = {
|
||||||
description: "Updated description",
|
description: "Updated description",
|
||||||
temperature: 0.9,
|
tone: "formal",
|
||||||
|
isActive: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
it("should update a personality", async () => {
|
it("should update a personality and return mapped response", async () => {
|
||||||
mockPrismaService.personality.findUnique.mockResolvedValue(mockPersonality);
|
const updatedRecord = {
|
||||||
mockPrismaService.personality.findFirst.mockResolvedValue(null);
|
...mockPrismaRecord,
|
||||||
mockPrismaService.personality.update.mockResolvedValue({
|
description: updateDto.description,
|
||||||
...mockPersonality,
|
tone: updateDto.tone,
|
||||||
...updateDto,
|
isEnabled: false,
|
||||||
});
|
};
|
||||||
|
|
||||||
|
mockPrismaService.personality.findFirst
|
||||||
|
.mockResolvedValueOnce(mockPrismaRecord) // findOne check
|
||||||
|
.mockResolvedValueOnce(null); // name conflict check (no dto.name here)
|
||||||
|
mockPrismaService.personality.update.mockResolvedValue(updatedRecord);
|
||||||
|
|
||||||
const result = await service.update(mockWorkspaceId, mockPersonalityId, updateDto);
|
const result = await service.update(mockWorkspaceId, mockPersonalityId, updateDto);
|
||||||
|
|
||||||
expect(result).toMatchObject(updateDto);
|
expect(result.description).toBe(updateDto.description);
|
||||||
expect(prisma.personality.update).toHaveBeenCalledWith({
|
expect(result.tone).toBe(updateDto.tone);
|
||||||
where: { id: mockPersonalityId },
|
expect(result.isActive).toBe(false);
|
||||||
data: updateDto,
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw NotFoundException when personality not found", async () => {
|
it("should throw NotFoundException when personality not found", async () => {
|
||||||
mockPrismaService.personality.findUnique.mockResolvedValue(null);
|
mockPrismaService.personality.findFirst.mockResolvedValue(null);
|
||||||
|
|
||||||
await expect(service.update(mockWorkspaceId, mockPersonalityId, updateDto)).rejects.toThrow(
|
await expect(service.update(mockWorkspaceId, mockPersonalityId, updateDto)).rejects.toThrow(
|
||||||
NotFoundException
|
NotFoundException
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw ConflictException when updating to existing name", async () => {
|
it("should throw ConflictException when updating to an existing name", async () => {
|
||||||
const updateNameDto = { name: "existing-name" };
|
const updateNameDto: UpdatePersonalityDto = { name: "existing-name" };
|
||||||
mockPrismaService.personality.findUnique.mockResolvedValue(mockPersonality);
|
const conflictRecord = { ...mockPrismaRecord, id: "different-id" };
|
||||||
mockPrismaService.personality.findFirst.mockResolvedValue({
|
|
||||||
...mockPersonality,
|
mockPrismaService.personality.findFirst
|
||||||
id: "different-id",
|
.mockResolvedValueOnce(mockPrismaRecord) // findOne check
|
||||||
});
|
.mockResolvedValueOnce(conflictRecord); // name conflict
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
service.update(mockWorkspaceId, mockPersonalityId, updateNameDto)
|
service.update(mockWorkspaceId, mockPersonalityId, updateNameDto)
|
||||||
@@ -258,14 +291,16 @@ describe("PersonalitiesService", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should unset other defaults when setting as default", async () => {
|
it("should unset other defaults when setting as default", async () => {
|
||||||
const updateDefaultDto = { isDefault: true };
|
const updateDefaultDto: UpdatePersonalityDto = { isDefault: true };
|
||||||
const otherPersonality = { ...mockPersonality, id: "other-id", isDefault: true };
|
const otherPersonality = { ...mockPrismaRecord, id: "other-id", isDefault: true };
|
||||||
|
const updatedRecord = { ...mockPrismaRecord, isDefault: true };
|
||||||
|
|
||||||
mockPrismaService.personality.findUnique.mockResolvedValue(mockPersonality);
|
mockPrismaService.personality.findFirst
|
||||||
mockPrismaService.personality.findFirst.mockResolvedValue(otherPersonality); // Existing default from unsetOtherDefaults
|
.mockResolvedValueOnce(mockPrismaRecord) // findOne check
|
||||||
|
.mockResolvedValueOnce(otherPersonality); // unsetOtherDefaults lookup
|
||||||
mockPrismaService.personality.update
|
mockPrismaService.personality.update
|
||||||
.mockResolvedValueOnce({ ...otherPersonality, isDefault: false }) // Unset old default
|
.mockResolvedValueOnce({ ...otherPersonality, isDefault: false })
|
||||||
.mockResolvedValueOnce({ ...mockPersonality, isDefault: true }); // Set new default
|
.mockResolvedValueOnce(updatedRecord);
|
||||||
|
|
||||||
await service.update(mockWorkspaceId, mockPersonalityId, updateDefaultDto);
|
await service.update(mockWorkspaceId, mockPersonalityId, updateDefaultDto);
|
||||||
|
|
||||||
@@ -273,16 +308,12 @@ describe("PersonalitiesService", () => {
|
|||||||
where: { id: "other-id" },
|
where: { id: "other-id" },
|
||||||
data: { isDefault: false },
|
data: { isDefault: false },
|
||||||
});
|
});
|
||||||
expect(prisma.personality.update).toHaveBeenNthCalledWith(2, {
|
|
||||||
where: { id: mockPersonalityId },
|
|
||||||
data: updateDefaultDto,
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("delete", () => {
|
describe("delete", () => {
|
||||||
it("should delete a personality", async () => {
|
it("should delete a personality", async () => {
|
||||||
mockPrismaService.personality.findUnique.mockResolvedValue(mockPersonality);
|
mockPrismaService.personality.findFirst.mockResolvedValue(mockPrismaRecord);
|
||||||
mockPrismaService.personality.delete.mockResolvedValue(undefined);
|
mockPrismaService.personality.delete.mockResolvedValue(undefined);
|
||||||
|
|
||||||
await service.delete(mockWorkspaceId, mockPersonalityId);
|
await service.delete(mockWorkspaceId, mockPersonalityId);
|
||||||
@@ -293,7 +324,7 @@ describe("PersonalitiesService", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should throw NotFoundException when personality not found", async () => {
|
it("should throw NotFoundException when personality not found", async () => {
|
||||||
mockPrismaService.personality.findUnique.mockResolvedValue(null);
|
mockPrismaService.personality.findFirst.mockResolvedValue(null);
|
||||||
|
|
||||||
await expect(service.delete(mockWorkspaceId, mockPersonalityId)).rejects.toThrow(
|
await expect(service.delete(mockWorkspaceId, mockPersonalityId)).rejects.toThrow(
|
||||||
NotFoundException
|
NotFoundException
|
||||||
@@ -303,30 +334,27 @@ describe("PersonalitiesService", () => {
|
|||||||
|
|
||||||
describe("setDefault", () => {
|
describe("setDefault", () => {
|
||||||
it("should set a personality as default", async () => {
|
it("should set a personality as default", async () => {
|
||||||
const otherPersonality = { ...mockPersonality, id: "other-id", isDefault: true };
|
const otherPersonality = { ...mockPrismaRecord, id: "other-id", isDefault: true };
|
||||||
const updatedPersonality = { ...mockPersonality, isDefault: true };
|
const updatedRecord = { ...mockPrismaRecord, isDefault: true };
|
||||||
|
|
||||||
mockPrismaService.personality.findUnique.mockResolvedValue(mockPersonality);
|
mockPrismaService.personality.findFirst
|
||||||
mockPrismaService.personality.findFirst.mockResolvedValue(otherPersonality);
|
.mockResolvedValueOnce(mockPrismaRecord) // findOne check
|
||||||
|
.mockResolvedValueOnce(otherPersonality); // unsetOtherDefaults lookup
|
||||||
mockPrismaService.personality.update
|
mockPrismaService.personality.update
|
||||||
.mockResolvedValueOnce({ ...otherPersonality, isDefault: false }) // Unset old default
|
.mockResolvedValueOnce({ ...otherPersonality, isDefault: false })
|
||||||
.mockResolvedValueOnce(updatedPersonality); // Set new default
|
.mockResolvedValueOnce(updatedRecord);
|
||||||
|
|
||||||
const result = await service.setDefault(mockWorkspaceId, mockPersonalityId);
|
const result = await service.setDefault(mockWorkspaceId, mockPersonalityId);
|
||||||
|
|
||||||
expect(result).toMatchObject({ isDefault: true });
|
expect(result.isDefault).toBe(true);
|
||||||
expect(prisma.personality.update).toHaveBeenNthCalledWith(1, {
|
expect(prisma.personality.update).toHaveBeenCalledWith({
|
||||||
where: { id: "other-id" },
|
|
||||||
data: { isDefault: false },
|
|
||||||
});
|
|
||||||
expect(prisma.personality.update).toHaveBeenNthCalledWith(2, {
|
|
||||||
where: { id: mockPersonalityId },
|
where: { id: mockPersonalityId },
|
||||||
data: { isDefault: true },
|
data: { isDefault: true },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw NotFoundException when personality not found", async () => {
|
it("should throw NotFoundException when personality not found", async () => {
|
||||||
mockPrismaService.personality.findUnique.mockResolvedValue(null);
|
mockPrismaService.personality.findFirst.mockResolvedValue(null);
|
||||||
|
|
||||||
await expect(service.setDefault(mockWorkspaceId, mockPersonalityId)).rejects.toThrow(
|
await expect(service.setDefault(mockWorkspaceId, mockPersonalityId)).rejects.toThrow(
|
||||||
NotFoundException
|
NotFoundException
|
||||||
|
|||||||
@@ -1,10 +1,17 @@
|
|||||||
import { Injectable, NotFoundException, ConflictException, Logger } from "@nestjs/common";
|
import { Injectable, NotFoundException, ConflictException, Logger } from "@nestjs/common";
|
||||||
|
import type { FormalityLevel, Personality } from "@prisma/client";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
import { CreatePersonalityDto, UpdatePersonalityDto } from "./dto";
|
import type { CreatePersonalityDto } from "./dto/create-personality.dto";
|
||||||
import { Personality } from "./entities/personality.entity";
|
import type { UpdatePersonalityDto } from "./dto/update-personality.dto";
|
||||||
|
import type { PersonalityQueryDto } from "./dto/personality-query.dto";
|
||||||
|
import type { PersonalityResponse } from "./entities/personality.entity";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Service for managing personality/assistant configurations
|
* Service for managing personality/assistant configurations.
|
||||||
|
*
|
||||||
|
* Field mapping:
|
||||||
|
* Prisma `systemPrompt` <-> API/frontend `systemPromptTemplate`
|
||||||
|
* Prisma `isEnabled` <-> API/frontend `isActive`
|
||||||
*/
|
*/
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class PersonalitiesService {
|
export class PersonalitiesService {
|
||||||
@@ -12,11 +19,30 @@ export class PersonalitiesService {
|
|||||||
|
|
||||||
constructor(private readonly prisma: PrismaService) {}
|
constructor(private readonly prisma: PrismaService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Map a Prisma Personality record to the API response shape.
|
||||||
|
*/
|
||||||
|
private toResponse(personality: Personality): PersonalityResponse {
|
||||||
|
return {
|
||||||
|
id: personality.id,
|
||||||
|
workspaceId: personality.workspaceId,
|
||||||
|
name: personality.name,
|
||||||
|
description: personality.description,
|
||||||
|
tone: personality.tone,
|
||||||
|
formalityLevel: personality.formalityLevel,
|
||||||
|
systemPromptTemplate: personality.systemPrompt,
|
||||||
|
isDefault: personality.isDefault,
|
||||||
|
isActive: personality.isEnabled,
|
||||||
|
createdAt: personality.createdAt,
|
||||||
|
updatedAt: personality.updatedAt,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new personality
|
* Create a new personality
|
||||||
*/
|
*/
|
||||||
async create(workspaceId: string, dto: CreatePersonalityDto): Promise<Personality> {
|
async create(workspaceId: string, dto: CreatePersonalityDto): Promise<PersonalityResponse> {
|
||||||
// Check for duplicate name
|
// Check for duplicate name within workspace
|
||||||
const existing = await this.prisma.personality.findFirst({
|
const existing = await this.prisma.personality.findFirst({
|
||||||
where: { workspaceId, name: dto.name },
|
where: { workspaceId, name: dto.name },
|
||||||
});
|
});
|
||||||
@@ -25,7 +51,7 @@ export class PersonalitiesService {
|
|||||||
throw new ConflictException(`Personality with name "${dto.name}" already exists`);
|
throw new ConflictException(`Personality with name "${dto.name}" already exists`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If creating a default personality, unset other defaults
|
// If creating as default, unset other defaults first
|
||||||
if (dto.isDefault) {
|
if (dto.isDefault) {
|
||||||
await this.unsetOtherDefaults(workspaceId);
|
await this.unsetOtherDefaults(workspaceId);
|
||||||
}
|
}
|
||||||
@@ -34,36 +60,43 @@ export class PersonalitiesService {
|
|||||||
data: {
|
data: {
|
||||||
workspaceId,
|
workspaceId,
|
||||||
name: dto.name,
|
name: dto.name,
|
||||||
displayName: dto.displayName,
|
displayName: dto.name, // use name as displayName since frontend doesn't send displayName separately
|
||||||
description: dto.description ?? null,
|
description: dto.description ?? null,
|
||||||
systemPrompt: dto.systemPrompt,
|
tone: dto.tone,
|
||||||
temperature: dto.temperature ?? null,
|
formalityLevel: dto.formalityLevel,
|
||||||
maxTokens: dto.maxTokens ?? null,
|
systemPrompt: dto.systemPromptTemplate,
|
||||||
llmProviderInstanceId: dto.llmProviderInstanceId ?? null,
|
|
||||||
isDefault: dto.isDefault ?? false,
|
isDefault: dto.isDefault ?? false,
|
||||||
isEnabled: dto.isEnabled ?? true,
|
isEnabled: dto.isActive ?? true,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
this.logger.log(`Created personality ${personality.id} for workspace ${workspaceId}`);
|
this.logger.log(`Created personality ${personality.id} for workspace ${workspaceId}`);
|
||||||
return personality;
|
return this.toResponse(personality);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find all personalities for a workspace
|
* Find all personalities for a workspace with optional active filter
|
||||||
*/
|
*/
|
||||||
async findAll(workspaceId: string): Promise<Personality[]> {
|
async findAll(workspaceId: string, query?: PersonalityQueryDto): Promise<PersonalityResponse[]> {
|
||||||
return this.prisma.personality.findMany({
|
const where: { workspaceId: string; isEnabled?: boolean } = { workspaceId };
|
||||||
where: { workspaceId },
|
|
||||||
|
if (query?.isActive !== undefined) {
|
||||||
|
where.isEnabled = query.isActive;
|
||||||
|
}
|
||||||
|
|
||||||
|
const personalities = await this.prisma.personality.findMany({
|
||||||
|
where,
|
||||||
orderBy: [{ isDefault: "desc" }, { name: "asc" }],
|
orderBy: [{ isDefault: "desc" }, { name: "asc" }],
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return personalities.map((p) => this.toResponse(p));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find a specific personality by ID
|
* Find a specific personality by ID
|
||||||
*/
|
*/
|
||||||
async findOne(workspaceId: string, id: string): Promise<Personality> {
|
async findOne(workspaceId: string, id: string): Promise<PersonalityResponse> {
|
||||||
const personality = await this.prisma.personality.findUnique({
|
const personality = await this.prisma.personality.findFirst({
|
||||||
where: { id, workspaceId },
|
where: { id, workspaceId },
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -71,13 +104,13 @@ export class PersonalitiesService {
|
|||||||
throw new NotFoundException(`Personality with ID ${id} not found`);
|
throw new NotFoundException(`Personality with ID ${id} not found`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return personality;
|
return this.toResponse(personality);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find a personality by name
|
* Find a personality by name slug
|
||||||
*/
|
*/
|
||||||
async findByName(workspaceId: string, name: string): Promise<Personality> {
|
async findByName(workspaceId: string, name: string): Promise<PersonalityResponse> {
|
||||||
const personality = await this.prisma.personality.findFirst({
|
const personality = await this.prisma.personality.findFirst({
|
||||||
where: { workspaceId, name },
|
where: { workspaceId, name },
|
||||||
});
|
});
|
||||||
@@ -86,13 +119,13 @@ export class PersonalitiesService {
|
|||||||
throw new NotFoundException(`Personality with name "${name}" not found`);
|
throw new NotFoundException(`Personality with name "${name}" not found`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return personality;
|
return this.toResponse(personality);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Find the default personality for a workspace
|
* Find the default (and enabled) personality for a workspace
|
||||||
*/
|
*/
|
||||||
async findDefault(workspaceId: string): Promise<Personality> {
|
async findDefault(workspaceId: string): Promise<PersonalityResponse> {
|
||||||
const personality = await this.prisma.personality.findFirst({
|
const personality = await this.prisma.personality.findFirst({
|
||||||
where: { workspaceId, isDefault: true, isEnabled: true },
|
where: { workspaceId, isDefault: true, isEnabled: true },
|
||||||
});
|
});
|
||||||
@@ -101,14 +134,18 @@ export class PersonalitiesService {
|
|||||||
throw new NotFoundException(`No default personality found for workspace ${workspaceId}`);
|
throw new NotFoundException(`No default personality found for workspace ${workspaceId}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return personality;
|
return this.toResponse(personality);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update an existing personality
|
* Update an existing personality
|
||||||
*/
|
*/
|
||||||
async update(workspaceId: string, id: string, dto: UpdatePersonalityDto): Promise<Personality> {
|
async update(
|
||||||
// Check existence
|
workspaceId: string,
|
||||||
|
id: string,
|
||||||
|
dto: UpdatePersonalityDto
|
||||||
|
): Promise<PersonalityResponse> {
|
||||||
|
// Verify existence
|
||||||
await this.findOne(workspaceId, id);
|
await this.findOne(workspaceId, id);
|
||||||
|
|
||||||
// Check for duplicate name if updating name
|
// Check for duplicate name if updating name
|
||||||
@@ -127,20 +164,43 @@ export class PersonalitiesService {
|
|||||||
await this.unsetOtherDefaults(workspaceId, id);
|
await this.unsetOtherDefaults(workspaceId, id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Build update data with field mapping
|
||||||
|
const updateData: {
|
||||||
|
name?: string;
|
||||||
|
displayName?: string;
|
||||||
|
description?: string;
|
||||||
|
tone?: string;
|
||||||
|
formalityLevel?: FormalityLevel;
|
||||||
|
systemPrompt?: string;
|
||||||
|
isDefault?: boolean;
|
||||||
|
isEnabled?: boolean;
|
||||||
|
} = {};
|
||||||
|
|
||||||
|
if (dto.name !== undefined) {
|
||||||
|
updateData.name = dto.name;
|
||||||
|
updateData.displayName = dto.name;
|
||||||
|
}
|
||||||
|
if (dto.description !== undefined) updateData.description = dto.description;
|
||||||
|
if (dto.tone !== undefined) updateData.tone = dto.tone;
|
||||||
|
if (dto.formalityLevel !== undefined) updateData.formalityLevel = dto.formalityLevel;
|
||||||
|
if (dto.systemPromptTemplate !== undefined) updateData.systemPrompt = dto.systemPromptTemplate;
|
||||||
|
if (dto.isDefault !== undefined) updateData.isDefault = dto.isDefault;
|
||||||
|
if (dto.isActive !== undefined) updateData.isEnabled = dto.isActive;
|
||||||
|
|
||||||
const personality = await this.prisma.personality.update({
|
const personality = await this.prisma.personality.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: dto,
|
data: updateData,
|
||||||
});
|
});
|
||||||
|
|
||||||
this.logger.log(`Updated personality ${id} for workspace ${workspaceId}`);
|
this.logger.log(`Updated personality ${id} for workspace ${workspaceId}`);
|
||||||
return personality;
|
return this.toResponse(personality);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Delete a personality
|
* Delete a personality
|
||||||
*/
|
*/
|
||||||
async delete(workspaceId: string, id: string): Promise<void> {
|
async delete(workspaceId: string, id: string): Promise<void> {
|
||||||
// Check existence
|
// Verify existence
|
||||||
await this.findOne(workspaceId, id);
|
await this.findOne(workspaceId, id);
|
||||||
|
|
||||||
await this.prisma.personality.delete({
|
await this.prisma.personality.delete({
|
||||||
@@ -151,23 +211,22 @@ export class PersonalitiesService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set a personality as the default
|
* Set a personality as the default (convenience endpoint)
|
||||||
*/
|
*/
|
||||||
async setDefault(workspaceId: string, id: string): Promise<Personality> {
|
async setDefault(workspaceId: string, id: string): Promise<PersonalityResponse> {
|
||||||
// Check existence
|
// Verify existence
|
||||||
await this.findOne(workspaceId, id);
|
await this.findOne(workspaceId, id);
|
||||||
|
|
||||||
// Unset other defaults
|
// Unset other defaults
|
||||||
await this.unsetOtherDefaults(workspaceId, id);
|
await this.unsetOtherDefaults(workspaceId, id);
|
||||||
|
|
||||||
// Set this one as default
|
|
||||||
const personality = await this.prisma.personality.update({
|
const personality = await this.prisma.personality.update({
|
||||||
where: { id },
|
where: { id },
|
||||||
data: { isDefault: true },
|
data: { isDefault: true },
|
||||||
});
|
});
|
||||||
|
|
||||||
this.logger.log(`Set personality ${id} as default for workspace ${workspaceId}`);
|
this.logger.log(`Set personality ${id} as default for workspace ${workspaceId}`);
|
||||||
return personality;
|
return this.toResponse(personality);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -178,7 +237,7 @@ export class PersonalitiesService {
|
|||||||
where: {
|
where: {
|
||||||
workspaceId,
|
workspaceId,
|
||||||
isDefault: true,
|
isDefault: true,
|
||||||
...(excludeId && { id: { not: excludeId } }),
|
...(excludeId !== undefined && { id: { not: excludeId } }),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -140,8 +140,11 @@ export class PrismaService extends PrismaClient implements OnModuleInit, OnModul
|
|||||||
workspaceId: string,
|
workspaceId: string,
|
||||||
client: PrismaClient = this
|
client: PrismaClient = this
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
await client.$executeRaw`SET LOCAL app.current_user_id = ${userId}`;
|
// Use set_config() instead of SET LOCAL so values are safely parameterized.
|
||||||
await client.$executeRaw`SET LOCAL app.current_workspace_id = ${workspaceId}`;
|
// SET LOCAL with Prisma's tagged template produces invalid SQL (bind parameter $1
|
||||||
|
// is not supported in SET statements by PostgreSQL).
|
||||||
|
await client.$executeRaw`SELECT set_config('app.current_user_id', ${userId}, true)`;
|
||||||
|
await client.$executeRaw`SELECT set_config('app.current_workspace_id', ${workspaceId}, true)`;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -151,8 +154,8 @@ export class PrismaService extends PrismaClient implements OnModuleInit, OnModul
|
|||||||
* @param client - Optional Prisma client (uses 'this' if not provided)
|
* @param client - Optional Prisma client (uses 'this' if not provided)
|
||||||
*/
|
*/
|
||||||
async clearWorkspaceContext(client: PrismaClient = this): Promise<void> {
|
async clearWorkspaceContext(client: PrismaClient = this): Promise<void> {
|
||||||
await client.$executeRaw`SET LOCAL app.current_user_id = NULL`;
|
await client.$executeRaw`SELECT set_config('app.current_user_id', '', true)`;
|
||||||
await client.$executeRaw`SET LOCAL app.current_workspace_id = NULL`;
|
await client.$executeRaw`SELECT set_config('app.current_workspace_id', '', true)`;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -50,6 +50,12 @@ export class CreateTaskDto {
|
|||||||
@IsUUID("4", { message: "parentId must be a valid UUID" })
|
@IsUUID("4", { message: "parentId must be a valid UUID" })
|
||||||
parentId?: string;
|
parentId?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "assignedAgent must be a string" })
|
||||||
|
@MinLength(1, { message: "assignedAgent must not be empty" })
|
||||||
|
@MaxLength(255, { message: "assignedAgent must not exceed 255 characters" })
|
||||||
|
assignedAgent?: string;
|
||||||
|
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@IsInt({ message: "sortOrder must be an integer" })
|
@IsInt({ message: "sortOrder must be an integer" })
|
||||||
@Min(0, { message: "sortOrder must be at least 0" })
|
@Min(0, { message: "sortOrder must be at least 0" })
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user