Compare commits
2 Commits
feat/ms22-
...
chore/task
| Author | SHA1 | Date | |
|---|---|---|---|
| f9588d2ce1 | |||
| 294446043a |
@@ -34,9 +34,3 @@ CVE-2026-26996 # HIGH: minimatch DoS via specially crafted glob patterns (needs
|
||||
# OpenBao 2.5.0 compiled with Go 1.25.6, fix needs Go >= 1.25.7.
|
||||
# Cannot build OpenBao from source (large project). Waiting for upstream release.
|
||||
CVE-2025-68121 # CRITICAL: crypto/tls session resumption
|
||||
|
||||
# === multer CVEs (upstream via @nestjs/platform-express) ===
|
||||
# multer <2.1.0 — waiting on NestJS to update their dependency
|
||||
# These are DoS vulnerabilities in file upload handling
|
||||
GHSA-xf7r-hgr6-v32p # HIGH: DoS via incomplete cleanup
|
||||
GHSA-v52c-386h-88mc # HIGH: DoS via resource exhaustion
|
||||
|
||||
232
.woodpecker/api.yml
Normal file
232
.woodpecker/api.yml
Normal file
@@ -0,0 +1,232 @@
|
||||
# API Pipeline - Mosaic Stack
|
||||
# Quality gates, build, and Docker publish for @mosaic/api
|
||||
#
|
||||
# Triggers on: apps/api/**, packages/**, root configs
|
||||
# Security chain: source audit + Trivy container scan
|
||||
|
||||
when:
|
||||
- event: [push, pull_request, manual]
|
||||
path:
|
||||
include:
|
||||
- "apps/api/**"
|
||||
- "packages/**"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "turbo.json"
|
||||
- "package.json"
|
||||
- ".woodpecker/api.yml"
|
||||
- ".trivyignore"
|
||||
|
||||
variables:
|
||||
- &node_image "node:24-alpine"
|
||||
- &install_deps |
|
||||
corepack enable
|
||||
pnpm install --frozen-lockfile
|
||||
- &use_deps |
|
||||
corepack enable
|
||||
- &turbo_env
|
||||
TURBO_API:
|
||||
from_secret: turbo_api
|
||||
TURBO_TOKEN:
|
||||
from_secret: turbo_token
|
||||
TURBO_TEAM:
|
||||
from_secret: turbo_team
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17.7-alpine3.22
|
||||
environment:
|
||||
POSTGRES_DB: test_db
|
||||
POSTGRES_USER: test_user
|
||||
POSTGRES_PASSWORD: test_password
|
||||
|
||||
steps:
|
||||
# === Quality Gates ===
|
||||
|
||||
install:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *install_deps
|
||||
|
||||
security-audit:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm audit --audit-level=high
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
prisma-generate:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" prisma:generate
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
lint:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo lint --filter=@mosaic/api
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
typecheck:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo typecheck --filter=@mosaic/api
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
prisma-migrate:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" prisma migrate deploy
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
test:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts' --exclude 'src/mosaic-telemetry/mosaic-telemetry.module.spec.ts'
|
||||
depends_on:
|
||||
- prisma-migrate
|
||||
|
||||
# === Build ===
|
||||
|
||||
build:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
NODE_ENV: "production"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo build --filter=@mosaic/api
|
||||
depends_on:
|
||||
- lint
|
||||
- typecheck
|
||||
- test
|
||||
- security-audit
|
||||
|
||||
# === Docker Build & Push ===
|
||||
|
||||
docker-build-api:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/api/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# === Container Security Scan ===
|
||||
|
||||
security-trivy-api:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
SCAN_TAG="latest"
|
||||
else
|
||||
SCAN_TAG="latest"
|
||||
fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
||||
--ignorefile .trivyignore \
|
||||
git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-api
|
||||
|
||||
# === Package Linking ===
|
||||
|
||||
link-packages:
|
||||
image: alpine:3
|
||||
environment:
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
commands:
|
||||
- apk add --no-cache curl
|
||||
- sleep 10
|
||||
- |
|
||||
set -e
|
||||
link_package() {
|
||||
PKG="$$1"
|
||||
echo "Linking $$PKG..."
|
||||
for attempt in 1 2 3; do
|
||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||
-H "Authorization: token $$GITEA_TOKEN" \
|
||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||
echo " Linked $$PKG"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "400" ]; then
|
||||
echo " $$PKG already linked"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||
sleep 5
|
||||
else
|
||||
echo " FAILED: $$PKG status $$STATUS"
|
||||
cat /tmp/link-response.txt
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
link_package "stack-api"
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- security-trivy-api
|
||||
@@ -1,337 +0,0 @@
|
||||
# Unified CI Pipeline - Mosaic Stack
|
||||
# Single install, parallel quality gates, sequential deploy
|
||||
#
|
||||
# Replaces: api.yml, orchestrator.yml, web.yml
|
||||
# Keeps: coordinator.yml (Python), infra.yml (separate concerns)
|
||||
#
|
||||
# Flow:
|
||||
# install → security-audit
|
||||
# → prisma-generate → lint + typecheck (parallel)
|
||||
# → prisma-migrate → test
|
||||
# → build (after all gates pass)
|
||||
# → docker builds (main only, parallel)
|
||||
# → trivy scans (main only, parallel)
|
||||
# → package linking (main only)
|
||||
|
||||
when:
|
||||
- event: [push, pull_request, manual]
|
||||
path:
|
||||
include:
|
||||
- "apps/api/**"
|
||||
- "apps/orchestrator/**"
|
||||
- "apps/web/**"
|
||||
- "packages/**"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "turbo.json"
|
||||
- "package.json"
|
||||
- ".woodpecker/ci.yml"
|
||||
- ".trivyignore"
|
||||
|
||||
variables:
|
||||
- &node_image "node:24-alpine"
|
||||
- &install_deps |
|
||||
corepack enable
|
||||
pnpm install --frozen-lockfile
|
||||
- &use_deps |
|
||||
corepack enable
|
||||
- &turbo_env
|
||||
TURBO_API:
|
||||
from_secret: turbo_api
|
||||
TURBO_TOKEN:
|
||||
from_secret: turbo_token
|
||||
TURBO_TEAM:
|
||||
from_secret: turbo_team
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:17.7-alpine3.22
|
||||
environment:
|
||||
POSTGRES_DB: test_db
|
||||
POSTGRES_USER: test_user
|
||||
POSTGRES_PASSWORD: test_password
|
||||
|
||||
steps:
|
||||
# ─── Install (once) ─────────────────────────────────────────
|
||||
install:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *install_deps
|
||||
|
||||
# ─── Security Audit (once) ──────────────────────────────────
|
||||
security-audit:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm audit --audit-level=high
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
# ─── Prisma Generate ────────────────────────────────────────
|
||||
prisma-generate:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" prisma:generate
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
# ─── Lint (all packages) ────────────────────────────────────
|
||||
lint:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo lint
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
# ─── Typecheck (all packages, parallel with lint) ───────────
|
||||
typecheck:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo typecheck
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
# ─── Prisma Migrate (test DB) ──────────────────────────────
|
||||
prisma-migrate:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" prisma migrate deploy
|
||||
depends_on:
|
||||
- prisma-generate
|
||||
|
||||
# ─── Test (all packages) ───────────────────────────────────
|
||||
test:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts' --exclude 'src/mosaic-telemetry/mosaic-telemetry.module.spec.ts'
|
||||
- pnpm turbo test --filter=@mosaic/orchestrator --filter=@mosaic/web
|
||||
depends_on:
|
||||
- prisma-migrate
|
||||
|
||||
# ─── Build (all packages) ──────────────────────────────────
|
||||
build:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
NODE_ENV: "production"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo build
|
||||
depends_on:
|
||||
- lint
|
||||
- typecheck
|
||||
- test
|
||||
- security-audit
|
||||
|
||||
# ─── Docker Builds (main only, parallel) ───────────────────
|
||||
|
||||
docker-build-api:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/api/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
docker-build-orchestrator:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
docker-build-web:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --snapshot-mode=redo --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# ─── Container Security Scans (main only) ──────────────────
|
||||
|
||||
security-trivy-api:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-api
|
||||
|
||||
security-trivy-orchestrator:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-orchestrator
|
||||
|
||||
security-trivy-web:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-web
|
||||
|
||||
# ─── Package Linking (main only, once) ─────────────────────
|
||||
|
||||
link-packages:
|
||||
image: alpine:3
|
||||
environment:
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
commands:
|
||||
- apk add --no-cache curl
|
||||
- sleep 10
|
||||
- |
|
||||
set -e
|
||||
link_package() {
|
||||
PKG="$$1"
|
||||
echo "Linking $$PKG..."
|
||||
for attempt in 1 2 3; do
|
||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||
-H "Authorization: token $$GITEA_TOKEN" \
|
||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||
echo " Linked $$PKG"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "400" ]; then
|
||||
echo " $$PKG already linked"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||
sleep 5
|
||||
else
|
||||
echo " FAILED: $$PKG status $$STATUS"
|
||||
cat /tmp/link-response.txt
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
link_package "stack-api"
|
||||
link_package "stack-orchestrator"
|
||||
link_package "stack-web"
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- security-trivy-api
|
||||
- security-trivy-orchestrator
|
||||
- security-trivy-web
|
||||
202
.woodpecker/orchestrator.yml
Normal file
202
.woodpecker/orchestrator.yml
Normal file
@@ -0,0 +1,202 @@
|
||||
# Orchestrator Pipeline - Mosaic Stack
|
||||
# Quality gates, build, and Docker publish for @mosaic/orchestrator
|
||||
#
|
||||
# Triggers on: apps/orchestrator/**, packages/**, root configs
|
||||
# Security chain: source audit + Trivy container scan
|
||||
|
||||
when:
|
||||
- event: [push, pull_request, manual]
|
||||
path:
|
||||
include:
|
||||
- "apps/orchestrator/**"
|
||||
- "packages/**"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "turbo.json"
|
||||
- "package.json"
|
||||
- ".woodpecker/orchestrator.yml"
|
||||
- ".trivyignore"
|
||||
|
||||
variables:
|
||||
- &node_image "node:24-alpine"
|
||||
- &install_deps |
|
||||
corepack enable
|
||||
pnpm install --frozen-lockfile
|
||||
- &use_deps |
|
||||
corepack enable
|
||||
- &turbo_env
|
||||
TURBO_API:
|
||||
from_secret: turbo_api
|
||||
TURBO_TOKEN:
|
||||
from_secret: turbo_token
|
||||
TURBO_TEAM:
|
||||
from_secret: turbo_team
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
steps:
|
||||
# === Quality Gates ===
|
||||
|
||||
install:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *install_deps
|
||||
|
||||
security-audit:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm audit --audit-level=high
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
lint:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo lint --filter=@mosaic/orchestrator
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
typecheck:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo typecheck --filter=@mosaic/orchestrator
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
test:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo test --filter=@mosaic/orchestrator
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
# === Build ===
|
||||
|
||||
build:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
NODE_ENV: "production"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo build --filter=@mosaic/orchestrator
|
||||
depends_on:
|
||||
- lint
|
||||
- typecheck
|
||||
- test
|
||||
- security-audit
|
||||
|
||||
# === Docker Build & Push ===
|
||||
|
||||
docker-build-orchestrator:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# === Container Security Scan ===
|
||||
|
||||
security-trivy-orchestrator:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
SCAN_TAG="latest"
|
||||
else
|
||||
SCAN_TAG="latest"
|
||||
fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
||||
--ignorefile .trivyignore \
|
||||
git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-orchestrator
|
||||
|
||||
# === Package Linking ===
|
||||
|
||||
link-packages:
|
||||
image: alpine:3
|
||||
environment:
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
commands:
|
||||
- apk add --no-cache curl
|
||||
- sleep 10
|
||||
- |
|
||||
set -e
|
||||
link_package() {
|
||||
PKG="$$1"
|
||||
echo "Linking $$PKG..."
|
||||
for attempt in 1 2 3; do
|
||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||
-H "Authorization: token $$GITEA_TOKEN" \
|
||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||
echo " Linked $$PKG"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "400" ]; then
|
||||
echo " $$PKG already linked"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||
sleep 5
|
||||
else
|
||||
echo " FAILED: $$PKG status $$STATUS"
|
||||
cat /tmp/link-response.txt
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
link_package "stack-orchestrator"
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- security-trivy-orchestrator
|
||||
202
.woodpecker/web.yml
Normal file
202
.woodpecker/web.yml
Normal file
@@ -0,0 +1,202 @@
|
||||
# Web Pipeline - Mosaic Stack
|
||||
# Quality gates, build, and Docker publish for @mosaic/web
|
||||
#
|
||||
# Triggers on: apps/web/**, packages/**, root configs
|
||||
# Security chain: source audit + Trivy container scan
|
||||
|
||||
when:
|
||||
- event: [push, pull_request, manual]
|
||||
path:
|
||||
include:
|
||||
- "apps/web/**"
|
||||
- "packages/**"
|
||||
- "pnpm-lock.yaml"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "turbo.json"
|
||||
- "package.json"
|
||||
- ".woodpecker/web.yml"
|
||||
- ".trivyignore"
|
||||
|
||||
variables:
|
||||
- &node_image "node:24-alpine"
|
||||
- &install_deps |
|
||||
corepack enable
|
||||
pnpm install --frozen-lockfile
|
||||
- &use_deps |
|
||||
corepack enable
|
||||
- &turbo_env
|
||||
TURBO_API:
|
||||
from_secret: turbo_api
|
||||
TURBO_TOKEN:
|
||||
from_secret: turbo_token
|
||||
TURBO_TEAM:
|
||||
from_secret: turbo_team
|
||||
- &kaniko_setup |
|
||||
mkdir -p /kaniko/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||
|
||||
steps:
|
||||
# === Quality Gates ===
|
||||
|
||||
install:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *install_deps
|
||||
|
||||
security-audit:
|
||||
image: *node_image
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm audit --audit-level=high
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
lint:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo lint --filter=@mosaic/web
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
typecheck:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo typecheck --filter=@mosaic/web
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
test:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo test --filter=@mosaic/web
|
||||
depends_on:
|
||||
- install
|
||||
|
||||
# === Build ===
|
||||
|
||||
build:
|
||||
image: *node_image
|
||||
environment:
|
||||
SKIP_ENV_VALIDATION: "true"
|
||||
NODE_ENV: "production"
|
||||
<<: *turbo_env
|
||||
commands:
|
||||
- *use_deps
|
||||
- pnpm turbo build --filter=@mosaic/web
|
||||
depends_on:
|
||||
- lint
|
||||
- typecheck
|
||||
- test
|
||||
- security-audit
|
||||
|
||||
# === Docker Build & Push ===
|
||||
|
||||
docker-build-web:
|
||||
image: gcr.io/kaniko-project/executor:debug
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- *kaniko_setup
|
||||
- |
|
||||
DESTINATIONS=""
|
||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
||||
fi
|
||||
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --snapshot-mode=redo --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
# === Container Security Scan ===
|
||||
|
||||
security-trivy-web:
|
||||
image: aquasec/trivy:latest
|
||||
environment:
|
||||
GITEA_USER:
|
||||
from_secret: gitea_username
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||
commands:
|
||||
- |
|
||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||
SCAN_TAG="latest"
|
||||
else
|
||||
SCAN_TAG="latest"
|
||||
fi
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
||||
--ignorefile .trivyignore \
|
||||
git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- docker-build-web
|
||||
|
||||
# === Package Linking ===
|
||||
|
||||
link-packages:
|
||||
image: alpine:3
|
||||
environment:
|
||||
GITEA_TOKEN:
|
||||
from_secret: gitea_token
|
||||
commands:
|
||||
- apk add --no-cache curl
|
||||
- sleep 10
|
||||
- |
|
||||
set -e
|
||||
link_package() {
|
||||
PKG="$$1"
|
||||
echo "Linking $$PKG..."
|
||||
for attempt in 1 2 3; do
|
||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||
-H "Authorization: token $$GITEA_TOKEN" \
|
||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||
echo " Linked $$PKG"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "400" ]; then
|
||||
echo " $$PKG already linked"
|
||||
return 0
|
||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||
sleep 5
|
||||
else
|
||||
echo " FAILED: $$PKG status $$STATUS"
|
||||
cat /tmp/link-response.txt
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
}
|
||||
link_package "stack-web"
|
||||
when:
|
||||
- branch: [main]
|
||||
event: [push, manual, tag]
|
||||
depends_on:
|
||||
- security-trivy-web
|
||||
@@ -1,24 +0,0 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "agent_memories" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"agent_id" TEXT NOT NULL,
|
||||
"key" TEXT NOT NULL,
|
||||
"value" JSONB NOT NULL,
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "agent_memories_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "agent_memories_workspace_id_agent_id_key_key" ON "agent_memories"("workspace_id", "agent_id", "key");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "agent_memories_workspace_id_idx" ON "agent_memories"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "agent_memories_agent_id_idx" ON "agent_memories"("agent_id");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "agent_memories" ADD CONSTRAINT "agent_memories_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -1,33 +0,0 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "conversation_archives" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"session_id" TEXT NOT NULL,
|
||||
"agent_id" TEXT NOT NULL,
|
||||
"messages" JSONB NOT NULL,
|
||||
"message_count" INTEGER NOT NULL,
|
||||
"summary" TEXT NOT NULL,
|
||||
"embedding" vector(1536),
|
||||
"started_at" TIMESTAMPTZ NOT NULL,
|
||||
"ended_at" TIMESTAMPTZ,
|
||||
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "conversation_archives_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "conversation_archives_workspace_id_session_id_key" ON "conversation_archives"("workspace_id", "session_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "conversation_archives_workspace_id_idx" ON "conversation_archives"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "conversation_archives_agent_id_idx" ON "conversation_archives"("agent_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "conversation_archives_started_at_idx" ON "conversation_archives"("started_at");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "conversation_archives" ADD CONSTRAINT "conversation_archives_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -1,37 +0,0 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "findings" (
|
||||
"id" UUID NOT NULL,
|
||||
"workspace_id" UUID NOT NULL,
|
||||
"task_id" UUID,
|
||||
"agent_id" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"title" TEXT NOT NULL,
|
||||
"data" JSONB NOT NULL,
|
||||
"summary" TEXT NOT NULL,
|
||||
"embedding" vector(1536),
|
||||
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||
|
||||
CONSTRAINT "findings_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "findings_id_workspace_id_key" ON "findings"("id", "workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_workspace_id_idx" ON "findings"("workspace_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_agent_id_idx" ON "findings"("agent_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_type_idx" ON "findings"("type");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "findings_task_id_idx" ON "findings"("task_id");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "findings" ADD CONSTRAINT "findings_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "findings" ADD CONSTRAINT "findings_task_id_fkey" FOREIGN KEY ("task_id") REFERENCES "agent_tasks"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
@@ -298,8 +298,6 @@ model Workspace {
|
||||
agents Agent[]
|
||||
agentSessions AgentSession[]
|
||||
agentTasks AgentTask[]
|
||||
findings Finding[]
|
||||
agentMemories AgentMemory[]
|
||||
userLayouts UserLayout[]
|
||||
knowledgeEntries KnowledgeEntry[]
|
||||
knowledgeTags KnowledgeTag[]
|
||||
@@ -314,7 +312,6 @@ model Workspace {
|
||||
llmUsageLogs LlmUsageLog[]
|
||||
userCredentials UserCredential[]
|
||||
terminalSessions TerminalSession[]
|
||||
conversationArchives ConversationArchive[]
|
||||
|
||||
@@index([ownerId])
|
||||
@@map("workspaces")
|
||||
@@ -692,7 +689,6 @@ model AgentTask {
|
||||
createdBy User @relation("AgentTaskCreator", fields: [createdById], references: [id], onDelete: Cascade)
|
||||
createdById String @map("created_by_id") @db.Uuid
|
||||
runnerJobs RunnerJob[]
|
||||
findings Finding[]
|
||||
|
||||
@@unique([id, workspaceId])
|
||||
@@index([workspaceId])
|
||||
@@ -702,33 +698,6 @@ model AgentTask {
|
||||
@@map("agent_tasks")
|
||||
}
|
||||
|
||||
model Finding {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
taskId String? @map("task_id") @db.Uuid
|
||||
|
||||
agentId String @map("agent_id")
|
||||
type String
|
||||
title String
|
||||
data Json
|
||||
summary String @db.Text
|
||||
// Note: vector dimension (1536) must match EMBEDDING_DIMENSION constant in @mosaic/shared
|
||||
embedding Unsupported("vector(1536)")?
|
||||
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
task AgentTask? @relation(fields: [taskId], references: [id], onDelete: SetNull)
|
||||
|
||||
@@unique([id, workspaceId])
|
||||
@@index([workspaceId])
|
||||
@@index([agentId])
|
||||
@@index([type])
|
||||
@@index([taskId])
|
||||
@@map("findings")
|
||||
}
|
||||
|
||||
model AgentSession {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
@@ -766,23 +735,6 @@ model AgentSession {
|
||||
@@map("agent_sessions")
|
||||
}
|
||||
|
||||
model AgentMemory {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
agentId String @map("agent_id")
|
||||
key String
|
||||
value Json
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([workspaceId, agentId, key])
|
||||
@@index([workspaceId])
|
||||
@@index([agentId])
|
||||
@@map("agent_memories")
|
||||
}
|
||||
|
||||
model WidgetDefinition {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
|
||||
@@ -1594,33 +1546,3 @@ model TerminalSession {
|
||||
@@index([workspaceId, status])
|
||||
@@map("terminal_sessions")
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// CONVERSATION ARCHIVE MODULE
|
||||
// ============================================
|
||||
|
||||
model ConversationArchive {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
sessionId String @map("session_id")
|
||||
agentId String @map("agent_id")
|
||||
messages Json
|
||||
messageCount Int @map("message_count")
|
||||
summary String @db.Text
|
||||
// Note: vector dimension (1536) must match EMBEDDING_DIMENSION constant in @mosaic/shared
|
||||
embedding Unsupported("vector(1536)")?
|
||||
startedAt DateTime @map("started_at") @db.Timestamptz
|
||||
endedAt DateTime? @map("ended_at") @db.Timestamptz
|
||||
metadata Json @default("{}")
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
// Relations
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([workspaceId, sessionId])
|
||||
@@index([workspaceId])
|
||||
@@index([agentId])
|
||||
@@index([startedAt])
|
||||
@@map("conversation_archives")
|
||||
}
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { AgentMemoryController } from "./agent-memory.controller";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
|
||||
describe("AgentMemoryController", () => {
|
||||
let controller: AgentMemoryController;
|
||||
|
||||
const mockAgentMemoryService = {
|
||||
upsert: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
findOne: vi.fn(),
|
||||
remove: vi.fn(),
|
||||
};
|
||||
|
||||
const mockGuard = { canActivate: vi.fn(() => true) };
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
controllers: [AgentMemoryController],
|
||||
providers: [
|
||||
{
|
||||
provide: AgentMemoryService,
|
||||
useValue: mockAgentMemoryService,
|
||||
},
|
||||
],
|
||||
})
|
||||
.overrideGuard(AuthGuard)
|
||||
.useValue(mockGuard)
|
||||
.overrideGuard(WorkspaceGuard)
|
||||
.useValue(mockGuard)
|
||||
.overrideGuard(PermissionGuard)
|
||||
.useValue(mockGuard)
|
||||
.compile();
|
||||
|
||||
controller = module.get<AgentMemoryController>(AgentMemoryController);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const workspaceId = "workspace-1";
|
||||
const agentId = "agent-1";
|
||||
const key = "context";
|
||||
|
||||
describe("upsert", () => {
|
||||
it("should upsert a memory entry", async () => {
|
||||
const dto = { value: { foo: "bar" } };
|
||||
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: dto.value };
|
||||
|
||||
mockAgentMemoryService.upsert.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await controller.upsert(agentId, key, dto, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.upsert).toHaveBeenCalledWith(workspaceId, agentId, key, dto);
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should list all memory entries for an agent", async () => {
|
||||
const mockEntries = [
|
||||
{ id: "mem-1", key: "a", value: 1 },
|
||||
{ id: "mem-2", key: "b", value: 2 },
|
||||
];
|
||||
|
||||
mockAgentMemoryService.findAll.mockResolvedValue(mockEntries);
|
||||
|
||||
const result = await controller.findAll(agentId, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.findAll).toHaveBeenCalledWith(workspaceId, agentId);
|
||||
expect(result).toEqual(mockEntries);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
it("should get a single memory entry", async () => {
|
||||
const mockEntry = { id: "mem-1", key, value: "v" };
|
||||
|
||||
mockAgentMemoryService.findOne.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await controller.findOne(agentId, key, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.findOne).toHaveBeenCalledWith(workspaceId, agentId, key);
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
it("should delete a memory entry", async () => {
|
||||
const mockResponse = { message: "Memory entry deleted successfully" };
|
||||
|
||||
mockAgentMemoryService.remove.mockResolvedValue(mockResponse);
|
||||
|
||||
const result = await controller.remove(agentId, key, workspaceId);
|
||||
|
||||
expect(mockAgentMemoryService.remove).toHaveBeenCalledWith(workspaceId, agentId, key);
|
||||
expect(result).toEqual(mockResponse);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,89 +0,0 @@
|
||||
import {
|
||||
Controller,
|
||||
Get,
|
||||
Put,
|
||||
Delete,
|
||||
Body,
|
||||
Param,
|
||||
UseGuards,
|
||||
HttpCode,
|
||||
HttpStatus,
|
||||
} from "@nestjs/common";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { UpsertAgentMemoryDto } from "./dto";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||
|
||||
/**
|
||||
* Controller for per-agent key/value memory endpoints.
|
||||
* All endpoints require authentication and workspace context.
|
||||
*
|
||||
* Guards are applied in order:
|
||||
* 1. AuthGuard - Verifies user authentication
|
||||
* 2. WorkspaceGuard - Validates workspace access
|
||||
* 3. PermissionGuard - Checks role-based permissions
|
||||
*/
|
||||
@Controller("agents/:agentId/memory")
|
||||
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||
export class AgentMemoryController {
|
||||
constructor(private readonly agentMemoryService: AgentMemoryService) {}
|
||||
|
||||
/**
|
||||
* PUT /api/agents/:agentId/memory/:key
|
||||
* Upsert a memory entry for an agent
|
||||
* Requires: MEMBER role or higher
|
||||
*/
|
||||
@Put(":key")
|
||||
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||
async upsert(
|
||||
@Param("agentId") agentId: string,
|
||||
@Param("key") key: string,
|
||||
@Body() dto: UpsertAgentMemoryDto,
|
||||
@Workspace() workspaceId: string
|
||||
) {
|
||||
return this.agentMemoryService.upsert(workspaceId, agentId, key, dto);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/agents/:agentId/memory
|
||||
* List all memory entries for an agent
|
||||
* Requires: Any workspace member (including GUEST)
|
||||
*/
|
||||
@Get()
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findAll(@Param("agentId") agentId: string, @Workspace() workspaceId: string) {
|
||||
return this.agentMemoryService.findAll(workspaceId, agentId);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/agents/:agentId/memory/:key
|
||||
* Get a single memory entry by key
|
||||
* Requires: Any workspace member (including GUEST)
|
||||
*/
|
||||
@Get(":key")
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findOne(
|
||||
@Param("agentId") agentId: string,
|
||||
@Param("key") key: string,
|
||||
@Workspace() workspaceId: string
|
||||
) {
|
||||
return this.agentMemoryService.findOne(workspaceId, agentId, key);
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/agents/:agentId/memory/:key
|
||||
* Remove a memory entry
|
||||
* Requires: MEMBER role or higher
|
||||
*/
|
||||
@Delete(":key")
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||
async remove(
|
||||
@Param("agentId") agentId: string,
|
||||
@Param("key") key: string,
|
||||
@Workspace() workspaceId: string
|
||||
) {
|
||||
return this.agentMemoryService.remove(workspaceId, agentId, key);
|
||||
}
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AgentMemoryController } from "./agent-memory.controller";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, AuthModule],
|
||||
controllers: [AgentMemoryController],
|
||||
providers: [AgentMemoryService],
|
||||
exports: [AgentMemoryService],
|
||||
})
|
||||
export class AgentMemoryModule {}
|
||||
@@ -1,126 +0,0 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { AgentMemoryService } from "./agent-memory.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { NotFoundException } from "@nestjs/common";
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
|
||||
describe("AgentMemoryService", () => {
|
||||
let service: AgentMemoryService;
|
||||
|
||||
const mockPrismaService = {
|
||||
agentMemory: {
|
||||
upsert: vi.fn(),
|
||||
findMany: vi.fn(),
|
||||
findUnique: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
AgentMemoryService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: mockPrismaService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<AgentMemoryService>(AgentMemoryService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const workspaceId = "workspace-1";
|
||||
const agentId = "agent-1";
|
||||
const key = "session-context";
|
||||
|
||||
describe("upsert", () => {
|
||||
it("should upsert a memory entry", async () => {
|
||||
const dto = { value: { data: "some context" } };
|
||||
const mockEntry = {
|
||||
id: "mem-1",
|
||||
workspaceId,
|
||||
agentId,
|
||||
key,
|
||||
value: dto.value,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockPrismaService.agentMemory.upsert.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await service.upsert(workspaceId, agentId, key, dto);
|
||||
|
||||
expect(mockPrismaService.agentMemory.upsert).toHaveBeenCalledWith({
|
||||
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||
create: { workspaceId, agentId, key, value: dto.value },
|
||||
update: { value: dto.value },
|
||||
});
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should return all memory entries for an agent", async () => {
|
||||
const mockEntries = [
|
||||
{ id: "mem-1", key: "a", value: 1 },
|
||||
{ id: "mem-2", key: "b", value: 2 },
|
||||
];
|
||||
|
||||
mockPrismaService.agentMemory.findMany.mockResolvedValue(mockEntries);
|
||||
|
||||
const result = await service.findAll(workspaceId, agentId);
|
||||
|
||||
expect(mockPrismaService.agentMemory.findMany).toHaveBeenCalledWith({
|
||||
where: { workspaceId, agentId },
|
||||
orderBy: { key: "asc" },
|
||||
});
|
||||
expect(result).toEqual(mockEntries);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
it("should return a memory entry by key", async () => {
|
||||
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: "ctx" };
|
||||
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await service.findOne(workspaceId, agentId, key);
|
||||
|
||||
expect(mockPrismaService.agentMemory.findUnique).toHaveBeenCalledWith({
|
||||
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||
});
|
||||
expect(result).toEqual(mockEntry);
|
||||
});
|
||||
|
||||
it("should throw NotFoundException when key not found", async () => {
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.findOne(workspaceId, agentId, key)).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
it("should delete a memory entry", async () => {
|
||||
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: "x" };
|
||||
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(mockEntry);
|
||||
mockPrismaService.agentMemory.delete.mockResolvedValue(mockEntry);
|
||||
|
||||
const result = await service.remove(workspaceId, agentId, key);
|
||||
|
||||
expect(mockPrismaService.agentMemory.delete).toHaveBeenCalledWith({
|
||||
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||
});
|
||||
expect(result).toEqual({ message: "Memory entry deleted successfully" });
|
||||
});
|
||||
|
||||
it("should throw NotFoundException when key not found", async () => {
|
||||
mockPrismaService.agentMemory.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.remove(workspaceId, agentId, key)).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,79 +0,0 @@
|
||||
import { Injectable, NotFoundException } from "@nestjs/common";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import type { UpsertAgentMemoryDto } from "./dto";
|
||||
|
||||
@Injectable()
|
||||
export class AgentMemoryService {
|
||||
constructor(private readonly prisma: PrismaService) {}
|
||||
|
||||
/**
|
||||
* Upsert a memory entry for an agent.
|
||||
*/
|
||||
async upsert(workspaceId: string, agentId: string, key: string, dto: UpsertAgentMemoryDto) {
|
||||
return this.prisma.agentMemory.upsert({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
create: {
|
||||
workspaceId,
|
||||
agentId,
|
||||
key,
|
||||
value: dto.value as Prisma.InputJsonValue,
|
||||
},
|
||||
update: {
|
||||
value: dto.value as Prisma.InputJsonValue,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* List all memory entries for an agent in a workspace.
|
||||
*/
|
||||
async findAll(workspaceId: string, agentId: string) {
|
||||
return this.prisma.agentMemory.findMany({
|
||||
where: { workspaceId, agentId },
|
||||
orderBy: { key: "asc" },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single memory entry by key.
|
||||
*/
|
||||
async findOne(workspaceId: string, agentId: string, key: string) {
|
||||
const entry = await this.prisma.agentMemory.findUnique({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
});
|
||||
|
||||
if (!entry) {
|
||||
throw new NotFoundException(`Memory key "${key}" not found for agent "${agentId}"`);
|
||||
}
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a memory entry by key.
|
||||
*/
|
||||
async remove(workspaceId: string, agentId: string, key: string) {
|
||||
const entry = await this.prisma.agentMemory.findUnique({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
});
|
||||
|
||||
if (!entry) {
|
||||
throw new NotFoundException(`Memory key "${key}" not found for agent "${agentId}"`);
|
||||
}
|
||||
|
||||
await this.prisma.agentMemory.delete({
|
||||
where: {
|
||||
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||
},
|
||||
});
|
||||
|
||||
return { message: "Memory entry deleted successfully" };
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export * from "./upsert-agent-memory.dto";
|
||||
@@ -1,10 +0,0 @@
|
||||
import { IsNotEmpty } from "class-validator";
|
||||
|
||||
/**
|
||||
* DTO for upserting an agent memory entry.
|
||||
* The value accepts any JSON-serializable data.
|
||||
*/
|
||||
export class UpsertAgentMemoryDto {
|
||||
@IsNotEmpty({ message: "value must not be empty" })
|
||||
value!: unknown;
|
||||
}
|
||||
@@ -27,8 +27,6 @@ import { LlmUsageModule } from "./llm-usage/llm-usage.module";
|
||||
import { BrainModule } from "./brain/brain.module";
|
||||
import { CronModule } from "./cron/cron.module";
|
||||
import { AgentTasksModule } from "./agent-tasks/agent-tasks.module";
|
||||
import { FindingsModule } from "./findings/findings.module";
|
||||
import { AgentMemoryModule } from "./agent-memory/agent-memory.module";
|
||||
import { ValkeyModule } from "./valkey/valkey.module";
|
||||
import { BullMqModule } from "./bullmq/bullmq.module";
|
||||
import { StitcherModule } from "./stitcher/stitcher.module";
|
||||
@@ -48,7 +46,6 @@ import { WorkspacesModule } from "./workspaces/workspaces.module";
|
||||
import { AdminModule } from "./admin/admin.module";
|
||||
import { TeamsModule } from "./teams/teams.module";
|
||||
import { ImportModule } from "./import/import.module";
|
||||
import { ConversationArchiveModule } from "./conversation-archive/conversation-archive.module";
|
||||
import { RlsContextInterceptor } from "./common/interceptors/rls-context.interceptor";
|
||||
|
||||
@Module({
|
||||
@@ -103,8 +100,6 @@ import { RlsContextInterceptor } from "./common/interceptors/rls-context.interce
|
||||
BrainModule,
|
||||
CronModule,
|
||||
AgentTasksModule,
|
||||
FindingsModule,
|
||||
AgentMemoryModule,
|
||||
RunnerJobsModule,
|
||||
JobEventsModule,
|
||||
JobStepsModule,
|
||||
@@ -120,7 +115,6 @@ import { RlsContextInterceptor } from "./common/interceptors/rls-context.interce
|
||||
AdminModule,
|
||||
TeamsModule,
|
||||
ImportModule,
|
||||
ConversationArchiveModule,
|
||||
],
|
||||
controllers: [AppController, CsrfController],
|
||||
providers: [
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
import { Controller, Post, Get, Body, Param, Query, UseGuards } from "@nestjs/common";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { Workspace, RequirePermission, Permission } from "../common/decorators";
|
||||
import { ConversationArchiveService } from "./conversation-archive.service";
|
||||
import { IngestConversationDto, SearchConversationDto, ListConversationsDto } from "./dto";
|
||||
|
||||
/**
|
||||
* Controller for conversation archive endpoints.
|
||||
* All endpoints require workspace membership.
|
||||
*/
|
||||
@Controller("conversations")
|
||||
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||
export class ConversationArchiveController {
|
||||
constructor(private readonly service: ConversationArchiveService) {}
|
||||
|
||||
/**
|
||||
* POST /api/conversations/ingest
|
||||
* Ingest a conversation session log and auto-embed for semantic search.
|
||||
* Requires: MEMBER or higher
|
||||
*/
|
||||
@Post("ingest")
|
||||
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||
async ingest(
|
||||
@Workspace() workspaceId: string,
|
||||
@Body() dto: IngestConversationDto
|
||||
): Promise<{ id: string }> {
|
||||
return this.service.ingest(workspaceId, dto);
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/conversations/search
|
||||
* Vector similarity search across archived conversations.
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Post("search")
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async search(
|
||||
@Workspace() workspaceId: string,
|
||||
@Body() dto: SearchConversationDto
|
||||
): Promise<unknown> {
|
||||
return this.service.search(workspaceId, dto);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/conversations
|
||||
* List conversation archives with filtering and pagination.
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Get()
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findAll(
|
||||
@Workspace() workspaceId: string,
|
||||
@Query() query: ListConversationsDto
|
||||
): Promise<unknown> {
|
||||
return this.service.findAll(workspaceId, query);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/conversations/:id
|
||||
* Get a single conversation archive by ID (includes full messages).
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Get(":id")
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findOne(@Workspace() workspaceId: string, @Param("id") id: string): Promise<unknown> {
|
||||
return this.service.findOne(workspaceId, id);
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { KnowledgeModule } from "../knowledge/knowledge.module";
|
||||
import { ConversationArchiveService } from "./conversation-archive.service";
|
||||
import { ConversationArchiveController } from "./conversation-archive.controller";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, AuthModule, KnowledgeModule],
|
||||
controllers: [ConversationArchiveController],
|
||||
providers: [ConversationArchiveService],
|
||||
exports: [ConversationArchiveService],
|
||||
})
|
||||
export class ConversationArchiveModule {}
|
||||
@@ -1,149 +0,0 @@
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { ConflictException, NotFoundException } from "@nestjs/common";
|
||||
import { ConversationArchiveService } from "./conversation-archive.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||
|
||||
const mockPrisma = {
|
||||
conversationArchive: {
|
||||
findUnique: vi.fn(),
|
||||
create: vi.fn(),
|
||||
count: vi.fn(),
|
||||
findMany: vi.fn(),
|
||||
findFirst: vi.fn(),
|
||||
},
|
||||
$queryRaw: vi.fn(),
|
||||
$executeRaw: vi.fn(),
|
||||
};
|
||||
|
||||
const mockEmbedding = {
|
||||
isConfigured: vi.fn(),
|
||||
generateEmbedding: vi.fn(),
|
||||
};
|
||||
|
||||
describe("ConversationArchiveService", () => {
|
||||
let service: ConversationArchiveService;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
ConversationArchiveService,
|
||||
{ provide: PrismaService, useValue: mockPrisma },
|
||||
{ provide: EmbeddingService, useValue: mockEmbedding },
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<ConversationArchiveService>(ConversationArchiveService);
|
||||
});
|
||||
|
||||
describe("ingest", () => {
|
||||
const workspaceId = "ws-1";
|
||||
const dto = {
|
||||
sessionId: "sess-abc",
|
||||
agentId: "agent-xyz",
|
||||
messages: [
|
||||
{ role: "user", content: "Hello" },
|
||||
{ role: "assistant", content: "Hi there!" },
|
||||
],
|
||||
summary: "A greeting conversation",
|
||||
startedAt: "2026-02-28T10:00:00Z",
|
||||
};
|
||||
|
||||
it("creates a conversation archive and returns id", async () => {
|
||||
mockPrisma.conversationArchive.findUnique.mockResolvedValue(null);
|
||||
mockPrisma.conversationArchive.create.mockResolvedValue({ id: "conv-1" });
|
||||
mockEmbedding.isConfigured.mockReturnValue(false);
|
||||
|
||||
const result = await service.ingest(workspaceId, dto);
|
||||
|
||||
expect(result).toEqual({ id: "conv-1" });
|
||||
expect(mockPrisma.conversationArchive.create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
workspaceId,
|
||||
sessionId: dto.sessionId,
|
||||
agentId: dto.agentId,
|
||||
messageCount: 2,
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("throws ConflictException when session already exists", async () => {
|
||||
mockPrisma.conversationArchive.findUnique.mockResolvedValue({ id: "existing" });
|
||||
|
||||
await expect(service.ingest(workspaceId, dto)).rejects.toThrow(ConflictException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
const workspaceId = "ws-1";
|
||||
|
||||
it("returns paginated list", async () => {
|
||||
mockPrisma.conversationArchive.count.mockResolvedValue(5);
|
||||
mockPrisma.conversationArchive.findMany.mockResolvedValue([
|
||||
{ id: "conv-1", sessionId: "sess-1" },
|
||||
]);
|
||||
|
||||
const result = await service.findAll(workspaceId, { page: 1, limit: 10 });
|
||||
|
||||
expect(result.pagination.total).toBe(5);
|
||||
expect(result.data).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("uses default pagination when not provided", async () => {
|
||||
mockPrisma.conversationArchive.count.mockResolvedValue(0);
|
||||
mockPrisma.conversationArchive.findMany.mockResolvedValue([]);
|
||||
|
||||
const result = await service.findAll(workspaceId, {});
|
||||
|
||||
expect(result.pagination.page).toBe(1);
|
||||
expect(result.pagination.limit).toBe(20);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
const workspaceId = "ws-1";
|
||||
|
||||
it("returns record when found", async () => {
|
||||
const record = { id: "conv-1", workspaceId, sessionId: "sess-1" };
|
||||
mockPrisma.conversationArchive.findFirst.mockResolvedValue(record);
|
||||
|
||||
const result = await service.findOne(workspaceId, "conv-1");
|
||||
|
||||
expect(result).toEqual(record);
|
||||
});
|
||||
|
||||
it("throws NotFoundException when record does not exist", async () => {
|
||||
mockPrisma.conversationArchive.findFirst.mockResolvedValue(null);
|
||||
|
||||
await expect(service.findOne(workspaceId, "missing")).rejects.toThrow(NotFoundException);
|
||||
});
|
||||
});
|
||||
|
||||
describe("search", () => {
|
||||
it("throws ConflictException when embedding is not configured", async () => {
|
||||
mockEmbedding.isConfigured.mockReturnValue(false);
|
||||
|
||||
await expect(service.search("ws-1", { query: "test query" })).rejects.toThrow(
|
||||
ConflictException
|
||||
);
|
||||
});
|
||||
|
||||
it("performs vector search when configured", async () => {
|
||||
mockEmbedding.isConfigured.mockReturnValue(true);
|
||||
mockEmbedding.generateEmbedding.mockResolvedValue(new Array(1536).fill(0.1));
|
||||
mockPrisma.$queryRaw
|
||||
.mockResolvedValueOnce([{ id: "conv-1", similarity: 0.9 }])
|
||||
.mockResolvedValueOnce([{ count: BigInt(1) }]);
|
||||
|
||||
const result = await service.search("ws-1", { query: "greetings" });
|
||||
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.pagination.total).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,277 +0,0 @@
|
||||
import { Injectable, Logger, NotFoundException, ConflictException } from "@nestjs/common";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import { EMBEDDING_DIMENSION } from "@mosaic/shared";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||
import type { IngestConversationDto, SearchConversationDto, ListConversationsDto } from "./dto";
|
||||
|
||||
/**
|
||||
* Shape of a raw conversation archive row from $queryRaw vector search
|
||||
*/
|
||||
interface RawConversationResult {
|
||||
id: string;
|
||||
workspace_id: string;
|
||||
session_id: string;
|
||||
agent_id: string;
|
||||
messages: unknown;
|
||||
message_count: number;
|
||||
summary: string;
|
||||
started_at: Date;
|
||||
ended_at: Date | null;
|
||||
metadata: unknown;
|
||||
created_at: Date;
|
||||
updated_at: Date;
|
||||
similarity: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Paginated response wrapper
|
||||
*/
|
||||
export interface PaginatedConversations<T> {
|
||||
data: T[];
|
||||
pagination: {
|
||||
page: number;
|
||||
limit: number;
|
||||
total: number;
|
||||
totalPages: number;
|
||||
};
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class ConversationArchiveService {
|
||||
private readonly logger = new Logger(ConversationArchiveService.name);
|
||||
private readonly defaultSimilarityThreshold = 0.5;
|
||||
|
||||
constructor(
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly embedding: EmbeddingService
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Ingest a conversation session log.
|
||||
* Generates a vector embedding from the summary + message content and stores it alongside the record.
|
||||
*/
|
||||
async ingest(workspaceId: string, dto: IngestConversationDto): Promise<{ id: string }> {
|
||||
// Verify no duplicate session in this workspace
|
||||
const existing = await this.prisma.conversationArchive.findUnique({
|
||||
where: { workspaceId_sessionId: { workspaceId, sessionId: dto.sessionId } },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (existing) {
|
||||
throw new ConflictException(
|
||||
`Conversation session '${dto.sessionId}' already exists in this workspace`
|
||||
);
|
||||
}
|
||||
|
||||
const messageCount = dto.messages.length;
|
||||
|
||||
// Create record first to get ID for embedding
|
||||
const record = await this.prisma.conversationArchive.create({
|
||||
data: {
|
||||
workspaceId,
|
||||
sessionId: dto.sessionId,
|
||||
agentId: dto.agentId,
|
||||
messages: dto.messages as unknown as Prisma.InputJsonValue,
|
||||
messageCount,
|
||||
summary: dto.summary,
|
||||
startedAt: new Date(dto.startedAt),
|
||||
endedAt: dto.endedAt ? new Date(dto.endedAt) : null,
|
||||
metadata: (dto.metadata ?? {}) as Prisma.InputJsonValue,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
// Generate and store embedding asynchronously (non-blocking for ingest)
|
||||
if (this.embedding.isConfigured()) {
|
||||
const textForEmbedding = this.buildEmbeddingText(dto.summary, dto.messages);
|
||||
this.storeEmbedding(record.id, textForEmbedding).catch((err: unknown) => {
|
||||
this.logger.error(`Failed to store embedding for conversation ${record.id}`, err);
|
||||
});
|
||||
}
|
||||
|
||||
this.logger.log(`Ingested conversation ${record.id} (session: ${dto.sessionId})`);
|
||||
return { id: record.id };
|
||||
}
|
||||
|
||||
/**
|
||||
* Semantic vector search across conversation archives in a workspace.
|
||||
*/
|
||||
async search(
|
||||
workspaceId: string,
|
||||
dto: SearchConversationDto
|
||||
): Promise<PaginatedConversations<RawConversationResult>> {
|
||||
if (!this.embedding.isConfigured()) {
|
||||
throw new ConflictException("Semantic search requires OpenAI API key to be configured");
|
||||
}
|
||||
|
||||
const limit = dto.limit ?? 20;
|
||||
const threshold = dto.similarityThreshold ?? this.defaultSimilarityThreshold;
|
||||
const distanceThreshold = 1 - threshold;
|
||||
|
||||
const queryEmbedding = await this.embedding.generateEmbedding(dto.query);
|
||||
const embeddingStr = `[${queryEmbedding.join(",")}]`;
|
||||
|
||||
const agentFilter = dto.agentId ? Prisma.sql`AND ca.agent_id = ${dto.agentId}` : Prisma.sql``;
|
||||
|
||||
const rows = await this.prisma.$queryRaw<RawConversationResult[]>`
|
||||
SELECT
|
||||
ca.id,
|
||||
ca.workspace_id,
|
||||
ca.session_id,
|
||||
ca.agent_id,
|
||||
ca.messages,
|
||||
ca.message_count,
|
||||
ca.summary,
|
||||
ca.started_at,
|
||||
ca.ended_at,
|
||||
ca.metadata,
|
||||
ca.created_at,
|
||||
ca.updated_at,
|
||||
(1 - (ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION}))) AS similarity
|
||||
FROM conversation_archives ca
|
||||
WHERE ca.workspace_id = ${workspaceId}::uuid
|
||||
AND ca.embedding IS NOT NULL
|
||||
AND (ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION})) <= ${distanceThreshold}
|
||||
${agentFilter}
|
||||
ORDER BY ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION})
|
||||
LIMIT ${limit}
|
||||
`;
|
||||
|
||||
const countResult = await this.prisma.$queryRaw<[{ count: bigint }]>`
|
||||
SELECT COUNT(*) AS count
|
||||
FROM conversation_archives ca
|
||||
WHERE ca.workspace_id = ${workspaceId}::uuid
|
||||
AND ca.embedding IS NOT NULL
|
||||
AND (ca.embedding <=> ${embeddingStr}::vector(${EMBEDDING_DIMENSION})) <= ${distanceThreshold}
|
||||
${agentFilter}
|
||||
`;
|
||||
|
||||
const total = Number(countResult[0].count);
|
||||
|
||||
return {
|
||||
data: rows,
|
||||
pagination: {
|
||||
page: 1,
|
||||
limit,
|
||||
total,
|
||||
totalPages: Math.ceil(total / limit),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* List conversation archives with filtering and pagination.
|
||||
*/
|
||||
async findAll(
|
||||
workspaceId: string,
|
||||
query: ListConversationsDto
|
||||
): Promise<PaginatedConversations<object>> {
|
||||
const page = query.page ?? 1;
|
||||
const limit = query.limit ?? 20;
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
const where: Prisma.ConversationArchiveWhereInput = {
|
||||
workspaceId,
|
||||
...(query.agentId ? { agentId: query.agentId } : {}),
|
||||
...(query.startedAfter || query.startedBefore
|
||||
? {
|
||||
startedAt: {
|
||||
...(query.startedAfter ? { gte: new Date(query.startedAfter) } : {}),
|
||||
...(query.startedBefore ? { lte: new Date(query.startedBefore) } : {}),
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
|
||||
const [total, records] = await Promise.all([
|
||||
this.prisma.conversationArchive.count({ where }),
|
||||
this.prisma.conversationArchive.findMany({
|
||||
where,
|
||||
select: {
|
||||
id: true,
|
||||
workspaceId: true,
|
||||
sessionId: true,
|
||||
agentId: true,
|
||||
messageCount: true,
|
||||
summary: true,
|
||||
startedAt: true,
|
||||
endedAt: true,
|
||||
metadata: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
orderBy: { startedAt: "desc" },
|
||||
skip,
|
||||
take: limit,
|
||||
}),
|
||||
]);
|
||||
|
||||
return {
|
||||
data: records,
|
||||
pagination: {
|
||||
page,
|
||||
limit,
|
||||
total,
|
||||
totalPages: Math.ceil(total / limit),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single conversation archive by ID.
|
||||
*/
|
||||
async findOne(workspaceId: string, id: string): Promise<object> {
|
||||
const record = await this.prisma.conversationArchive.findFirst({
|
||||
where: { id, workspaceId },
|
||||
select: {
|
||||
id: true,
|
||||
workspaceId: true,
|
||||
sessionId: true,
|
||||
agentId: true,
|
||||
messages: true,
|
||||
messageCount: true,
|
||||
summary: true,
|
||||
startedAt: true,
|
||||
endedAt: true,
|
||||
metadata: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!record) {
|
||||
throw new NotFoundException(`Conversation archive '${id}' not found`);
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build text content for embedding from summary and messages.
|
||||
*/
|
||||
private buildEmbeddingText(
|
||||
summary: string,
|
||||
messages: { role: string; content: string }[]
|
||||
): string {
|
||||
const messageText = messages.map((m) => `${m.role}: ${m.content}`).join("\n");
|
||||
return `${summary}\n\n${messageText}`.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate embedding and store it on the conversation_archives row.
|
||||
*/
|
||||
private async storeEmbedding(id: string, text: string): Promise<void> {
|
||||
const vector = await this.embedding.generateEmbedding(text);
|
||||
const embeddingStr = `[${vector.join(",")}]`;
|
||||
|
||||
await this.prisma.$executeRaw`
|
||||
UPDATE conversation_archives
|
||||
SET embedding = ${embeddingStr}::vector(${EMBEDDING_DIMENSION}),
|
||||
updated_at = NOW()
|
||||
WHERE id = ${id}::uuid
|
||||
`;
|
||||
|
||||
this.logger.log(`Stored embedding for conversation ${id}`);
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
export { IngestConversationDto, ConversationMessageDto } from "./ingest-conversation.dto";
|
||||
export { SearchConversationDto } from "./search-conversation.dto";
|
||||
export { ListConversationsDto } from "./list-conversations.dto";
|
||||
@@ -1,64 +0,0 @@
|
||||
import {
|
||||
IsString,
|
||||
IsArray,
|
||||
IsOptional,
|
||||
IsDateString,
|
||||
MinLength,
|
||||
MaxLength,
|
||||
IsObject,
|
||||
ValidateNested,
|
||||
ArrayMinSize,
|
||||
} from "class-validator";
|
||||
import { Type } from "class-transformer";
|
||||
|
||||
/**
|
||||
* Represents a single message in a conversation session
|
||||
*/
|
||||
export class ConversationMessageDto {
|
||||
@IsString()
|
||||
role!: string;
|
||||
|
||||
@IsString()
|
||||
@MinLength(1)
|
||||
content!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
timestamp?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* DTO for ingesting a conversation session log
|
||||
*/
|
||||
export class IngestConversationDto {
|
||||
@IsString()
|
||||
@MinLength(1)
|
||||
@MaxLength(500)
|
||||
sessionId!: string;
|
||||
|
||||
@IsString()
|
||||
@MinLength(1)
|
||||
@MaxLength(500)
|
||||
agentId!: string;
|
||||
|
||||
@IsArray()
|
||||
@ArrayMinSize(1)
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => ConversationMessageDto)
|
||||
messages!: ConversationMessageDto[];
|
||||
|
||||
@IsString()
|
||||
@MinLength(1)
|
||||
summary!: string;
|
||||
|
||||
@IsDateString()
|
||||
startedAt!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
endedAt?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsObject()
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
import { IsString, IsOptional, MaxLength, IsInt, Min, Max, IsDateString } from "class-validator";
|
||||
import { Type } from "class-transformer";
|
||||
|
||||
/**
|
||||
* DTO for listing/filtering conversation archives
|
||||
*/
|
||||
export class ListConversationsDto {
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
@MaxLength(500)
|
||||
agentId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
startedAfter?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsDateString()
|
||||
startedBefore?: string;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt()
|
||||
@Min(1)
|
||||
page?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt()
|
||||
@Min(1)
|
||||
@Max(100)
|
||||
limit?: number;
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
import {
|
||||
IsString,
|
||||
IsOptional,
|
||||
MinLength,
|
||||
MaxLength,
|
||||
IsInt,
|
||||
Min,
|
||||
Max,
|
||||
IsNumber,
|
||||
} from "class-validator";
|
||||
import { Type } from "class-transformer";
|
||||
|
||||
/**
|
||||
* DTO for semantic search across conversation archives
|
||||
*/
|
||||
export class SearchConversationDto {
|
||||
@IsString()
|
||||
@MinLength(1)
|
||||
@MaxLength(1000)
|
||||
query!: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
@MaxLength(500)
|
||||
agentId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt()
|
||||
@Min(1)
|
||||
@Max(100)
|
||||
limit?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsNumber()
|
||||
@Min(0)
|
||||
@Max(1)
|
||||
similarityThreshold?: number;
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
import { IsObject, IsOptional, IsString, IsUUID, MaxLength, MinLength } from "class-validator";
|
||||
|
||||
/**
|
||||
* DTO for creating a finding
|
||||
*/
|
||||
export class CreateFindingDto {
|
||||
@IsOptional()
|
||||
@IsUUID("4", { message: "taskId must be a valid UUID" })
|
||||
taskId?: string;
|
||||
|
||||
@IsString({ message: "agentId must be a string" })
|
||||
@MinLength(1, { message: "agentId must not be empty" })
|
||||
@MaxLength(255, { message: "agentId must not exceed 255 characters" })
|
||||
agentId!: string;
|
||||
|
||||
@IsString({ message: "type must be a string" })
|
||||
@MinLength(1, { message: "type must not be empty" })
|
||||
@MaxLength(100, { message: "type must not exceed 100 characters" })
|
||||
type!: string;
|
||||
|
||||
@IsString({ message: "title must be a string" })
|
||||
@MinLength(1, { message: "title must not be empty" })
|
||||
@MaxLength(255, { message: "title must not exceed 255 characters" })
|
||||
title!: string;
|
||||
|
||||
@IsObject({ message: "data must be an object" })
|
||||
data!: Record<string, unknown>;
|
||||
|
||||
@IsString({ message: "summary must be a string" })
|
||||
@MinLength(1, { message: "summary must not be empty" })
|
||||
@MaxLength(20000, { message: "summary must not exceed 20000 characters" })
|
||||
summary!: string;
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
export { CreateFindingDto } from "./create-finding.dto";
|
||||
export { QueryFindingsDto } from "./query-findings.dto";
|
||||
export { SearchFindingsDto } from "./search-findings.dto";
|
||||
@@ -1,32 +0,0 @@
|
||||
import { Type } from "class-transformer";
|
||||
import { IsInt, IsOptional, IsString, IsUUID, Max, Min } from "class-validator";
|
||||
|
||||
/**
|
||||
* DTO for querying findings with filters and pagination
|
||||
*/
|
||||
export class QueryFindingsDto {
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt({ message: "page must be an integer" })
|
||||
@Min(1, { message: "page must be at least 1" })
|
||||
page?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt({ message: "limit must be an integer" })
|
||||
@Min(1, { message: "limit must be at least 1" })
|
||||
@Max(100, { message: "limit must not exceed 100" })
|
||||
limit?: number;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "agentId must be a string" })
|
||||
agentId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "type must be a string" })
|
||||
type?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsUUID("4", { message: "taskId must be a valid UUID" })
|
||||
taskId?: string;
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
import { Type } from "class-transformer";
|
||||
import {
|
||||
IsInt,
|
||||
IsNumber,
|
||||
IsOptional,
|
||||
IsString,
|
||||
IsUUID,
|
||||
Max,
|
||||
MaxLength,
|
||||
Min,
|
||||
} from "class-validator";
|
||||
|
||||
/**
|
||||
* DTO for finding semantic similarity search
|
||||
*/
|
||||
export class SearchFindingsDto {
|
||||
@IsString({ message: "query must be a string" })
|
||||
@MaxLength(1000, { message: "query must not exceed 1000 characters" })
|
||||
query!: string;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt({ message: "page must be an integer" })
|
||||
@Min(1, { message: "page must be at least 1" })
|
||||
page?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsInt({ message: "limit must be an integer" })
|
||||
@Min(1, { message: "limit must be at least 1" })
|
||||
@Max(100, { message: "limit must not exceed 100" })
|
||||
limit?: number;
|
||||
|
||||
@IsOptional()
|
||||
@Type(() => Number)
|
||||
@IsNumber({}, { message: "similarityThreshold must be a number" })
|
||||
@Min(0, { message: "similarityThreshold must be at least 0" })
|
||||
@Max(1, { message: "similarityThreshold must not exceed 1" })
|
||||
similarityThreshold?: number;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "agentId must be a string" })
|
||||
agentId?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString({ message: "type must be a string" })
|
||||
type?: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsUUID("4", { message: "taskId must be a valid UUID" })
|
||||
taskId?: string;
|
||||
}
|
||||
@@ -1,195 +0,0 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { FindingsController } from "./findings.controller";
|
||||
import { FindingsService } from "./findings.service";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { CreateFindingDto, QueryFindingsDto, SearchFindingsDto } from "./dto";
|
||||
|
||||
describe("FindingsController", () => {
|
||||
let controller: FindingsController;
|
||||
let service: FindingsService;
|
||||
|
||||
const mockFindingsService = {
|
||||
create: vi.fn(),
|
||||
findAll: vi.fn(),
|
||||
findOne: vi.fn(),
|
||||
search: vi.fn(),
|
||||
remove: vi.fn(),
|
||||
};
|
||||
|
||||
const mockAuthGuard = {
|
||||
canActivate: vi.fn(() => true),
|
||||
};
|
||||
|
||||
const mockWorkspaceGuard = {
|
||||
canActivate: vi.fn(() => true),
|
||||
};
|
||||
|
||||
const mockPermissionGuard = {
|
||||
canActivate: vi.fn(() => true),
|
||||
};
|
||||
|
||||
const workspaceId = "550e8400-e29b-41d4-a716-446655440001";
|
||||
const findingId = "550e8400-e29b-41d4-a716-446655440002";
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
controllers: [FindingsController],
|
||||
providers: [
|
||||
{
|
||||
provide: FindingsService,
|
||||
useValue: mockFindingsService,
|
||||
},
|
||||
],
|
||||
})
|
||||
.overrideGuard(AuthGuard)
|
||||
.useValue(mockAuthGuard)
|
||||
.overrideGuard(WorkspaceGuard)
|
||||
.useValue(mockWorkspaceGuard)
|
||||
.overrideGuard(PermissionGuard)
|
||||
.useValue(mockPermissionGuard)
|
||||
.compile();
|
||||
|
||||
controller = module.get<FindingsController>(FindingsController);
|
||||
service = module.get<FindingsService>(FindingsService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(controller).toBeDefined();
|
||||
});
|
||||
|
||||
describe("create", () => {
|
||||
it("should create a finding", async () => {
|
||||
const createDto: CreateFindingDto = {
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
};
|
||||
|
||||
const createdFinding = {
|
||||
id: findingId,
|
||||
workspaceId,
|
||||
taskId: null,
|
||||
...createDto,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockFindingsService.create.mockResolvedValue(createdFinding);
|
||||
|
||||
const result = await controller.create(createDto, workspaceId);
|
||||
|
||||
expect(result).toEqual(createdFinding);
|
||||
expect(service.create).toHaveBeenCalledWith(workspaceId, createDto);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should return paginated findings", async () => {
|
||||
const query: QueryFindingsDto = {
|
||||
page: 1,
|
||||
limit: 10,
|
||||
type: "security",
|
||||
};
|
||||
|
||||
const response = {
|
||||
data: [],
|
||||
meta: {
|
||||
total: 0,
|
||||
page: 1,
|
||||
limit: 10,
|
||||
totalPages: 0,
|
||||
},
|
||||
};
|
||||
|
||||
mockFindingsService.findAll.mockResolvedValue(response);
|
||||
|
||||
const result = await controller.findAll(query, workspaceId);
|
||||
|
||||
expect(result).toEqual(response);
|
||||
expect(service.findAll).toHaveBeenCalledWith(workspaceId, query);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
it("should return a finding", async () => {
|
||||
const finding = {
|
||||
id: findingId,
|
||||
workspaceId,
|
||||
taskId: null,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockFindingsService.findOne.mockResolvedValue(finding);
|
||||
|
||||
const result = await controller.findOne(findingId, workspaceId);
|
||||
|
||||
expect(result).toEqual(finding);
|
||||
expect(service.findOne).toHaveBeenCalledWith(findingId, workspaceId);
|
||||
});
|
||||
});
|
||||
|
||||
describe("search", () => {
|
||||
it("should perform semantic search", async () => {
|
||||
const searchDto: SearchFindingsDto = {
|
||||
query: "sql injection",
|
||||
limit: 5,
|
||||
};
|
||||
|
||||
const response = {
|
||||
data: [
|
||||
{
|
||||
id: findingId,
|
||||
workspaceId,
|
||||
taskId: null,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
score: 0.91,
|
||||
},
|
||||
],
|
||||
meta: {
|
||||
total: 1,
|
||||
page: 1,
|
||||
limit: 5,
|
||||
totalPages: 1,
|
||||
},
|
||||
query: "sql injection",
|
||||
};
|
||||
|
||||
mockFindingsService.search.mockResolvedValue(response);
|
||||
|
||||
const result = await controller.search(searchDto, workspaceId);
|
||||
|
||||
expect(result).toEqual(response);
|
||||
expect(service.search).toHaveBeenCalledWith(workspaceId, searchDto);
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
it("should delete a finding", async () => {
|
||||
const response = { message: "Finding deleted successfully" };
|
||||
mockFindingsService.remove.mockResolvedValue(response);
|
||||
|
||||
const result = await controller.remove(findingId, workspaceId);
|
||||
|
||||
expect(result).toEqual(response);
|
||||
expect(service.remove).toHaveBeenCalledWith(findingId, workspaceId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,81 +0,0 @@
|
||||
import { Body, Controller, Delete, Get, Param, Post, Query, UseGuards } from "@nestjs/common";
|
||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||
import { CreateFindingDto, QueryFindingsDto, SearchFindingsDto } from "./dto";
|
||||
import {
|
||||
FindingsService,
|
||||
FindingsSearchResponse,
|
||||
PaginatedFindingsResponse,
|
||||
} from "./findings.service";
|
||||
|
||||
/**
|
||||
* Controller for findings endpoints
|
||||
* All endpoints require authentication and workspace context
|
||||
*/
|
||||
@Controller("findings")
|
||||
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||
export class FindingsController {
|
||||
constructor(private readonly findingsService: FindingsService) {}
|
||||
|
||||
/**
|
||||
* POST /api/findings
|
||||
* Create a new finding and embed its summary
|
||||
* Requires: MEMBER role or higher
|
||||
*/
|
||||
@Post()
|
||||
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||
async create(@Body() createFindingDto: CreateFindingDto, @Workspace() workspaceId: string) {
|
||||
return this.findingsService.create(workspaceId, createFindingDto);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/findings
|
||||
* Get paginated findings with optional filters
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Get()
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findAll(
|
||||
@Query() query: QueryFindingsDto,
|
||||
@Workspace() workspaceId: string
|
||||
): Promise<PaginatedFindingsResponse> {
|
||||
return this.findingsService.findAll(workspaceId, query);
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/findings/:id
|
||||
* Get a single finding by ID
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Get(":id")
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async findOne(@Param("id") id: string, @Workspace() workspaceId: string) {
|
||||
return this.findingsService.findOne(id, workspaceId);
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/findings/search
|
||||
* Semantic search findings by vector similarity
|
||||
* Requires: Any workspace member
|
||||
*/
|
||||
@Post("search")
|
||||
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||
async search(
|
||||
@Body() searchDto: SearchFindingsDto,
|
||||
@Workspace() workspaceId: string
|
||||
): Promise<FindingsSearchResponse> {
|
||||
return this.findingsService.search(workspaceId, searchDto);
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/findings/:id
|
||||
* Delete a finding
|
||||
* Requires: ADMIN role or higher
|
||||
*/
|
||||
@Delete(":id")
|
||||
@RequirePermission(Permission.WORKSPACE_ADMIN)
|
||||
async remove(@Param("id") id: string, @Workspace() workspaceId: string) {
|
||||
return this.findingsService.remove(id, workspaceId);
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { PrismaModule } from "../prisma/prisma.module";
|
||||
import { AuthModule } from "../auth/auth.module";
|
||||
import { KnowledgeModule } from "../knowledge/knowledge.module";
|
||||
import { FindingsController } from "./findings.controller";
|
||||
import { FindingsService } from "./findings.service";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule, AuthModule, KnowledgeModule],
|
||||
controllers: [FindingsController],
|
||||
providers: [FindingsService],
|
||||
exports: [FindingsService],
|
||||
})
|
||||
export class FindingsModule {}
|
||||
@@ -1,300 +0,0 @@
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import { BadRequestException, NotFoundException } from "@nestjs/common";
|
||||
import { FindingsService } from "./findings.service";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||
|
||||
describe("FindingsService", () => {
|
||||
let service: FindingsService;
|
||||
let prisma: PrismaService;
|
||||
let embeddingService: EmbeddingService;
|
||||
|
||||
const mockWorkspaceId = "550e8400-e29b-41d4-a716-446655440001";
|
||||
const mockFindingId = "550e8400-e29b-41d4-a716-446655440002";
|
||||
const mockTaskId = "550e8400-e29b-41d4-a716-446655440003";
|
||||
|
||||
const mockPrismaService = {
|
||||
finding: {
|
||||
create: vi.fn(),
|
||||
findMany: vi.fn(),
|
||||
findUnique: vi.fn(),
|
||||
count: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
},
|
||||
agentTask: {
|
||||
findUnique: vi.fn(),
|
||||
},
|
||||
$queryRaw: vi.fn(),
|
||||
$executeRaw: vi.fn(),
|
||||
};
|
||||
|
||||
const mockEmbeddingService = {
|
||||
isConfigured: vi.fn(),
|
||||
generateEmbedding: vi.fn(),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [
|
||||
FindingsService,
|
||||
{
|
||||
provide: PrismaService,
|
||||
useValue: mockPrismaService,
|
||||
},
|
||||
{
|
||||
provide: EmbeddingService,
|
||||
useValue: mockEmbeddingService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<FindingsService>(FindingsService);
|
||||
prisma = module.get<PrismaService>(PrismaService);
|
||||
embeddingService = module.get<EmbeddingService>(EmbeddingService);
|
||||
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should be defined", () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe("create", () => {
|
||||
it("should create a finding and store embedding when configured", async () => {
|
||||
const createDto = {
|
||||
taskId: mockTaskId,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
};
|
||||
|
||||
const createdFinding = {
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
...createDto,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockPrismaService.agentTask.findUnique.mockResolvedValue({
|
||||
id: mockTaskId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
});
|
||||
mockPrismaService.finding.create.mockResolvedValue(createdFinding);
|
||||
mockPrismaService.finding.findUnique.mockResolvedValue(createdFinding);
|
||||
mockEmbeddingService.isConfigured.mockReturnValue(true);
|
||||
mockEmbeddingService.generateEmbedding.mockResolvedValue([0.1, 0.2, 0.3]);
|
||||
mockPrismaService.$executeRaw.mockResolvedValue(1);
|
||||
|
||||
const result = await service.create(mockWorkspaceId, createDto);
|
||||
|
||||
expect(result).toEqual(createdFinding);
|
||||
expect(prisma.finding.create).toHaveBeenCalledWith({
|
||||
data: expect.objectContaining({
|
||||
workspaceId: mockWorkspaceId,
|
||||
taskId: mockTaskId,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
}),
|
||||
select: expect.any(Object),
|
||||
});
|
||||
expect(embeddingService.generateEmbedding).toHaveBeenCalledWith(createDto.summary);
|
||||
expect(prisma.$executeRaw).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should create a finding without embedding when not configured", async () => {
|
||||
const createDto = {
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
};
|
||||
|
||||
const createdFinding = {
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
taskId: null,
|
||||
...createDto,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockPrismaService.finding.create.mockResolvedValue(createdFinding);
|
||||
mockEmbeddingService.isConfigured.mockReturnValue(false);
|
||||
|
||||
const result = await service.create(mockWorkspaceId, createDto);
|
||||
|
||||
expect(result).toEqual(createdFinding);
|
||||
expect(embeddingService.generateEmbedding).not.toHaveBeenCalled();
|
||||
expect(prisma.$executeRaw).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findAll", () => {
|
||||
it("should return paginated findings with filters", async () => {
|
||||
const findings = [
|
||||
{
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
taskId: null,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
];
|
||||
|
||||
mockPrismaService.finding.findMany.mockResolvedValue(findings);
|
||||
mockPrismaService.finding.count.mockResolvedValue(1);
|
||||
|
||||
const result = await service.findAll(mockWorkspaceId, {
|
||||
page: 1,
|
||||
limit: 10,
|
||||
type: "security",
|
||||
agentId: "research-agent",
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
data: findings,
|
||||
meta: {
|
||||
total: 1,
|
||||
page: 1,
|
||||
limit: 10,
|
||||
totalPages: 1,
|
||||
},
|
||||
});
|
||||
expect(prisma.finding.findMany).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
where: {
|
||||
workspaceId: mockWorkspaceId,
|
||||
type: "security",
|
||||
agentId: "research-agent",
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findOne", () => {
|
||||
it("should return a finding", async () => {
|
||||
const finding = {
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
taskId: null,
|
||||
agentId: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
mockPrismaService.finding.findUnique.mockResolvedValue(finding);
|
||||
|
||||
const result = await service.findOne(mockFindingId, mockWorkspaceId);
|
||||
|
||||
expect(result).toEqual(finding);
|
||||
expect(prisma.finding.findUnique).toHaveBeenCalledWith({
|
||||
where: {
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
},
|
||||
select: expect.any(Object),
|
||||
});
|
||||
});
|
||||
|
||||
it("should throw when finding does not exist", async () => {
|
||||
mockPrismaService.finding.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.findOne(mockFindingId, mockWorkspaceId)).rejects.toThrow(
|
||||
NotFoundException
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("search", () => {
|
||||
it("should throw BadRequestException when embeddings are not configured", async () => {
|
||||
mockEmbeddingService.isConfigured.mockReturnValue(false);
|
||||
|
||||
await expect(
|
||||
service.search(mockWorkspaceId, {
|
||||
query: "sql injection",
|
||||
})
|
||||
).rejects.toThrow(BadRequestException);
|
||||
});
|
||||
|
||||
it("should return similarity-ranked search results", async () => {
|
||||
mockEmbeddingService.isConfigured.mockReturnValue(true);
|
||||
mockEmbeddingService.generateEmbedding.mockResolvedValue([0.1, 0.2, 0.3]);
|
||||
mockPrismaService.$queryRaw
|
||||
.mockResolvedValueOnce([
|
||||
{
|
||||
id: mockFindingId,
|
||||
workspace_id: mockWorkspaceId,
|
||||
task_id: null,
|
||||
agent_id: "research-agent",
|
||||
type: "security",
|
||||
title: "SQL injection risk",
|
||||
data: { severity: "high" },
|
||||
summary: "Potential SQL injection in search endpoint.",
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
score: 0.91,
|
||||
},
|
||||
])
|
||||
.mockResolvedValueOnce([{ count: BigInt(1) }]);
|
||||
|
||||
const result = await service.search(mockWorkspaceId, {
|
||||
query: "sql injection",
|
||||
page: 1,
|
||||
limit: 5,
|
||||
similarityThreshold: 0.5,
|
||||
});
|
||||
|
||||
expect(result.query).toBe("sql injection");
|
||||
expect(result.data).toHaveLength(1);
|
||||
expect(result.data[0].score).toBe(0.91);
|
||||
expect(result.meta.total).toBe(1);
|
||||
expect(prisma.$queryRaw).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("remove", () => {
|
||||
it("should delete a finding", async () => {
|
||||
mockPrismaService.finding.findUnique.mockResolvedValue({
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
});
|
||||
mockPrismaService.finding.delete.mockResolvedValue({
|
||||
id: mockFindingId,
|
||||
});
|
||||
|
||||
const result = await service.remove(mockFindingId, mockWorkspaceId);
|
||||
|
||||
expect(result).toEqual({ message: "Finding deleted successfully" });
|
||||
expect(prisma.finding.delete).toHaveBeenCalledWith({
|
||||
where: {
|
||||
id: mockFindingId,
|
||||
workspaceId: mockWorkspaceId,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should throw when finding does not exist", async () => {
|
||||
mockPrismaService.finding.findUnique.mockResolvedValue(null);
|
||||
|
||||
await expect(service.remove(mockFindingId, mockWorkspaceId)).rejects.toThrow(
|
||||
NotFoundException
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,337 +0,0 @@
|
||||
import { BadRequestException, Injectable, Logger, NotFoundException } from "@nestjs/common";
|
||||
import { Prisma } from "@prisma/client";
|
||||
import { PrismaService } from "../prisma/prisma.service";
|
||||
import { EmbeddingService } from "../knowledge/services/embedding.service";
|
||||
import type { CreateFindingDto, QueryFindingsDto, SearchFindingsDto } from "./dto";
|
||||
|
||||
const findingSelect = {
|
||||
id: true,
|
||||
workspaceId: true,
|
||||
taskId: true,
|
||||
agentId: true,
|
||||
type: true,
|
||||
title: true,
|
||||
data: true,
|
||||
summary: true,
|
||||
createdAt: true,
|
||||
updatedAt: true,
|
||||
} satisfies Prisma.FindingSelect;
|
||||
|
||||
type FindingRecord = Prisma.FindingGetPayload<{ select: typeof findingSelect }>;
|
||||
|
||||
interface RawFindingSearchResult {
|
||||
id: string;
|
||||
workspace_id: string;
|
||||
task_id: string | null;
|
||||
agent_id: string;
|
||||
type: string;
|
||||
title: string;
|
||||
data: Prisma.JsonValue;
|
||||
summary: string;
|
||||
created_at: Date;
|
||||
updated_at: Date;
|
||||
score: number;
|
||||
}
|
||||
|
||||
export interface FindingSearchResult extends FindingRecord {
|
||||
score: number;
|
||||
}
|
||||
|
||||
interface PaginatedMeta {
|
||||
total: number;
|
||||
page: number;
|
||||
limit: number;
|
||||
totalPages: number;
|
||||
}
|
||||
|
||||
export interface PaginatedFindingsResponse {
|
||||
data: FindingRecord[];
|
||||
meta: PaginatedMeta;
|
||||
}
|
||||
|
||||
export interface FindingsSearchResponse {
|
||||
data: FindingSearchResult[];
|
||||
meta: PaginatedMeta;
|
||||
query: string;
|
||||
similarityThreshold: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Service for managing structured findings with vector search support
|
||||
*/
|
||||
@Injectable()
|
||||
export class FindingsService {
|
||||
private readonly logger = new Logger(FindingsService.name);
|
||||
private readonly defaultSimilarityThreshold: number;
|
||||
|
||||
constructor(
|
||||
private readonly prisma: PrismaService,
|
||||
private readonly embeddingService: EmbeddingService
|
||||
) {
|
||||
const parsedThreshold = Number.parseFloat(process.env.FINDINGS_SIMILARITY_THRESHOLD ?? "0.5");
|
||||
|
||||
this.defaultSimilarityThreshold =
|
||||
Number.isFinite(parsedThreshold) && parsedThreshold >= 0 && parsedThreshold <= 1
|
||||
? parsedThreshold
|
||||
: 0.5;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a finding and generate its embedding from the summary when available
|
||||
*/
|
||||
async create(workspaceId: string, createFindingDto: CreateFindingDto): Promise<FindingRecord> {
|
||||
if (createFindingDto.taskId) {
|
||||
const task = await this.prisma.agentTask.findUnique({
|
||||
where: {
|
||||
id: createFindingDto.taskId,
|
||||
workspaceId,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!task) {
|
||||
throw new NotFoundException(`Agent task with ID ${createFindingDto.taskId} not found`);
|
||||
}
|
||||
}
|
||||
|
||||
const createInput: Prisma.FindingUncheckedCreateInput = {
|
||||
workspaceId,
|
||||
agentId: createFindingDto.agentId,
|
||||
type: createFindingDto.type,
|
||||
title: createFindingDto.title,
|
||||
data: createFindingDto.data as Prisma.InputJsonValue,
|
||||
summary: createFindingDto.summary,
|
||||
};
|
||||
|
||||
if (createFindingDto.taskId) {
|
||||
createInput.taskId = createFindingDto.taskId;
|
||||
}
|
||||
|
||||
const finding = await this.prisma.finding.create({
|
||||
data: createInput,
|
||||
select: findingSelect,
|
||||
});
|
||||
|
||||
await this.generateAndStoreEmbedding(finding.id, workspaceId, finding.summary);
|
||||
|
||||
if (this.embeddingService.isConfigured()) {
|
||||
return this.findOne(finding.id, workspaceId);
|
||||
}
|
||||
|
||||
return finding;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get paginated findings with optional filters
|
||||
*/
|
||||
async findAll(workspaceId: string, query: QueryFindingsDto): Promise<PaginatedFindingsResponse> {
|
||||
const page = query.page ?? 1;
|
||||
const limit = query.limit ?? 50;
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
const where: Prisma.FindingWhereInput = {
|
||||
workspaceId,
|
||||
};
|
||||
|
||||
if (query.agentId) {
|
||||
where.agentId = query.agentId;
|
||||
}
|
||||
|
||||
if (query.type) {
|
||||
where.type = query.type;
|
||||
}
|
||||
|
||||
if (query.taskId) {
|
||||
where.taskId = query.taskId;
|
||||
}
|
||||
|
||||
const [data, total] = await Promise.all([
|
||||
this.prisma.finding.findMany({
|
||||
where,
|
||||
select: findingSelect,
|
||||
orderBy: {
|
||||
createdAt: "desc",
|
||||
},
|
||||
skip,
|
||||
take: limit,
|
||||
}),
|
||||
this.prisma.finding.count({ where }),
|
||||
]);
|
||||
|
||||
return {
|
||||
data,
|
||||
meta: {
|
||||
total,
|
||||
page,
|
||||
limit,
|
||||
totalPages: Math.ceil(total / limit),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single finding by ID
|
||||
*/
|
||||
async findOne(id: string, workspaceId: string): Promise<FindingRecord> {
|
||||
const finding = await this.prisma.finding.findUnique({
|
||||
where: {
|
||||
id,
|
||||
workspaceId,
|
||||
},
|
||||
select: findingSelect,
|
||||
});
|
||||
|
||||
if (!finding) {
|
||||
throw new NotFoundException(`Finding with ID ${id} not found`);
|
||||
}
|
||||
|
||||
return finding;
|
||||
}
|
||||
|
||||
/**
|
||||
* Semantic search findings using vector similarity
|
||||
*/
|
||||
async search(workspaceId: string, searchDto: SearchFindingsDto): Promise<FindingsSearchResponse> {
|
||||
if (!this.embeddingService.isConfigured()) {
|
||||
throw new BadRequestException(
|
||||
"Finding vector search requires OPENAI_API_KEY to be configured"
|
||||
);
|
||||
}
|
||||
|
||||
const page = searchDto.page ?? 1;
|
||||
const limit = searchDto.limit ?? 20;
|
||||
const offset = (page - 1) * limit;
|
||||
const similarityThreshold = searchDto.similarityThreshold ?? this.defaultSimilarityThreshold;
|
||||
const distanceThreshold = 1 - similarityThreshold;
|
||||
|
||||
const queryEmbedding = await this.embeddingService.generateEmbedding(searchDto.query);
|
||||
const embeddingString = `[${queryEmbedding.join(",")}]`;
|
||||
|
||||
const agentFilter = searchDto.agentId
|
||||
? Prisma.sql`AND f.agent_id = ${searchDto.agentId}`
|
||||
: Prisma.sql``;
|
||||
const typeFilter = searchDto.type ? Prisma.sql`AND f.type = ${searchDto.type}` : Prisma.sql``;
|
||||
const taskFilter = searchDto.taskId
|
||||
? Prisma.sql`AND f.task_id = ${searchDto.taskId}::uuid`
|
||||
: Prisma.sql``;
|
||||
|
||||
const searchResults = await this.prisma.$queryRaw<RawFindingSearchResult[]>`
|
||||
SELECT
|
||||
f.id,
|
||||
f.workspace_id,
|
||||
f.task_id,
|
||||
f.agent_id,
|
||||
f.type,
|
||||
f.title,
|
||||
f.data,
|
||||
f.summary,
|
||||
f.created_at,
|
||||
f.updated_at,
|
||||
(1 - (f.embedding <=> ${embeddingString}::vector)) AS score
|
||||
FROM findings f
|
||||
WHERE f.workspace_id = ${workspaceId}::uuid
|
||||
AND f.embedding IS NOT NULL
|
||||
${agentFilter}
|
||||
${typeFilter}
|
||||
${taskFilter}
|
||||
AND (f.embedding <=> ${embeddingString}::vector) <= ${distanceThreshold}
|
||||
ORDER BY f.embedding <=> ${embeddingString}::vector
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
|
||||
const countResult = await this.prisma.$queryRaw<[{ count: bigint }]>`
|
||||
SELECT COUNT(*) as count
|
||||
FROM findings f
|
||||
WHERE f.workspace_id = ${workspaceId}::uuid
|
||||
AND f.embedding IS NOT NULL
|
||||
${agentFilter}
|
||||
${typeFilter}
|
||||
${taskFilter}
|
||||
AND (f.embedding <=> ${embeddingString}::vector) <= ${distanceThreshold}
|
||||
`;
|
||||
|
||||
const total = Number(countResult[0].count);
|
||||
|
||||
const data: FindingSearchResult[] = searchResults.map((row) => ({
|
||||
id: row.id,
|
||||
workspaceId: row.workspace_id,
|
||||
taskId: row.task_id,
|
||||
agentId: row.agent_id,
|
||||
type: row.type,
|
||||
title: row.title,
|
||||
data: row.data,
|
||||
summary: row.summary,
|
||||
createdAt: row.created_at,
|
||||
updatedAt: row.updated_at,
|
||||
score: row.score,
|
||||
}));
|
||||
|
||||
return {
|
||||
data,
|
||||
meta: {
|
||||
total,
|
||||
page,
|
||||
limit,
|
||||
totalPages: Math.ceil(total / limit),
|
||||
},
|
||||
query: searchDto.query,
|
||||
similarityThreshold,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a finding
|
||||
*/
|
||||
async remove(id: string, workspaceId: string): Promise<{ message: string }> {
|
||||
const existingFinding = await this.prisma.finding.findUnique({
|
||||
where: {
|
||||
id,
|
||||
workspaceId,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (!existingFinding) {
|
||||
throw new NotFoundException(`Finding with ID ${id} not found`);
|
||||
}
|
||||
|
||||
await this.prisma.finding.delete({
|
||||
where: {
|
||||
id,
|
||||
workspaceId,
|
||||
},
|
||||
});
|
||||
|
||||
return { message: "Finding deleted successfully" };
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate and persist embedding for a finding summary
|
||||
*/
|
||||
private async generateAndStoreEmbedding(
|
||||
findingId: string,
|
||||
workspaceId: string,
|
||||
summary: string
|
||||
): Promise<void> {
|
||||
if (!this.embeddingService.isConfigured()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const embedding = await this.embeddingService.generateEmbedding(summary);
|
||||
const embeddingString = `[${embedding.join(",")}]`;
|
||||
|
||||
await this.prisma.$executeRaw`
|
||||
UPDATE findings
|
||||
SET embedding = ${embeddingString}::vector,
|
||||
updated_at = NOW()
|
||||
WHERE id = ${findingId}::uuid
|
||||
AND workspace_id = ${workspaceId}::uuid
|
||||
`;
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
this.logger.warn(`Failed to generate embedding for finding ${findingId}: ${message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -52,22 +52,3 @@
|
||||
| **Total** | **31** | **15** | **~371K** | **~175K** |
|
||||
|
||||
Remaining estimate: ~143K tokens (Codex budget).
|
||||
|
||||
## MS22 — Fleet Evolution (Phase 0: Knowledge Layer)
|
||||
|
||||
| id | status | milestone | description | issue | repo | branch | depends_on | blocks | agent | started_at | completed_at | estimate | used | notes |
|
||||
| --------------- | ----------- | ------------ | ------------------------------------------------------------ | -------- | ----- | ------------------------------ | --------------------------------------------------------- | ------------- | ------------ | ---------- | ------------ | -------- | ---- | --------------------------------------------- |
|
||||
| MS22-PLAN-001 | done | p0-knowledge | PRD + mission bootstrap + TASKS.md | TASKS:P0 | stack | feat/ms22-knowledge-schema | — | MS22-DB-001 | orchestrator | 2026-02-28 | 2026-02-28 | 10K | 8K | PRD-MS22.md, mission fleet-evolution-20260228 |
|
||||
| MS22-DB-001 | done | p0-knowledge | Findings module (pgvector, CRUD, similarity search) | TASKS:P0 | api | feat/ms22-findings | MS22-PLAN-001 | — | codex | 2026-02-28 | 2026-02-28 | 20K | ~22K | PR #585 merged, CI green |
|
||||
| MS22-API-001 | done | p0-knowledge | Findings API endpoints | TASKS:P0 | api | feat/ms22-findings | MS22-DB-001 | — | codex | 2026-02-28 | 2026-02-28 | — | — | Combined with DB-001 |
|
||||
| MS22-DB-002 | done | p0-knowledge | AgentMemory module (key/value store, upsert) | TASKS:P0 | api | feat/ms22-agent-memory | MS22-DB-001 | — | codex | 2026-02-28 | 2026-02-28 | 15K | ~16K | PR #586 merged, CI green |
|
||||
| MS22-API-002 | done | p0-knowledge | AgentMemory API endpoints | TASKS:P0 | api | feat/ms22-agent-memory | MS22-DB-002 | — | codex | 2026-02-28 | 2026-02-28 | — | — | Combined with DB-002 |
|
||||
| MS22-DB-004 | done | p0-knowledge | ConversationArchive module (pgvector, ingest, search) | TASKS:P0 | api | feat/ms22-conversation-archive | MS22-DB-001 | — | codex | 2026-02-28 | 2026-02-28 | 20K | ~18K | PR #587 merged, CI green |
|
||||
| MS22-API-004 | done | p0-knowledge | ConversationArchive API endpoints | TASKS:P0 | api | feat/ms22-conversation-archive | MS22-DB-004 | — | codex | 2026-02-28 | 2026-02-28 | — | — | Combined with DB-004 |
|
||||
| MS22-API-005 | done | p0-knowledge | EmbeddingService (reuse existing KnowledgeModule) | TASKS:P0 | api | — | — | — | orchestrator | 2026-02-28 | 2026-02-28 | 0 | 0 | Already existed; no work needed |
|
||||
| MS22-DB-003 | not-started | p0-knowledge | Task model: add assigned_agent field + migration | TASKS:P0 | api | feat/ms22-task-agent | MS22-DB-001 | MS22-API-003 | — | — | — | 8K | — | Small schema + migration only |
|
||||
| MS22-API-003 | not-started | p0-knowledge | Task API: expose assigned_agent in CRUD | TASKS:P0 | api | feat/ms22-task-agent | MS22-DB-003 | MS22-TEST-001 | — | — | — | 8K | — | Extend existing TaskModule |
|
||||
| MS22-TEST-001 | not-started | p0-knowledge | Integration tests: Findings + AgentMemory + ConvArchive | TASKS:P0 | api | test/ms22-integration | MS22-API-001,MS22-API-002,MS22-API-004 | MS22-VER-P0 | — | — | — | 20K | — | E2E with live postgres |
|
||||
| MS22-SKILL-001 | not-started | p0-knowledge | OpenClaw mosaic skill (agents read/write findings/memory) | TASKS:P0 | stack | feat/ms22-openclaw-skill | MS22-API-001,MS22-API-002 | MS22-VER-P0 | — | — | — | 15K | — | Skill in ~/.agents/skills/mosaic/ |
|
||||
| MS22-INGEST-001 | not-started | p0-knowledge | Session log ingestion pipeline (OpenClaw logs → ConvArchive) | TASKS:P0 | stack | feat/ms22-ingest | MS22-API-004 | MS22-VER-P0 | — | — | — | 20K | — | Script to batch-ingest existing logs |
|
||||
| MS22-VER-P0 | not-started | p0-knowledge | Phase 0 verification: all modules deployed + smoke tested | TASKS:P0 | stack | — | MS22-TEST-001,MS22-SKILL-001,MS22-INGEST-001,MS22-API-003 | — | — | — | — | 5K | — | |
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
# MS22 Agent Memory Module
|
||||
|
||||
## Objective
|
||||
|
||||
Add per-agent key/value store: AgentMemory model + NestJS module with CRUD endpoints.
|
||||
|
||||
## Issues
|
||||
|
||||
- MS22-DB-002: Add AgentMemory schema model
|
||||
- MS22-API-002: Add agent-memory NestJS module
|
||||
|
||||
## Plan
|
||||
|
||||
1. AgentMemory model → schema.prisma (after AgentSession, line 736)
|
||||
2. Add `agentMemories AgentMemory[]` relation to Workspace model
|
||||
3. Create apps/api/src/agent-memory/ with service, controller, DTOs, specs
|
||||
4. Register in app.module.ts
|
||||
5. Migrate: `prisma migrate dev --name ms22_agent_memory`
|
||||
6. lint + build
|
||||
7. Commit
|
||||
|
||||
## Endpoints
|
||||
|
||||
- PUT /api/agents/:agentId/memory/:key (upsert)
|
||||
- GET /api/agents/:agentId/memory (list all)
|
||||
- GET /api/agents/:agentId/memory/:key (get one)
|
||||
- DELETE /api/agents/:agentId/memory/:key (remove)
|
||||
|
||||
## Auth
|
||||
|
||||
- @UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||
- @Workspace() decorator for workspaceId
|
||||
- Permission.WORKSPACE_MEMBER for write ops
|
||||
- Permission.WORKSPACE_ANY for read ops
|
||||
|
||||
## Schema
|
||||
|
||||
```prisma
|
||||
model AgentMemory {
|
||||
id String @id @default(uuid()) @db.Uuid
|
||||
workspaceId String @map("workspace_id") @db.Uuid
|
||||
agentId String @map("agent_id")
|
||||
key String
|
||||
value Json
|
||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||
|
||||
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([workspaceId, agentId, key])
|
||||
@@index([workspaceId])
|
||||
@@index([agentId])
|
||||
@@map("agent_memories")
|
||||
}
|
||||
```
|
||||
|
||||
## Progress
|
||||
|
||||
- [ ] Schema
|
||||
- [ ] Module files
|
||||
- [ ] app.module.ts
|
||||
- [ ] Migration
|
||||
- [ ] lint/build
|
||||
- [ ] Commit
|
||||
@@ -1,48 +0,0 @@
|
||||
# MS22 — Conversation Archive Module
|
||||
|
||||
## Objective
|
||||
|
||||
Implement ConversationArchive module: ingest OpenClaw session logs, store with vector embeddings for semantic search.
|
||||
|
||||
## Deliverables
|
||||
|
||||
1. ConversationArchive Prisma model
|
||||
2. NestJS module at apps/api/src/conversation-archive/
|
||||
3. Endpoints: ingest, search, list, get-by-id
|
||||
4. Register in app.module.ts
|
||||
5. Migrate, lint, build, commit
|
||||
|
||||
## Plan
|
||||
|
||||
- Add model to schema.prisma (end of file)
|
||||
- Add relation to Workspace model
|
||||
- Create module structure: dto/, service, controller, spec, module
|
||||
- Use EmbeddingService from knowledge module (import KnowledgeModule or just PrismaModule + embed inline)
|
||||
- Follow pattern: AuthGuard + WorkspaceGuard + PermissionGuard
|
||||
- Endpoint prefix: conversations (maps to /api/conversations)
|
||||
- Vector search: $queryRaw with <=> operator (cosine distance)
|
||||
|
||||
## Assumptions
|
||||
|
||||
- ASSUMPTION: Embedding is stored inline on ConversationArchive (not a separate table) — simpler and sufficient for this use case, matches MemoryEmbedding pattern
|
||||
- ASSUMPTION: Import KnowledgeModule to reuse EmbeddingService (it exports it)
|
||||
- ASSUMPTION: messageCount computed server-side from messages array length on ingest
|
||||
- ASSUMPTION: Permission level WORKSPACE_MEMBER for ingest/search, WORKSPACE_ANY for list/get
|
||||
|
||||
## Progress
|
||||
|
||||
- [ ] Schema model
|
||||
- [ ] Migration
|
||||
- [ ] DTOs
|
||||
- [ ] Service
|
||||
- [ ] Controller
|
||||
- [ ] Spec
|
||||
- [ ] Module
|
||||
- [ ] app.module.ts registration
|
||||
- [ ] Lint + build
|
||||
- [ ] Commit
|
||||
|
||||
## Risks
|
||||
|
||||
- EmbeddingService exports from knowledge.module — need to import KnowledgeModule
|
||||
- Migration requires live DB (may need --skip-generate flag if no DB access)
|
||||
@@ -35,8 +35,7 @@
|
||||
"docker:ps": "docker compose ps",
|
||||
"docker:build": "docker compose build",
|
||||
"docker:restart": "docker compose restart",
|
||||
"prepare": "husky || true",
|
||||
"ingest:sessions": "tsx scripts/ingest-openclaw-sessions.ts"
|
||||
"prepare": "husky || true"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@typescript-eslint/eslint-plugin": "^8.26.0",
|
||||
@@ -75,8 +74,7 @@
|
||||
"tough-cookie": ">=4.1.3",
|
||||
"undici": ">=6.23.0",
|
||||
"rollup": ">=4.59.0",
|
||||
"serialize-javascript": ">=7.0.3",
|
||||
"multer": ">=2.1.0"
|
||||
"serialize-javascript": ">=7.0.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
21
pnpm-lock.yaml
generated
21
pnpm-lock.yaml
generated
@@ -17,7 +17,6 @@ overrides:
|
||||
undici: '>=6.23.0'
|
||||
rollup: '>=4.59.0'
|
||||
serialize-javascript: '>=7.0.3'
|
||||
multer: '>=2.1.0'
|
||||
|
||||
importers:
|
||||
|
||||
@@ -1604,7 +1603,6 @@ packages:
|
||||
|
||||
'@mosaicstack/telemetry-client@0.1.1':
|
||||
resolution: {integrity: sha512-1udg6p4cs8rhQgQ2pKCfi7EpRlJieRRhA5CIqthRQ6HQZLgQ0wH+632jEulov3rlHSM1iplIQ+AAe5DWrvSkEA==, tarball: https://git.mosaicstack.dev/api/packages/mosaic/npm/%40mosaicstack%2Ftelemetry-client/-/0.1.1/telemetry-client-0.1.1.tgz}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
'@mrleebo/prisma-ast@0.13.1':
|
||||
resolution: {integrity: sha512-XyroGQXcHrZdvmrGJvsA9KNeOOgGMg1Vg9OlheUsBOSKznLMDl+YChxbkboRHvtFYJEMRYmlV3uoo/njCw05iw==}
|
||||
@@ -5807,6 +5805,10 @@ packages:
|
||||
mkdirp-classic@0.5.3:
|
||||
resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==}
|
||||
|
||||
mkdirp@0.5.6:
|
||||
resolution: {integrity: sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==}
|
||||
hasBin: true
|
||||
|
||||
mkdirp@3.0.1:
|
||||
resolution: {integrity: sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==}
|
||||
engines: {node: '>=10'}
|
||||
@@ -5835,8 +5837,8 @@ packages:
|
||||
msgpackr@1.11.5:
|
||||
resolution: {integrity: sha512-UjkUHN0yqp9RWKy0Lplhh+wlpdt9oQBYgULZOiFhV3VclSF1JnSQWZ5r9gORQlNYaUKQoR8itv7g7z1xDDuACA==}
|
||||
|
||||
multer@2.1.0:
|
||||
resolution: {integrity: sha512-TBm6j41rxNohqawsxlsWsNNh/VdV4QFXcBvRcPhXaA05EZ79z0qJ2bQFpync6JBoHTeNY5Q1JpG7AlTjdlfAEA==}
|
||||
multer@2.0.2:
|
||||
resolution: {integrity: sha512-u7f2xaZ/UG8oLXHvtF/oWTRvT44p9ecwBBqTwgJVq0+4BW1g8OW01TyMEGWBHbyMOYVHXslaut7qEQ1meATXgw==}
|
||||
engines: {node: '>= 10.16.0'}
|
||||
|
||||
mute-stream@2.0.0:
|
||||
@@ -8840,7 +8842,7 @@ snapshots:
|
||||
'@nestjs/core': 11.1.12(@nestjs/common@11.1.12(class-transformer@0.5.1)(class-validator@0.14.3)(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/platform-express@11.1.12)(@nestjs/websockets@11.1.12)(reflect-metadata@0.2.2)(rxjs@7.8.2)
|
||||
cors: 2.8.5
|
||||
express: 5.2.1
|
||||
multer: 2.1.0
|
||||
multer: 2.0.2
|
||||
path-to-regexp: 8.3.0
|
||||
tslib: 2.8.1
|
||||
transitivePeerDependencies:
|
||||
@@ -13389,6 +13391,10 @@ snapshots:
|
||||
|
||||
mkdirp-classic@0.5.3: {}
|
||||
|
||||
mkdirp@0.5.6:
|
||||
dependencies:
|
||||
minimist: 1.2.8
|
||||
|
||||
mkdirp@3.0.1: {}
|
||||
|
||||
mlly@1.8.0:
|
||||
@@ -13430,12 +13436,15 @@ snapshots:
|
||||
optionalDependencies:
|
||||
msgpackr-extract: 3.0.3
|
||||
|
||||
multer@2.1.0:
|
||||
multer@2.0.2:
|
||||
dependencies:
|
||||
append-field: 1.0.0
|
||||
busboy: 1.6.0
|
||||
concat-stream: 2.0.0
|
||||
mkdirp: 0.5.6
|
||||
object-assign: 4.1.1
|
||||
type-is: 1.6.18
|
||||
xtend: 4.0.2
|
||||
|
||||
mute-stream@2.0.0: {}
|
||||
|
||||
|
||||
@@ -1,621 +0,0 @@
|
||||
import { createReadStream, constants as fsConstants } from "node:fs";
|
||||
import { access, readdir, stat } from "node:fs/promises";
|
||||
import { homedir } from "node:os";
|
||||
import * as path from "node:path";
|
||||
import * as process from "node:process";
|
||||
import { createInterface } from "node:readline";
|
||||
|
||||
const DEFAULT_ENDPOINT = "https://mosaic-api.woltje.com/conversation-archive/ingest";
|
||||
|
||||
type IngestRole = "user" | "assistant";
|
||||
|
||||
interface IngestMessage {
|
||||
role: IngestRole;
|
||||
content: string;
|
||||
timestamp?: string;
|
||||
}
|
||||
|
||||
interface IngestPayload {
|
||||
sessionId: string;
|
||||
workspaceId: string;
|
||||
title: string;
|
||||
messages: IngestMessage[];
|
||||
agentId?: string;
|
||||
}
|
||||
|
||||
interface CliOptions {
|
||||
workspaceId: string;
|
||||
agentId?: string;
|
||||
since?: Date;
|
||||
sessionsDir?: string;
|
||||
endpoint: string;
|
||||
}
|
||||
|
||||
interface ParsedSession {
|
||||
sessionId: string;
|
||||
title: string;
|
||||
messages: IngestMessage[];
|
||||
startedAt?: string;
|
||||
endedAt?: string;
|
||||
parseErrors: number;
|
||||
inferredAgentId?: string;
|
||||
}
|
||||
|
||||
interface SendResult {
|
||||
ok: boolean;
|
||||
status: number;
|
||||
body: string;
|
||||
}
|
||||
|
||||
interface IngestSummary {
|
||||
discovered: number;
|
||||
processed: number;
|
||||
ingested: number;
|
||||
skippedSince: number;
|
||||
skippedEmpty: number;
|
||||
skippedDuplicate: number;
|
||||
failed: number;
|
||||
}
|
||||
|
||||
function printUsage(): void {
|
||||
console.log(
|
||||
[
|
||||
"Usage:",
|
||||
" pnpm ingest:sessions --workspace-id <id> [--agent-id <id>] [--since <ISO date>] [--sessions-dir <path>] [--endpoint <url>]",
|
||||
"",
|
||||
"Required:",
|
||||
" --workspace-id Target Mosaic workspace ID",
|
||||
"",
|
||||
"Optional:",
|
||||
" --agent-id Agent ID to include in each ingest payload",
|
||||
" --since Skip sessions before this date/time (ISO8601 or YYYY-MM-DD)",
|
||||
" --sessions-dir Override session directory path",
|
||||
` --endpoint Ingest endpoint (default: ${DEFAULT_ENDPOINT})`,
|
||||
].join("\n")
|
||||
);
|
||||
}
|
||||
|
||||
function expandHomePath(inputPath: string): string {
|
||||
if (inputPath === "~") {
|
||||
return homedir();
|
||||
}
|
||||
if (inputPath.startsWith("~/")) {
|
||||
return path.join(homedir(), inputPath.slice(2));
|
||||
}
|
||||
return inputPath;
|
||||
}
|
||||
|
||||
function parseSinceDate(rawDate: string): Date {
|
||||
const parsed = new Date(rawDate);
|
||||
if (Number.isNaN(parsed.getTime())) {
|
||||
throw new Error(`Invalid --since date: "${rawDate}". Use ISO8601 or YYYY-MM-DD.`);
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function parseCliArgs(args: string[]): CliOptions {
|
||||
let workspaceId: string | null = null;
|
||||
let agentId: string | undefined;
|
||||
let since: Date | undefined;
|
||||
let sessionsDir: string | undefined;
|
||||
let endpoint = DEFAULT_ENDPOINT;
|
||||
|
||||
for (let index = 0; index < args.length; index += 1) {
|
||||
const arg = args[index];
|
||||
|
||||
if (arg === "--help" || arg === "-h") {
|
||||
printUsage();
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (arg.startsWith("--workspace-id=")) {
|
||||
workspaceId = arg.slice("--workspace-id=".length);
|
||||
continue;
|
||||
}
|
||||
if (arg === "--workspace-id") {
|
||||
const value = args[index + 1];
|
||||
if (!value) {
|
||||
throw new Error("Missing value for --workspace-id");
|
||||
}
|
||||
workspaceId = value;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith("--agent-id=")) {
|
||||
agentId = arg.slice("--agent-id=".length);
|
||||
continue;
|
||||
}
|
||||
if (arg === "--agent-id") {
|
||||
const value = args[index + 1];
|
||||
if (!value) {
|
||||
throw new Error("Missing value for --agent-id");
|
||||
}
|
||||
agentId = value;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith("--since=")) {
|
||||
since = parseSinceDate(arg.slice("--since=".length));
|
||||
continue;
|
||||
}
|
||||
if (arg === "--since") {
|
||||
const value = args[index + 1];
|
||||
if (!value) {
|
||||
throw new Error("Missing value for --since");
|
||||
}
|
||||
since = parseSinceDate(value);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith("--sessions-dir=")) {
|
||||
sessionsDir = arg.slice("--sessions-dir=".length);
|
||||
continue;
|
||||
}
|
||||
if (arg === "--sessions-dir") {
|
||||
const value = args[index + 1];
|
||||
if (!value) {
|
||||
throw new Error("Missing value for --sessions-dir");
|
||||
}
|
||||
sessionsDir = value;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith("--endpoint=")) {
|
||||
endpoint = arg.slice("--endpoint=".length);
|
||||
continue;
|
||||
}
|
||||
if (arg === "--endpoint") {
|
||||
const value = args[index + 1];
|
||||
if (!value) {
|
||||
throw new Error("Missing value for --endpoint");
|
||||
}
|
||||
endpoint = value;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
throw new Error(`Unknown flag: ${arg}`);
|
||||
}
|
||||
|
||||
if (!workspaceId || workspaceId.trim().length === 0) {
|
||||
throw new Error("--workspace-id is required");
|
||||
}
|
||||
|
||||
return {
|
||||
workspaceId: workspaceId.trim(),
|
||||
agentId: agentId?.trim(),
|
||||
since,
|
||||
sessionsDir: sessionsDir ? path.resolve(expandHomePath(sessionsDir)) : undefined,
|
||||
endpoint,
|
||||
};
|
||||
}
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === "object" && value !== null;
|
||||
}
|
||||
|
||||
function asString(value: unknown): string | null {
|
||||
return typeof value === "string" ? value : null;
|
||||
}
|
||||
|
||||
function normalizeIsoTimestamp(value: unknown): string | null {
|
||||
if (typeof value === "string") {
|
||||
const parsed = new Date(value);
|
||||
if (!Number.isNaN(parsed.getTime())) {
|
||||
return parsed.toISOString();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeof value === "number" && Number.isFinite(value)) {
|
||||
const millis = value >= 1_000_000_000_000 ? value : value * 1000;
|
||||
const parsed = new Date(millis);
|
||||
if (!Number.isNaN(parsed.getTime())) {
|
||||
return parsed.toISOString();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function truncate(value: string, maxLength: number): string {
|
||||
if (value.length <= maxLength) {
|
||||
return value;
|
||||
}
|
||||
return `${value.slice(0, maxLength - 3)}...`;
|
||||
}
|
||||
|
||||
function deriveTitle(content: string, fallbackSessionId: string): string {
|
||||
const firstLine = content
|
||||
.split(/\r?\n/u)
|
||||
.map((line) => line.trim())
|
||||
.find((line) => line.length > 0);
|
||||
|
||||
if (!firstLine) {
|
||||
return `OpenClaw session ${fallbackSessionId}`;
|
||||
}
|
||||
|
||||
const normalized = firstLine.replace(/\s+/gu, " ").trim();
|
||||
return truncate(normalized, 140);
|
||||
}
|
||||
|
||||
function extractTextContent(content: unknown): string {
|
||||
if (typeof content === "string") {
|
||||
return content.trim();
|
||||
}
|
||||
|
||||
if (Array.isArray(content)) {
|
||||
const parts: string[] = [];
|
||||
for (const item of content) {
|
||||
if (typeof item === "string") {
|
||||
const trimmed = item.trim();
|
||||
if (trimmed.length > 0) {
|
||||
parts.push(trimmed);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (!isRecord(item)) {
|
||||
continue;
|
||||
}
|
||||
const itemType = asString(item.type);
|
||||
if (itemType !== null && itemType !== "text") {
|
||||
continue;
|
||||
}
|
||||
const textValue = asString(item.text);
|
||||
if (textValue && textValue.trim().length > 0) {
|
||||
parts.push(textValue.trim());
|
||||
}
|
||||
}
|
||||
return parts.join("\n\n").trim();
|
||||
}
|
||||
|
||||
if (isRecord(content)) {
|
||||
const textValue = asString(content.text);
|
||||
if (textValue) {
|
||||
return textValue.trim();
|
||||
}
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
function inferAgentIdFromPath(filePath: string): string | null {
|
||||
const pathParts = filePath.split(path.sep);
|
||||
const agentsIndex = pathParts.lastIndexOf("agents");
|
||||
if (agentsIndex < 0) {
|
||||
return null;
|
||||
}
|
||||
const candidate = pathParts[agentsIndex + 1];
|
||||
return candidate && candidate.trim().length > 0 ? candidate : null;
|
||||
}
|
||||
|
||||
async function parseSessionFile(filePath: string): Promise<ParsedSession> {
|
||||
const fallbackSessionId = path.basename(filePath, path.extname(filePath));
|
||||
const inferredAgentId = inferAgentIdFromPath(filePath) ?? undefined;
|
||||
|
||||
let sessionId = fallbackSessionId;
|
||||
let title: string | null = null;
|
||||
let startedAt: string | undefined;
|
||||
let endedAt: string | undefined;
|
||||
let parseErrors = 0;
|
||||
const messages: IngestMessage[] = [];
|
||||
|
||||
const readStream = createReadStream(filePath, { encoding: "utf8" });
|
||||
const lineReader = createInterface({
|
||||
input: readStream,
|
||||
crlfDelay: Number.POSITIVE_INFINITY,
|
||||
});
|
||||
|
||||
for await (const rawLine of lineReader) {
|
||||
const line = rawLine.trim();
|
||||
if (line.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let parsedLine: unknown;
|
||||
try {
|
||||
parsedLine = JSON.parse(line) as unknown;
|
||||
} catch {
|
||||
parseErrors += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!isRecord(parsedLine)) {
|
||||
parseErrors += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const eventType = asString(parsedLine.type);
|
||||
if (eventType === "session") {
|
||||
const rawSessionId = asString(parsedLine.id);
|
||||
if (rawSessionId && rawSessionId.trim().length > 0) {
|
||||
sessionId = rawSessionId;
|
||||
}
|
||||
|
||||
const sessionTimestamp = normalizeIsoTimestamp(parsedLine.timestamp);
|
||||
if (sessionTimestamp) {
|
||||
startedAt ??= sessionTimestamp;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (eventType !== "message") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const messageRecord = parsedLine.message;
|
||||
if (!isRecord(messageRecord)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const role = asString(messageRecord.role);
|
||||
if (role !== "user" && role !== "assistant") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const content = extractTextContent(messageRecord.content);
|
||||
if (content.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const timestamp =
|
||||
normalizeIsoTimestamp(messageRecord.timestamp) ?? normalizeIsoTimestamp(parsedLine.timestamp);
|
||||
|
||||
const message: IngestMessage = {
|
||||
role,
|
||||
content,
|
||||
timestamp: timestamp ?? undefined,
|
||||
};
|
||||
messages.push(message);
|
||||
|
||||
if (!title && role === "user") {
|
||||
title = deriveTitle(content, sessionId);
|
||||
}
|
||||
if (!startedAt && timestamp) {
|
||||
startedAt = timestamp;
|
||||
}
|
||||
if (timestamp) {
|
||||
endedAt = timestamp;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
sessionId,
|
||||
title: title ?? `OpenClaw session ${sessionId}`,
|
||||
messages,
|
||||
startedAt,
|
||||
endedAt,
|
||||
parseErrors,
|
||||
inferredAgentId,
|
||||
};
|
||||
}
|
||||
|
||||
async function pathExists(candidatePath: string): Promise<boolean> {
|
||||
try {
|
||||
await access(candidatePath, fsConstants.F_OK);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function discoverSessionDirectories(overrideDir?: string): Promise<string[]> {
|
||||
if (overrideDir) {
|
||||
if (!(await pathExists(overrideDir))) {
|
||||
throw new Error(`Provided --sessions-dir does not exist: ${overrideDir}`);
|
||||
}
|
||||
return [overrideDir];
|
||||
}
|
||||
|
||||
const defaultDir = path.join(homedir(), ".openclaw", "sessions");
|
||||
if (await pathExists(defaultDir)) {
|
||||
return [defaultDir];
|
||||
}
|
||||
|
||||
const agentsRoot = path.join(homedir(), ".openclaw", "agents");
|
||||
if (!(await pathExists(agentsRoot))) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const agentEntries = await readdir(agentsRoot, { withFileTypes: true });
|
||||
const directories: string[] = [];
|
||||
for (const entry of agentEntries) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const sessionsDir = path.join(agentsRoot, entry.name, "sessions");
|
||||
if (await pathExists(sessionsDir)) {
|
||||
directories.push(sessionsDir);
|
||||
}
|
||||
}
|
||||
|
||||
return directories.sort((left, right) => left.localeCompare(right));
|
||||
}
|
||||
|
||||
async function discoverSessionFiles(overrideDir?: string): Promise<string[]> {
|
||||
const directories = await discoverSessionDirectories(overrideDir);
|
||||
const files: string[] = [];
|
||||
|
||||
for (const directory of directories) {
|
||||
const entries = await readdir(directory, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (!entry.isFile() || !entry.name.endsWith(".jsonl")) {
|
||||
continue;
|
||||
}
|
||||
files.push(path.join(directory, entry.name));
|
||||
}
|
||||
}
|
||||
|
||||
return files.sort((left, right) => left.localeCompare(right));
|
||||
}
|
||||
|
||||
async function resolveSessionTimestamp(session: ParsedSession, filePath: string): Promise<Date> {
|
||||
const sessionTimestamp = session.startedAt ?? session.endedAt;
|
||||
if (sessionTimestamp) {
|
||||
const parsed = new Date(sessionTimestamp);
|
||||
if (!Number.isNaN(parsed.getTime())) {
|
||||
return parsed;
|
||||
}
|
||||
}
|
||||
|
||||
const fileStat = await stat(filePath);
|
||||
return fileStat.mtime;
|
||||
}
|
||||
|
||||
function buildPayload(
|
||||
options: CliOptions,
|
||||
session: ParsedSession,
|
||||
fallbackAgentId: string | undefined
|
||||
): IngestPayload {
|
||||
const payload: IngestPayload = {
|
||||
sessionId: session.sessionId,
|
||||
workspaceId: options.workspaceId,
|
||||
title: session.title,
|
||||
messages: session.messages,
|
||||
};
|
||||
|
||||
const selectedAgentId = options.agentId ?? fallbackAgentId;
|
||||
if (selectedAgentId && selectedAgentId.trim().length > 0) {
|
||||
payload.agentId = selectedAgentId.trim();
|
||||
}
|
||||
|
||||
return payload;
|
||||
}
|
||||
|
||||
async function sendIngestRequest(
|
||||
endpoint: string,
|
||||
token: string,
|
||||
payload: IngestPayload
|
||||
): Promise<SendResult> {
|
||||
const response = await fetch(endpoint, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
|
||||
const body = await response.text();
|
||||
return {
|
||||
ok: response.ok,
|
||||
status: response.status,
|
||||
body,
|
||||
};
|
||||
}
|
||||
|
||||
function summarizeFailureBody(body: string): string {
|
||||
const compact = body.replace(/\s+/gu, " ").trim();
|
||||
if (compact.length === 0) {
|
||||
return "(empty response body)";
|
||||
}
|
||||
return truncate(compact, 220);
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const options = parseCliArgs(process.argv.slice(2));
|
||||
const token = process.env.MOSAIC_API_TOKEN;
|
||||
if (!token || token.trim().length === 0) {
|
||||
throw new Error("MOSAIC_API_TOKEN environment variable is required.");
|
||||
}
|
||||
|
||||
const sessionFiles = await discoverSessionFiles(options.sessionsDir);
|
||||
if (sessionFiles.length === 0) {
|
||||
console.log("No OpenClaw session files found.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Discovered ${sessionFiles.length} session file(s).`);
|
||||
if (options.since) {
|
||||
console.log(`Applying --since filter at ${options.since.toISOString()}.`);
|
||||
}
|
||||
|
||||
const summary: IngestSummary = {
|
||||
discovered: sessionFiles.length,
|
||||
processed: 0,
|
||||
ingested: 0,
|
||||
skippedSince: 0,
|
||||
skippedEmpty: 0,
|
||||
skippedDuplicate: 0,
|
||||
failed: 0,
|
||||
};
|
||||
|
||||
for (const [index, filePath] of sessionFiles.entries()) {
|
||||
const position = `${index + 1}/${sessionFiles.length}`;
|
||||
const parsedSession = await parseSessionFile(filePath);
|
||||
summary.processed += 1;
|
||||
|
||||
if (parsedSession.messages.length === 0) {
|
||||
summary.skippedEmpty += 1;
|
||||
console.log(
|
||||
`[${position}] Skipped ${parsedSession.sessionId}: no user/assistant text messages.`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const sessionDate = await resolveSessionTimestamp(parsedSession, filePath);
|
||||
if (options.since && sessionDate.getTime() < options.since.getTime()) {
|
||||
summary.skippedSince += 1;
|
||||
console.log(
|
||||
`[${position}] Skipped ${parsedSession.sessionId}: session is before --since (${sessionDate.toISOString()}).`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const payload = buildPayload(options, parsedSession, parsedSession.inferredAgentId);
|
||||
let result: SendResult;
|
||||
|
||||
try {
|
||||
result = await sendIngestRequest(options.endpoint, token, payload);
|
||||
} catch (error) {
|
||||
summary.failed += 1;
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error(`[${position}] Failed ${parsedSession.sessionId}: request error: ${message}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (result.ok) {
|
||||
summary.ingested += 1;
|
||||
const note =
|
||||
parsedSession.parseErrors > 0 ? ` (parse warnings: ${parsedSession.parseErrors})` : "";
|
||||
console.log(
|
||||
`[${position}] Ingested ${parsedSession.sessionId} (${parsedSession.messages.length} messages)${note}.`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (result.status === 409) {
|
||||
summary.skippedDuplicate += 1;
|
||||
console.log(`[${position}] Skipped ${parsedSession.sessionId}: already exists (409).`);
|
||||
continue;
|
||||
}
|
||||
|
||||
summary.failed += 1;
|
||||
console.error(
|
||||
`[${position}] Failed ${parsedSession.sessionId}: HTTP ${result.status} ${summarizeFailureBody(result.body)}`
|
||||
);
|
||||
}
|
||||
|
||||
console.log("\nIngestion summary:");
|
||||
console.log(` Discovered: ${summary.discovered}`);
|
||||
console.log(` Processed: ${summary.processed}`);
|
||||
console.log(` Ingested: ${summary.ingested}`);
|
||||
console.log(` Skipped (--since): ${summary.skippedSince}`);
|
||||
console.log(` Skipped (empty): ${summary.skippedEmpty}`);
|
||||
console.log(` Skipped (duplicate): ${summary.skippedDuplicate}`);
|
||||
console.log(` Failed: ${summary.failed}`);
|
||||
|
||||
if (summary.failed > 0) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((error: unknown) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error(`Fatal error: ${message}`);
|
||||
process.exit(1);
|
||||
});
|
||||
Reference in New Issue
Block a user