Compare commits
365 Commits
03d0c032e4
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 44fb402ef2 | |||
| f42c47e314 | |||
| 8069aeadb5 | |||
| 1f883c4c04 | |||
| 5207d8c0c9 | |||
| d1c9a747b9 | |||
| 3d669713d7 | |||
| 1a6cf113c8 | |||
| 48d734516a | |||
| 83477165d4 | |||
| c45cec3bba | |||
| b1baa70e00 | |||
| 55340dc661 | |||
| a8d426e3c0 | |||
| 40e12214cf | |||
| 892ffd637f | |||
| 394a46bef2 | |||
| 29a78890c9 | |||
| 0c88010123 | |||
| 7f94ecdc7a | |||
| 5b77774d91 | |||
| a16371c6f9 | |||
| 51d46b2e4a | |||
| 6582785ddd | |||
| ae0bebe2e0 | |||
| 173b429c62 | |||
| 7d505e75f8 | |||
| cd1c52c506 | |||
| a00f1e1fd7 | |||
| 9305cacd4a | |||
| 0d5aa5c3ae | |||
| eb34eb8104 | |||
| 5165a30fad | |||
| 6eb91c9eba | |||
| e7da4ca25e | |||
| e1e265804a | |||
| d361d00674 | |||
| 78ff8f8e70 | |||
| 2463b7b8ba | |||
| 5b235a668f | |||
| c5ab179071 | |||
| b4f4de6f7a | |||
| 2b6bed2480 | |||
| eba33fc93d | |||
| c23c33b0c5 | |||
| c5253e9d62 | |||
| e898551814 | |||
| 3607554902 | |||
| a25a77a43c | |||
| 861eff4686 | |||
| 99a4567e32 | |||
| 559c6b3831 | |||
| 631e5010b5 | |||
| 09e377ecd7 | |||
| deafcdc84b | |||
| 66d401461c | |||
| 01ae164b61 | |||
| 029c190c05 | |||
| 477d0c8fdf | |||
| 03af39def9 | |||
| dc7e0c805c | |||
| 2b010fadda | |||
| c25e753f35 | |||
| d3c8b8cadd | |||
| a3a0d7afca | |||
| ab2b68c93c | |||
| c1ec0ad7ef | |||
| e5b772f7cb | |||
| 7a46c81897 | |||
| 3688f89c37 | |||
| e59e517d5c | |||
| fab833a710 | |||
| 4294deda49 | |||
| 2fe858d61a | |||
| 512a29a240 | |||
| 8ea3c3ee67 | |||
| c4a6be5b6b | |||
| f4c1c9d816 | |||
| ac67697fe4 | |||
| 6521f655a8 | |||
| 0e74b03d9c | |||
| a925f91062 | |||
| 7106512fa9 | |||
| 1df20f0e13 | |||
| 8dab20c022 | |||
| 7073057e8d | |||
| 5e7346adc7 | |||
| d07a840f25 | |||
| 4b2e48af9c | |||
| 7b390d8be2 | |||
| e8502577b8 | |||
| af68f84dcd | |||
| b57f549d39 | |||
| 2c8d0a8daf | |||
| c939a541a7 | |||
| 895ea7fd14 | |||
| e93e7ffaa9 | |||
| 307639eca0 | |||
| 31814f181a | |||
| 5cd6b8622d | |||
| 20c9e68e1b | |||
| 127bf61fe2 | |||
| f99107fbfc | |||
| 5b782bafc9 | |||
| 85d3f930f3 | |||
| 0e6734bdae | |||
| 5bcaaeddd9 | |||
| 676a2a288b | |||
| ac16d6ed88 | |||
| 8388d49786 | |||
| 20f914ea85 | |||
| 1b84741f1a | |||
| ffc10c9a45 | |||
| 62d9ac0e5a | |||
| 8098504fb8 | |||
| 128431ba58 | |||
| d2c51eda91 | |||
| 78b643a945 | |||
| f93503ebcf | |||
| c0e679ab7c | |||
| 6ac63fe755 | |||
| 1667f28d71 | |||
| 66fe475fa1 | |||
| d39ab6aafc | |||
| 147e8ac574 | |||
| c38bfae16c | |||
| 36b4d8323d | |||
| 833662a64f | |||
| b3922e1d5b | |||
| 78b71a0ecc | |||
| dd0568cf15 | |||
| 8964226163 | |||
| 11f22a7e96 | |||
| edcff6a0e0 | |||
| e3cba37e8c | |||
| 21bf7e050f | |||
| 83d5aee53a | |||
| cc5b108b2f | |||
| 5ed0a859da | |||
| bf299bb672 | |||
| ad99cb9a03 | |||
| d05b870f08 | |||
| 1aaf5618ce | |||
| 9b2520ce1f | |||
| b110c469c4 | |||
| 859dcfc4b7 | |||
| 13aa52aa53 | |||
| 417c6ab49c | |||
| 8128eb7fbe | |||
| 7de0e734b0 | |||
| 6290fc3d53 | |||
| 9f4de1682f | |||
| 374ca7ace3 | |||
| 72c64d2eeb | |||
| 5f6c520a98 | |||
| 9a7673bea2 | |||
| 91934b9933 | |||
| 7f89682946 | |||
| 8b4c565f20 | |||
| d5ecc0b107 | |||
| a81c4a5edd | |||
| ff5a09c3fb | |||
| f93fa60fff | |||
| cc56f2cbe1 | |||
| f9cccd6965 | |||
| 90c3bbccdf | |||
| 79286e98c6 | |||
| cfd1def4a9 | |||
| f435d8e8c6 | |||
| 3d78b09064 | |||
| a7955b9b32 | |||
| 372cc100cc | |||
| 37cf813b88 | |||
| 3d5b50af11 | |||
| f30c2f790c | |||
| 05b1a93ccb | |||
| a78a8b88e1 | |||
| 172ed1d40f | |||
| ee2ddfc8b8 | |||
| 5a6d00a064 | |||
| ffda74ec12 | |||
| f97be2e6a3 | |||
| 97606713b5 | |||
| d0c720e6da | |||
| 64e817cfb8 | |||
| cd5c2218c8 | |||
| f643d2bc04 | |||
| 8957904ea9 | |||
| 458cac7cdd | |||
| 7581d26567 | |||
| 07f5225a76 | |||
| 7c55464d54 | |||
| ea1620fa7a | |||
| d218902cb0 | |||
| b43e860c40 | |||
| 716f230f72 | |||
| a5ed260fbd | |||
| 9b5c15ca56 | |||
| 74c8c376b7 | |||
| 9901fba61e | |||
| 17144b1c42 | |||
| a6f75cd587 | |||
| 06e54328d5 | |||
| 7480deff10 | |||
| 1b66417be5 | |||
| 23d610ba5b | |||
| 25ae14aba1 | |||
| 1425893318 | |||
| bc4c1f9c70 | |||
| d66451cf48 | |||
| c23ebca648 | |||
|
|
eae55bc4a3 | ||
| b5ac2630c1 | |||
| 8424a28faa | |||
| d2cec04cba | |||
| 9ac971e857 | |||
| 0c2a6b14cf | |||
| af299abdaf | |||
| fa9f173f8e | |||
| 7935d86015 | |||
| f43631671f | |||
| 8328f9509b | |||
| f72e8c2da9 | |||
| 1a668627a3 | |||
| bd3625ae1b | |||
| aeac188d40 | |||
| f219dd71a0 | |||
| 2c3c1f67ac | |||
| dedc1af080 | |||
| 3b16b2c743 | |||
|
|
6fd8e85266 | ||
|
|
d3474cdd74 | ||
| 157b702331 | |||
|
|
63c6a129bd | ||
| 4a4aee7b7c | |||
|
|
9d9a01f5f7 | ||
|
|
5bce7dbb05 | ||
|
|
ab902250f8 | ||
|
|
d34f097a5c | ||
|
|
f4ad7eba37 | ||
|
|
4d089cd020 | ||
|
|
3258cd4f4d | ||
| 35dd623ab5 | |||
|
|
758b2a839b | ||
| af113707d9 | |||
|
|
57d0f5d2a3 | ||
|
|
ad428598a9 | ||
|
|
cab8d690ab | ||
| 0a780a5062 | |||
| a1515676db | |||
|
|
254f85369b | ||
|
|
ddf6851bfd | ||
| 027fee1afa | |||
| abe57621cd | |||
| 7c7ad59002 | |||
| ca430d6fdf | |||
| 18e5f6312b | |||
| d2ed1f2817 | |||
| fb609d40e3 | |||
| 0c93be417a | |||
| b719fa0444 | |||
|
|
8961f5b18c | ||
| d58bf47cd7 | |||
|
|
c917a639c4 | ||
|
|
9d3a673e6c | ||
|
|
b96e2d7dc6 | ||
|
|
76756ad695 | ||
|
|
05ee6303c2 | ||
|
|
5328390f4c | ||
|
|
4d9b75994f | ||
|
|
d7de20e586 | ||
|
|
399d5a31c8 | ||
|
|
b675db1324 | ||
|
|
e0d6d585b3 | ||
|
|
0a2eaaa5e4 | ||
|
|
df495c67b5 | ||
|
|
3e2c1b69ea | ||
|
|
27c4c8edf3 | ||
|
|
e600cfd2d0 | ||
|
|
08e32d42a3 | ||
|
|
752e839054 | ||
|
|
8a572e8525 | ||
|
|
4f31690281 | ||
|
|
097f5f4ab6 | ||
|
|
ac492aab80 | ||
|
|
110e181272 | ||
|
|
9696e45265 | ||
|
|
7ead8b1076 | ||
|
|
3fbba135b9 | ||
|
|
c233d97ba0 | ||
|
|
f1ee0df933 | ||
|
|
07084208a7 | ||
|
|
f500300b1f | ||
|
|
24ee7c7f87 | ||
|
|
d9a3eeb9aa | ||
|
|
077bb042b7 | ||
|
|
1d7d5a9d01 | ||
|
|
2020c15545 | ||
|
|
3ab87362a9 | ||
|
|
81b5204258 | ||
|
|
9623a3be97 | ||
|
|
f37c83e280 | ||
|
|
7ebbcbf958 | ||
|
|
b316e98b64 | ||
|
|
447141f05d | ||
|
|
3b2356f5a0 | ||
|
|
d2605196ac | ||
|
|
2d59c4b2e4 | ||
|
|
a9090aca7f | ||
|
|
f6eadff5bf | ||
|
|
9ae21c4c15 | ||
|
|
976d14d94b | ||
|
|
b2eec3cf83 | ||
|
|
bd7470f5d7 | ||
| 491675b613 | |||
| 4b3eecf05a | |||
| 3376d8162e | |||
| e2ffaa71b1 | |||
| 444fa1116a | |||
| 31ce9e920c | |||
| ba54de88fd | |||
| ca21416efc | |||
| 1bad7a8cca | |||
| 6015ace1de | |||
| 92de2f282f | |||
| 1fde25760a | |||
| cf28efa880 | |||
| 11d284554d | |||
| 3cc2030446 | |||
| eca2c46e9d | |||
| c5a87df6e1 | |||
| 17ee28b6f6 | |||
| af9c5799af | |||
| dcbc8d1053 | |||
| d2c7602430 | |||
| 24065aa199 | |||
| bc86947d01 | |||
| 74d6c1092e | |||
| 28c9e6fe65 | |||
| b3d6d73348 | |||
| 527262af38 | |||
| 6c465566f6 | |||
| 7b4fda6011 | |||
| d37c78f503 | |||
| 79b1d81d27 | |||
| a943ae139a | |||
| 8e27f73f8f | |||
| b5edb4f37e | |||
| 3ae9e53bcc | |||
| 2eafa91e70 | |||
| 248f711571 | |||
| 306c2e5bd8 | |||
| 746ab20c38 | |||
| a5ee974765 | |||
| 5958569cba | |||
| d6c6af10d9 | |||
| ed23293e1a | |||
| fcecf3654b | |||
| 24c21f45b3 | |||
| 314dd24dce | |||
| 8d8d37dbf9 | |||
| c40373fa3b | |||
| 52553c8266 | |||
| 4cc43bece6 | |||
| fb53272fa9 |
204
.env.example
204
.env.example
@@ -15,11 +15,19 @@ WEB_PORT=3000
|
|||||||
# ======================
|
# ======================
|
||||||
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||||
NEXT_PUBLIC_API_URL=http://localhost:3001
|
NEXT_PUBLIC_API_URL=http://localhost:3001
|
||||||
|
# Frontend auth mode:
|
||||||
|
# - real: Normal auth/session flow
|
||||||
|
# - mock: Local-only seeded user for FE development (blocked outside NODE_ENV=development)
|
||||||
|
# Use `mock` locally to continue FE work when auth flow is unstable.
|
||||||
|
# If omitted, web runtime defaults:
|
||||||
|
# - development -> mock
|
||||||
|
# - production -> real
|
||||||
|
NEXT_PUBLIC_AUTH_MODE=real
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# PostgreSQL Database
|
# PostgreSQL Database
|
||||||
# ======================
|
# ======================
|
||||||
# Bundled PostgreSQL (when database profile enabled)
|
# Bundled PostgreSQL
|
||||||
# SECURITY: Change POSTGRES_PASSWORD to a strong random password in production
|
# SECURITY: Change POSTGRES_PASSWORD to a strong random password in production
|
||||||
DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@postgres:5432/mosaic
|
DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@postgres:5432/mosaic
|
||||||
POSTGRES_USER=mosaic
|
POSTGRES_USER=mosaic
|
||||||
@@ -28,7 +36,7 @@ POSTGRES_DB=mosaic
|
|||||||
POSTGRES_PORT=5432
|
POSTGRES_PORT=5432
|
||||||
|
|
||||||
# External PostgreSQL (managed service)
|
# External PostgreSQL (managed service)
|
||||||
# Disable 'database' profile and point DATABASE_URL to your external instance
|
# To use an external instance, update DATABASE_URL above
|
||||||
# Example: DATABASE_URL=postgresql://user:pass@rds.amazonaws.com:5432/mosaic
|
# Example: DATABASE_URL=postgresql://user:pass@rds.amazonaws.com:5432/mosaic
|
||||||
|
|
||||||
# PostgreSQL Performance Tuning (Optional)
|
# PostgreSQL Performance Tuning (Optional)
|
||||||
@@ -39,7 +47,7 @@ POSTGRES_MAX_CONNECTIONS=100
|
|||||||
# ======================
|
# ======================
|
||||||
# Valkey Cache (Redis-compatible)
|
# Valkey Cache (Redis-compatible)
|
||||||
# ======================
|
# ======================
|
||||||
# Bundled Valkey (when cache profile enabled)
|
# Bundled Valkey
|
||||||
VALKEY_URL=redis://valkey:6379
|
VALKEY_URL=redis://valkey:6379
|
||||||
VALKEY_HOST=valkey
|
VALKEY_HOST=valkey
|
||||||
VALKEY_PORT=6379
|
VALKEY_PORT=6379
|
||||||
@@ -47,7 +55,7 @@ VALKEY_PORT=6379
|
|||||||
VALKEY_MAXMEMORY=256mb
|
VALKEY_MAXMEMORY=256mb
|
||||||
|
|
||||||
# External Redis/Valkey (managed service)
|
# External Redis/Valkey (managed service)
|
||||||
# Disable 'cache' profile and point VALKEY_URL to your external instance
|
# To use an external instance, update VALKEY_URL above
|
||||||
# Example: VALKEY_URL=redis://elasticache.amazonaws.com:6379
|
# Example: VALKEY_URL=redis://elasticache.amazonaws.com:6379
|
||||||
# Example with auth: VALKEY_URL=redis://:password@redis.example.com:6379
|
# Example with auth: VALKEY_URL=redis://:password@redis.example.com:6379
|
||||||
|
|
||||||
@@ -61,7 +69,7 @@ KNOWLEDGE_CACHE_TTL=300
|
|||||||
# Authentication (Authentik OIDC)
|
# Authentication (Authentik OIDC)
|
||||||
# ======================
|
# ======================
|
||||||
# Set to 'true' to enable OIDC authentication with Authentik
|
# Set to 'true' to enable OIDC authentication with Authentik
|
||||||
# When enabled, OIDC_ISSUER, OIDC_CLIENT_ID, and OIDC_CLIENT_SECRET are required
|
# When enabled, OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET, and OIDC_REDIRECT_URI are required
|
||||||
OIDC_ENABLED=false
|
OIDC_ENABLED=false
|
||||||
|
|
||||||
# Authentik Server URLs (required when OIDC_ENABLED=true)
|
# Authentik Server URLs (required when OIDC_ENABLED=true)
|
||||||
@@ -70,9 +78,9 @@ OIDC_ISSUER=https://auth.example.com/application/o/mosaic-stack/
|
|||||||
OIDC_CLIENT_ID=your-client-id-here
|
OIDC_CLIENT_ID=your-client-id-here
|
||||||
OIDC_CLIENT_SECRET=your-client-secret-here
|
OIDC_CLIENT_SECRET=your-client-secret-here
|
||||||
# Redirect URI must match what's configured in Authentik
|
# Redirect URI must match what's configured in Authentik
|
||||||
# Development: http://localhost:3001/auth/callback/authentik
|
# Development: http://localhost:3001/auth/oauth2/callback/authentik
|
||||||
# Production: https://api.mosaicstack.dev/auth/callback/authentik
|
# Production: https://mosaic-api.woltje.com/auth/oauth2/callback/authentik
|
||||||
OIDC_REDIRECT_URI=http://localhost:3001/auth/callback/authentik
|
OIDC_REDIRECT_URI=http://localhost:3001/auth/oauth2/callback/authentik
|
||||||
|
|
||||||
# Authentik PostgreSQL Database
|
# Authentik PostgreSQL Database
|
||||||
AUTHENTIK_POSTGRES_USER=authentik
|
AUTHENTIK_POSTGRES_USER=authentik
|
||||||
@@ -116,6 +124,17 @@ JWT_EXPIRATION=24h
|
|||||||
# This is used by BetterAuth for session management and CSRF protection
|
# This is used by BetterAuth for session management and CSRF protection
|
||||||
# Example: openssl rand -base64 32
|
# Example: openssl rand -base64 32
|
||||||
BETTER_AUTH_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS
|
BETTER_AUTH_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS
|
||||||
|
# Optional explicit BetterAuth origin for callback/error URL generation.
|
||||||
|
# When empty, backend falls back to NEXT_PUBLIC_API_URL.
|
||||||
|
BETTER_AUTH_URL=
|
||||||
|
|
||||||
|
# Trusted Origins (comma-separated list of additional trusted origins for CORS and auth)
|
||||||
|
# These are added to NEXT_PUBLIC_APP_URL and NEXT_PUBLIC_API_URL automatically
|
||||||
|
TRUSTED_ORIGINS=
|
||||||
|
|
||||||
|
# Cookie Domain (for cross-subdomain session sharing)
|
||||||
|
# Leave empty for single-domain setups. Set to ".example.com" for cross-subdomain.
|
||||||
|
COOKIE_DOMAIN=
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Encryption (Credential Security)
|
# Encryption (Credential Security)
|
||||||
@@ -196,11 +215,9 @@ NODE_ENV=development
|
|||||||
# Used by docker-compose.yml (pulls images) and docker-swarm.yml
|
# Used by docker-compose.yml (pulls images) and docker-swarm.yml
|
||||||
# For local builds, use docker-compose.build.yml instead
|
# For local builds, use docker-compose.build.yml instead
|
||||||
# Options:
|
# Options:
|
||||||
# - dev: Pull development images from registry (default, built from develop branch)
|
# - latest: Pull latest images from registry (default, built from main branch)
|
||||||
# - latest: Pull latest stable images from registry (built from main branch)
|
|
||||||
# - <commit-sha>: Use specific commit SHA tag (e.g., 658ec077)
|
|
||||||
# - <version>: Use specific version tag (e.g., v1.0.0)
|
# - <version>: Use specific version tag (e.g., v1.0.0)
|
||||||
IMAGE_TAG=dev
|
IMAGE_TAG=latest
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Docker Compose Profiles
|
# Docker Compose Profiles
|
||||||
@@ -236,12 +253,16 @@ MOSAIC_API_DOMAIN=api.mosaic.local
|
|||||||
MOSAIC_WEB_DOMAIN=mosaic.local
|
MOSAIC_WEB_DOMAIN=mosaic.local
|
||||||
MOSAIC_AUTH_DOMAIN=auth.mosaic.local
|
MOSAIC_AUTH_DOMAIN=auth.mosaic.local
|
||||||
|
|
||||||
# External Traefik network name (for upstream mode)
|
# External Traefik network name (for upstream mode and swarm)
|
||||||
# Must match the network name of your existing Traefik instance
|
# Must match the network name of your existing Traefik instance
|
||||||
TRAEFIK_NETWORK=traefik-public
|
TRAEFIK_NETWORK=traefik-public
|
||||||
|
TRAEFIK_DOCKER_NETWORK=traefik-public
|
||||||
|
|
||||||
# TLS/SSL Configuration
|
# TLS/SSL Configuration
|
||||||
TRAEFIK_TLS_ENABLED=true
|
TRAEFIK_TLS_ENABLED=true
|
||||||
|
TRAEFIK_ENTRYPOINT=websecure
|
||||||
|
# Cert resolver name (leave empty if TLS is handled externally or using self-signed certs)
|
||||||
|
TRAEFIK_CERTRESOLVER=
|
||||||
# For Let's Encrypt (production):
|
# For Let's Encrypt (production):
|
||||||
TRAEFIK_ACME_EMAIL=admin@example.com
|
TRAEFIK_ACME_EMAIL=admin@example.com
|
||||||
# For self-signed certificates (development), leave TRAEFIK_ACME_EMAIL empty
|
# For self-signed certificates (development), leave TRAEFIK_ACME_EMAIL empty
|
||||||
@@ -277,6 +298,15 @@ GITEA_WEBHOOK_SECRET=REPLACE_WITH_RANDOM_WEBHOOK_SECRET
|
|||||||
# The coordinator service uses this key to authenticate with the API
|
# The coordinator service uses this key to authenticate with the API
|
||||||
COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
||||||
|
|
||||||
|
# Anthropic API Key (used by coordinator for issue parsing)
|
||||||
|
# Get your API key from: https://console.anthropic.com/
|
||||||
|
ANTHROPIC_API_KEY=REPLACE_WITH_ANTHROPIC_API_KEY
|
||||||
|
|
||||||
|
# Coordinator tuning
|
||||||
|
COORDINATOR_POLL_INTERVAL=5.0
|
||||||
|
COORDINATOR_MAX_CONCURRENT_AGENTS=10
|
||||||
|
COORDINATOR_ENABLED=true
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Rate Limiting
|
# Rate Limiting
|
||||||
# ======================
|
# ======================
|
||||||
@@ -284,17 +314,19 @@ COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
|||||||
# TTL is in seconds, limits are per TTL window
|
# TTL is in seconds, limits are per TTL window
|
||||||
|
|
||||||
# Global rate limit (applies to all endpoints unless overridden)
|
# Global rate limit (applies to all endpoints unless overridden)
|
||||||
RATE_LIMIT_TTL=60 # Time window in seconds
|
# Time window in seconds
|
||||||
RATE_LIMIT_GLOBAL_LIMIT=100 # Requests per window
|
RATE_LIMIT_TTL=60
|
||||||
|
# Requests per window
|
||||||
|
RATE_LIMIT_GLOBAL_LIMIT=100
|
||||||
|
|
||||||
# Webhook endpoints (/stitcher/webhook, /stitcher/dispatch)
|
# Webhook endpoints (/stitcher/webhook, /stitcher/dispatch) — requests per minute
|
||||||
RATE_LIMIT_WEBHOOK_LIMIT=60 # Requests per minute
|
RATE_LIMIT_WEBHOOK_LIMIT=60
|
||||||
|
|
||||||
# Coordinator endpoints (/coordinator/*)
|
# Coordinator endpoints (/coordinator/*) — requests per minute
|
||||||
RATE_LIMIT_COORDINATOR_LIMIT=100 # Requests per minute
|
RATE_LIMIT_COORDINATOR_LIMIT=100
|
||||||
|
|
||||||
# Health check endpoints (/coordinator/health)
|
# Health check endpoints (/coordinator/health) — requests per minute (higher for monitoring)
|
||||||
RATE_LIMIT_HEALTH_LIMIT=300 # Requests per minute (higher for monitoring)
|
RATE_LIMIT_HEALTH_LIMIT=300
|
||||||
|
|
||||||
# Storage backend for rate limiting (redis or memory)
|
# Storage backend for rate limiting (redis or memory)
|
||||||
# redis: Uses Valkey for distributed rate limiting (recommended for production)
|
# redis: Uses Valkey for distributed rate limiting (recommended for production)
|
||||||
@@ -321,16 +353,34 @@ RATE_LIMIT_STORAGE=redis
|
|||||||
# ======================
|
# ======================
|
||||||
# Matrix bot integration for chat-based control via Matrix protocol
|
# Matrix bot integration for chat-based control via Matrix protocol
|
||||||
# Requires a Matrix account with an access token for the bot user
|
# Requires a Matrix account with an access token for the bot user
|
||||||
# MATRIX_HOMESERVER_URL=https://matrix.example.com
|
# Set these AFTER deploying Synapse and creating the bot account.
|
||||||
# MATRIX_ACCESS_TOKEN=
|
|
||||||
# MATRIX_BOT_USER_ID=@mosaic-bot:example.com
|
|
||||||
# MATRIX_CONTROL_ROOM_ID=!roomid:example.com
|
|
||||||
# MATRIX_WORKSPACE_ID=your-workspace-uuid
|
|
||||||
#
|
#
|
||||||
# SECURITY: MATRIX_WORKSPACE_ID must be a valid workspace UUID from your database.
|
# SECURITY: MATRIX_WORKSPACE_ID must be a valid workspace UUID from your database.
|
||||||
# All Matrix commands will execute within this workspace context for proper
|
# All Matrix commands will execute within this workspace context for proper
|
||||||
# multi-tenant isolation. Each Matrix bot instance should be configured for
|
# multi-tenant isolation. Each Matrix bot instance should be configured for
|
||||||
# a single workspace.
|
# a single workspace.
|
||||||
|
MATRIX_HOMESERVER_URL=http://synapse:8008
|
||||||
|
MATRIX_ACCESS_TOKEN=
|
||||||
|
MATRIX_BOT_USER_ID=@mosaic-bot:matrix.woltje.com
|
||||||
|
MATRIX_SERVER_NAME=matrix.woltje.com
|
||||||
|
# MATRIX_CONTROL_ROOM_ID=!roomid:matrix.woltje.com
|
||||||
|
# MATRIX_WORKSPACE_ID=your-workspace-uuid
|
||||||
|
|
||||||
|
# ======================
|
||||||
|
# Matrix / Synapse Deployment
|
||||||
|
# ======================
|
||||||
|
# Domains for Traefik routing to Matrix services
|
||||||
|
MATRIX_DOMAIN=matrix.woltje.com
|
||||||
|
ELEMENT_DOMAIN=chat.woltje.com
|
||||||
|
|
||||||
|
# Synapse database (created automatically by synapse-db-init in the swarm compose)
|
||||||
|
SYNAPSE_POSTGRES_DB=synapse
|
||||||
|
SYNAPSE_POSTGRES_USER=synapse
|
||||||
|
SYNAPSE_POSTGRES_PASSWORD=REPLACE_WITH_SECURE_SYNAPSE_DB_PASSWORD
|
||||||
|
|
||||||
|
# Image tags for Matrix services
|
||||||
|
SYNAPSE_IMAGE_TAG=latest
|
||||||
|
ELEMENT_IMAGE_TAG=latest
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Orchestrator Configuration
|
# Orchestrator Configuration
|
||||||
@@ -342,6 +392,17 @@ RATE_LIMIT_STORAGE=redis
|
|||||||
# Health endpoints (/health/*) remain unauthenticated
|
# Health endpoints (/health/*) remain unauthenticated
|
||||||
ORCHESTRATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
ORCHESTRATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
||||||
|
|
||||||
|
# Runtime safety defaults (recommended for low-memory hosts)
|
||||||
|
MAX_CONCURRENT_AGENTS=2
|
||||||
|
SESSION_CLEANUP_DELAY_MS=30000
|
||||||
|
ORCHESTRATOR_QUEUE_NAME=orchestrator-tasks
|
||||||
|
ORCHESTRATOR_QUEUE_CONCURRENCY=1
|
||||||
|
ORCHESTRATOR_QUEUE_MAX_RETRIES=3
|
||||||
|
ORCHESTRATOR_QUEUE_BASE_DELAY_MS=1000
|
||||||
|
ORCHESTRATOR_QUEUE_MAX_DELAY_MS=60000
|
||||||
|
SANDBOX_DEFAULT_MEMORY_MB=256
|
||||||
|
SANDBOX_DEFAULT_CPU_LIMIT=1.0
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# AI Provider Configuration
|
# AI Provider Configuration
|
||||||
# ======================
|
# ======================
|
||||||
@@ -355,11 +416,10 @@ AI_PROVIDER=ollama
|
|||||||
# For remote Ollama: http://your-ollama-server:11434
|
# For remote Ollama: http://your-ollama-server:11434
|
||||||
OLLAMA_MODEL=llama3.1:latest
|
OLLAMA_MODEL=llama3.1:latest
|
||||||
|
|
||||||
# Claude API Configuration (when AI_PROVIDER=claude)
|
# Claude API Key
|
||||||
# OPTIONAL: Only required if AI_PROVIDER=claude
|
# Required only when AI_PROVIDER=claude.
|
||||||
# Get your API key from: https://console.anthropic.com/
|
# Get your API key from: https://console.anthropic.com/
|
||||||
# Note: Claude Max subscription users should use AI_PROVIDER=ollama instead
|
CLAUDE_API_KEY=REPLACE_WITH_CLAUDE_API_KEY
|
||||||
# CLAUDE_API_KEY=sk-ant-...
|
|
||||||
|
|
||||||
# OpenAI API Configuration (when AI_PROVIDER=openai)
|
# OpenAI API Configuration (when AI_PROVIDER=openai)
|
||||||
# OPTIONAL: Only required if AI_PROVIDER=openai
|
# OPTIONAL: Only required if AI_PROVIDER=openai
|
||||||
@@ -367,26 +427,72 @@ OLLAMA_MODEL=llama3.1:latest
|
|||||||
# OPENAI_API_KEY=sk-...
|
# OPENAI_API_KEY=sk-...
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Matrix Dev Environment (docker-compose.matrix.yml overlay)
|
# Speech Services (STT / TTS)
|
||||||
# ======================
|
# ======================
|
||||||
# These variables configure the local Matrix dev environment.
|
# Speech-to-Text (STT) - Whisper via Speaches
|
||||||
# Only used when running: docker compose -f docker/docker-compose.yml -f docker/docker-compose.matrix.yml up
|
# Set STT_ENABLED=true to enable speech-to-text transcription
|
||||||
#
|
# STT_BASE_URL is required when STT_ENABLED=true
|
||||||
# Synapse homeserver
|
STT_ENABLED=true
|
||||||
# SYNAPSE_CLIENT_PORT=8008
|
STT_BASE_URL=http://speaches:8000/v1
|
||||||
# SYNAPSE_FEDERATION_PORT=8448
|
STT_MODEL=Systran/faster-whisper-large-v3-turbo
|
||||||
# SYNAPSE_POSTGRES_DB=synapse
|
STT_LANGUAGE=en
|
||||||
# SYNAPSE_POSTGRES_USER=synapse
|
|
||||||
# SYNAPSE_POSTGRES_PASSWORD=synapse_dev_password
|
# Text-to-Speech (TTS) - Default Engine (Kokoro)
|
||||||
#
|
# Set TTS_ENABLED=true to enable text-to-speech synthesis
|
||||||
# Element Web client
|
# TTS_DEFAULT_URL is required when TTS_ENABLED=true
|
||||||
# ELEMENT_PORT=8501
|
TTS_ENABLED=true
|
||||||
#
|
TTS_DEFAULT_URL=http://kokoro-tts:8880/v1
|
||||||
# Matrix bridge connection (set after running docker/matrix/scripts/setup-bot.sh)
|
TTS_DEFAULT_VOICE=af_heart
|
||||||
# MATRIX_HOMESERVER_URL=http://localhost:8008
|
TTS_DEFAULT_FORMAT=mp3
|
||||||
# MATRIX_ACCESS_TOKEN=<obtained from setup-bot.sh>
|
|
||||||
# MATRIX_BOT_USER_ID=@mosaic-bot:localhost
|
# Text-to-Speech (TTS) - Premium Engine (Chatterbox) - Optional
|
||||||
# MATRIX_SERVER_NAME=localhost
|
# Higher quality voice cloning engine, disabled by default
|
||||||
|
# TTS_PREMIUM_URL is required when TTS_PREMIUM_ENABLED=true
|
||||||
|
TTS_PREMIUM_ENABLED=false
|
||||||
|
TTS_PREMIUM_URL=http://chatterbox-tts:8881/v1
|
||||||
|
|
||||||
|
# Text-to-Speech (TTS) - Fallback Engine (Piper/OpenedAI) - Optional
|
||||||
|
# Lightweight fallback engine, disabled by default
|
||||||
|
# TTS_FALLBACK_URL is required when TTS_FALLBACK_ENABLED=true
|
||||||
|
TTS_FALLBACK_ENABLED=false
|
||||||
|
TTS_FALLBACK_URL=http://openedai-speech:8000/v1
|
||||||
|
|
||||||
|
# Whisper model for Speaches STT engine
|
||||||
|
SPEACHES_WHISPER_MODEL=Systran/faster-whisper-large-v3-turbo
|
||||||
|
|
||||||
|
# Speech Service Limits
|
||||||
|
# Maximum upload file size in bytes (default: 25MB)
|
||||||
|
SPEECH_MAX_UPLOAD_SIZE=25000000
|
||||||
|
# Maximum audio duration in seconds (default: 600 = 10 minutes)
|
||||||
|
SPEECH_MAX_DURATION_SECONDS=600
|
||||||
|
# Maximum text length for TTS in characters (default: 4096)
|
||||||
|
SPEECH_MAX_TEXT_LENGTH=4096
|
||||||
|
|
||||||
|
# ======================
|
||||||
|
# Mosaic Telemetry (Task Completion Tracking & Predictions)
|
||||||
|
# ======================
|
||||||
|
# Telemetry tracks task completion patterns to provide time estimates and predictions.
|
||||||
|
# Data is sent to the Mosaic Telemetry API (a separate service).
|
||||||
|
|
||||||
|
# Master switch: set to false to completely disable telemetry (no HTTP calls will be made)
|
||||||
|
MOSAIC_TELEMETRY_ENABLED=true
|
||||||
|
|
||||||
|
# URL of the telemetry API server
|
||||||
|
# For Docker Compose (internal): http://telemetry-api:8000
|
||||||
|
# For production/swarm: https://tel-api.mosaicstack.dev
|
||||||
|
MOSAIC_TELEMETRY_SERVER_URL=http://telemetry-api:8000
|
||||||
|
|
||||||
|
# API key for authenticating with the telemetry server
|
||||||
|
# Generate with: openssl rand -hex 32
|
||||||
|
MOSAIC_TELEMETRY_API_KEY=your-64-char-hex-api-key-here
|
||||||
|
|
||||||
|
# Unique identifier for this Mosaic Stack instance
|
||||||
|
# Generate with: uuidgen or python -c "import uuid; print(uuid.uuid4())"
|
||||||
|
MOSAIC_TELEMETRY_INSTANCE_ID=your-instance-uuid-here
|
||||||
|
|
||||||
|
# Dry run mode: set to true to log telemetry events to console instead of sending HTTP requests
|
||||||
|
# Useful for development and debugging telemetry payloads
|
||||||
|
MOSAIC_TELEMETRY_DRY_RUN=false
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Logging & Debugging
|
# Logging & Debugging
|
||||||
|
|||||||
@@ -1,66 +0,0 @@
|
|||||||
# ==============================================
|
|
||||||
# Mosaic Stack Production Environment
|
|
||||||
# ==============================================
|
|
||||||
# Copy to .env and configure for production deployment
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# PostgreSQL Database
|
|
||||||
# ======================
|
|
||||||
# CRITICAL: Use a strong, unique password
|
|
||||||
POSTGRES_USER=mosaic
|
|
||||||
POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
POSTGRES_DB=mosaic
|
|
||||||
POSTGRES_SHARED_BUFFERS=256MB
|
|
||||||
POSTGRES_EFFECTIVE_CACHE_SIZE=1GB
|
|
||||||
POSTGRES_MAX_CONNECTIONS=100
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Valkey Cache
|
|
||||||
# ======================
|
|
||||||
VALKEY_MAXMEMORY=256mb
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# API Configuration
|
|
||||||
# ======================
|
|
||||||
API_PORT=3001
|
|
||||||
API_HOST=0.0.0.0
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Web Configuration
|
|
||||||
# ======================
|
|
||||||
WEB_PORT=3000
|
|
||||||
NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Authentication (Authentik OIDC)
|
|
||||||
# ======================
|
|
||||||
OIDC_ISSUER=https://auth.diversecanvas.com/application/o/mosaic-stack/
|
|
||||||
OIDC_CLIENT_ID=your-client-id
|
|
||||||
OIDC_CLIENT_SECRET=your-client-secret
|
|
||||||
OIDC_REDIRECT_URI=https://api.mosaicstack.dev/auth/callback/authentik
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# JWT Configuration
|
|
||||||
# ======================
|
|
||||||
# CRITICAL: Generate a random secret (openssl rand -base64 32)
|
|
||||||
JWT_SECRET=REPLACE_WITH_RANDOM_SECRET
|
|
||||||
JWT_EXPIRATION=24h
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Traefik Integration
|
|
||||||
# ======================
|
|
||||||
# Set to true if using external Traefik
|
|
||||||
TRAEFIK_ENABLE=true
|
|
||||||
TRAEFIK_ENTRYPOINT=websecure
|
|
||||||
TRAEFIK_TLS_ENABLED=true
|
|
||||||
TRAEFIK_DOCKER_NETWORK=traefik-public
|
|
||||||
TRAEFIK_CERTRESOLVER=letsencrypt
|
|
||||||
|
|
||||||
# Domain configuration
|
|
||||||
MOSAIC_API_DOMAIN=api.mosaicstack.dev
|
|
||||||
MOSAIC_WEB_DOMAIN=app.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Optional: Ollama
|
|
||||||
# ======================
|
|
||||||
# OLLAMA_ENDPOINT=http://ollama.diversecanvas.com:11434
|
|
||||||
@@ -1,161 +0,0 @@
|
|||||||
# ==============================================
|
|
||||||
# Mosaic Stack - Docker Swarm Configuration
|
|
||||||
# ==============================================
|
|
||||||
# Copy this file to .env for Docker Swarm deployment
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Application Ports (Internal)
|
|
||||||
# ======================
|
|
||||||
API_PORT=3001
|
|
||||||
API_HOST=0.0.0.0
|
|
||||||
WEB_PORT=3000
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Domain Configuration (Traefik)
|
|
||||||
# ======================
|
|
||||||
# These domains must be configured in your DNS or /etc/hosts
|
|
||||||
MOSAIC_API_DOMAIN=api.mosaicstack.dev
|
|
||||||
MOSAIC_WEB_DOMAIN=mosaic.mosaicstack.dev
|
|
||||||
MOSAIC_AUTH_DOMAIN=auth.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Web Configuration
|
|
||||||
# ======================
|
|
||||||
# Use the Traefik domain for the API URL
|
|
||||||
NEXT_PUBLIC_APP_URL=http://mosaic.mosaicstack.dev
|
|
||||||
NEXT_PUBLIC_API_URL=http://api.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# PostgreSQL Database
|
|
||||||
# ======================
|
|
||||||
DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@postgres:5432/mosaic
|
|
||||||
POSTGRES_USER=mosaic
|
|
||||||
POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
POSTGRES_DB=mosaic
|
|
||||||
POSTGRES_PORT=5432
|
|
||||||
|
|
||||||
# PostgreSQL Performance Tuning
|
|
||||||
POSTGRES_SHARED_BUFFERS=256MB
|
|
||||||
POSTGRES_EFFECTIVE_CACHE_SIZE=1GB
|
|
||||||
POSTGRES_MAX_CONNECTIONS=100
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Valkey Cache
|
|
||||||
# ======================
|
|
||||||
VALKEY_URL=redis://valkey:6379
|
|
||||||
VALKEY_HOST=valkey
|
|
||||||
VALKEY_PORT=6379
|
|
||||||
VALKEY_MAXMEMORY=256mb
|
|
||||||
|
|
||||||
# Knowledge Module Cache Configuration
|
|
||||||
KNOWLEDGE_CACHE_ENABLED=true
|
|
||||||
KNOWLEDGE_CACHE_TTL=300
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Authentication (Authentik OIDC)
|
|
||||||
# ======================
|
|
||||||
# NOTE: Authentik services are COMMENTED OUT in docker-compose.swarm.yml by default
|
|
||||||
# Uncomment those services if you want to run Authentik internally
|
|
||||||
# Otherwise, use external Authentik by configuring OIDC_* variables below
|
|
||||||
|
|
||||||
# External Authentik Configuration (default)
|
|
||||||
OIDC_ENABLED=true
|
|
||||||
OIDC_ISSUER=https://auth.example.com/application/o/mosaic-stack/
|
|
||||||
OIDC_CLIENT_ID=your-client-id-here
|
|
||||||
OIDC_CLIENT_SECRET=your-client-secret-here
|
|
||||||
OIDC_REDIRECT_URI=https://api.mosaicstack.dev/auth/callback/authentik
|
|
||||||
|
|
||||||
# Internal Authentik Configuration (only needed if uncommenting Authentik services)
|
|
||||||
# Authentik PostgreSQL Database
|
|
||||||
AUTHENTIK_POSTGRES_USER=authentik
|
|
||||||
AUTHENTIK_POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
AUTHENTIK_POSTGRES_DB=authentik
|
|
||||||
|
|
||||||
# Authentik Server Configuration
|
|
||||||
AUTHENTIK_SECRET_KEY=REPLACE_WITH_RANDOM_SECRET_MINIMUM_50_CHARS
|
|
||||||
AUTHENTIK_ERROR_REPORTING=false
|
|
||||||
AUTHENTIK_BOOTSTRAP_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
AUTHENTIK_BOOTSTRAP_EMAIL=admin@mosaicstack.dev
|
|
||||||
AUTHENTIK_COOKIE_DOMAIN=.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# JWT Configuration
|
|
||||||
# ======================
|
|
||||||
JWT_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS
|
|
||||||
JWT_EXPIRATION=24h
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Encryption (Credential Security)
|
|
||||||
# ======================
|
|
||||||
# Generate with: openssl rand -hex 32
|
|
||||||
ENCRYPTION_KEY=REPLACE_WITH_64_CHAR_HEX_STRING_GENERATE_WITH_OPENSSL_RAND_HEX_32
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# OpenBao Secrets Management
|
|
||||||
# ======================
|
|
||||||
OPENBAO_ADDR=http://openbao:8200
|
|
||||||
OPENBAO_PORT=8200
|
|
||||||
# For development only - remove in production
|
|
||||||
OPENBAO_DEV_ROOT_TOKEN_ID=root
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Ollama (Optional AI Service)
|
|
||||||
# ======================
|
|
||||||
OLLAMA_ENDPOINT=http://ollama:11434
|
|
||||||
OLLAMA_PORT=11434
|
|
||||||
OLLAMA_EMBEDDING_MODEL=mxbai-embed-large
|
|
||||||
|
|
||||||
# Semantic Search Configuration
|
|
||||||
SEMANTIC_SEARCH_SIMILARITY_THRESHOLD=0.5
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# OpenAI API (Optional)
|
|
||||||
# ======================
|
|
||||||
# OPENAI_API_KEY=sk-...
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Application Environment
|
|
||||||
# ======================
|
|
||||||
NODE_ENV=production
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Gitea Integration (Coordinator)
|
|
||||||
# ======================
|
|
||||||
GITEA_URL=https://git.mosaicstack.dev
|
|
||||||
GITEA_BOT_USERNAME=mosaic
|
|
||||||
GITEA_BOT_TOKEN=REPLACE_WITH_COORDINATOR_BOT_API_TOKEN
|
|
||||||
GITEA_BOT_PASSWORD=REPLACE_WITH_COORDINATOR_BOT_PASSWORD
|
|
||||||
GITEA_REPO_OWNER=mosaic
|
|
||||||
GITEA_REPO_NAME=stack
|
|
||||||
GITEA_WEBHOOK_SECRET=REPLACE_WITH_RANDOM_WEBHOOK_SECRET
|
|
||||||
COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Coordinator Service
|
|
||||||
# ======================
|
|
||||||
ANTHROPIC_API_KEY=REPLACE_WITH_ANTHROPIC_API_KEY
|
|
||||||
COORDINATOR_POLL_INTERVAL=5.0
|
|
||||||
COORDINATOR_MAX_CONCURRENT_AGENTS=10
|
|
||||||
COORDINATOR_ENABLED=true
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Rate Limiting
|
|
||||||
# ======================
|
|
||||||
RATE_LIMIT_TTL=60
|
|
||||||
RATE_LIMIT_GLOBAL_LIMIT=100
|
|
||||||
RATE_LIMIT_WEBHOOK_LIMIT=60
|
|
||||||
RATE_LIMIT_COORDINATOR_LIMIT=100
|
|
||||||
RATE_LIMIT_HEALTH_LIMIT=300
|
|
||||||
RATE_LIMIT_STORAGE=redis
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Orchestrator Configuration
|
|
||||||
# ======================
|
|
||||||
ORCHESTRATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
|
||||||
CLAUDE_API_KEY=REPLACE_WITH_CLAUDE_API_KEY
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Logging & Debugging
|
|
||||||
# ======================
|
|
||||||
LOG_LEVEL=info
|
|
||||||
DEBUG=false
|
|
||||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -59,3 +59,13 @@ yarn-error.log*
|
|||||||
|
|
||||||
# Orchestrator reports (generated by QA automation, cleaned up after processing)
|
# Orchestrator reports (generated by QA automation, cleaned up after processing)
|
||||||
docs/reports/qa-automation/
|
docs/reports/qa-automation/
|
||||||
|
|
||||||
|
# Repo-local orchestrator runtime artifacts
|
||||||
|
.mosaic/orchestrator/orchestrator.pid
|
||||||
|
.mosaic/orchestrator/state.json
|
||||||
|
.mosaic/orchestrator/tasks.json
|
||||||
|
.mosaic/orchestrator/matrix_state.json
|
||||||
|
.mosaic/orchestrator/logs/*.log
|
||||||
|
.mosaic/orchestrator/results/*
|
||||||
|
!.mosaic/orchestrator/logs/.gitkeep
|
||||||
|
!.mosaic/orchestrator/results/.gitkeep
|
||||||
|
|||||||
15
.mosaic/README.md
Normal file
15
.mosaic/README.md
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# Repo Mosaic Linkage
|
||||||
|
|
||||||
|
This repository is attached to the machine-wide Mosaic framework.
|
||||||
|
|
||||||
|
## Load Order for Agents
|
||||||
|
|
||||||
|
1. `~/.config/mosaic/STANDARDS.md`
|
||||||
|
2. `AGENTS.md` (this repository)
|
||||||
|
3. `.mosaic/repo-hooks.sh` (repo-specific automation hooks)
|
||||||
|
|
||||||
|
## Purpose
|
||||||
|
|
||||||
|
- Keep universal standards in `~/.config/mosaic`
|
||||||
|
- Keep repo-specific behavior in this repo
|
||||||
|
- Avoid copying large runtime configs into each project
|
||||||
18
.mosaic/orchestrator/config.json
Normal file
18
.mosaic/orchestrator/config.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"transport": "matrix",
|
||||||
|
"matrix": {
|
||||||
|
"control_room_id": "",
|
||||||
|
"workspace_id": "",
|
||||||
|
"homeserver_url": "",
|
||||||
|
"access_token": "",
|
||||||
|
"bot_user_id": ""
|
||||||
|
},
|
||||||
|
"worker": {
|
||||||
|
"runtime": "codex",
|
||||||
|
"command_template": "bash scripts/agent/orchestrator-worker.sh {task_file}",
|
||||||
|
"timeout_seconds": 7200,
|
||||||
|
"max_attempts": 1
|
||||||
|
},
|
||||||
|
"quality_gates": ["pnpm lint", "pnpm typecheck", "pnpm test"]
|
||||||
|
}
|
||||||
1
.mosaic/orchestrator/logs/.gitkeep
Normal file
1
.mosaic/orchestrator/logs/.gitkeep
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
90
.mosaic/orchestrator/mission.json
Normal file
90
.mosaic/orchestrator/mission.json
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
{
|
||||||
|
"schema_version": 1,
|
||||||
|
"mission_id": "ms21-multi-tenant-rbac-data-migration-20260228",
|
||||||
|
"name": "MS21 Multi-Tenant RBAC Data Migration",
|
||||||
|
"description": "Build multi-tenant user/workspace/team management, break-glass auth, RBAC UI enforcement, and migrate jarvis-brain data into Mosaic Stack",
|
||||||
|
"project_path": "/home/jwoltje/src/mosaic-stack",
|
||||||
|
"created_at": "2026-02-28T17:10:22Z",
|
||||||
|
"status": "active",
|
||||||
|
"task_prefix": "MS21",
|
||||||
|
"quality_gates": "pnpm lint && pnpm build && pnpm test",
|
||||||
|
"milestone_version": "0.0.21",
|
||||||
|
"milestones": [
|
||||||
|
{
|
||||||
|
"id": "phase-1",
|
||||||
|
"name": "Schema and Admin API",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "schema-and-admin-api",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "phase-2",
|
||||||
|
"name": "Break-Glass Authentication",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "break-glass-authentication",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "phase-3",
|
||||||
|
"name": "Data Migration",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "data-migration",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "phase-4",
|
||||||
|
"name": "Admin UI",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "admin-ui",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "phase-5",
|
||||||
|
"name": "RBAC UI Enforcement",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "rbac-ui-enforcement",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "phase-6",
|
||||||
|
"name": "Verification",
|
||||||
|
"status": "pending",
|
||||||
|
"branch": "verification",
|
||||||
|
"issue_ref": "",
|
||||||
|
"started_at": "",
|
||||||
|
"completed_at": ""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sessions": [
|
||||||
|
{
|
||||||
|
"session_id": "sess-001",
|
||||||
|
"runtime": "unknown",
|
||||||
|
"started_at": "2026-02-28T17:48:51Z",
|
||||||
|
"ended_at": "",
|
||||||
|
"ended_reason": "",
|
||||||
|
"milestone_at_end": "",
|
||||||
|
"tasks_completed": [],
|
||||||
|
"last_task_id": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"session_id": "sess-002",
|
||||||
|
"runtime": "unknown",
|
||||||
|
"started_at": "2026-02-28T20:30:13Z",
|
||||||
|
"ended_at": "",
|
||||||
|
"ended_reason": "",
|
||||||
|
"milestone_at_end": "",
|
||||||
|
"tasks_completed": [],
|
||||||
|
"last_task_id": ""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
1
.mosaic/orchestrator/results/.gitkeep
Normal file
1
.mosaic/orchestrator/results/.gitkeep
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
8
.mosaic/orchestrator/session.lock
Normal file
8
.mosaic/orchestrator/session.lock
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"session_id": "sess-002",
|
||||||
|
"runtime": "unknown",
|
||||||
|
"pid": 3178395,
|
||||||
|
"started_at": "2026-02-28T20:30:13Z",
|
||||||
|
"project_path": "/tmp/ms21-ui-001",
|
||||||
|
"milestone_id": ""
|
||||||
|
}
|
||||||
10
.mosaic/quality-rails.yml
Normal file
10
.mosaic/quality-rails.yml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
enabled: false
|
||||||
|
template: ""
|
||||||
|
|
||||||
|
# Set enabled: true and choose one template:
|
||||||
|
# - typescript-node
|
||||||
|
# - typescript-nextjs
|
||||||
|
# - monorepo
|
||||||
|
#
|
||||||
|
# Apply manually:
|
||||||
|
# ~/.config/mosaic/bin/mosaic-quality-apply --template <template> --target <repo>
|
||||||
29
.mosaic/repo-hooks.sh
Executable file
29
.mosaic/repo-hooks.sh
Executable file
@@ -0,0 +1,29 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Repo-specific hooks used by scripts/agent/*.sh for Mosaic Stack.
|
||||||
|
|
||||||
|
mosaic_hook_session_start() {
|
||||||
|
echo "[mosaic-stack] Branch: $(git rev-parse --abbrev-ref HEAD)"
|
||||||
|
echo "[mosaic-stack] Remotes:"
|
||||||
|
git remote -v | sed 's/^/[mosaic-stack] /'
|
||||||
|
if command -v node >/dev/null 2>&1; then
|
||||||
|
echo "[mosaic-stack] Node: $(node -v)"
|
||||||
|
fi
|
||||||
|
if command -v pnpm >/dev/null 2>&1; then
|
||||||
|
echo "[mosaic-stack] pnpm: $(pnpm -v)"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
mosaic_hook_critical() {
|
||||||
|
echo "[mosaic-stack] Recent commits:"
|
||||||
|
git log --oneline --decorate -n 5 | sed 's/^/[mosaic-stack] /'
|
||||||
|
echo "[mosaic-stack] Open TODO/FIXME markers (top 20):"
|
||||||
|
rg -n "(TODO|FIXME|HACK|SECURITY)" apps packages plugins docs --glob '!**/node_modules/**' -S \
|
||||||
|
| head -n 20 \
|
||||||
|
| sed 's/^/[mosaic-stack] /' \
|
||||||
|
|| true
|
||||||
|
}
|
||||||
|
|
||||||
|
mosaic_hook_session_end() {
|
||||||
|
echo "[mosaic-stack] Working tree summary:"
|
||||||
|
git status --short | sed 's/^/[mosaic-stack] /' || true
|
||||||
|
}
|
||||||
3
.npmrc
Normal file
3
.npmrc
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
@mosaicstack:registry=https://git.mosaicstack.dev/api/packages/mosaic/npm/
|
||||||
|
supportedArchitectures[libc][]=glibc
|
||||||
|
supportedArchitectures[cpu][]=x64
|
||||||
19
.trivyignore
19
.trivyignore
@@ -6,7 +6,7 @@
|
|||||||
# - npm bundled CVEs (5): npm removed from production Node.js images
|
# - npm bundled CVEs (5): npm removed from production Node.js images
|
||||||
# - Node.js 20 → 24 LTS migration (#367): base images updated
|
# - Node.js 20 → 24 LTS migration (#367): base images updated
|
||||||
#
|
#
|
||||||
# REMAINING: OpenBao (5 CVEs) + Next.js bundled tar (3 CVEs)
|
# REMAINING: OpenBao (5 CVEs) + Next.js bundled tar/minimatch (5 CVEs)
|
||||||
# Re-evaluate when upgrading openbao image beyond 2.5.0 or Next.js beyond 16.1.6.
|
# Re-evaluate when upgrading openbao image beyond 2.5.0 or Next.js beyond 16.1.6.
|
||||||
|
|
||||||
# === OpenBao false positives ===
|
# === OpenBao false positives ===
|
||||||
@@ -17,17 +17,26 @@ CVE-2024-9180 # HIGH: privilege escalation (fixed in 2.0.3)
|
|||||||
CVE-2025-59043 # HIGH: DoS via malicious JSON (fixed in 2.4.1)
|
CVE-2025-59043 # HIGH: DoS via malicious JSON (fixed in 2.4.1)
|
||||||
CVE-2025-64761 # HIGH: identity group root escalation (fixed in 2.4.4)
|
CVE-2025-64761 # HIGH: identity group root escalation (fixed in 2.4.4)
|
||||||
|
|
||||||
# === Next.js bundled tar CVEs (upstream — waiting on Next.js release) ===
|
# === Next.js bundled tar/minimatch CVEs (upstream — waiting on Next.js release) ===
|
||||||
# Next.js 16.1.6 bundles tar@7.5.2 in next/dist/compiled/tar/ (pre-compiled).
|
# Next.js 16.1.6 bundles tar@7.5.2 and minimatch@9.0.5 in next/dist/compiled/ (pre-compiled).
|
||||||
# This is NOT a pnpm dependency — it's embedded in the Next.js package itself.
|
# These are NOT pnpm dependencies — they're embedded in the Next.js package itself.
|
||||||
|
# pnpm overrides cannot reach these; only a Next.js upgrade can fix them.
|
||||||
# Affects web image only (orchestrator and API are clean).
|
# Affects web image only (orchestrator and API are clean).
|
||||||
# npm was also removed from all production images, eliminating the npm-bundled copy.
|
# npm was also removed from all production images, eliminating the npm-bundled copy.
|
||||||
# To resolve: upgrade Next.js when a release bundles tar >= 7.5.7.
|
# To resolve: upgrade Next.js when a release bundles tar >= 7.5.8 and minimatch >= 10.2.1.
|
||||||
CVE-2026-23745 # HIGH: tar arbitrary file overwrite via unsanitized linkpaths (fixed in 7.5.3)
|
CVE-2026-23745 # HIGH: tar arbitrary file overwrite via unsanitized linkpaths (fixed in 7.5.3)
|
||||||
CVE-2026-23950 # HIGH: tar arbitrary file overwrite via Unicode path collision (fixed in 7.5.4)
|
CVE-2026-23950 # HIGH: tar arbitrary file overwrite via Unicode path collision (fixed in 7.5.4)
|
||||||
CVE-2026-24842 # HIGH: tar arbitrary file creation via hardlink path traversal (needs tar >= 7.5.7)
|
CVE-2026-24842 # HIGH: tar arbitrary file creation via hardlink path traversal (needs tar >= 7.5.7)
|
||||||
|
CVE-2026-26960 # HIGH: tar arbitrary file read/write via malicious archive hardlink (needs tar >= 7.5.8)
|
||||||
|
CVE-2026-26996 # HIGH: minimatch DoS via specially crafted glob patterns (needs minimatch >= 10.2.1)
|
||||||
|
|
||||||
# === OpenBao Go stdlib (waiting on upstream rebuild) ===
|
# === OpenBao Go stdlib (waiting on upstream rebuild) ===
|
||||||
# OpenBao 2.5.0 compiled with Go 1.25.6, fix needs Go >= 1.25.7.
|
# OpenBao 2.5.0 compiled with Go 1.25.6, fix needs Go >= 1.25.7.
|
||||||
# Cannot build OpenBao from source (large project). Waiting for upstream release.
|
# Cannot build OpenBao from source (large project). Waiting for upstream release.
|
||||||
CVE-2025-68121 # CRITICAL: crypto/tls session resumption
|
CVE-2025-68121 # CRITICAL: crypto/tls session resumption
|
||||||
|
|
||||||
|
# === multer CVEs (upstream via @nestjs/platform-express) ===
|
||||||
|
# multer <2.1.0 — waiting on NestJS to update their dependency
|
||||||
|
# These are DoS vulnerabilities in file upload handling
|
||||||
|
GHSA-xf7r-hgr6-v32p # HIGH: DoS via incomplete cleanup
|
||||||
|
GHSA-v52c-386h-88mc # HIGH: DoS via resource exhaustion
|
||||||
|
|||||||
@@ -85,12 +85,11 @@ install -> [ruff-check, mypy, security-bandit, security-pip-audit, test]
|
|||||||
|
|
||||||
## Image Tagging
|
## Image Tagging
|
||||||
|
|
||||||
| Condition | Tag | Purpose |
|
| Condition | Tag | Purpose |
|
||||||
| ---------------- | -------------------------- | -------------------------- |
|
| ------------- | -------------------------- | -------------------------- |
|
||||||
| Always | `${CI_COMMIT_SHA:0:8}` | Immutable commit reference |
|
| Always | `${CI_COMMIT_SHA:0:8}` | Immutable commit reference |
|
||||||
| `main` branch | `latest` | Current production release |
|
| `main` branch | `latest` | Current latest build |
|
||||||
| `develop` branch | `dev` | Current development build |
|
| Git tag | tag value (e.g., `v1.0.0`) | Semantic version release |
|
||||||
| Git tag | tag value (e.g., `v1.0.0`) | Semantic version release |
|
|
||||||
|
|
||||||
## Required Secrets
|
## Required Secrets
|
||||||
|
|
||||||
@@ -138,5 +137,5 @@ Fails on blockers or critical/high severity security findings.
|
|||||||
|
|
||||||
### Pipeline runs Docker builds on pull requests
|
### Pipeline runs Docker builds on pull requests
|
||||||
|
|
||||||
- Docker build steps have `when: branch: [main, develop]` guards
|
- Docker build steps have `when: branch: [main]` guards
|
||||||
- PRs only run quality gates, not Docker builds
|
- PRs only run quality gates, not Docker builds
|
||||||
|
|||||||
@@ -1,235 +0,0 @@
|
|||||||
# API Pipeline - Mosaic Stack
|
|
||||||
# Quality gates, build, and Docker publish for @mosaic/api
|
|
||||||
#
|
|
||||||
# Triggers on: apps/api/**, packages/**, root configs
|
|
||||||
# Security chain: source audit + Trivy container scan
|
|
||||||
|
|
||||||
when:
|
|
||||||
- event: [push, pull_request, manual]
|
|
||||||
path:
|
|
||||||
include:
|
|
||||||
- "apps/api/**"
|
|
||||||
- "packages/**"
|
|
||||||
- "pnpm-lock.yaml"
|
|
||||||
- "pnpm-workspace.yaml"
|
|
||||||
- "turbo.json"
|
|
||||||
- "package.json"
|
|
||||||
- ".woodpecker/api.yml"
|
|
||||||
|
|
||||||
variables:
|
|
||||||
- &node_image "node:24-alpine"
|
|
||||||
- &install_deps |
|
|
||||||
corepack enable
|
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
- &use_deps |
|
|
||||||
corepack enable
|
|
||||||
- &kaniko_setup |
|
|
||||||
mkdir -p /kaniko/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:17.7-alpine3.22
|
|
||||||
environment:
|
|
||||||
POSTGRES_DB: test_db
|
|
||||||
POSTGRES_USER: test_user
|
|
||||||
POSTGRES_PASSWORD: test_password
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# === Quality Gates ===
|
|
||||||
|
|
||||||
install:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *install_deps
|
|
||||||
|
|
||||||
security-audit:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm audit --audit-level=high
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
lint:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/api" lint
|
|
||||||
depends_on:
|
|
||||||
- prisma-generate
|
|
||||||
- build-shared
|
|
||||||
|
|
||||||
prisma-generate:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/api" prisma:generate
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
build-shared:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/shared" build
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
typecheck:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/api" typecheck
|
|
||||||
depends_on:
|
|
||||||
- prisma-generate
|
|
||||||
- build-shared
|
|
||||||
|
|
||||||
prisma-migrate:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/api" prisma migrate deploy
|
|
||||||
depends_on:
|
|
||||||
- prisma-generate
|
|
||||||
|
|
||||||
test:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
|
||||||
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts'
|
|
||||||
depends_on:
|
|
||||||
- prisma-migrate
|
|
||||||
|
|
||||||
# === Build ===
|
|
||||||
|
|
||||||
build:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
NODE_ENV: "production"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm turbo build --filter=@mosaic/api
|
|
||||||
depends_on:
|
|
||||||
- lint
|
|
||||||
- typecheck
|
|
||||||
- test
|
|
||||||
- security-audit
|
|
||||||
|
|
||||||
# === Docker Build & Push ===
|
|
||||||
|
|
||||||
docker-build-api:
|
|
||||||
image: gcr.io/kaniko-project/executor:debug
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- *kaniko_setup
|
|
||||||
- |
|
|
||||||
DESTINATIONS=""
|
|
||||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:dev"
|
|
||||||
fi
|
|
||||||
/kaniko/executor --context . --dockerfile apps/api/Dockerfile $DESTINATIONS
|
|
||||||
when:
|
|
||||||
- branch: [main, develop]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
|
|
||||||
# === Container Security Scan ===
|
|
||||||
|
|
||||||
security-trivy-api:
|
|
||||||
image: aquasec/trivy:latest
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- |
|
|
||||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
|
||||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
|
||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
SCAN_TAG="latest"
|
|
||||||
else
|
|
||||||
SCAN_TAG="dev"
|
|
||||||
fi
|
|
||||||
mkdir -p ~/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
|
||||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
|
||||||
--ignorefile .trivyignore \
|
|
||||||
git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
|
||||||
when:
|
|
||||||
- branch: [main, develop]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- docker-build-api
|
|
||||||
|
|
||||||
# === Package Linking ===
|
|
||||||
|
|
||||||
link-packages:
|
|
||||||
image: alpine:3
|
|
||||||
environment:
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
commands:
|
|
||||||
- apk add --no-cache curl
|
|
||||||
- sleep 10
|
|
||||||
- |
|
|
||||||
set -e
|
|
||||||
link_package() {
|
|
||||||
PKG="$$1"
|
|
||||||
echo "Linking $$PKG..."
|
|
||||||
for attempt in 1 2 3; do
|
|
||||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
|
||||||
-H "Authorization: token $$GITEA_TOKEN" \
|
|
||||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
|
||||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
|
||||||
echo " Linked $$PKG"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "400" ]; then
|
|
||||||
echo " $$PKG already linked"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
|
||||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
|
||||||
sleep 5
|
|
||||||
else
|
|
||||||
echo " FAILED: $$PKG status $$STATUS"
|
|
||||||
cat /tmp/link-response.txt
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
}
|
|
||||||
link_package "stack-api"
|
|
||||||
when:
|
|
||||||
- branch: [main, develop]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- security-trivy-api
|
|
||||||
27
.woodpecker/base-image.yml
Normal file
27
.woodpecker/base-image.yml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
when:
|
||||||
|
- event: manual
|
||||||
|
- event: cron
|
||||||
|
cron: weekly-base-image
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- &kaniko_setup |
|
||||||
|
mkdir -p /kaniko/.docker
|
||||||
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||||
|
|
||||||
|
steps:
|
||||||
|
build-base:
|
||||||
|
image: gcr.io/kaniko-project/executor:debug
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
commands:
|
||||||
|
- *kaniko_setup
|
||||||
|
- /kaniko/executor
|
||||||
|
--context .
|
||||||
|
--dockerfile docker/base.Dockerfile
|
||||||
|
--destination git.mosaicstack.dev/mosaic/node-base:24-slim
|
||||||
|
--destination git.mosaicstack.dev/mosaic/node-base:latest
|
||||||
|
--cache=true
|
||||||
|
--cache-repo git.mosaicstack.dev/mosaic/node-base/cache
|
||||||
382
.woodpecker/ci.yml
Normal file
382
.woodpecker/ci.yml
Normal file
@@ -0,0 +1,382 @@
|
|||||||
|
# Unified CI Pipeline - Mosaic Stack
|
||||||
|
# Single install, parallel quality gates, sequential deploy
|
||||||
|
#
|
||||||
|
# Replaces: api.yml, orchestrator.yml, web.yml
|
||||||
|
# Keeps: coordinator.yml (Python), infra.yml (separate concerns)
|
||||||
|
#
|
||||||
|
# Flow:
|
||||||
|
# install → security-audit
|
||||||
|
# → prisma-generate → lint + typecheck (parallel)
|
||||||
|
# → prisma-migrate → test
|
||||||
|
# → build (after all gates pass)
|
||||||
|
# → docker builds (main only, parallel)
|
||||||
|
# → trivy scans (main only, parallel)
|
||||||
|
# → package linking (main only)
|
||||||
|
|
||||||
|
when:
|
||||||
|
- event: [push, pull_request, manual]
|
||||||
|
path:
|
||||||
|
include:
|
||||||
|
- "apps/api/**"
|
||||||
|
- "apps/orchestrator/**"
|
||||||
|
- "apps/web/**"
|
||||||
|
- "packages/**"
|
||||||
|
- "pnpm-lock.yaml"
|
||||||
|
- "pnpm-workspace.yaml"
|
||||||
|
- "turbo.json"
|
||||||
|
- "package.json"
|
||||||
|
- ".woodpecker/ci.yml"
|
||||||
|
- ".trivyignore"
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- &node_image "node:24-slim"
|
||||||
|
- &install_deps |
|
||||||
|
corepack enable
|
||||||
|
apt-get update && apt-get install -y --no-install-recommends python3 make g++
|
||||||
|
pnpm config set store-dir /root/.local/share/pnpm/store
|
||||||
|
pnpm install --frozen-lockfile
|
||||||
|
- &use_deps |
|
||||||
|
corepack enable
|
||||||
|
- &turbo_env
|
||||||
|
TURBO_API:
|
||||||
|
from_secret: turbo_api
|
||||||
|
TURBO_TOKEN:
|
||||||
|
from_secret: turbo_token
|
||||||
|
TURBO_TEAM:
|
||||||
|
from_secret: turbo_team
|
||||||
|
- &kaniko_setup |
|
||||||
|
mkdir -p /kaniko/.docker
|
||||||
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:17.7-alpine3.22
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: test_db
|
||||||
|
POSTGRES_USER: test_user
|
||||||
|
POSTGRES_PASSWORD: test_password
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# ─── Install (once) ─────────────────────────────────────────
|
||||||
|
install:
|
||||||
|
image: *node_image
|
||||||
|
commands:
|
||||||
|
- *install_deps
|
||||||
|
|
||||||
|
# ─── Security Audit (once) ──────────────────────────────────
|
||||||
|
security-audit:
|
||||||
|
image: *node_image
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm audit --audit-level=high
|
||||||
|
depends_on:
|
||||||
|
- install
|
||||||
|
|
||||||
|
# ─── Prisma Generate ────────────────────────────────────────
|
||||||
|
prisma-generate:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm --filter "@mosaic/api" prisma:generate
|
||||||
|
depends_on:
|
||||||
|
- install
|
||||||
|
|
||||||
|
# ─── Lint (all packages) ────────────────────────────────────
|
||||||
|
lint:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
<<: *turbo_env
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm turbo lint
|
||||||
|
depends_on:
|
||||||
|
- prisma-generate
|
||||||
|
|
||||||
|
# ─── Typecheck (all packages, parallel with lint) ───────────
|
||||||
|
typecheck:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
<<: *turbo_env
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm turbo typecheck
|
||||||
|
depends_on:
|
||||||
|
- prisma-generate
|
||||||
|
|
||||||
|
# ─── Prisma Migrate (test DB) ──────────────────────────────
|
||||||
|
prisma-migrate:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm --filter "@mosaic/api" prisma migrate deploy
|
||||||
|
depends_on:
|
||||||
|
- prisma-generate
|
||||||
|
|
||||||
|
# ─── Test (all packages) ───────────────────────────────────
|
||||||
|
test:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
DATABASE_URL: "postgresql://test_user:test_password@postgres:5432/test_db?schema=public"
|
||||||
|
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||||
|
<<: *turbo_env
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts' --exclude 'src/mosaic-telemetry/mosaic-telemetry.module.spec.ts'
|
||||||
|
- pnpm turbo test --filter=@mosaic/orchestrator --filter=@mosaic/web
|
||||||
|
depends_on:
|
||||||
|
- prisma-migrate
|
||||||
|
|
||||||
|
# ─── Build (all packages) ──────────────────────────────────
|
||||||
|
build:
|
||||||
|
image: *node_image
|
||||||
|
environment:
|
||||||
|
SKIP_ENV_VALIDATION: "true"
|
||||||
|
NODE_ENV: "production"
|
||||||
|
<<: *turbo_env
|
||||||
|
commands:
|
||||||
|
- *use_deps
|
||||||
|
- pnpm turbo build
|
||||||
|
depends_on:
|
||||||
|
- lint
|
||||||
|
- typecheck
|
||||||
|
- test
|
||||||
|
- security-audit
|
||||||
|
|
||||||
|
# ─── Docker Builds (main only, parallel) ───────────────────
|
||||||
|
|
||||||
|
docker-build-api:
|
||||||
|
image: gcr.io/kaniko-project/executor:debug
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- *kaniko_setup
|
||||||
|
- |
|
||||||
|
DESTINATIONS=""
|
||||||
|
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
||||||
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
||||||
|
fi
|
||||||
|
/kaniko/executor --context . --dockerfile apps/api/Dockerfile --snapshot-mode=redo --cache=true --cache-repo git.mosaicstack.dev/mosaic/stack-api/cache $DESTINATIONS
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
docker-build-orchestrator:
|
||||||
|
image: gcr.io/kaniko-project/executor:debug
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- *kaniko_setup
|
||||||
|
- |
|
||||||
|
DESTINATIONS=""
|
||||||
|
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
||||||
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
||||||
|
fi
|
||||||
|
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile --snapshot-mode=redo --cache=true --cache-repo git.mosaicstack.dev/mosaic/stack-orchestrator/cache $DESTINATIONS
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
docker-build-web:
|
||||||
|
image: gcr.io/kaniko-project/executor:debug
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- *kaniko_setup
|
||||||
|
- |
|
||||||
|
DESTINATIONS=""
|
||||||
|
if [ -n "$CI_COMMIT_TAG" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
||||||
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
||||||
|
fi
|
||||||
|
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --snapshot-mode=redo --cache=true --cache-repo git.mosaicstack.dev/mosaic/stack-web/cache --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
# ─── Container Security Scans (main only) ──────────────────
|
||||||
|
|
||||||
|
security-trivy-api:
|
||||||
|
image: aquasec/trivy:latest
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- |
|
||||||
|
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||||
|
mkdir -p ~/.docker
|
||||||
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
|
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- docker-build-api
|
||||||
|
|
||||||
|
security-trivy-orchestrator:
|
||||||
|
image: aquasec/trivy:latest
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- |
|
||||||
|
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||||
|
mkdir -p ~/.docker
|
||||||
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
|
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- docker-build-orchestrator
|
||||||
|
|
||||||
|
security-trivy-web:
|
||||||
|
image: aquasec/trivy:latest
|
||||||
|
environment:
|
||||||
|
GITEA_USER:
|
||||||
|
from_secret: gitea_username
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
||||||
|
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
||||||
|
commands:
|
||||||
|
- |
|
||||||
|
if [ -n "$$CI_COMMIT_TAG" ]; then SCAN_TAG="$$CI_COMMIT_TAG"; else SCAN_TAG="latest"; fi
|
||||||
|
mkdir -p ~/.docker
|
||||||
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
|
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed --ignorefile .trivyignore git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- docker-build-web
|
||||||
|
|
||||||
|
# ─── Package Linking (main only, once) ─────────────────────
|
||||||
|
|
||||||
|
link-packages:
|
||||||
|
image: alpine:3
|
||||||
|
environment:
|
||||||
|
GITEA_TOKEN:
|
||||||
|
from_secret: gitea_token
|
||||||
|
commands:
|
||||||
|
- apk add --no-cache curl
|
||||||
|
- sleep 10
|
||||||
|
- |
|
||||||
|
set -e
|
||||||
|
link_package() {
|
||||||
|
PKG="$$1"
|
||||||
|
echo "Linking $$PKG..."
|
||||||
|
for attempt in 1 2 3; do
|
||||||
|
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
||||||
|
-H "Authorization: token $$GITEA_TOKEN" \
|
||||||
|
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
||||||
|
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
||||||
|
echo " Linked $$PKG"
|
||||||
|
return 0
|
||||||
|
elif [ "$$STATUS" = "400" ]; then
|
||||||
|
echo " $$PKG already linked"
|
||||||
|
return 0
|
||||||
|
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
||||||
|
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
||||||
|
sleep 5
|
||||||
|
else
|
||||||
|
echo " FAILED: $$PKG status $$STATUS"
|
||||||
|
cat /tmp/link-response.txt
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
link_package "stack-api"
|
||||||
|
link_package "stack-orchestrator"
|
||||||
|
link_package "stack-web"
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- security-trivy-api
|
||||||
|
- security-trivy-orchestrator
|
||||||
|
- security-trivy-web
|
||||||
|
|
||||||
|
# ─── Deploy to Docker Swarm via Portainer API (main only) ─────────────────────
|
||||||
|
|
||||||
|
deploy-swarm:
|
||||||
|
image: alpine:3
|
||||||
|
environment:
|
||||||
|
PORTAINER_URL:
|
||||||
|
from_secret: portainer_url
|
||||||
|
PORTAINER_API_KEY:
|
||||||
|
from_secret: portainer_api_key
|
||||||
|
PORTAINER_STACK_ID: "121"
|
||||||
|
commands:
|
||||||
|
- apk add --no-cache curl
|
||||||
|
- |
|
||||||
|
set -e
|
||||||
|
echo "🚀 Deploying to Docker Swarm via Portainer API..."
|
||||||
|
|
||||||
|
# Use Portainer API to update the stack (forces pull of new images)
|
||||||
|
RESPONSE=$(curl -s -w "\n%{http_code}" -X POST \
|
||||||
|
-H "X-API-Key: $PORTAINER_API_KEY" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"$PORTAINER_URL/api/stacks/$PORTAINER_STACK_ID/git/redeploy")
|
||||||
|
|
||||||
|
HTTP_CODE=$(echo "$RESPONSE" | tail -1)
|
||||||
|
BODY=$(echo "$RESPONSE" | head -n -1)
|
||||||
|
|
||||||
|
if [ "$HTTP_CODE" = "200" ] || [ "$HTTP_CODE" = "202" ]; then
|
||||||
|
echo "✅ Stack update triggered successfully"
|
||||||
|
else
|
||||||
|
echo "❌ Stack update failed (HTTP $HTTP_CODE)"
|
||||||
|
echo "$BODY"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Wait for services to converge
|
||||||
|
echo "⏳ Waiting for services to converge..."
|
||||||
|
sleep 30
|
||||||
|
echo "✅ Deploy complete"
|
||||||
|
when:
|
||||||
|
- branch: [main]
|
||||||
|
event: [push, manual, tag]
|
||||||
|
depends_on:
|
||||||
|
- link-packages
|
||||||
@@ -12,7 +12,7 @@ when:
|
|||||||
event: pull_request
|
event: pull_request
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- &node_image "node:22-slim"
|
- &node_image "node:24-slim"
|
||||||
- &install_codex "npm i -g @openai/codex"
|
- &install_codex "npm i -g @openai/codex"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ steps:
|
|||||||
- python -m venv venv
|
- python -m venv venv
|
||||||
- . venv/bin/activate
|
- . venv/bin/activate
|
||||||
- pip install --no-cache-dir --upgrade "pip>=25.3"
|
- pip install --no-cache-dir --upgrade "pip>=25.3"
|
||||||
- pip install --no-cache-dir -e ".[dev]"
|
- pip install --no-cache-dir --extra-index-url https://git.mosaicstack.dev/api/packages/mosaic/pypi/simple/ -e ".[dev]"
|
||||||
- pip install --no-cache-dir bandit pip-audit
|
- pip install --no-cache-dir bandit pip-audit
|
||||||
|
|
||||||
ruff-check:
|
ruff-check:
|
||||||
@@ -92,12 +92,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context apps/coordinator --dockerfile apps/coordinator/Dockerfile $DESTINATIONS
|
/kaniko/executor --context apps/coordinator --dockerfile apps/coordinator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- ruff-check
|
- ruff-check
|
||||||
@@ -124,7 +122,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -132,7 +130,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-coordinator:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-coordinator:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-coordinator
|
- docker-build-coordinator
|
||||||
@@ -174,7 +172,7 @@ steps:
|
|||||||
}
|
}
|
||||||
link_package "stack-coordinator"
|
link_package "stack-coordinator"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-coordinator
|
- security-trivy-coordinator
|
||||||
|
|||||||
@@ -36,12 +36,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context docker/postgres --dockerfile docker/postgres/Dockerfile $DESTINATIONS
|
/kaniko/executor --context docker/postgres --dockerfile docker/postgres/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
|
|
||||||
docker-build-openbao:
|
docker-build-openbao:
|
||||||
@@ -61,12 +59,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context docker/openbao --dockerfile docker/openbao/Dockerfile $DESTINATIONS
|
/kaniko/executor --context docker/openbao --dockerfile docker/openbao/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
|
|
||||||
# === Container Security Scans ===
|
# === Container Security Scans ===
|
||||||
@@ -87,7 +83,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -95,7 +91,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-postgres:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-postgres:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-postgres
|
- docker-build-postgres
|
||||||
@@ -116,7 +112,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -124,7 +120,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-openbao:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-openbao:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-openbao
|
- docker-build-openbao
|
||||||
@@ -167,7 +163,7 @@ steps:
|
|||||||
link_package "stack-postgres"
|
link_package "stack-postgres"
|
||||||
link_package "stack-openbao"
|
link_package "stack-openbao"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-postgres
|
- security-trivy-postgres
|
||||||
|
|||||||
@@ -1,192 +0,0 @@
|
|||||||
# Orchestrator Pipeline - Mosaic Stack
|
|
||||||
# Quality gates, build, and Docker publish for @mosaic/orchestrator
|
|
||||||
#
|
|
||||||
# Triggers on: apps/orchestrator/**, packages/**, root configs
|
|
||||||
# Security chain: source audit + Trivy container scan
|
|
||||||
|
|
||||||
when:
|
|
||||||
- event: [push, pull_request, manual]
|
|
||||||
path:
|
|
||||||
include:
|
|
||||||
- "apps/orchestrator/**"
|
|
||||||
- "packages/**"
|
|
||||||
- "pnpm-lock.yaml"
|
|
||||||
- "pnpm-workspace.yaml"
|
|
||||||
- "turbo.json"
|
|
||||||
- "package.json"
|
|
||||||
- ".woodpecker/orchestrator.yml"
|
|
||||||
|
|
||||||
variables:
|
|
||||||
- &node_image "node:24-alpine"
|
|
||||||
- &install_deps |
|
|
||||||
corepack enable
|
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
- &use_deps |
|
|
||||||
corepack enable
|
|
||||||
- &kaniko_setup |
|
|
||||||
mkdir -p /kaniko/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# === Quality Gates ===
|
|
||||||
|
|
||||||
install:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *install_deps
|
|
||||||
|
|
||||||
security-audit:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm audit --audit-level=high
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
lint:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/orchestrator" lint
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
typecheck:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/orchestrator" typecheck
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
test:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/orchestrator" test
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
# === Build ===
|
|
||||||
|
|
||||||
build:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
NODE_ENV: "production"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm turbo build --filter=@mosaic/orchestrator
|
|
||||||
depends_on:
|
|
||||||
- lint
|
|
||||||
- typecheck
|
|
||||||
- test
|
|
||||||
- security-audit
|
|
||||||
|
|
||||||
# === Docker Build & Push ===
|
|
||||||
|
|
||||||
docker-build-orchestrator:
|
|
||||||
image: gcr.io/kaniko-project/executor:debug
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- *kaniko_setup
|
|
||||||
- |
|
|
||||||
DESTINATIONS=""
|
|
||||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:dev"
|
|
||||||
fi
|
|
||||||
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile $DESTINATIONS
|
|
||||||
when:
|
|
||||||
- branch: [main, develop]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
|
|
||||||
# === Container Security Scan ===
|
|
||||||
|
|
||||||
security-trivy-orchestrator:
|
|
||||||
image: aquasec/trivy:latest
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- |
|
|
||||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
|
||||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
|
||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
SCAN_TAG="latest"
|
|
||||||
else
|
|
||||||
SCAN_TAG="dev"
|
|
||||||
fi
|
|
||||||
mkdir -p ~/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
|
||||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
|
||||||
--ignorefile .trivyignore \
|
|
||||||
git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
|
||||||
when:
|
|
||||||
- branch: [main, develop]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- docker-build-orchestrator
|
|
||||||
|
|
||||||
# === Package Linking ===
|
|
||||||
|
|
||||||
link-packages:
|
|
||||||
image: alpine:3
|
|
||||||
environment:
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
commands:
|
|
||||||
- apk add --no-cache curl
|
|
||||||
- sleep 10
|
|
||||||
- |
|
|
||||||
set -e
|
|
||||||
link_package() {
|
|
||||||
PKG="$$1"
|
|
||||||
echo "Linking $$PKG..."
|
|
||||||
for attempt in 1 2 3; do
|
|
||||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
|
||||||
-H "Authorization: token $$GITEA_TOKEN" \
|
|
||||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
|
||||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
|
||||||
echo " Linked $$PKG"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "400" ]; then
|
|
||||||
echo " $$PKG already linked"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
|
||||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
|
||||||
sleep 5
|
|
||||||
else
|
|
||||||
echo " FAILED: $$PKG status $$STATUS"
|
|
||||||
cat /tmp/link-response.txt
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
}
|
|
||||||
link_package "stack-orchestrator"
|
|
||||||
when:
|
|
||||||
- branch: [main, develop]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- security-trivy-orchestrator
|
|
||||||
@@ -1,203 +0,0 @@
|
|||||||
# Web Pipeline - Mosaic Stack
|
|
||||||
# Quality gates, build, and Docker publish for @mosaic/web
|
|
||||||
#
|
|
||||||
# Triggers on: apps/web/**, packages/**, root configs
|
|
||||||
# Security chain: source audit + Trivy container scan
|
|
||||||
|
|
||||||
when:
|
|
||||||
- event: [push, pull_request, manual]
|
|
||||||
path:
|
|
||||||
include:
|
|
||||||
- "apps/web/**"
|
|
||||||
- "packages/**"
|
|
||||||
- "pnpm-lock.yaml"
|
|
||||||
- "pnpm-workspace.yaml"
|
|
||||||
- "turbo.json"
|
|
||||||
- "package.json"
|
|
||||||
- ".woodpecker/web.yml"
|
|
||||||
|
|
||||||
variables:
|
|
||||||
- &node_image "node:24-alpine"
|
|
||||||
- &install_deps |
|
|
||||||
corepack enable
|
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
- &use_deps |
|
|
||||||
corepack enable
|
|
||||||
- &kaniko_setup |
|
|
||||||
mkdir -p /kaniko/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# === Quality Gates ===
|
|
||||||
|
|
||||||
install:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *install_deps
|
|
||||||
|
|
||||||
security-audit:
|
|
||||||
image: *node_image
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm audit --audit-level=high
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
build-shared:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/shared" build
|
|
||||||
- pnpm --filter "@mosaic/ui" build
|
|
||||||
depends_on:
|
|
||||||
- install
|
|
||||||
|
|
||||||
lint:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/web" lint
|
|
||||||
depends_on:
|
|
||||||
- build-shared
|
|
||||||
|
|
||||||
typecheck:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/web" typecheck
|
|
||||||
depends_on:
|
|
||||||
- build-shared
|
|
||||||
|
|
||||||
test:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm --filter "@mosaic/web" test
|
|
||||||
depends_on:
|
|
||||||
- build-shared
|
|
||||||
|
|
||||||
# === Build ===
|
|
||||||
|
|
||||||
build:
|
|
||||||
image: *node_image
|
|
||||||
environment:
|
|
||||||
SKIP_ENV_VALIDATION: "true"
|
|
||||||
NODE_ENV: "production"
|
|
||||||
commands:
|
|
||||||
- *use_deps
|
|
||||||
- pnpm turbo build --filter=@mosaic/web
|
|
||||||
depends_on:
|
|
||||||
- lint
|
|
||||||
- typecheck
|
|
||||||
- test
|
|
||||||
- security-audit
|
|
||||||
|
|
||||||
# === Docker Build & Push ===
|
|
||||||
|
|
||||||
docker-build-web:
|
|
||||||
image: gcr.io/kaniko-project/executor:debug
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- *kaniko_setup
|
|
||||||
- |
|
|
||||||
DESTINATIONS=""
|
|
||||||
if [ -n "$CI_COMMIT_TAG" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:dev"
|
|
||||||
fi
|
|
||||||
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
|
||||||
when:
|
|
||||||
- branch: [main, develop]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- build
|
|
||||||
|
|
||||||
# === Container Security Scan ===
|
|
||||||
|
|
||||||
security-trivy-web:
|
|
||||||
image: aquasec/trivy:latest
|
|
||||||
environment:
|
|
||||||
GITEA_USER:
|
|
||||||
from_secret: gitea_username
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
|
|
||||||
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
|
|
||||||
commands:
|
|
||||||
- |
|
|
||||||
if [ -n "$$CI_COMMIT_TAG" ]; then
|
|
||||||
SCAN_TAG="$$CI_COMMIT_TAG"
|
|
||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
|
||||||
SCAN_TAG="latest"
|
|
||||||
else
|
|
||||||
SCAN_TAG="dev"
|
|
||||||
fi
|
|
||||||
mkdir -p ~/.docker
|
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
|
||||||
trivy image --exit-code 1 --severity HIGH,CRITICAL --ignore-unfixed \
|
|
||||||
--ignorefile .trivyignore \
|
|
||||||
git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
|
||||||
when:
|
|
||||||
- branch: [main, develop]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- docker-build-web
|
|
||||||
|
|
||||||
# === Package Linking ===
|
|
||||||
|
|
||||||
link-packages:
|
|
||||||
image: alpine:3
|
|
||||||
environment:
|
|
||||||
GITEA_TOKEN:
|
|
||||||
from_secret: gitea_token
|
|
||||||
commands:
|
|
||||||
- apk add --no-cache curl
|
|
||||||
- sleep 10
|
|
||||||
- |
|
|
||||||
set -e
|
|
||||||
link_package() {
|
|
||||||
PKG="$$1"
|
|
||||||
echo "Linking $$PKG..."
|
|
||||||
for attempt in 1 2 3; do
|
|
||||||
STATUS=$$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \
|
|
||||||
-H "Authorization: token $$GITEA_TOKEN" \
|
|
||||||
"https://git.mosaicstack.dev/api/v1/packages/mosaic/container/$$PKG/-/link/stack")
|
|
||||||
if [ "$$STATUS" = "201" ] || [ "$$STATUS" = "204" ]; then
|
|
||||||
echo " Linked $$PKG"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "400" ]; then
|
|
||||||
echo " $$PKG already linked"
|
|
||||||
return 0
|
|
||||||
elif [ "$$STATUS" = "404" ] && [ $$attempt -lt 3 ]; then
|
|
||||||
echo " $$PKG not found yet, retrying in 5s (attempt $$attempt/3)..."
|
|
||||||
sleep 5
|
|
||||||
else
|
|
||||||
echo " FAILED: $$PKG status $$STATUS"
|
|
||||||
cat /tmp/link-response.txt
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
}
|
|
||||||
link_package "stack-web"
|
|
||||||
when:
|
|
||||||
- branch: [main, develop]
|
|
||||||
event: [push, manual, tag]
|
|
||||||
depends_on:
|
|
||||||
- security-trivy-web
|
|
||||||
89
AGENTS.md
89
AGENTS.md
@@ -1,37 +1,82 @@
|
|||||||
# Mosaic Stack — Agent Guidelines
|
# Mosaic Stack — Agent Guidelines
|
||||||
|
|
||||||
> **Any AI model, coding assistant, or framework working in this codebase MUST read and follow `CLAUDE.md` in the project root.**
|
## Load Order
|
||||||
|
|
||||||
`CLAUDE.md` is the authoritative source for:
|
1. `SOUL.md` (repo identity + behavior invariants)
|
||||||
|
2. `~/.config/mosaic/STANDARDS.md` (machine-wide standards rails)
|
||||||
|
3. `AGENTS.md` (repo-specific overlay)
|
||||||
|
4. `.mosaic/repo-hooks.sh` (repo lifecycle hooks)
|
||||||
|
|
||||||
- Technology stack and versions
|
## Runtime Contract
|
||||||
- TypeScript strict mode requirements
|
|
||||||
- ESLint Quality Rails (error-level enforcement)
|
|
||||||
- Prettier formatting rules
|
|
||||||
- Testing requirements (85% coverage, TDD)
|
|
||||||
- API conventions and database patterns
|
|
||||||
- Commit format and branch strategy
|
|
||||||
- PDA-friendly design principles
|
|
||||||
|
|
||||||
## Quick Rules (Read CLAUDE.md for Details)
|
- This file is authoritative for repo-local operations.
|
||||||
|
- `CLAUDE.md` is a compatibility pointer to `AGENTS.md`.
|
||||||
|
- Follow universal rails from `~/.config/mosaic/guides/` and `~/.config/mosaic/rails/`.
|
||||||
|
|
||||||
- **No `any` types** — use `unknown`, generics, or proper types
|
## Session Lifecycle
|
||||||
- **Explicit return types** on all functions
|
|
||||||
- **Type-only imports** — `import type { Foo }` for types
|
|
||||||
- **Double quotes**, semicolons, 2-space indent, 100 char width
|
|
||||||
- **`??` not `||`** for defaults, **`?.`** not `&&` chains
|
|
||||||
- **All promises** must be awaited or returned
|
|
||||||
- **85% test coverage** minimum, tests before implementation
|
|
||||||
|
|
||||||
## Updating Conventions
|
```bash
|
||||||
|
bash scripts/agent/session-start.sh
|
||||||
|
bash scripts/agent/critical.sh
|
||||||
|
bash scripts/agent/session-end.sh
|
||||||
|
```
|
||||||
|
|
||||||
If you discover new patterns, gotchas, or conventions while working in this codebase, **update `CLAUDE.md`** — not this file. This file exists solely to redirect agents that look for `AGENTS.md` to the canonical source.
|
Optional:
|
||||||
|
|
||||||
## Per-App Context
|
```bash
|
||||||
|
bash scripts/agent/log-limitation.sh "Short Name"
|
||||||
|
bash scripts/agent/orchestrator-daemon.sh status
|
||||||
|
bash scripts/agent/orchestrator-events.sh recent --limit 50
|
||||||
|
```
|
||||||
|
|
||||||
Each app directory has its own `AGENTS.md` for app-specific patterns:
|
## Repo Context
|
||||||
|
|
||||||
|
- Platform: multi-tenant personal assistant stack
|
||||||
|
- Monorepo: `pnpm` workspaces + Turborepo
|
||||||
|
- Core apps: `apps/api` (NestJS), `apps/web` (Next.js), orchestrator/coordinator services
|
||||||
|
- Infrastructure: Docker Compose + PostgreSQL + Valkey + Authentik
|
||||||
|
|
||||||
|
## Quick Command Set
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm install
|
||||||
|
pnpm dev
|
||||||
|
pnpm test
|
||||||
|
pnpm lint
|
||||||
|
pnpm build
|
||||||
|
```
|
||||||
|
|
||||||
|
## Versioning Protocol (HARD GATE)
|
||||||
|
|
||||||
|
**This project is ALPHA. All versions MUST be `0.0.x`.**
|
||||||
|
|
||||||
|
- The `0.1.0` release is FORBIDDEN until Jason explicitly authorizes it.
|
||||||
|
- Every milestone bump increments the patch: `0.0.20` → `0.0.21` → `0.0.22`, etc.
|
||||||
|
- ALL package.json files in the monorepo MUST stay in sync at the same version.
|
||||||
|
- Use `scripts/version-bump.sh <version>` to bump — it enforces the alpha constraint and updates all packages atomically.
|
||||||
|
- The script rejects any version >= `0.1.0`.
|
||||||
|
- When creating a release tag, the tag MUST match the package version: `v0.0.x`.
|
||||||
|
|
||||||
|
**Milestone-to-version mapping** is defined in the PRD (`docs/PRD.md`) under "Delivery/Milestone Intent". Agents MUST use the version from that table when tagging a milestone release.
|
||||||
|
|
||||||
|
**Violation of this protocol is a blocking error.** If an agent attempts to set a version >= `0.1.0`, stop and escalate.
|
||||||
|
|
||||||
|
## Standards and Quality
|
||||||
|
|
||||||
|
- Enforce strict typing and no unsafe shortcuts.
|
||||||
|
- Keep lint/typecheck/tests green before completion.
|
||||||
|
- Prefer small, focused commits and clear change descriptions.
|
||||||
|
|
||||||
|
## App-Specific Overlays
|
||||||
|
|
||||||
- `apps/api/AGENTS.md`
|
- `apps/api/AGENTS.md`
|
||||||
- `apps/web/AGENTS.md`
|
- `apps/web/AGENTS.md`
|
||||||
- `apps/coordinator/AGENTS.md`
|
- `apps/coordinator/AGENTS.md`
|
||||||
- `apps/orchestrator/AGENTS.md`
|
- `apps/orchestrator/AGENTS.md`
|
||||||
|
|
||||||
|
## Additional Guidance
|
||||||
|
|
||||||
|
- Orchestrator guidance: `docs/claude/orchestrator.md`
|
||||||
|
- Security remediation context: `docs/reports/codebase-review-2026-02-05/01-security-review.md`
|
||||||
|
- Code quality context: `docs/reports/codebase-review-2026-02-05/02-code-quality-review.md`
|
||||||
|
- QA context: `docs/reports/codebase-review-2026-02-05/03-qa-test-coverage.md`
|
||||||
|
|||||||
479
CLAUDE.md
479
CLAUDE.md
@@ -1,477 +1,10 @@
|
|||||||
**Multi-tenant personal assistant platform with PostgreSQL backend, Authentik SSO, and MoltBot
|
# CLAUDE Compatibility Pointer
|
||||||
integration.**
|
|
||||||
|
|
||||||
## Conditional Documentation Loading
|
This file exists so Claude Code sessions load Mosaic standards.
|
||||||
|
|
||||||
| When working on... | Load this guide |
|
## MANDATORY — Read Before Any Response
|
||||||
| ---------------------------------------- | ------------------------------------------------------------------- |
|
|
||||||
| Orchestrating autonomous task completion | `docs/claude/orchestrator.md` |
|
|
||||||
| Security remediation (review findings) | `docs/reports/codebase-review-2026-02-05/01-security-review.md` |
|
|
||||||
| Code quality fixes | `docs/reports/codebase-review-2026-02-05/02-code-quality-review.md` |
|
|
||||||
| Test coverage gaps | `docs/reports/codebase-review-2026-02-05/03-qa-test-coverage.md` |
|
|
||||||
|
|
||||||
## Platform Templates
|
BEFORE responding to any user message, READ `~/.config/mosaic/AGENTS.md`.
|
||||||
|
|
||||||
Bootstrap templates are at `docs/templates/`. See `docs/templates/README.md` for usage.
|
That file is the universal agent configuration. Do NOT respond until you have loaded it.
|
||||||
|
Then read the project-local `AGENTS.md` in this repository for project-specific guidance.
|
||||||
## Project Overview
|
|
||||||
|
|
||||||
Mosaic Stack is a standalone platform that provides:
|
|
||||||
|
|
||||||
- Multi-user workspaces with team sharing
|
|
||||||
- Task, event, and project management
|
|
||||||
- Gantt charts and Kanban boards
|
|
||||||
- MoltBot integration via plugins (stock MoltBot + mosaic-plugin-\*)
|
|
||||||
- PDA-friendly design throughout
|
|
||||||
|
|
||||||
**Repository:** git.mosaicstack.dev/mosaic/stack
|
|
||||||
**Versioning:** Start at 0.0.1, MVP = 0.1.0
|
|
||||||
|
|
||||||
## Technology Stack
|
|
||||||
|
|
||||||
| Layer | Technology |
|
|
||||||
| ---------- | -------------------------------------------- |
|
|
||||||
| Frontend | Next.js 16 + React + TailwindCSS + Shadcn/ui |
|
|
||||||
| Backend | NestJS + Prisma ORM |
|
|
||||||
| Database | PostgreSQL 17 + pgvector |
|
|
||||||
| Cache | Valkey (Redis-compatible) |
|
|
||||||
| Auth | Authentik (OIDC) |
|
|
||||||
| AI | Ollama (configurable: local or remote) |
|
|
||||||
| Messaging | MoltBot (stock + Mosaic plugins) |
|
|
||||||
| Real-time | WebSockets (Socket.io) |
|
|
||||||
| Monorepo | pnpm workspaces + TurboRepo |
|
|
||||||
| Testing | Vitest + Playwright |
|
|
||||||
| Deployment | Docker + docker-compose |
|
|
||||||
|
|
||||||
## Repository Structure
|
|
||||||
|
|
||||||
mosaic-stack/
|
|
||||||
├── apps/
|
|
||||||
│ ├── api/ # mosaic-api (NestJS)
|
|
||||||
│ │ ├── src/
|
|
||||||
│ │ │ ├── auth/ # Authentik OIDC
|
|
||||||
│ │ │ ├── tasks/ # Task management
|
|
||||||
│ │ │ ├── events/ # Calendar/events
|
|
||||||
│ │ │ ├── projects/ # Project management
|
|
||||||
│ │ │ ├── brain/ # MoltBot integration
|
|
||||||
│ │ │ └── activity/ # Activity logging
|
|
||||||
│ │ ├── prisma/
|
|
||||||
│ │ │ └── schema.prisma
|
|
||||||
│ │ └── Dockerfile
|
|
||||||
│ └── web/ # mosaic-web (Next.js 16)
|
|
||||||
│ ├── app/
|
|
||||||
│ ├── components/
|
|
||||||
│ └── Dockerfile
|
|
||||||
├── packages/
|
|
||||||
│ ├── shared/ # Shared types, utilities
|
|
||||||
│ ├── ui/ # Shared UI components
|
|
||||||
│ └── config/ # Shared configuration
|
|
||||||
├── plugins/
|
|
||||||
│ ├── mosaic-plugin-brain/ # MoltBot skill: API queries
|
|
||||||
│ ├── mosaic-plugin-calendar/ # MoltBot skill: Calendar
|
|
||||||
│ ├── mosaic-plugin-tasks/ # MoltBot skill: Tasks
|
|
||||||
│ └── mosaic-plugin-gantt/ # MoltBot skill: Gantt
|
|
||||||
├── docker/
|
|
||||||
│ ├── docker-compose.yml # Turnkey deployment
|
|
||||||
│ └── init-scripts/ # PostgreSQL init
|
|
||||||
├── docs/
|
|
||||||
│ ├── SETUP.md
|
|
||||||
│ ├── CONFIGURATION.md
|
|
||||||
│ └── DESIGN-PRINCIPLES.md
|
|
||||||
├── .env.example
|
|
||||||
├── turbo.json
|
|
||||||
├── pnpm-workspace.yaml
|
|
||||||
└── README.md
|
|
||||||
|
|
||||||
## Development Workflow
|
|
||||||
|
|
||||||
### Branch Strategy
|
|
||||||
|
|
||||||
- `main` — stable releases only
|
|
||||||
- `develop` — active development (default working branch)
|
|
||||||
- `feature/*` — feature branches from develop
|
|
||||||
- `fix/*` — bug fix branches
|
|
||||||
|
|
||||||
### Starting Work
|
|
||||||
|
|
||||||
````bash
|
|
||||||
git checkout develop
|
|
||||||
git pull --rebase
|
|
||||||
pnpm install
|
|
||||||
|
|
||||||
Running Locally
|
|
||||||
|
|
||||||
# Start all services (Docker)
|
|
||||||
docker compose up -d
|
|
||||||
|
|
||||||
# Or run individually for development
|
|
||||||
pnpm dev # All apps
|
|
||||||
pnpm dev:api # API only
|
|
||||||
pnpm dev:web # Web only
|
|
||||||
|
|
||||||
Testing
|
|
||||||
|
|
||||||
pnpm test # Run all tests
|
|
||||||
pnpm test:api # API tests only
|
|
||||||
pnpm test:web # Web tests only
|
|
||||||
pnpm test:e2e # Playwright E2E
|
|
||||||
|
|
||||||
Building
|
|
||||||
|
|
||||||
pnpm build # Build all
|
|
||||||
pnpm build:api # Build API
|
|
||||||
pnpm build:web # Build Web
|
|
||||||
|
|
||||||
Design Principles (NON-NEGOTIABLE)
|
|
||||||
|
|
||||||
PDA-Friendly Language
|
|
||||||
|
|
||||||
NEVER use demanding language. This is critical.
|
|
||||||
┌─────────────┬──────────────────────┐
|
|
||||||
│ ❌ NEVER │ ✅ ALWAYS │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ OVERDUE │ Target passed │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ URGENT │ Approaching target │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ MUST DO │ Scheduled for │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ CRITICAL │ High priority │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ YOU NEED TO │ Consider / Option to │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ REQUIRED │ Recommended │
|
|
||||||
└─────────────┴──────────────────────┘
|
|
||||||
Visual Indicators
|
|
||||||
|
|
||||||
Use status indicators consistently:
|
|
||||||
- 🟢 On track / Active
|
|
||||||
- 🔵 Upcoming / Scheduled
|
|
||||||
- ⏸️ Paused / On hold
|
|
||||||
- 💤 Dormant / Inactive
|
|
||||||
- ⚪ Not started
|
|
||||||
|
|
||||||
Display Principles
|
|
||||||
|
|
||||||
1. 10-second scannability — Key info visible immediately
|
|
||||||
2. Visual chunking — Clear sections with headers
|
|
||||||
3. Single-line items — Compact, scannable lists
|
|
||||||
4. Date grouping — Today, Tomorrow, This Week headers
|
|
||||||
5. Progressive disclosure — Details on click, not upfront
|
|
||||||
6. Calm colors — No aggressive reds for status
|
|
||||||
|
|
||||||
Reference
|
|
||||||
|
|
||||||
See docs/DESIGN-PRINCIPLES.md for complete guidelines.
|
|
||||||
For original patterns, see: jarvis-brain/docs/DESIGN-PRINCIPLES.md
|
|
||||||
|
|
||||||
API Conventions
|
|
||||||
|
|
||||||
Endpoints
|
|
||||||
|
|
||||||
GET /api/{resource} # List (with pagination, filters)
|
|
||||||
GET /api/{resource}/:id # Get single
|
|
||||||
POST /api/{resource} # Create
|
|
||||||
PATCH /api/{resource}/:id # Update
|
|
||||||
DELETE /api/{resource}/:id # Delete
|
|
||||||
|
|
||||||
Response Format
|
|
||||||
|
|
||||||
// Success
|
|
||||||
{
|
|
||||||
data: T | T[],
|
|
||||||
meta?: { total, page, limit }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Error
|
|
||||||
{
|
|
||||||
error: {
|
|
||||||
code: string,
|
|
||||||
message: string,
|
|
||||||
details?: any
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Brain Query API
|
|
||||||
|
|
||||||
POST /api/brain/query
|
|
||||||
{
|
|
||||||
query: "what's on my calendar",
|
|
||||||
context?: { view: "dashboard", workspace_id: "..." }
|
|
||||||
}
|
|
||||||
|
|
||||||
Database Conventions
|
|
||||||
|
|
||||||
Multi-Tenant (RLS)
|
|
||||||
|
|
||||||
All workspace-scoped tables use Row-Level Security:
|
|
||||||
- Always include workspace_id in queries
|
|
||||||
- RLS policies enforce isolation
|
|
||||||
- Set session context for current user
|
|
||||||
|
|
||||||
Prisma Commands
|
|
||||||
|
|
||||||
pnpm prisma:generate # Generate client
|
|
||||||
pnpm prisma:migrate # Run migrations
|
|
||||||
pnpm prisma:studio # Open Prisma Studio
|
|
||||||
pnpm prisma:seed # Seed development data
|
|
||||||
|
|
||||||
MoltBot Plugin Development
|
|
||||||
|
|
||||||
Plugins live in plugins/mosaic-plugin-*/ and follow MoltBot skill format:
|
|
||||||
|
|
||||||
# plugins/mosaic-plugin-brain/SKILL.md
|
|
||||||
---
|
|
||||||
name: mosaic-plugin-brain
|
|
||||||
description: Query Mosaic Stack for tasks, events, projects
|
|
||||||
version: 0.0.1
|
|
||||||
triggers:
|
|
||||||
- "what's on my calendar"
|
|
||||||
- "show my tasks"
|
|
||||||
- "morning briefing"
|
|
||||||
tools:
|
|
||||||
- mosaic_api
|
|
||||||
---
|
|
||||||
|
|
||||||
# Plugin instructions here...
|
|
||||||
|
|
||||||
Key principle: MoltBot remains stock. All customization via plugins only.
|
|
||||||
|
|
||||||
Environment Variables
|
|
||||||
|
|
||||||
See .env.example for all variables. Key ones:
|
|
||||||
|
|
||||||
# Database
|
|
||||||
DATABASE_URL=postgresql://mosaic:password@localhost:5432/mosaic
|
|
||||||
|
|
||||||
# Auth
|
|
||||||
AUTHENTIK_URL=https://auth.example.com
|
|
||||||
AUTHENTIK_CLIENT_ID=mosaic-stack
|
|
||||||
AUTHENTIK_CLIENT_SECRET=...
|
|
||||||
|
|
||||||
# Ollama
|
|
||||||
OLLAMA_MODE=local|remote
|
|
||||||
OLLAMA_ENDPOINT=http://localhost:11434
|
|
||||||
|
|
||||||
# MoltBot
|
|
||||||
MOSAIC_API_TOKEN=...
|
|
||||||
|
|
||||||
Issue Tracking
|
|
||||||
|
|
||||||
Issues are tracked at: https://git.mosaicstack.dev/mosaic/stack/issues
|
|
||||||
|
|
||||||
Labels
|
|
||||||
|
|
||||||
- Priority: p0 (critical), p1 (high), p2 (medium), p3 (low)
|
|
||||||
- Type: api, web, database, auth, plugin, ai, devops, docs, migration, security, testing,
|
|
||||||
performance, setup
|
|
||||||
|
|
||||||
Milestones
|
|
||||||
|
|
||||||
- M1-Foundation (0.0.x)
|
|
||||||
- M2-MultiTenant (0.0.x)
|
|
||||||
- M3-Features (0.0.x)
|
|
||||||
- M4-MoltBot (0.0.x)
|
|
||||||
- M5-Migration (0.1.0 MVP)
|
|
||||||
|
|
||||||
Commit Format
|
|
||||||
|
|
||||||
<type>(#issue): Brief description
|
|
||||||
|
|
||||||
Detailed explanation if needed.
|
|
||||||
|
|
||||||
Fixes #123
|
|
||||||
Types: feat, fix, docs, test, refactor, chore
|
|
||||||
|
|
||||||
Test-Driven Development (TDD) - REQUIRED
|
|
||||||
|
|
||||||
**All code must follow TDD principles. This is non-negotiable.**
|
|
||||||
|
|
||||||
TDD Workflow (Red-Green-Refactor)
|
|
||||||
|
|
||||||
1. **RED** — Write a failing test first
|
|
||||||
- Write the test for new functionality BEFORE writing any implementation code
|
|
||||||
- Run the test to verify it fails (proves the test works)
|
|
||||||
- Commit message: `test(#issue): add test for [feature]`
|
|
||||||
|
|
||||||
2. **GREEN** — Write minimal code to make the test pass
|
|
||||||
- Implement only enough code to pass the test
|
|
||||||
- Run tests to verify they pass
|
|
||||||
- Commit message: `feat(#issue): implement [feature]`
|
|
||||||
|
|
||||||
3. **REFACTOR** — Clean up the code while keeping tests green
|
|
||||||
- Improve code quality, remove duplication, enhance readability
|
|
||||||
- Ensure all tests still pass after refactoring
|
|
||||||
- Commit message: `refactor(#issue): improve [component]`
|
|
||||||
|
|
||||||
Testing Requirements
|
|
||||||
|
|
||||||
- **Minimum 85% code coverage** for all new code
|
|
||||||
- **Write tests BEFORE implementation** — no exceptions
|
|
||||||
- Test files must be co-located with source files:
|
|
||||||
- `feature.service.ts` → `feature.service.spec.ts`
|
|
||||||
- `component.tsx` → `component.test.tsx`
|
|
||||||
- All tests must pass before creating a PR
|
|
||||||
- Use descriptive test names: `it("should return user when valid token provided")`
|
|
||||||
- Group related tests with `describe()` blocks
|
|
||||||
- Mock external dependencies (database, APIs, file system)
|
|
||||||
|
|
||||||
Test Types
|
|
||||||
|
|
||||||
- **Unit Tests** — Test individual functions/methods in isolation
|
|
||||||
- **Integration Tests** — Test module interactions (e.g., service + database)
|
|
||||||
- **E2E Tests** — Test complete user workflows with Playwright
|
|
||||||
|
|
||||||
Running Tests
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pnpm test # Run all tests
|
|
||||||
pnpm test:watch # Watch mode for active development
|
|
||||||
pnpm test:coverage # Generate coverage report
|
|
||||||
pnpm test:api # API tests only
|
|
||||||
pnpm test:web # Web tests only
|
|
||||||
pnpm test:e2e # Playwright E2E tests
|
|
||||||
````
|
|
||||||
|
|
||||||
Coverage Verification
|
|
||||||
|
|
||||||
After implementing a feature, verify coverage meets requirements:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pnpm test:coverage
|
|
||||||
# Check the coverage report in coverage/index.html
|
|
||||||
# Ensure your files show ≥85% coverage
|
|
||||||
```
|
|
||||||
|
|
||||||
TDD Anti-Patterns to Avoid
|
|
||||||
|
|
||||||
❌ Writing implementation code before tests
|
|
||||||
❌ Writing tests after implementation is complete
|
|
||||||
❌ Skipping tests for "simple" code
|
|
||||||
❌ Testing implementation details instead of behavior
|
|
||||||
❌ Writing tests that don't fail when they should
|
|
||||||
❌ Committing code with failing tests
|
|
||||||
|
|
||||||
Quality Rails - Mechanical Code Quality Enforcement
|
|
||||||
|
|
||||||
**Status:** ACTIVE (2026-01-30) - Strict enforcement enabled ✅
|
|
||||||
|
|
||||||
Quality Rails provides mechanical enforcement of code quality standards through pre-commit hooks
|
|
||||||
and CI/CD pipelines. See `docs/quality-rails-status.md` for full details.
|
|
||||||
|
|
||||||
What's Enforced (NOW ACTIVE):
|
|
||||||
|
|
||||||
- ✅ **Type Safety** - Blocks explicit `any` types (@typescript-eslint/no-explicit-any: error)
|
|
||||||
- ✅ **Return Types** - Requires explicit return types on exported functions
|
|
||||||
- ✅ **Security** - Detects SQL injection, XSS, unsafe regex (eslint-plugin-security)
|
|
||||||
- ✅ **Promise Safety** - Blocks floating promises and misused promises
|
|
||||||
- ✅ **Code Formatting** - Auto-formats with Prettier on commit
|
|
||||||
- ✅ **Build Verification** - Type-checks before allowing commit
|
|
||||||
- ✅ **Secret Scanning** - Blocks hardcoded passwords/API keys (git-secrets)
|
|
||||||
|
|
||||||
Current Status:
|
|
||||||
|
|
||||||
- ✅ **Pre-commit hooks**: ACTIVE - Blocks commits with violations
|
|
||||||
- ✅ **Strict enforcement**: ENABLED - Package-level enforcement
|
|
||||||
- 🟡 **CI/CD pipeline**: Ready (.woodpecker.yml created, not yet configured)
|
|
||||||
|
|
||||||
How It Works:
|
|
||||||
|
|
||||||
**Package-Level Enforcement** - If you touch ANY file in a package with violations,
|
|
||||||
you must fix ALL violations in that package before committing. This forces incremental
|
|
||||||
cleanup while preventing new violations.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
- Edit `apps/api/src/tasks/tasks.service.ts`
|
|
||||||
- Pre-commit hook runs lint on ENTIRE `@mosaic/api` package
|
|
||||||
- If `@mosaic/api` has violations → Commit BLOCKED
|
|
||||||
- Fix all violations in `@mosaic/api` → Commit allowed
|
|
||||||
|
|
||||||
Next Steps:
|
|
||||||
|
|
||||||
1. Fix violations package-by-package as you work in them
|
|
||||||
2. Priority: Fix explicit `any` types and type safety issues first
|
|
||||||
3. Configure Woodpecker CI to run quality gates on all PRs
|
|
||||||
|
|
||||||
Why This Matters:
|
|
||||||
|
|
||||||
Based on validation of 50 real production issues, Quality Rails mechanically prevents ~70%
|
|
||||||
of quality issues including:
|
|
||||||
|
|
||||||
- Hardcoded passwords
|
|
||||||
- Type safety violations
|
|
||||||
- SQL injection vulnerabilities
|
|
||||||
- Build failures
|
|
||||||
- Test coverage gaps
|
|
||||||
|
|
||||||
**Mechanical enforcement works. Process compliance doesn't.**
|
|
||||||
|
|
||||||
See `docs/quality-rails-status.md` for detailed roadmap and violation breakdown.
|
|
||||||
|
|
||||||
Example TDD Session
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# 1. RED - Write failing test
|
|
||||||
# Edit: feature.service.spec.ts
|
|
||||||
# Add test for getUserById()
|
|
||||||
pnpm test:watch # Watch it fail
|
|
||||||
git add feature.service.spec.ts
|
|
||||||
git commit -m "test(#42): add test for getUserById"
|
|
||||||
|
|
||||||
# 2. GREEN - Implement minimal code
|
|
||||||
# Edit: feature.service.ts
|
|
||||||
# Add getUserById() method
|
|
||||||
pnpm test:watch # Watch it pass
|
|
||||||
git add feature.service.ts
|
|
||||||
git commit -m "feat(#42): implement getUserById"
|
|
||||||
|
|
||||||
# 3. REFACTOR - Improve code quality
|
|
||||||
# Edit: feature.service.ts
|
|
||||||
# Extract helper, improve naming
|
|
||||||
pnpm test:watch # Ensure still passing
|
|
||||||
git add feature.service.ts
|
|
||||||
git commit -m "refactor(#42): extract user mapping logic"
|
|
||||||
```
|
|
||||||
|
|
||||||
Docker Deployment
|
|
||||||
|
|
||||||
Turnkey (includes everything)
|
|
||||||
|
|
||||||
docker compose up -d
|
|
||||||
|
|
||||||
Customized (external services)
|
|
||||||
|
|
||||||
Create docker-compose.override.yml to:
|
|
||||||
|
|
||||||
- Point to external PostgreSQL/Valkey/Ollama
|
|
||||||
- Disable bundled services
|
|
||||||
|
|
||||||
See docs/DOCKER.md for details.
|
|
||||||
|
|
||||||
Key Documentation
|
|
||||||
┌───────────────────────────┬───────────────────────┐
|
|
||||||
│ Document │ Purpose │
|
|
||||||
├───────────────────────────┼───────────────────────┤
|
|
||||||
│ docs/SETUP.md │ Installation guide │
|
|
||||||
├───────────────────────────┼───────────────────────┤
|
|
||||||
│ docs/CONFIGURATION.md │ All config options │
|
|
||||||
├───────────────────────────┼───────────────────────┤
|
|
||||||
│ docs/DESIGN-PRINCIPLES.md │ PDA-friendly patterns │
|
|
||||||
├───────────────────────────┼───────────────────────┤
|
|
||||||
│ docs/DOCKER.md │ Docker deployment │
|
|
||||||
├───────────────────────────┼───────────────────────┤
|
|
||||||
│ docs/API.md │ API documentation │
|
|
||||||
└───────────────────────────┴───────────────────────┘
|
|
||||||
Related Repositories
|
|
||||||
┌──────────────┬──────────────────────────────────────────────┐
|
|
||||||
│ Repo │ Purpose │
|
|
||||||
├──────────────┼──────────────────────────────────────────────┤
|
|
||||||
│ jarvis-brain │ Original JSON-based brain (migration source) │
|
|
||||||
├──────────────┼──────────────────────────────────────────────┤
|
|
||||||
│ MoltBot │ Stock messaging gateway │
|
|
||||||
└──────────────┴──────────────────────────────────────────────┘
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Mosaic Stack v0.0.x — Building the future of personal assistants.
|
|
||||||
|
|||||||
17
Makefile
17
Makefile
@@ -1,4 +1,4 @@
|
|||||||
.PHONY: help install dev build test docker-up docker-down docker-logs docker-ps docker-build docker-restart docker-test clean matrix-up matrix-down matrix-logs matrix-setup-bot
|
.PHONY: help install dev build test docker-up docker-down docker-logs docker-ps docker-build docker-restart docker-test speech-up speech-down speech-logs clean matrix-up matrix-down matrix-logs matrix-setup-bot
|
||||||
|
|
||||||
# Default target
|
# Default target
|
||||||
help:
|
help:
|
||||||
@@ -24,6 +24,11 @@ help:
|
|||||||
@echo " make docker-test Run Docker smoke test"
|
@echo " make docker-test Run Docker smoke test"
|
||||||
@echo " make docker-test-traefik Run Traefik integration tests"
|
@echo " make docker-test-traefik Run Traefik integration tests"
|
||||||
@echo ""
|
@echo ""
|
||||||
|
@echo "Speech Services:"
|
||||||
|
@echo " make speech-up Start speech services (STT + TTS)"
|
||||||
|
@echo " make speech-down Stop speech services"
|
||||||
|
@echo " make speech-logs View speech service logs"
|
||||||
|
@echo ""
|
||||||
@echo "Matrix Dev Environment:"
|
@echo "Matrix Dev Environment:"
|
||||||
@echo " make matrix-up Start Matrix services (Synapse + Element)"
|
@echo " make matrix-up Start Matrix services (Synapse + Element)"
|
||||||
@echo " make matrix-down Stop Matrix services"
|
@echo " make matrix-down Stop Matrix services"
|
||||||
@@ -91,6 +96,16 @@ docker-test:
|
|||||||
docker-test-traefik:
|
docker-test-traefik:
|
||||||
./tests/integration/docker/traefik.test.sh all
|
./tests/integration/docker/traefik.test.sh all
|
||||||
|
|
||||||
|
# Speech services
|
||||||
|
speech-up:
|
||||||
|
docker compose -f docker-compose.yml -f docker-compose.speech.yml up -d speaches kokoro-tts
|
||||||
|
|
||||||
|
speech-down:
|
||||||
|
docker compose -f docker-compose.yml -f docker-compose.speech.yml down --remove-orphans
|
||||||
|
|
||||||
|
speech-logs:
|
||||||
|
docker compose -f docker-compose.yml -f docker-compose.speech.yml logs -f speaches kokoro-tts
|
||||||
|
|
||||||
# Matrix Dev Environment
|
# Matrix Dev Environment
|
||||||
matrix-up:
|
matrix-up:
|
||||||
docker compose -f docker/docker-compose.yml -f docker/docker-compose.matrix.yml up -d
|
docker compose -f docker/docker-compose.yml -f docker/docker-compose.matrix.yml up -d
|
||||||
|
|||||||
64
README.md
64
README.md
@@ -19,19 +19,20 @@ Mosaic Stack is a modern, PDA-friendly platform designed to help users manage th
|
|||||||
|
|
||||||
## Technology Stack
|
## Technology Stack
|
||||||
|
|
||||||
| Layer | Technology |
|
| Layer | Technology |
|
||||||
| -------------- | -------------------------------------------- |
|
| -------------- | ---------------------------------------------- |
|
||||||
| **Frontend** | Next.js 16 + React + TailwindCSS + Shadcn/ui |
|
| **Frontend** | Next.js 16 + React + TailwindCSS + Shadcn/ui |
|
||||||
| **Backend** | NestJS + Prisma ORM |
|
| **Backend** | NestJS + Prisma ORM |
|
||||||
| **Database** | PostgreSQL 17 + pgvector |
|
| **Database** | PostgreSQL 17 + pgvector |
|
||||||
| **Cache** | Valkey (Redis-compatible) |
|
| **Cache** | Valkey (Redis-compatible) |
|
||||||
| **Auth** | Authentik (OIDC) via BetterAuth |
|
| **Auth** | Authentik (OIDC) via BetterAuth |
|
||||||
| **AI** | Ollama (local or remote) |
|
| **AI** | Ollama (local or remote) |
|
||||||
| **Messaging** | MoltBot (stock + plugins) |
|
| **Messaging** | MoltBot (stock + plugins) |
|
||||||
| **Real-time** | WebSockets (Socket.io) |
|
| **Real-time** | WebSockets (Socket.io) |
|
||||||
| **Monorepo** | pnpm workspaces + TurboRepo |
|
| **Speech** | Speaches (STT) + Kokoro/Chatterbox/Piper (TTS) |
|
||||||
| **Testing** | Vitest + Playwright |
|
| **Monorepo** | pnpm workspaces + TurboRepo |
|
||||||
| **Deployment** | Docker + docker-compose |
|
| **Testing** | Vitest + Playwright |
|
||||||
|
| **Deployment** | Docker + docker-compose |
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
@@ -89,7 +90,7 @@ docker compose down
|
|||||||
If you prefer manual installation, you'll need:
|
If you prefer manual installation, you'll need:
|
||||||
|
|
||||||
- **Docker mode:** Docker 24+ and Docker Compose
|
- **Docker mode:** Docker 24+ and Docker Compose
|
||||||
- **Native mode:** Node.js 22+, pnpm 10+, PostgreSQL 17+
|
- **Native mode:** Node.js 24+, pnpm 10+, PostgreSQL 17+
|
||||||
|
|
||||||
The installer handles these automatically.
|
The installer handles these automatically.
|
||||||
|
|
||||||
@@ -231,7 +232,7 @@ docker compose -f docker-compose.openbao.yml up -d
|
|||||||
sleep 30 # Wait for auto-initialization
|
sleep 30 # Wait for auto-initialization
|
||||||
|
|
||||||
# 5. Deploy swarm stack
|
# 5. Deploy swarm stack
|
||||||
IMAGE_TAG=dev ./scripts/deploy-swarm.sh mosaic
|
IMAGE_TAG=latest ./scripts/deploy-swarm.sh mosaic
|
||||||
|
|
||||||
# 6. Check deployment status
|
# 6. Check deployment status
|
||||||
docker stack services mosaic
|
docker stack services mosaic
|
||||||
@@ -356,6 +357,29 @@ Mosaic Stack includes a sophisticated agent orchestration system for autonomous
|
|||||||
|
|
||||||
See [Agent Orchestration Design](docs/design/agent-orchestration.md) for architecture details.
|
See [Agent Orchestration Design](docs/design/agent-orchestration.md) for architecture details.
|
||||||
|
|
||||||
|
## Speech Services
|
||||||
|
|
||||||
|
Mosaic Stack includes integrated speech-to-text (STT) and text-to-speech (TTS) capabilities through a modular provider architecture. Each component is optional and independently configurable.
|
||||||
|
|
||||||
|
- **Speech-to-Text** - Transcribe audio files and real-time audio streams using Whisper (via Speaches)
|
||||||
|
- **Text-to-Speech** - Synthesize speech with 54+ voices across 8 languages (via Kokoro, CPU-based)
|
||||||
|
- **Premium Voice Cloning** - Clone voices from audio samples with emotion control (via Chatterbox, GPU)
|
||||||
|
- **Fallback TTS** - Ultra-lightweight CPU fallback for low-resource environments (via Piper/OpenedAI Speech)
|
||||||
|
- **WebSocket Streaming** - Real-time streaming transcription via Socket.IO `/speech` namespace
|
||||||
|
- **Automatic Fallback** - TTS tier system with graceful degradation (premium -> default -> fallback)
|
||||||
|
|
||||||
|
**Quick Start:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start speech services alongside core stack
|
||||||
|
make speech-up
|
||||||
|
|
||||||
|
# Or with Docker Compose directly
|
||||||
|
docker compose -f docker-compose.yml -f docker-compose.speech.yml up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
See [Speech Services Documentation](docs/SPEECH.md) for architecture details, API reference, provider configuration, and deployment options.
|
||||||
|
|
||||||
## Current Implementation Status
|
## Current Implementation Status
|
||||||
|
|
||||||
### ✅ Completed (v0.0.1-0.0.6)
|
### ✅ Completed (v0.0.1-0.0.6)
|
||||||
@@ -502,10 +526,9 @@ KNOWLEDGE_CACHE_TTL=300 # 5 minutes
|
|||||||
|
|
||||||
### Branch Strategy
|
### Branch Strategy
|
||||||
|
|
||||||
- `main` — Stable releases only
|
- `main` — Trunk branch (all development merges here)
|
||||||
- `develop` — Active development (default working branch)
|
- `feature/*` — Feature branches from main
|
||||||
- `feature/*` — Feature branches from develop
|
- `fix/*` — Bug fix branches from main
|
||||||
- `fix/*` — Bug fix branches
|
|
||||||
|
|
||||||
### Running Locally
|
### Running Locally
|
||||||
|
|
||||||
@@ -715,7 +738,7 @@ See [Type Sharing Strategy](docs/2-development/3-type-sharing/1-strategy.md) for
|
|||||||
4. Run tests: `pnpm test`
|
4. Run tests: `pnpm test`
|
||||||
5. Build: `pnpm build`
|
5. Build: `pnpm build`
|
||||||
6. Commit with conventional format: `feat(#issue): Description`
|
6. Commit with conventional format: `feat(#issue): Description`
|
||||||
7. Push and create a pull request to `develop`
|
7. Push and create a pull request to `main`
|
||||||
|
|
||||||
### Commit Format
|
### Commit Format
|
||||||
|
|
||||||
@@ -758,6 +781,7 @@ Complete documentation is organized in a Bookstack-compatible structure in the `
|
|||||||
- **[Overview](docs/3-architecture/1-overview/)** — System design and components
|
- **[Overview](docs/3-architecture/1-overview/)** — System design and components
|
||||||
- **[Authentication](docs/3-architecture/2-authentication/)** — BetterAuth and OIDC integration
|
- **[Authentication](docs/3-architecture/2-authentication/)** — BetterAuth and OIDC integration
|
||||||
- **[Design Principles](docs/3-architecture/3-design-principles/1-pda-friendly.md)** — PDA-friendly patterns (non-negotiable)
|
- **[Design Principles](docs/3-architecture/3-design-principles/1-pda-friendly.md)** — PDA-friendly patterns (non-negotiable)
|
||||||
|
- **[Telemetry](docs/telemetry.md)** — AI task completion tracking, predictions, and SDK reference
|
||||||
|
|
||||||
### 🔌 API Reference
|
### 🔌 API Reference
|
||||||
|
|
||||||
|
|||||||
20
SOUL.md
Normal file
20
SOUL.md
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Mosaic Stack Soul
|
||||||
|
|
||||||
|
You are Jarvis for the Mosaic Stack repository, running on the current agent runtime.
|
||||||
|
|
||||||
|
## Behavioral Invariants
|
||||||
|
|
||||||
|
- Identity first: answer identity prompts as Jarvis for this repository.
|
||||||
|
- Implementation detail second: runtime (Codex/Claude/OpenCode/etc.) is secondary metadata.
|
||||||
|
- Be proactive: surface risks, blockers, and next actions without waiting.
|
||||||
|
- Be calm and clear: keep responses concise, chunked, and PDA-friendly.
|
||||||
|
- Respect canonical sources:
|
||||||
|
- Repo operations and conventions: `AGENTS.md`
|
||||||
|
- Machine-wide rails: `~/.config/mosaic/STANDARDS.md`
|
||||||
|
- Repo lifecycle hooks: `.mosaic/repo-hooks.sh`
|
||||||
|
|
||||||
|
## Guardrails
|
||||||
|
|
||||||
|
- Do not claim completion without verification evidence.
|
||||||
|
- Do not bypass lint/type/test quality gates.
|
||||||
|
- Prefer explicit assumptions and concrete file/command references.
|
||||||
@@ -4,15 +4,22 @@
|
|||||||
|
|
||||||
## Patterns
|
## Patterns
|
||||||
|
|
||||||
<!-- Add module-specific patterns as you discover them -->
|
- **Config validation pattern**: Config files use exported validation functions + typed getter functions (not class-validator). See `auth.config.ts`, `federation.config.ts`, `speech/speech.config.ts`. Pattern: export `isXEnabled()`, `validateXConfig()`, and `getXConfig()` functions.
|
||||||
|
- **Config registerAs**: `speech.config.ts` also exports a `registerAs("speech", ...)` factory for NestJS ConfigModule namespaced injection. Use `ConfigModule.forFeature(speechConfig)` in module imports and access via `this.config.get<string>('speech.stt.baseUrl')`.
|
||||||
|
- **Conditional config validation**: When a service has an enabled flag (e.g., `STT_ENABLED`), URL/connection vars are only required when enabled. Validation throws with a helpful message suggesting how to disable.
|
||||||
|
- **Boolean env parsing**: Use `value === "true" || value === "1"` pattern. No default-true -- all services default to disabled when env var is unset.
|
||||||
|
|
||||||
## Gotchas
|
## Gotchas
|
||||||
|
|
||||||
<!-- Add things that trip up agents in this module -->
|
- **Prisma client must be generated** before `tsc --noEmit` will pass. Run `pnpm prisma:generate` first. Pre-existing type errors from Prisma are expected in worktrees without generated client.
|
||||||
|
- **Pre-commit hooks**: lint-staged runs on staged files. If other packages' files are staged, their lint must pass too. Only stage files you intend to commit.
|
||||||
|
- **vitest runs all test files**: Even when targeting a specific test file, vitest loads all spec files. Many will fail if Prisma client isn't generated -- this is expected. Check only your target file's pass/fail status.
|
||||||
|
|
||||||
## Key Files
|
## Key Files
|
||||||
|
|
||||||
| File | Purpose |
|
| File | Purpose |
|
||||||
| ---- | ------- |
|
| ------------------------------------- | ---------------------------------------------------------------------- |
|
||||||
|
| `src/speech/speech.config.ts` | Speech services env var validation and typed config (STT, TTS, limits) |
|
||||||
<!-- Add important files in this directory -->
|
| `src/speech/speech.config.spec.ts` | Unit tests for speech config validation (51 tests) |
|
||||||
|
| `src/auth/auth.config.ts` | Auth/OIDC config validation (reference pattern) |
|
||||||
|
| `src/federation/federation.config.ts` | Federation config validation (reference pattern) |
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
# syntax=docker/dockerfile:1
|
|
||||||
# Enable BuildKit features for cache mounts
|
|
||||||
|
|
||||||
# Base image for all stages
|
# Base image for all stages
|
||||||
FROM node:24-alpine AS base
|
# Uses Debian slim (glibc) instead of Alpine (musl) because native Node.js addons
|
||||||
|
# (matrix-sdk-crypto-nodejs, Prisma engines) require glibc-compatible binaries.
|
||||||
|
FROM git.mosaicstack.dev/mosaic/node-base:24-slim AS base
|
||||||
|
|
||||||
# Install pnpm globally
|
# Install pnpm globally
|
||||||
RUN corepack enable && corepack prepare pnpm@10.27.0 --activate
|
RUN corepack enable && corepack prepare pnpm@10.27.0 --activate
|
||||||
@@ -19,15 +18,27 @@ COPY turbo.json ./
|
|||||||
# ======================
|
# ======================
|
||||||
FROM base AS deps
|
FROM base AS deps
|
||||||
|
|
||||||
|
# Install build tools for native addons (node-pty requires node-gyp compilation)
|
||||||
|
# Note: openssl and ca-certificates pre-installed in base image
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
python3 make g++ \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Copy all package.json files for workspace resolution
|
# Copy all package.json files for workspace resolution
|
||||||
COPY packages/shared/package.json ./packages/shared/
|
COPY packages/shared/package.json ./packages/shared/
|
||||||
COPY packages/ui/package.json ./packages/ui/
|
COPY packages/ui/package.json ./packages/ui/
|
||||||
COPY packages/config/package.json ./packages/config/
|
COPY packages/config/package.json ./packages/config/
|
||||||
COPY apps/api/package.json ./apps/api/
|
COPY apps/api/package.json ./apps/api/
|
||||||
|
|
||||||
# Install dependencies with pnpm store cache
|
# Copy npm configuration for native binary architecture hints
|
||||||
RUN --mount=type=cache,id=pnpm-store,target=/root/.local/share/pnpm/store \
|
COPY .npmrc ./
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
|
# Install dependencies (no cache mount — Kaniko builds are ephemeral in CI)
|
||||||
|
# Then explicitly rebuild node-pty from source since pnpm may skip postinstall
|
||||||
|
# scripts or fail to find prebuilt binaries for this Node.js version
|
||||||
|
RUN pnpm install --frozen-lockfile \
|
||||||
|
&& cd node_modules/.pnpm/node-pty@*/node_modules/node-pty \
|
||||||
|
&& npx node-gyp rebuild 2>&1 || true
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Builder stage
|
# Builder stage
|
||||||
@@ -53,16 +64,15 @@ RUN pnpm turbo build --filter=@mosaic/api --force
|
|||||||
# ======================
|
# ======================
|
||||||
# Production stage
|
# Production stage
|
||||||
# ======================
|
# ======================
|
||||||
FROM node:24-alpine AS production
|
FROM git.mosaicstack.dev/mosaic/node-base:24-slim AS production
|
||||||
|
|
||||||
# Remove npm (unused in production — we use pnpm) to reduce attack surface
|
# dumb-init, openssl, ca-certificates pre-installed in base image
|
||||||
RUN rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx
|
|
||||||
|
|
||||||
# Install dumb-init for proper signal handling
|
# Single RUN to minimize Kaniko filesystem snapshots (each RUN = full snapshot)
|
||||||
RUN apk add --no-cache dumb-init
|
# - Remove npm/npx to reduce image size (not used in production)
|
||||||
|
# - Create non-root user
|
||||||
# Create non-root user
|
RUN rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx \
|
||||||
RUN addgroup -g 1001 -S nodejs && adduser -S nestjs -u 1001
|
&& groupadd -g 1001 nodejs && useradd -m -u 1001 -g nodejs nestjs
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@mosaic/api",
|
"name": "@mosaic/api",
|
||||||
"version": "0.0.1",
|
"version": "0.0.20",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "nest build",
|
"build": "nest build",
|
||||||
@@ -27,6 +27,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@anthropic-ai/sdk": "^0.72.1",
|
"@anthropic-ai/sdk": "^0.72.1",
|
||||||
"@mosaic/shared": "workspace:*",
|
"@mosaic/shared": "workspace:*",
|
||||||
|
"@mosaicstack/telemetry-client": "^0.1.1",
|
||||||
"@nestjs/axios": "^4.0.1",
|
"@nestjs/axios": "^4.0.1",
|
||||||
"@nestjs/bullmq": "^11.0.4",
|
"@nestjs/bullmq": "^11.0.4",
|
||||||
"@nestjs/common": "^11.1.12",
|
"@nestjs/common": "^11.1.12",
|
||||||
@@ -35,6 +36,7 @@
|
|||||||
"@nestjs/mapped-types": "^2.1.0",
|
"@nestjs/mapped-types": "^2.1.0",
|
||||||
"@nestjs/platform-express": "^11.1.12",
|
"@nestjs/platform-express": "^11.1.12",
|
||||||
"@nestjs/platform-socket.io": "^11.1.12",
|
"@nestjs/platform-socket.io": "^11.1.12",
|
||||||
|
"@nestjs/schedule": "^6.1.1",
|
||||||
"@nestjs/throttler": "^6.5.0",
|
"@nestjs/throttler": "^6.5.0",
|
||||||
"@nestjs/websockets": "^11.1.12",
|
"@nestjs/websockets": "^11.1.12",
|
||||||
"@opentelemetry/api": "^1.9.0",
|
"@opentelemetry/api": "^1.9.0",
|
||||||
@@ -51,13 +53,16 @@
|
|||||||
"adm-zip": "^0.5.16",
|
"adm-zip": "^0.5.16",
|
||||||
"archiver": "^7.0.1",
|
"archiver": "^7.0.1",
|
||||||
"axios": "^1.13.5",
|
"axios": "^1.13.5",
|
||||||
|
"bcryptjs": "^3.0.3",
|
||||||
"better-auth": "^1.4.17",
|
"better-auth": "^1.4.17",
|
||||||
"bullmq": "^5.67.2",
|
"bullmq": "^5.67.2",
|
||||||
"class-transformer": "^0.5.1",
|
"class-transformer": "^0.5.1",
|
||||||
"class-validator": "^0.14.3",
|
"class-validator": "^0.14.3",
|
||||||
"cookie-parser": "^1.4.7",
|
"cookie-parser": "^1.4.7",
|
||||||
"discord.js": "^14.25.1",
|
"discord.js": "^14.25.1",
|
||||||
|
"dockerode": "^4.0.9",
|
||||||
"gray-matter": "^4.0.3",
|
"gray-matter": "^4.0.3",
|
||||||
|
"helmet": "^8.1.0",
|
||||||
"highlight.js": "^11.11.1",
|
"highlight.js": "^11.11.1",
|
||||||
"ioredis": "^5.9.2",
|
"ioredis": "^5.9.2",
|
||||||
"jose": "^6.1.3",
|
"jose": "^6.1.3",
|
||||||
@@ -65,6 +70,7 @@
|
|||||||
"marked-gfm-heading-id": "^4.1.3",
|
"marked-gfm-heading-id": "^4.1.3",
|
||||||
"marked-highlight": "^2.2.3",
|
"marked-highlight": "^2.2.3",
|
||||||
"matrix-bot-sdk": "^0.8.0",
|
"matrix-bot-sdk": "^0.8.0",
|
||||||
|
"node-pty": "^1.0.0",
|
||||||
"ollama": "^0.6.3",
|
"ollama": "^0.6.3",
|
||||||
"openai": "^6.17.0",
|
"openai": "^6.17.0",
|
||||||
"reflect-metadata": "^0.2.2",
|
"reflect-metadata": "^0.2.2",
|
||||||
@@ -83,7 +89,9 @@
|
|||||||
"@swc/core": "^1.10.18",
|
"@swc/core": "^1.10.18",
|
||||||
"@types/adm-zip": "^0.5.7",
|
"@types/adm-zip": "^0.5.7",
|
||||||
"@types/archiver": "^7.0.0",
|
"@types/archiver": "^7.0.0",
|
||||||
|
"@types/bcryptjs": "^3.0.0",
|
||||||
"@types/cookie-parser": "^1.4.10",
|
"@types/cookie-parser": "^1.4.10",
|
||||||
|
"@types/dockerode": "^3.3.47",
|
||||||
"@types/express": "^5.0.1",
|
"@types/express": "^5.0.1",
|
||||||
"@types/highlight.js": "^10.1.0",
|
"@types/highlight.js": "^10.1.0",
|
||||||
"@types/node": "^22.13.4",
|
"@types/node": "^22.13.4",
|
||||||
|
|||||||
@@ -1,3 +1,38 @@
|
|||||||
|
-- RecreateEnum: FormalityLevel was dropped in 20260129235248_add_link_storage_fields
|
||||||
|
CREATE TYPE "FormalityLevel" AS ENUM ('VERY_CASUAL', 'CASUAL', 'NEUTRAL', 'FORMAL', 'VERY_FORMAL');
|
||||||
|
|
||||||
|
-- RecreateTable: personalities was dropped in 20260129235248_add_link_storage_fields
|
||||||
|
-- Recreated with current schema (display_name, system_prompt, temperature, etc.)
|
||||||
|
CREATE TABLE "personalities" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"workspace_id" UUID NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"display_name" TEXT NOT NULL,
|
||||||
|
"description" TEXT,
|
||||||
|
"system_prompt" TEXT NOT NULL,
|
||||||
|
"temperature" DOUBLE PRECISION,
|
||||||
|
"max_tokens" INTEGER,
|
||||||
|
"llm_provider_instance_id" UUID,
|
||||||
|
"is_default" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"is_enabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "personalities_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex: personalities
|
||||||
|
CREATE UNIQUE INDEX "personalities_id_workspace_id_key" ON "personalities"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "personalities_workspace_id_name_key" ON "personalities"("workspace_id", "name");
|
||||||
|
CREATE INDEX "personalities_workspace_id_idx" ON "personalities"("workspace_id");
|
||||||
|
CREATE INDEX "personalities_workspace_id_is_default_idx" ON "personalities"("workspace_id", "is_default");
|
||||||
|
CREATE INDEX "personalities_workspace_id_is_enabled_idx" ON "personalities"("workspace_id", "is_enabled");
|
||||||
|
CREATE INDEX "personalities_llm_provider_instance_id_idx" ON "personalities"("llm_provider_instance_id");
|
||||||
|
|
||||||
|
-- AddForeignKey: personalities
|
||||||
|
ALTER TABLE "personalities" ADD CONSTRAINT "personalities_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
ALTER TABLE "personalities" ADD CONSTRAINT "personalities_llm_provider_instance_id_fkey" FOREIGN KEY ("llm_provider_instance_id") REFERENCES "llm_provider_instances"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
|
|
||||||
-- CreateTable
|
-- CreateTable
|
||||||
CREATE TABLE "cron_schedules" (
|
CREATE TABLE "cron_schedules" (
|
||||||
"id" UUID NOT NULL,
|
"id" UUID NOT NULL,
|
||||||
|
|||||||
@@ -0,0 +1,49 @@
|
|||||||
|
-- Fix schema drift: tables, indexes, and constraints defined in schema.prisma
|
||||||
|
-- but never created (or dropped and never recreated) by prior migrations.
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- CreateTable: instances (Federation module)
|
||||||
|
-- Never created in any prior migration
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE "instances" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"instance_id" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"url" TEXT NOT NULL,
|
||||||
|
"public_key" TEXT NOT NULL,
|
||||||
|
"private_key" TEXT NOT NULL,
|
||||||
|
"capabilities" JSONB NOT NULL DEFAULT '{}',
|
||||||
|
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "instances_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX "instances_instance_id_key" ON "instances"("instance_id");
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Recreate dropped unique index on knowledge_links
|
||||||
|
-- Created in 20260129220645_add_knowledge_module, dropped in
|
||||||
|
-- 20260129235248_add_link_storage_fields, never recreated.
|
||||||
|
-- ============================================
|
||||||
|
CREATE UNIQUE INDEX "knowledge_links_source_id_target_id_key" ON "knowledge_links"("source_id", "target_id");
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Missing @@unique([id, workspaceId]) composite indexes
|
||||||
|
-- Defined in schema.prisma but never created in migrations.
|
||||||
|
-- (agent_tasks and runner_jobs already have these.)
|
||||||
|
-- ============================================
|
||||||
|
CREATE UNIQUE INDEX "tasks_id_workspace_id_key" ON "tasks"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "events_id_workspace_id_key" ON "events"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "projects_id_workspace_id_key" ON "projects"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "activity_logs_id_workspace_id_key" ON "activity_logs"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "domains_id_workspace_id_key" ON "domains"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "ideas_id_workspace_id_key" ON "ideas"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "user_layouts_id_workspace_id_key" ON "user_layouts"("id", "workspace_id");
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Missing index on agent_tasks.agent_type
|
||||||
|
-- Defined as @@index([agentType]) in schema.prisma
|
||||||
|
-- ============================================
|
||||||
|
CREATE INDEX "agent_tasks_agent_type_idx" ON "agent_tasks"("agent_type");
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
-- CreateEnum
|
||||||
|
CREATE TYPE "TerminalSessionStatus" AS ENUM ('ACTIVE', 'CLOSED');
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "terminal_sessions" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"workspace_id" UUID NOT NULL,
|
||||||
|
"name" TEXT NOT NULL DEFAULT 'Terminal',
|
||||||
|
"status" "TerminalSessionStatus" NOT NULL DEFAULT 'ACTIVE',
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"closed_at" TIMESTAMPTZ,
|
||||||
|
|
||||||
|
CONSTRAINT "terminal_sessions_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "terminal_sessions_workspace_id_idx" ON "terminal_sessions"("workspace_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "terminal_sessions_workspace_id_status_idx" ON "terminal_sessions"("workspace_id", "status");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "terminal_sessions" ADD CONSTRAINT "terminal_sessions_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
-- AlterTable: add tone and formality_level columns to personalities
|
||||||
|
ALTER TABLE "personalities" ADD COLUMN "tone" TEXT NOT NULL DEFAULT 'neutral';
|
||||||
|
ALTER TABLE "personalities" ADD COLUMN "formality_level" "FormalityLevel" NOT NULL DEFAULT 'NEUTRAL';
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "agent_memories" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"workspace_id" UUID NOT NULL,
|
||||||
|
"agent_id" TEXT NOT NULL,
|
||||||
|
"key" TEXT NOT NULL,
|
||||||
|
"value" JSONB NOT NULL,
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "agent_memories_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "agent_memories_workspace_id_agent_id_key_key" ON "agent_memories"("workspace_id", "agent_id", "key");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "agent_memories_workspace_id_idx" ON "agent_memories"("workspace_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "agent_memories_agent_id_idx" ON "agent_memories"("agent_id");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "agent_memories" ADD CONSTRAINT "agent_memories_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "conversation_archives" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"workspace_id" UUID NOT NULL,
|
||||||
|
"session_id" TEXT NOT NULL,
|
||||||
|
"agent_id" TEXT NOT NULL,
|
||||||
|
"messages" JSONB NOT NULL,
|
||||||
|
"message_count" INTEGER NOT NULL,
|
||||||
|
"summary" TEXT NOT NULL,
|
||||||
|
"embedding" vector(1536),
|
||||||
|
"started_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
"ended_at" TIMESTAMPTZ,
|
||||||
|
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "conversation_archives_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "conversation_archives_workspace_id_session_id_key" ON "conversation_archives"("workspace_id", "session_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "conversation_archives_workspace_id_idx" ON "conversation_archives"("workspace_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "conversation_archives_agent_id_idx" ON "conversation_archives"("agent_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "conversation_archives_started_at_idx" ON "conversation_archives"("started_at");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "conversation_archives" ADD CONSTRAINT "conversation_archives_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@@ -0,0 +1,109 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "SystemConfig" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"key" TEXT NOT NULL,
|
||||||
|
"value" TEXT NOT NULL,
|
||||||
|
"encrypted" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "SystemConfig_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "BreakglassUser" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"username" TEXT NOT NULL,
|
||||||
|
"passwordHash" TEXT NOT NULL,
|
||||||
|
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "BreakglassUser_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "LlmProvider" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"userId" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"displayName" TEXT NOT NULL,
|
||||||
|
"type" TEXT NOT NULL,
|
||||||
|
"baseUrl" TEXT,
|
||||||
|
"apiKey" TEXT,
|
||||||
|
"apiType" TEXT NOT NULL DEFAULT 'openai-completions',
|
||||||
|
"models" JSONB NOT NULL DEFAULT '[]',
|
||||||
|
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "LlmProvider_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "UserContainer" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"userId" TEXT NOT NULL,
|
||||||
|
"containerId" TEXT,
|
||||||
|
"containerName" TEXT NOT NULL,
|
||||||
|
"gatewayPort" INTEGER,
|
||||||
|
"gatewayToken" TEXT NOT NULL,
|
||||||
|
"status" TEXT NOT NULL DEFAULT 'stopped',
|
||||||
|
"lastActiveAt" TIMESTAMP(3),
|
||||||
|
"idleTimeoutMin" INTEGER NOT NULL DEFAULT 30,
|
||||||
|
"config" JSONB NOT NULL DEFAULT '{}',
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "UserContainer_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "SystemContainer" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"role" TEXT NOT NULL,
|
||||||
|
"containerId" TEXT,
|
||||||
|
"gatewayPort" INTEGER,
|
||||||
|
"gatewayToken" TEXT NOT NULL,
|
||||||
|
"status" TEXT NOT NULL DEFAULT 'stopped',
|
||||||
|
"primaryModel" TEXT NOT NULL,
|
||||||
|
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "SystemContainer_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "UserAgentConfig" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"userId" TEXT NOT NULL,
|
||||||
|
"primaryModel" TEXT,
|
||||||
|
"fallbackModels" JSONB NOT NULL DEFAULT '[]',
|
||||||
|
"personality" TEXT,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "UserAgentConfig_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "SystemConfig_key_key" ON "SystemConfig"("key");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "BreakglassUser_username_key" ON "BreakglassUser"("username");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "LlmProvider_userId_idx" ON "LlmProvider"("userId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "LlmProvider_userId_name_key" ON "LlmProvider"("userId", "name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "UserContainer_userId_key" ON "UserContainer"("userId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "SystemContainer_name_key" ON "SystemContainer"("name");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "UserAgentConfig_userId_key" ON "UserAgentConfig"("userId");
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "findings" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"workspace_id" UUID NOT NULL,
|
||||||
|
"task_id" UUID,
|
||||||
|
"agent_id" TEXT NOT NULL,
|
||||||
|
"type" TEXT NOT NULL,
|
||||||
|
"title" TEXT NOT NULL,
|
||||||
|
"data" JSONB NOT NULL,
|
||||||
|
"summary" TEXT NOT NULL,
|
||||||
|
"embedding" vector(1536),
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "findings_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "findings_id_workspace_id_key" ON "findings"("id", "workspace_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "findings_workspace_id_idx" ON "findings"("workspace_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "findings_agent_id_idx" ON "findings"("agent_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "findings_type_idx" ON "findings"("type");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "findings_task_id_idx" ON "findings"("task_id");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "findings" ADD CONSTRAINT "findings_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "findings" ADD CONSTRAINT "findings_task_id_fkey" FOREIGN KEY ("task_id") REFERENCES "agent_tasks"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "tasks" ADD COLUMN "assigned_agent" TEXT;
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
-- MS21: Add admin, local auth, and invitation fields to users table
|
||||||
|
-- These columns were added to schema.prisma but never captured in a migration.
|
||||||
|
|
||||||
|
ALTER TABLE "users"
|
||||||
|
ADD COLUMN IF NOT EXISTS "deactivated_at" TIMESTAMPTZ,
|
||||||
|
ADD COLUMN IF NOT EXISTS "is_local_auth" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
ADD COLUMN IF NOT EXISTS "password_hash" TEXT,
|
||||||
|
ADD COLUMN IF NOT EXISTS "invited_by" UUID,
|
||||||
|
ADD COLUMN IF NOT EXISTS "invitation_token" TEXT,
|
||||||
|
ADD COLUMN IF NOT EXISTS "invited_at" TIMESTAMPTZ;
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX IF NOT EXISTS "users_invitation_token_key" ON "users"("invitation_token");
|
||||||
@@ -3,6 +3,7 @@
|
|||||||
|
|
||||||
generator client {
|
generator client {
|
||||||
provider = "prisma-client-js"
|
provider = "prisma-client-js"
|
||||||
|
binaryTargets = ["native", "debian-openssl-3.0.x"]
|
||||||
previewFeatures = ["postgresqlExtensions"]
|
previewFeatures = ["postgresqlExtensions"]
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -206,6 +207,11 @@ enum CredentialScope {
|
|||||||
SYSTEM
|
SYSTEM
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enum TerminalSessionStatus {
|
||||||
|
ACTIVE
|
||||||
|
CLOSED
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================
|
// ============================================
|
||||||
// MODELS
|
// MODELS
|
||||||
// ============================================
|
// ============================================
|
||||||
@@ -221,6 +227,14 @@ model User {
|
|||||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||||
|
|
||||||
|
// MS21: Admin, local auth, and invitation fields
|
||||||
|
deactivatedAt DateTime? @map("deactivated_at") @db.Timestamptz
|
||||||
|
isLocalAuth Boolean @default(false) @map("is_local_auth")
|
||||||
|
passwordHash String? @map("password_hash")
|
||||||
|
invitedBy String? @map("invited_by") @db.Uuid
|
||||||
|
invitationToken String? @unique @map("invitation_token")
|
||||||
|
invitedAt DateTime? @map("invited_at") @db.Timestamptz
|
||||||
|
|
||||||
// Relations
|
// Relations
|
||||||
ownedWorkspaces Workspace[] @relation("WorkspaceOwner")
|
ownedWorkspaces Workspace[] @relation("WorkspaceOwner")
|
||||||
workspaceMemberships WorkspaceMember[]
|
workspaceMemberships WorkspaceMember[]
|
||||||
@@ -284,6 +298,8 @@ model Workspace {
|
|||||||
agents Agent[]
|
agents Agent[]
|
||||||
agentSessions AgentSession[]
|
agentSessions AgentSession[]
|
||||||
agentTasks AgentTask[]
|
agentTasks AgentTask[]
|
||||||
|
findings Finding[]
|
||||||
|
agentMemories AgentMemory[]
|
||||||
userLayouts UserLayout[]
|
userLayouts UserLayout[]
|
||||||
knowledgeEntries KnowledgeEntry[]
|
knowledgeEntries KnowledgeEntry[]
|
||||||
knowledgeTags KnowledgeTag[]
|
knowledgeTags KnowledgeTag[]
|
||||||
@@ -297,6 +313,8 @@ model Workspace {
|
|||||||
federationEventSubscriptions FederationEventSubscription[]
|
federationEventSubscriptions FederationEventSubscription[]
|
||||||
llmUsageLogs LlmUsageLog[]
|
llmUsageLogs LlmUsageLog[]
|
||||||
userCredentials UserCredential[]
|
userCredentials UserCredential[]
|
||||||
|
terminalSessions TerminalSession[]
|
||||||
|
conversationArchives ConversationArchive[]
|
||||||
|
|
||||||
@@index([ownerId])
|
@@index([ownerId])
|
||||||
@@map("workspaces")
|
@@map("workspaces")
|
||||||
@@ -361,6 +379,7 @@ model Task {
|
|||||||
creatorId String @map("creator_id") @db.Uuid
|
creatorId String @map("creator_id") @db.Uuid
|
||||||
projectId String? @map("project_id") @db.Uuid
|
projectId String? @map("project_id") @db.Uuid
|
||||||
parentId String? @map("parent_id") @db.Uuid
|
parentId String? @map("parent_id") @db.Uuid
|
||||||
|
assignedAgent String? @map("assigned_agent")
|
||||||
domainId String? @map("domain_id") @db.Uuid
|
domainId String? @map("domain_id") @db.Uuid
|
||||||
sortOrder Int @default(0) @map("sort_order")
|
sortOrder Int @default(0) @map("sort_order")
|
||||||
metadata Json @default("{}")
|
metadata Json @default("{}")
|
||||||
@@ -674,6 +693,7 @@ model AgentTask {
|
|||||||
createdBy User @relation("AgentTaskCreator", fields: [createdById], references: [id], onDelete: Cascade)
|
createdBy User @relation("AgentTaskCreator", fields: [createdById], references: [id], onDelete: Cascade)
|
||||||
createdById String @map("created_by_id") @db.Uuid
|
createdById String @map("created_by_id") @db.Uuid
|
||||||
runnerJobs RunnerJob[]
|
runnerJobs RunnerJob[]
|
||||||
|
findings Finding[]
|
||||||
|
|
||||||
@@unique([id, workspaceId])
|
@@unique([id, workspaceId])
|
||||||
@@index([workspaceId])
|
@@index([workspaceId])
|
||||||
@@ -683,6 +703,33 @@ model AgentTask {
|
|||||||
@@map("agent_tasks")
|
@@map("agent_tasks")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model Finding {
|
||||||
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
workspaceId String @map("workspace_id") @db.Uuid
|
||||||
|
taskId String? @map("task_id") @db.Uuid
|
||||||
|
|
||||||
|
agentId String @map("agent_id")
|
||||||
|
type String
|
||||||
|
title String
|
||||||
|
data Json
|
||||||
|
summary String @db.Text
|
||||||
|
// Note: vector dimension (1536) must match EMBEDDING_DIMENSION constant in @mosaic/shared
|
||||||
|
embedding Unsupported("vector(1536)")?
|
||||||
|
|
||||||
|
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||||
|
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||||
|
|
||||||
|
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||||
|
task AgentTask? @relation(fields: [taskId], references: [id], onDelete: SetNull)
|
||||||
|
|
||||||
|
@@unique([id, workspaceId])
|
||||||
|
@@index([workspaceId])
|
||||||
|
@@index([agentId])
|
||||||
|
@@index([type])
|
||||||
|
@@index([taskId])
|
||||||
|
@@map("findings")
|
||||||
|
}
|
||||||
|
|
||||||
model AgentSession {
|
model AgentSession {
|
||||||
id String @id @default(uuid()) @db.Uuid
|
id String @id @default(uuid()) @db.Uuid
|
||||||
workspaceId String @map("workspace_id") @db.Uuid
|
workspaceId String @map("workspace_id") @db.Uuid
|
||||||
@@ -720,6 +767,23 @@ model AgentSession {
|
|||||||
@@map("agent_sessions")
|
@@map("agent_sessions")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
model AgentMemory {
|
||||||
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
workspaceId String @map("workspace_id") @db.Uuid
|
||||||
|
agentId String @map("agent_id")
|
||||||
|
key String
|
||||||
|
value Json
|
||||||
|
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||||
|
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||||
|
|
||||||
|
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([workspaceId, agentId, key])
|
||||||
|
@@index([workspaceId])
|
||||||
|
@@index([agentId])
|
||||||
|
@@map("agent_memories")
|
||||||
|
}
|
||||||
|
|
||||||
model WidgetDefinition {
|
model WidgetDefinition {
|
||||||
id String @id @default(uuid()) @db.Uuid
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
|
||||||
@@ -1061,6 +1125,10 @@ model Personality {
|
|||||||
displayName String @map("display_name")
|
displayName String @map("display_name")
|
||||||
description String? @db.Text
|
description String? @db.Text
|
||||||
|
|
||||||
|
// Tone and formality
|
||||||
|
tone String @default("neutral")
|
||||||
|
formalityLevel FormalityLevel @default(NEUTRAL) @map("formality_level")
|
||||||
|
|
||||||
// System prompt
|
// System prompt
|
||||||
systemPrompt String @map("system_prompt") @db.Text
|
systemPrompt String @map("system_prompt") @db.Text
|
||||||
|
|
||||||
@@ -1507,3 +1575,131 @@ model LlmUsageLog {
|
|||||||
@@index([conversationId])
|
@@index([conversationId])
|
||||||
@@map("llm_usage_logs")
|
@@map("llm_usage_logs")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// TERMINAL MODULE
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
model TerminalSession {
|
||||||
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
workspaceId String @map("workspace_id") @db.Uuid
|
||||||
|
name String @default("Terminal")
|
||||||
|
status TerminalSessionStatus @default(ACTIVE)
|
||||||
|
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||||
|
closedAt DateTime? @map("closed_at") @db.Timestamptz
|
||||||
|
|
||||||
|
// Relations
|
||||||
|
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@index([workspaceId])
|
||||||
|
@@index([workspaceId, status])
|
||||||
|
@@map("terminal_sessions")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// CONVERSATION ARCHIVE MODULE
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
model ConversationArchive {
|
||||||
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
workspaceId String @map("workspace_id") @db.Uuid
|
||||||
|
sessionId String @map("session_id")
|
||||||
|
agentId String @map("agent_id")
|
||||||
|
messages Json
|
||||||
|
messageCount Int @map("message_count")
|
||||||
|
summary String @db.Text
|
||||||
|
// Note: vector dimension (1536) must match EMBEDDING_DIMENSION constant in @mosaic/shared
|
||||||
|
embedding Unsupported("vector(1536)")?
|
||||||
|
startedAt DateTime @map("started_at") @db.Timestamptz
|
||||||
|
endedAt DateTime? @map("ended_at") @db.Timestamptz
|
||||||
|
metadata Json @default("{}")
|
||||||
|
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||||
|
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||||
|
|
||||||
|
// Relations
|
||||||
|
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@unique([workspaceId, sessionId])
|
||||||
|
@@index([workspaceId])
|
||||||
|
@@index([agentId])
|
||||||
|
@@index([startedAt])
|
||||||
|
@@map("conversation_archives")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// AGENT FLEET MODULE
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
model SystemConfig {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
key String @unique
|
||||||
|
value String
|
||||||
|
encrypted Boolean @default(false)
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
model BreakglassUser {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
username String @unique
|
||||||
|
passwordHash String
|
||||||
|
isActive Boolean @default(true)
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
model LlmProvider {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
userId String
|
||||||
|
name String
|
||||||
|
displayName String
|
||||||
|
type String
|
||||||
|
baseUrl String?
|
||||||
|
apiKey String?
|
||||||
|
apiType String @default("openai-completions")
|
||||||
|
models Json @default("[]")
|
||||||
|
isActive Boolean @default(true)
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
|
@@unique([userId, name])
|
||||||
|
@@index([userId])
|
||||||
|
}
|
||||||
|
|
||||||
|
model UserContainer {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
userId String @unique
|
||||||
|
containerId String?
|
||||||
|
containerName String
|
||||||
|
gatewayPort Int?
|
||||||
|
gatewayToken String
|
||||||
|
status String @default("stopped")
|
||||||
|
lastActiveAt DateTime?
|
||||||
|
idleTimeoutMin Int @default(30)
|
||||||
|
config Json @default("{}")
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
model SystemContainer {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
name String @unique
|
||||||
|
role String
|
||||||
|
containerId String?
|
||||||
|
gatewayPort Int?
|
||||||
|
gatewayToken String
|
||||||
|
status String @default("stopped")
|
||||||
|
primaryModel String
|
||||||
|
isActive Boolean @default(true)
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
model UserAgentConfig {
|
||||||
|
id String @id @default(cuid())
|
||||||
|
userId String @unique
|
||||||
|
primaryModel String?
|
||||||
|
fallbackModels Json @default("[]")
|
||||||
|
personality String?
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
updatedAt DateTime @updatedAt
|
||||||
|
}
|
||||||
|
|||||||
@@ -65,6 +65,136 @@ async function main() {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// WIDGET DEFINITIONS (global, not workspace-scoped)
|
||||||
|
// ============================================
|
||||||
|
const widgetDefs = [
|
||||||
|
{
|
||||||
|
name: "TasksWidget",
|
||||||
|
displayName: "Tasks",
|
||||||
|
description: "View and manage your tasks",
|
||||||
|
component: "TasksWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 2,
|
||||||
|
minWidth: 1,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 4,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "CalendarWidget",
|
||||||
|
displayName: "Calendar",
|
||||||
|
description: "View upcoming events and schedule",
|
||||||
|
component: "CalendarWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 2,
|
||||||
|
minWidth: 2,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 4,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "QuickCaptureWidget",
|
||||||
|
displayName: "Quick Capture",
|
||||||
|
description: "Quickly capture notes and tasks",
|
||||||
|
component: "QuickCaptureWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 1,
|
||||||
|
minWidth: 2,
|
||||||
|
minHeight: 1,
|
||||||
|
maxWidth: 4,
|
||||||
|
maxHeight: 2,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "AgentStatusWidget",
|
||||||
|
displayName: "Agent Status",
|
||||||
|
description: "Monitor agent activity and status",
|
||||||
|
component: "AgentStatusWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 2,
|
||||||
|
minWidth: 1,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 3,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ActiveProjectsWidget",
|
||||||
|
displayName: "Active Projects & Agent Chains",
|
||||||
|
description: "View active projects and running agent sessions",
|
||||||
|
component: "ActiveProjectsWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 3,
|
||||||
|
minWidth: 2,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 4,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "TaskProgressWidget",
|
||||||
|
displayName: "Task Progress",
|
||||||
|
description: "Live progress of orchestrator agent tasks",
|
||||||
|
component: "TaskProgressWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 2,
|
||||||
|
minWidth: 1,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 3,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "OrchestratorEventsWidget",
|
||||||
|
displayName: "Orchestrator Events",
|
||||||
|
description: "Recent orchestration events with stream/Matrix visibility",
|
||||||
|
component: "OrchestratorEventsWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 2,
|
||||||
|
minWidth: 1,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 4,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const wd of widgetDefs) {
|
||||||
|
await prisma.widgetDefinition.upsert({
|
||||||
|
where: { name: wd.name },
|
||||||
|
update: {
|
||||||
|
displayName: wd.displayName,
|
||||||
|
description: wd.description,
|
||||||
|
component: wd.component,
|
||||||
|
defaultWidth: wd.defaultWidth,
|
||||||
|
defaultHeight: wd.defaultHeight,
|
||||||
|
minWidth: wd.minWidth,
|
||||||
|
minHeight: wd.minHeight,
|
||||||
|
maxWidth: wd.maxWidth,
|
||||||
|
maxHeight: wd.maxHeight,
|
||||||
|
configSchema: wd.configSchema,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
name: wd.name,
|
||||||
|
displayName: wd.displayName,
|
||||||
|
description: wd.description,
|
||||||
|
component: wd.component,
|
||||||
|
defaultWidth: wd.defaultWidth,
|
||||||
|
defaultHeight: wd.defaultHeight,
|
||||||
|
minWidth: wd.minWidth,
|
||||||
|
minHeight: wd.minHeight,
|
||||||
|
maxWidth: wd.maxWidth,
|
||||||
|
maxHeight: wd.maxHeight,
|
||||||
|
configSchema: wd.configSchema,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Seeded ${widgetDefs.length} widget definitions`);
|
||||||
|
|
||||||
// Use transaction for atomic seed data reset and creation
|
// Use transaction for atomic seed data reset and creation
|
||||||
await prisma.$transaction(async (tx) => {
|
await prisma.$transaction(async (tx) => {
|
||||||
// Delete existing seed data for idempotency (avoids duplicates on re-run)
|
// Delete existing seed data for idempotency (avoids duplicates on re-run)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { Controller, Get, Query, Param, UseGuards } from "@nestjs/common";
|
import { Controller, Get, Query, Param, UseGuards } from "@nestjs/common";
|
||||||
import { ActivityService } from "./activity.service";
|
import { ActivityService } from "./activity.service";
|
||||||
import { EntityType } from "@prisma/client";
|
import { EntityType } from "@prisma/client";
|
||||||
import type { QueryActivityLogDto } from "./dto";
|
import { QueryActivityLogDto } from "./dto";
|
||||||
import { AuthGuard } from "../auth/guards/auth.guard";
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||||
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||||
|
|||||||
@@ -117,12 +117,13 @@ export class ActivityService {
|
|||||||
/**
|
/**
|
||||||
* Get a single activity log by ID
|
* Get a single activity log by ID
|
||||||
*/
|
*/
|
||||||
async findOne(id: string, workspaceId: string): Promise<ActivityLogResult | null> {
|
async findOne(id: string, workspaceId?: string): Promise<ActivityLogResult | null> {
|
||||||
|
const where: Prisma.ActivityLogWhereUniqueInput = { id };
|
||||||
|
if (workspaceId) {
|
||||||
|
where.workspaceId = workspaceId;
|
||||||
|
}
|
||||||
return await this.prisma.activityLog.findUnique({
|
return await this.prisma.activityLog.findUnique({
|
||||||
where: {
|
where,
|
||||||
id,
|
|
||||||
workspaceId,
|
|
||||||
},
|
|
||||||
include: {
|
include: {
|
||||||
user: {
|
user: {
|
||||||
select: {
|
select: {
|
||||||
|
|||||||
@@ -384,10 +384,18 @@ describe("ActivityLoggingInterceptor", () => {
|
|||||||
const context = createMockExecutionContext("POST", {}, body, user);
|
const context = createMockExecutionContext("POST", {}, body, user);
|
||||||
const next = createMockCallHandler(result);
|
const next = createMockCallHandler(result);
|
||||||
|
|
||||||
|
mockActivityService.logActivity.mockResolvedValue({
|
||||||
|
id: "activity-123",
|
||||||
|
});
|
||||||
|
|
||||||
await new Promise<void>((resolve) => {
|
await new Promise<void>((resolve) => {
|
||||||
interceptor.intercept(context, next).subscribe(() => {
|
interceptor.intercept(context, next).subscribe(() => {
|
||||||
// Should not call logActivity when workspaceId is missing
|
// workspaceId is now optional, so logActivity should be called without it
|
||||||
expect(mockActivityService.logActivity).not.toHaveBeenCalled();
|
expect(mockActivityService.logActivity).toHaveBeenCalled();
|
||||||
|
const callArgs = mockActivityService.logActivity.mock.calls[0][0];
|
||||||
|
expect(callArgs.userId).toBe("user-123");
|
||||||
|
expect(callArgs.entityId).toBe("task-123");
|
||||||
|
expect(callArgs.workspaceId).toBeUndefined();
|
||||||
resolve();
|
resolve();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -412,10 +420,18 @@ describe("ActivityLoggingInterceptor", () => {
|
|||||||
const context = createMockExecutionContext("POST", {}, body, user);
|
const context = createMockExecutionContext("POST", {}, body, user);
|
||||||
const next = createMockCallHandler(result);
|
const next = createMockCallHandler(result);
|
||||||
|
|
||||||
|
mockActivityService.logActivity.mockResolvedValue({
|
||||||
|
id: "activity-123",
|
||||||
|
});
|
||||||
|
|
||||||
await new Promise<void>((resolve) => {
|
await new Promise<void>((resolve) => {
|
||||||
interceptor.intercept(context, next).subscribe(() => {
|
interceptor.intercept(context, next).subscribe(() => {
|
||||||
// Should not call logActivity when workspaceId is missing
|
// workspaceId is now optional, so logActivity should be called without it
|
||||||
expect(mockActivityService.logActivity).not.toHaveBeenCalled();
|
expect(mockActivityService.logActivity).toHaveBeenCalled();
|
||||||
|
const callArgs = mockActivityService.logActivity.mock.calls[0][0];
|
||||||
|
expect(callArgs.userId).toBe("user-123");
|
||||||
|
expect(callArgs.entityId).toBe("task-123");
|
||||||
|
expect(callArgs.workspaceId).toBeUndefined();
|
||||||
resolve();
|
resolve();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { tap } from "rxjs/operators";
|
|||||||
import { ActivityService } from "../activity.service";
|
import { ActivityService } from "../activity.service";
|
||||||
import { ActivityAction, EntityType } from "@prisma/client";
|
import { ActivityAction, EntityType } from "@prisma/client";
|
||||||
import type { Prisma } from "@prisma/client";
|
import type { Prisma } from "@prisma/client";
|
||||||
|
import type { CreateActivityLogInput } from "../interfaces/activity.interface";
|
||||||
import type { AuthenticatedRequest } from "../../common/types/user.types";
|
import type { AuthenticatedRequest } from "../../common/types/user.types";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -61,10 +62,13 @@ export class ActivityLoggingInterceptor implements NestInterceptor {
|
|||||||
// Extract entity information
|
// Extract entity information
|
||||||
const resultObj = result as Record<string, unknown> | undefined;
|
const resultObj = result as Record<string, unknown> | undefined;
|
||||||
const entityId = params.id ?? (resultObj?.id as string | undefined);
|
const entityId = params.id ?? (resultObj?.id as string | undefined);
|
||||||
|
|
||||||
|
// workspaceId is now optional - log events even when missing
|
||||||
const workspaceId = user.workspaceId ?? (body.workspaceId as string | undefined);
|
const workspaceId = user.workspaceId ?? (body.workspaceId as string | undefined);
|
||||||
|
|
||||||
if (!entityId || !workspaceId) {
|
// Log with warning if entityId is missing, but still proceed with logging if workspaceId exists
|
||||||
this.logger.warn("Cannot log activity: missing entityId or workspaceId");
|
if (!entityId) {
|
||||||
|
this.logger.warn("Cannot log activity: missing entityId");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -92,9 +96,8 @@ export class ActivityLoggingInterceptor implements NestInterceptor {
|
|||||||
const userAgent =
|
const userAgent =
|
||||||
typeof userAgentHeader === "string" ? userAgentHeader : userAgentHeader?.[0];
|
typeof userAgentHeader === "string" ? userAgentHeader : userAgentHeader?.[0];
|
||||||
|
|
||||||
// Log the activity
|
// Log the activity — workspaceId is optional
|
||||||
await this.activityService.logActivity({
|
const activityInput: CreateActivityLogInput = {
|
||||||
workspaceId,
|
|
||||||
userId: user.id,
|
userId: user.id,
|
||||||
action,
|
action,
|
||||||
entityType,
|
entityType,
|
||||||
@@ -102,7 +105,11 @@ export class ActivityLoggingInterceptor implements NestInterceptor {
|
|||||||
details,
|
details,
|
||||||
ipAddress: ip ?? undefined,
|
ipAddress: ip ?? undefined,
|
||||||
userAgent: userAgent ?? undefined,
|
userAgent: userAgent ?? undefined,
|
||||||
});
|
};
|
||||||
|
if (workspaceId) {
|
||||||
|
activityInput.workspaceId = workspaceId;
|
||||||
|
}
|
||||||
|
await this.activityService.logActivity(activityInput);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Don't fail the request if activity logging fails
|
// Don't fail the request if activity logging fails
|
||||||
this.logger.error(
|
this.logger.error(
|
||||||
|
|||||||
@@ -2,9 +2,10 @@ import type { ActivityAction, EntityType, Prisma } from "@prisma/client";
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Interface for creating a new activity log entry
|
* Interface for creating a new activity log entry
|
||||||
|
* workspaceId is optional - allows logging events without workspace context
|
||||||
*/
|
*/
|
||||||
export interface CreateActivityLogInput {
|
export interface CreateActivityLogInput {
|
||||||
workspaceId: string;
|
workspaceId?: string | null;
|
||||||
userId: string;
|
userId: string;
|
||||||
action: ActivityAction;
|
action: ActivityAction;
|
||||||
entityType: EntityType;
|
entityType: EntityType;
|
||||||
|
|||||||
258
apps/api/src/admin/admin.controller.spec.ts
Normal file
258
apps/api/src/admin/admin.controller.spec.ts
Normal file
@@ -0,0 +1,258 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { AdminController } from "./admin.controller";
|
||||||
|
import { AdminService } from "./admin.service";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { AdminGuard } from "../auth/guards/admin.guard";
|
||||||
|
import { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
import type { ExecutionContext } from "@nestjs/common";
|
||||||
|
|
||||||
|
describe("AdminController", () => {
|
||||||
|
let controller: AdminController;
|
||||||
|
let service: AdminService;
|
||||||
|
|
||||||
|
const mockAdminService = {
|
||||||
|
listUsers: vi.fn(),
|
||||||
|
inviteUser: vi.fn(),
|
||||||
|
updateUser: vi.fn(),
|
||||||
|
deactivateUser: vi.fn(),
|
||||||
|
createWorkspace: vi.fn(),
|
||||||
|
updateWorkspace: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockAuthGuard = {
|
||||||
|
canActivate: vi.fn((context: ExecutionContext) => {
|
||||||
|
const request = context.switchToHttp().getRequest();
|
||||||
|
request.user = {
|
||||||
|
id: "550e8400-e29b-41d4-a716-446655440001",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Admin User",
|
||||||
|
};
|
||||||
|
return true;
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockAdminGuard = {
|
||||||
|
canActivate: vi.fn(() => true),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockAdminId = "550e8400-e29b-41d4-a716-446655440001";
|
||||||
|
const mockUserId = "550e8400-e29b-41d4-a716-446655440002";
|
||||||
|
const mockWorkspaceId = "550e8400-e29b-41d4-a716-446655440003";
|
||||||
|
|
||||||
|
const mockAdminUser = {
|
||||||
|
id: mockAdminId,
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Admin User",
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockUserResponse = {
|
||||||
|
id: mockUserId,
|
||||||
|
name: "Test User",
|
||||||
|
email: "test@example.com",
|
||||||
|
emailVerified: false,
|
||||||
|
image: null,
|
||||||
|
createdAt: new Date("2026-01-01"),
|
||||||
|
deactivatedAt: null,
|
||||||
|
isLocalAuth: false,
|
||||||
|
invitedAt: null,
|
||||||
|
invitedBy: null,
|
||||||
|
workspaceMemberships: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockWorkspaceResponse = {
|
||||||
|
id: mockWorkspaceId,
|
||||||
|
name: "Test Workspace",
|
||||||
|
ownerId: mockAdminId,
|
||||||
|
settings: {},
|
||||||
|
createdAt: new Date("2026-01-01"),
|
||||||
|
updatedAt: new Date("2026-01-01"),
|
||||||
|
memberCount: 1,
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
controllers: [AdminController],
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: AdminService,
|
||||||
|
useValue: mockAdminService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
.overrideGuard(AuthGuard)
|
||||||
|
.useValue(mockAuthGuard)
|
||||||
|
.overrideGuard(AdminGuard)
|
||||||
|
.useValue(mockAdminGuard)
|
||||||
|
.compile();
|
||||||
|
|
||||||
|
controller = module.get<AdminController>(AdminController);
|
||||||
|
service = module.get<AdminService>(AdminService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be defined", () => {
|
||||||
|
expect(controller).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("listUsers", () => {
|
||||||
|
it("should return paginated users", async () => {
|
||||||
|
const paginatedResult = {
|
||||||
|
data: [mockUserResponse],
|
||||||
|
meta: { total: 1, page: 1, limit: 50, totalPages: 1 },
|
||||||
|
};
|
||||||
|
mockAdminService.listUsers.mockResolvedValue(paginatedResult);
|
||||||
|
|
||||||
|
const result = await controller.listUsers({ page: 1, limit: 50 });
|
||||||
|
|
||||||
|
expect(result).toEqual(paginatedResult);
|
||||||
|
expect(service.listUsers).toHaveBeenCalledWith(1, 50);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default pagination", async () => {
|
||||||
|
const paginatedResult = {
|
||||||
|
data: [],
|
||||||
|
meta: { total: 0, page: 1, limit: 50, totalPages: 0 },
|
||||||
|
};
|
||||||
|
mockAdminService.listUsers.mockResolvedValue(paginatedResult);
|
||||||
|
|
||||||
|
await controller.listUsers({});
|
||||||
|
|
||||||
|
expect(service.listUsers).toHaveBeenCalledWith(undefined, undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("inviteUser", () => {
|
||||||
|
it("should invite a user", async () => {
|
||||||
|
const inviteDto = { email: "new@example.com" };
|
||||||
|
const invitationResponse = {
|
||||||
|
userId: "new-id",
|
||||||
|
invitationToken: "token",
|
||||||
|
email: "new@example.com",
|
||||||
|
invitedAt: new Date(),
|
||||||
|
};
|
||||||
|
mockAdminService.inviteUser.mockResolvedValue(invitationResponse);
|
||||||
|
|
||||||
|
const result = await controller.inviteUser(inviteDto, mockAdminUser);
|
||||||
|
|
||||||
|
expect(result).toEqual(invitationResponse);
|
||||||
|
expect(service.inviteUser).toHaveBeenCalledWith(inviteDto, mockAdminId);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should invite a user with workspace and role", async () => {
|
||||||
|
const inviteDto = {
|
||||||
|
email: "new@example.com",
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
role: WorkspaceMemberRole.ADMIN,
|
||||||
|
};
|
||||||
|
mockAdminService.inviteUser.mockResolvedValue({
|
||||||
|
userId: "new-id",
|
||||||
|
invitationToken: "token",
|
||||||
|
email: "new@example.com",
|
||||||
|
invitedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await controller.inviteUser(inviteDto, mockAdminUser);
|
||||||
|
|
||||||
|
expect(service.inviteUser).toHaveBeenCalledWith(inviteDto, mockAdminId);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("updateUser", () => {
|
||||||
|
it("should update a user", async () => {
|
||||||
|
const updateDto = { name: "Updated Name" };
|
||||||
|
mockAdminService.updateUser.mockResolvedValue({
|
||||||
|
...mockUserResponse,
|
||||||
|
name: "Updated Name",
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.updateUser(mockUserId, updateDto);
|
||||||
|
|
||||||
|
expect(result.name).toBe("Updated Name");
|
||||||
|
expect(service.updateUser).toHaveBeenCalledWith(mockUserId, updateDto);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should deactivate a user via update", async () => {
|
||||||
|
const deactivatedAt = "2026-02-28T00:00:00.000Z";
|
||||||
|
const updateDto = { deactivatedAt };
|
||||||
|
mockAdminService.updateUser.mockResolvedValue({
|
||||||
|
...mockUserResponse,
|
||||||
|
deactivatedAt: new Date(deactivatedAt),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.updateUser(mockUserId, updateDto);
|
||||||
|
|
||||||
|
expect(result.deactivatedAt).toEqual(new Date(deactivatedAt));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("deactivateUser", () => {
|
||||||
|
it("should soft-delete a user", async () => {
|
||||||
|
mockAdminService.deactivateUser.mockResolvedValue({
|
||||||
|
...mockUserResponse,
|
||||||
|
deactivatedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.deactivateUser(mockUserId);
|
||||||
|
|
||||||
|
expect(result.deactivatedAt).toBeDefined();
|
||||||
|
expect(service.deactivateUser).toHaveBeenCalledWith(mockUserId);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("createWorkspace", () => {
|
||||||
|
it("should create a workspace", async () => {
|
||||||
|
const createDto = { name: "New Workspace", ownerId: mockAdminId };
|
||||||
|
mockAdminService.createWorkspace.mockResolvedValue(mockWorkspaceResponse);
|
||||||
|
|
||||||
|
const result = await controller.createWorkspace(createDto);
|
||||||
|
|
||||||
|
expect(result).toEqual(mockWorkspaceResponse);
|
||||||
|
expect(service.createWorkspace).toHaveBeenCalledWith(createDto);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create workspace with settings", async () => {
|
||||||
|
const createDto = {
|
||||||
|
name: "New Workspace",
|
||||||
|
ownerId: mockAdminId,
|
||||||
|
settings: { feature: true },
|
||||||
|
};
|
||||||
|
mockAdminService.createWorkspace.mockResolvedValue({
|
||||||
|
...mockWorkspaceResponse,
|
||||||
|
settings: { feature: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.createWorkspace(createDto);
|
||||||
|
|
||||||
|
expect(result.settings).toEqual({ feature: true });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("updateWorkspace", () => {
|
||||||
|
it("should update a workspace", async () => {
|
||||||
|
const updateDto = { name: "Updated Workspace" };
|
||||||
|
mockAdminService.updateWorkspace.mockResolvedValue({
|
||||||
|
...mockWorkspaceResponse,
|
||||||
|
name: "Updated Workspace",
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.updateWorkspace(mockWorkspaceId, updateDto);
|
||||||
|
|
||||||
|
expect(result.name).toBe("Updated Workspace");
|
||||||
|
expect(service.updateWorkspace).toHaveBeenCalledWith(mockWorkspaceId, updateDto);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should update workspace settings", async () => {
|
||||||
|
const updateDto = { settings: { notifications: false } };
|
||||||
|
mockAdminService.updateWorkspace.mockResolvedValue({
|
||||||
|
...mockWorkspaceResponse,
|
||||||
|
settings: { notifications: false },
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.updateWorkspace(mockWorkspaceId, updateDto);
|
||||||
|
|
||||||
|
expect(result.settings).toEqual({ notifications: false });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
64
apps/api/src/admin/admin.controller.ts
Normal file
64
apps/api/src/admin/admin.controller.ts
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import {
|
||||||
|
Controller,
|
||||||
|
Get,
|
||||||
|
Post,
|
||||||
|
Patch,
|
||||||
|
Delete,
|
||||||
|
Body,
|
||||||
|
Param,
|
||||||
|
Query,
|
||||||
|
UseGuards,
|
||||||
|
ParseUUIDPipe,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { AdminService } from "./admin.service";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { AdminGuard } from "../auth/guards/admin.guard";
|
||||||
|
import { CurrentUser } from "../auth/decorators/current-user.decorator";
|
||||||
|
import type { AuthUser } from "@mosaic/shared";
|
||||||
|
import { InviteUserDto } from "./dto/invite-user.dto";
|
||||||
|
import { UpdateUserDto } from "./dto/update-user.dto";
|
||||||
|
import { CreateWorkspaceDto } from "./dto/create-workspace.dto";
|
||||||
|
import { UpdateWorkspaceDto } from "./dto/update-workspace.dto";
|
||||||
|
import { QueryUsersDto } from "./dto/query-users.dto";
|
||||||
|
|
||||||
|
@Controller("admin")
|
||||||
|
@UseGuards(AuthGuard, AdminGuard)
|
||||||
|
export class AdminController {
|
||||||
|
constructor(private readonly adminService: AdminService) {}
|
||||||
|
|
||||||
|
@Get("users")
|
||||||
|
async listUsers(@Query() query: QueryUsersDto) {
|
||||||
|
return this.adminService.listUsers(query.page, query.limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post("users/invite")
|
||||||
|
async inviteUser(@Body() dto: InviteUserDto, @CurrentUser() user: AuthUser) {
|
||||||
|
return this.adminService.inviteUser(dto, user.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Patch("users/:id")
|
||||||
|
async updateUser(
|
||||||
|
@Param("id", new ParseUUIDPipe({ version: "4" })) id: string,
|
||||||
|
@Body() dto: UpdateUserDto
|
||||||
|
) {
|
||||||
|
return this.adminService.updateUser(id, dto);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Delete("users/:id")
|
||||||
|
async deactivateUser(@Param("id", new ParseUUIDPipe({ version: "4" })) id: string) {
|
||||||
|
return this.adminService.deactivateUser(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post("workspaces")
|
||||||
|
async createWorkspace(@Body() dto: CreateWorkspaceDto) {
|
||||||
|
return this.adminService.createWorkspace(dto);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Patch("workspaces/:id")
|
||||||
|
async updateWorkspace(
|
||||||
|
@Param("id", new ParseUUIDPipe({ version: "4" })) id: string,
|
||||||
|
@Body() dto: UpdateWorkspaceDto
|
||||||
|
) {
|
||||||
|
return this.adminService.updateWorkspace(id, dto);
|
||||||
|
}
|
||||||
|
}
|
||||||
13
apps/api/src/admin/admin.module.ts
Normal file
13
apps/api/src/admin/admin.module.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { AdminController } from "./admin.controller";
|
||||||
|
import { AdminService } from "./admin.service";
|
||||||
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
|
import { AuthModule } from "../auth/auth.module";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [PrismaModule, AuthModule],
|
||||||
|
controllers: [AdminController],
|
||||||
|
providers: [AdminService],
|
||||||
|
exports: [AdminService],
|
||||||
|
})
|
||||||
|
export class AdminModule {}
|
||||||
477
apps/api/src/admin/admin.service.spec.ts
Normal file
477
apps/api/src/admin/admin.service.spec.ts
Normal file
@@ -0,0 +1,477 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { AdminService } from "./admin.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { BadRequestException, ConflictException, NotFoundException } from "@nestjs/common";
|
||||||
|
import { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
|
||||||
|
describe("AdminService", () => {
|
||||||
|
let service: AdminService;
|
||||||
|
|
||||||
|
const mockPrismaService = {
|
||||||
|
user: {
|
||||||
|
findMany: vi.fn(),
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
count: vi.fn(),
|
||||||
|
create: vi.fn(),
|
||||||
|
update: vi.fn(),
|
||||||
|
},
|
||||||
|
workspace: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
create: vi.fn(),
|
||||||
|
update: vi.fn(),
|
||||||
|
},
|
||||||
|
workspaceMember: {
|
||||||
|
create: vi.fn(),
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
deleteMany: vi.fn(),
|
||||||
|
},
|
||||||
|
$transaction: vi.fn(async (ops) => {
|
||||||
|
if (typeof ops === "function") {
|
||||||
|
return ops(mockPrismaService);
|
||||||
|
}
|
||||||
|
return Promise.all(ops);
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockAdminId = "550e8400-e29b-41d4-a716-446655440001";
|
||||||
|
const mockUserId = "550e8400-e29b-41d4-a716-446655440002";
|
||||||
|
const mockWorkspaceId = "550e8400-e29b-41d4-a716-446655440003";
|
||||||
|
|
||||||
|
const mockUser = {
|
||||||
|
id: mockUserId,
|
||||||
|
name: "Test User",
|
||||||
|
email: "test@example.com",
|
||||||
|
emailVerified: false,
|
||||||
|
image: null,
|
||||||
|
createdAt: new Date("2026-01-01"),
|
||||||
|
updatedAt: new Date("2026-01-01"),
|
||||||
|
deactivatedAt: null,
|
||||||
|
isLocalAuth: false,
|
||||||
|
passwordHash: null,
|
||||||
|
invitedBy: null,
|
||||||
|
invitationToken: null,
|
||||||
|
invitedAt: null,
|
||||||
|
authProviderId: null,
|
||||||
|
preferences: {},
|
||||||
|
workspaceMemberships: [
|
||||||
|
{
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
userId: mockUserId,
|
||||||
|
role: WorkspaceMemberRole.MEMBER,
|
||||||
|
joinedAt: new Date("2026-01-01"),
|
||||||
|
workspace: { id: mockWorkspaceId, name: "Test Workspace" },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockWorkspace = {
|
||||||
|
id: mockWorkspaceId,
|
||||||
|
name: "Test Workspace",
|
||||||
|
ownerId: mockAdminId,
|
||||||
|
settings: {},
|
||||||
|
createdAt: new Date("2026-01-01"),
|
||||||
|
updatedAt: new Date("2026-01-01"),
|
||||||
|
matrixRoomId: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
AdminService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: mockPrismaService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<AdminService>(AdminService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be defined", () => {
|
||||||
|
expect(service).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("listUsers", () => {
|
||||||
|
it("should return paginated users with memberships", async () => {
|
||||||
|
mockPrismaService.user.findMany.mockResolvedValue([mockUser]);
|
||||||
|
mockPrismaService.user.count.mockResolvedValue(1);
|
||||||
|
|
||||||
|
const result = await service.listUsers(1, 50);
|
||||||
|
|
||||||
|
expect(result.data).toHaveLength(1);
|
||||||
|
expect(result.data[0]?.id).toBe(mockUserId);
|
||||||
|
expect(result.data[0]?.workspaceMemberships).toHaveLength(1);
|
||||||
|
expect(result.meta).toEqual({
|
||||||
|
total: 1,
|
||||||
|
page: 1,
|
||||||
|
limit: 50,
|
||||||
|
totalPages: 1,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default pagination when not provided", async () => {
|
||||||
|
mockPrismaService.user.findMany.mockResolvedValue([]);
|
||||||
|
mockPrismaService.user.count.mockResolvedValue(0);
|
||||||
|
|
||||||
|
await service.listUsers();
|
||||||
|
|
||||||
|
expect(mockPrismaService.user.findMany).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
skip: 0,
|
||||||
|
take: 50,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should calculate pagination correctly", async () => {
|
||||||
|
mockPrismaService.user.findMany.mockResolvedValue([]);
|
||||||
|
mockPrismaService.user.count.mockResolvedValue(150);
|
||||||
|
|
||||||
|
const result = await service.listUsers(3, 25);
|
||||||
|
|
||||||
|
expect(mockPrismaService.user.findMany).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
skip: 50,
|
||||||
|
take: 25,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
expect(result.meta.totalPages).toBe(6);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("inviteUser", () => {
|
||||||
|
it("should create a user with invitation token", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
const createdUser = {
|
||||||
|
id: "new-user-id",
|
||||||
|
email: "new@example.com",
|
||||||
|
name: "new",
|
||||||
|
invitationToken: "some-token",
|
||||||
|
};
|
||||||
|
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||||
|
|
||||||
|
const result = await service.inviteUser({ email: "new@example.com" }, mockAdminId);
|
||||||
|
|
||||||
|
expect(result.email).toBe("new@example.com");
|
||||||
|
expect(result.invitationToken).toBeDefined();
|
||||||
|
expect(result.userId).toBe("new-user-id");
|
||||||
|
expect(mockPrismaService.user.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
email: "new@example.com",
|
||||||
|
invitedBy: mockAdminId,
|
||||||
|
invitationToken: expect.any(String),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should add user to workspace when workspaceId provided", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||||
|
const createdUser = { id: "new-user-id", email: "new@example.com", name: "new" };
|
||||||
|
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||||
|
|
||||||
|
await service.inviteUser(
|
||||||
|
{
|
||||||
|
email: "new@example.com",
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
role: WorkspaceMemberRole.ADMIN,
|
||||||
|
},
|
||||||
|
mockAdminId
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockPrismaService.workspaceMember.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
workspaceId: mockWorkspaceId,
|
||||||
|
userId: "new-user-id",
|
||||||
|
role: WorkspaceMemberRole.ADMIN,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw ConflictException if email already exists", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
|
||||||
|
await expect(service.inviteUser({ email: "test@example.com" }, mockAdminId)).rejects.toThrow(
|
||||||
|
ConflictException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException if workspace does not exist", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.inviteUser({ email: "new@example.com", workspaceId: "non-existent" }, mockAdminId)
|
||||||
|
).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use email prefix as default name", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
const createdUser = { id: "new-user-id", email: "jane.doe@example.com", name: "jane.doe" };
|
||||||
|
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||||
|
|
||||||
|
await service.inviteUser({ email: "jane.doe@example.com" }, mockAdminId);
|
||||||
|
|
||||||
|
expect(mockPrismaService.user.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
name: "jane.doe",
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use provided name when given", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
const createdUser = { id: "new-user-id", email: "j@example.com", name: "Jane Doe" };
|
||||||
|
mockPrismaService.user.create.mockResolvedValue(createdUser);
|
||||||
|
|
||||||
|
await service.inviteUser({ email: "j@example.com", name: "Jane Doe" }, mockAdminId);
|
||||||
|
|
||||||
|
expect(mockPrismaService.user.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
name: "Jane Doe",
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("updateUser", () => {
|
||||||
|
it("should update user fields", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
name: "Updated Name",
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateUser(mockUserId, { name: "Updated Name" });
|
||||||
|
|
||||||
|
expect(result.name).toBe("Updated Name");
|
||||||
|
expect(mockPrismaService.user.update).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
where: { id: mockUserId },
|
||||||
|
data: { name: "Updated Name" },
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set deactivatedAt when provided", async () => {
|
||||||
|
const deactivatedAt = "2026-02-28T00:00:00.000Z";
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
deactivatedAt: new Date(deactivatedAt),
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateUser(mockUserId, { deactivatedAt });
|
||||||
|
|
||||||
|
expect(result.deactivatedAt).toEqual(new Date(deactivatedAt));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should clear deactivatedAt when set to null", async () => {
|
||||||
|
const deactivatedUser = { ...mockUser, deactivatedAt: new Date() };
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(deactivatedUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...deactivatedUser,
|
||||||
|
deactivatedAt: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateUser(mockUserId, { deactivatedAt: null });
|
||||||
|
|
||||||
|
expect(result.deactivatedAt).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException if user does not exist", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.updateUser("non-existent", { name: "Test" })).rejects.toThrow(
|
||||||
|
NotFoundException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should update emailVerified", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
emailVerified: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateUser(mockUserId, { emailVerified: true });
|
||||||
|
|
||||||
|
expect(result.emailVerified).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should update preferences", async () => {
|
||||||
|
const prefs = { theme: "dark" };
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
preferences: prefs,
|
||||||
|
});
|
||||||
|
|
||||||
|
await service.updateUser(mockUserId, { preferences: prefs });
|
||||||
|
|
||||||
|
expect(mockPrismaService.user.update).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({ preferences: prefs }),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("deactivateUser", () => {
|
||||||
|
it("should set deactivatedAt and invalidate sessions", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.user.update.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
deactivatedAt: new Date(),
|
||||||
|
});
|
||||||
|
mockPrismaService.session.deleteMany.mockResolvedValue({ count: 3 });
|
||||||
|
|
||||||
|
const result = await service.deactivateUser(mockUserId);
|
||||||
|
|
||||||
|
expect(result.deactivatedAt).toBeDefined();
|
||||||
|
expect(mockPrismaService.user.update).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
where: { id: mockUserId },
|
||||||
|
data: { deactivatedAt: expect.any(Date) },
|
||||||
|
})
|
||||||
|
);
|
||||||
|
expect(mockPrismaService.session.deleteMany).toHaveBeenCalledWith({ where: { userId: mockUserId } });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException if user does not exist", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.deactivateUser("non-existent")).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw BadRequestException if user is already deactivated", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
...mockUser,
|
||||||
|
deactivatedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(service.deactivateUser(mockUserId)).rejects.toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("createWorkspace", () => {
|
||||||
|
it("should create a workspace with owner membership", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.workspace.create.mockResolvedValue(mockWorkspace);
|
||||||
|
|
||||||
|
const result = await service.createWorkspace({
|
||||||
|
name: "New Workspace",
|
||||||
|
ownerId: mockAdminId,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.name).toBe("Test Workspace");
|
||||||
|
expect(result.memberCount).toBe(1);
|
||||||
|
expect(mockPrismaService.workspace.create).toHaveBeenCalled();
|
||||||
|
expect(mockPrismaService.workspaceMember.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
workspaceId: mockWorkspace.id,
|
||||||
|
userId: mockAdminId,
|
||||||
|
role: WorkspaceMemberRole.OWNER,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException if owner does not exist", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.createWorkspace({ name: "New Workspace", ownerId: "non-existent" })
|
||||||
|
).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass settings when provided", async () => {
|
||||||
|
const settings = { theme: "dark", features: ["chat"] };
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(mockUser);
|
||||||
|
mockPrismaService.workspace.create.mockResolvedValue({
|
||||||
|
...mockWorkspace,
|
||||||
|
settings,
|
||||||
|
});
|
||||||
|
|
||||||
|
await service.createWorkspace({
|
||||||
|
name: "New Workspace",
|
||||||
|
ownerId: mockAdminId,
|
||||||
|
settings,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockPrismaService.workspace.create).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: expect.objectContaining({ settings }),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("updateWorkspace", () => {
|
||||||
|
it("should update workspace name", async () => {
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||||
|
mockPrismaService.workspace.update.mockResolvedValue({
|
||||||
|
...mockWorkspace,
|
||||||
|
name: "Updated Workspace",
|
||||||
|
_count: { members: 3 },
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateWorkspace(mockWorkspaceId, {
|
||||||
|
name: "Updated Workspace",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.name).toBe("Updated Workspace");
|
||||||
|
expect(result.memberCount).toBe(3);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should update workspace settings", async () => {
|
||||||
|
const newSettings = { notifications: true };
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||||
|
mockPrismaService.workspace.update.mockResolvedValue({
|
||||||
|
...mockWorkspace,
|
||||||
|
settings: newSettings,
|
||||||
|
_count: { members: 1 },
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await service.updateWorkspace(mockWorkspaceId, {
|
||||||
|
settings: newSettings,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.settings).toEqual(newSettings);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException if workspace does not exist", async () => {
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.updateWorkspace("non-existent", { name: "Test" })).rejects.toThrow(
|
||||||
|
NotFoundException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should only update provided fields", async () => {
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
|
||||||
|
mockPrismaService.workspace.update.mockResolvedValue({
|
||||||
|
...mockWorkspace,
|
||||||
|
_count: { members: 1 },
|
||||||
|
});
|
||||||
|
|
||||||
|
await service.updateWorkspace(mockWorkspaceId, { name: "Only Name" });
|
||||||
|
|
||||||
|
expect(mockPrismaService.workspace.update).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
data: { name: "Only Name" },
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
309
apps/api/src/admin/admin.service.ts
Normal file
309
apps/api/src/admin/admin.service.ts
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
import {
|
||||||
|
BadRequestException,
|
||||||
|
ConflictException,
|
||||||
|
Injectable,
|
||||||
|
Logger,
|
||||||
|
NotFoundException,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { Prisma, WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
import { randomUUID } from "node:crypto";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import type { InviteUserDto } from "./dto/invite-user.dto";
|
||||||
|
import type { UpdateUserDto } from "./dto/update-user.dto";
|
||||||
|
import type { CreateWorkspaceDto } from "./dto/create-workspace.dto";
|
||||||
|
import type {
|
||||||
|
AdminUserResponse,
|
||||||
|
AdminWorkspaceResponse,
|
||||||
|
InvitationResponse,
|
||||||
|
PaginatedResponse,
|
||||||
|
} from "./types/admin.types";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AdminService {
|
||||||
|
private readonly logger = new Logger(AdminService.name);
|
||||||
|
|
||||||
|
constructor(private readonly prisma: PrismaService) {}
|
||||||
|
|
||||||
|
async listUsers(page = 1, limit = 50): Promise<PaginatedResponse<AdminUserResponse>> {
|
||||||
|
const skip = (page - 1) * limit;
|
||||||
|
|
||||||
|
const [users, total] = await Promise.all([
|
||||||
|
this.prisma.user.findMany({
|
||||||
|
include: {
|
||||||
|
workspaceMemberships: {
|
||||||
|
include: {
|
||||||
|
workspace: { select: { id: true, name: true } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { createdAt: "desc" },
|
||||||
|
skip,
|
||||||
|
take: limit,
|
||||||
|
}),
|
||||||
|
this.prisma.user.count(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
data: users.map((user) => ({
|
||||||
|
id: user.id,
|
||||||
|
name: user.name,
|
||||||
|
email: user.email,
|
||||||
|
emailVerified: user.emailVerified,
|
||||||
|
image: user.image,
|
||||||
|
createdAt: user.createdAt,
|
||||||
|
deactivatedAt: user.deactivatedAt,
|
||||||
|
isLocalAuth: user.isLocalAuth,
|
||||||
|
invitedAt: user.invitedAt,
|
||||||
|
invitedBy: user.invitedBy,
|
||||||
|
workspaceMemberships: user.workspaceMemberships.map((m) => ({
|
||||||
|
workspaceId: m.workspaceId,
|
||||||
|
workspaceName: m.workspace.name,
|
||||||
|
role: m.role,
|
||||||
|
joinedAt: m.joinedAt,
|
||||||
|
})),
|
||||||
|
})),
|
||||||
|
meta: {
|
||||||
|
total,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
totalPages: Math.ceil(total / limit),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async inviteUser(dto: InviteUserDto, inviterId: string): Promise<InvitationResponse> {
|
||||||
|
const existing = await this.prisma.user.findUnique({
|
||||||
|
where: { email: dto.email },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
throw new ConflictException(`User with email ${dto.email} already exists`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dto.workspaceId) {
|
||||||
|
const workspace = await this.prisma.workspace.findUnique({
|
||||||
|
where: { id: dto.workspaceId },
|
||||||
|
});
|
||||||
|
if (!workspace) {
|
||||||
|
throw new NotFoundException(`Workspace ${dto.workspaceId} not found`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const invitationToken = randomUUID();
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
const user = await this.prisma.$transaction(async (tx) => {
|
||||||
|
const created = await tx.user.create({
|
||||||
|
data: {
|
||||||
|
email: dto.email,
|
||||||
|
name: dto.name ?? dto.email.split("@")[0] ?? dto.email,
|
||||||
|
emailVerified: false,
|
||||||
|
invitedBy: inviterId,
|
||||||
|
invitationToken,
|
||||||
|
invitedAt: now,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (dto.workspaceId) {
|
||||||
|
await tx.workspaceMember.create({
|
||||||
|
data: {
|
||||||
|
workspaceId: dto.workspaceId,
|
||||||
|
userId: created.id,
|
||||||
|
role: dto.role ?? WorkspaceMemberRole.MEMBER,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return created;
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`User invited: ${user.email} by ${inviterId}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
userId: user.id,
|
||||||
|
invitationToken,
|
||||||
|
email: user.email,
|
||||||
|
invitedAt: now,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateUser(id: string, dto: UpdateUserDto): Promise<AdminUserResponse> {
|
||||||
|
const existing = await this.prisma.user.findUnique({ where: { id } });
|
||||||
|
if (!existing) {
|
||||||
|
throw new NotFoundException(`User ${id} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data: Prisma.UserUpdateInput = {};
|
||||||
|
|
||||||
|
if (dto.name !== undefined) {
|
||||||
|
data.name = dto.name;
|
||||||
|
}
|
||||||
|
if (dto.emailVerified !== undefined) {
|
||||||
|
data.emailVerified = dto.emailVerified;
|
||||||
|
}
|
||||||
|
if (dto.preferences !== undefined) {
|
||||||
|
data.preferences = dto.preferences as Prisma.InputJsonValue;
|
||||||
|
}
|
||||||
|
if (dto.deactivatedAt !== undefined) {
|
||||||
|
data.deactivatedAt = dto.deactivatedAt ? new Date(dto.deactivatedAt) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await this.prisma.user.update({
|
||||||
|
where: { id },
|
||||||
|
data,
|
||||||
|
include: {
|
||||||
|
workspaceMemberships: {
|
||||||
|
include: {
|
||||||
|
workspace: { select: { id: true, name: true } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`User updated: ${id}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: user.id,
|
||||||
|
name: user.name,
|
||||||
|
email: user.email,
|
||||||
|
emailVerified: user.emailVerified,
|
||||||
|
image: user.image,
|
||||||
|
createdAt: user.createdAt,
|
||||||
|
deactivatedAt: user.deactivatedAt,
|
||||||
|
isLocalAuth: user.isLocalAuth,
|
||||||
|
invitedAt: user.invitedAt,
|
||||||
|
invitedBy: user.invitedBy,
|
||||||
|
workspaceMemberships: user.workspaceMemberships.map((m) => ({
|
||||||
|
workspaceId: m.workspaceId,
|
||||||
|
workspaceName: m.workspace.name,
|
||||||
|
role: m.role,
|
||||||
|
joinedAt: m.joinedAt,
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async deactivateUser(id: string): Promise<AdminUserResponse> {
|
||||||
|
const existing = await this.prisma.user.findUnique({ where: { id } });
|
||||||
|
if (!existing) {
|
||||||
|
throw new NotFoundException(`User ${id} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existing.deactivatedAt) {
|
||||||
|
throw new BadRequestException(`User ${id} is already deactivated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [user] = await this.prisma.$transaction([
|
||||||
|
this.prisma.user.update({
|
||||||
|
where: { id },
|
||||||
|
data: { deactivatedAt: new Date() },
|
||||||
|
include: {
|
||||||
|
workspaceMemberships: {
|
||||||
|
include: {
|
||||||
|
workspace: { select: { id: true, name: true } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
this.prisma.session.deleteMany({ where: { userId: id } }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
this.logger.log(`User deactivated and sessions invalidated: ${id}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: user.id,
|
||||||
|
name: user.name,
|
||||||
|
email: user.email,
|
||||||
|
emailVerified: user.emailVerified,
|
||||||
|
image: user.image,
|
||||||
|
createdAt: user.createdAt,
|
||||||
|
deactivatedAt: user.deactivatedAt,
|
||||||
|
isLocalAuth: user.isLocalAuth,
|
||||||
|
invitedAt: user.invitedAt,
|
||||||
|
invitedBy: user.invitedBy,
|
||||||
|
workspaceMemberships: user.workspaceMemberships.map((m) => ({
|
||||||
|
workspaceId: m.workspaceId,
|
||||||
|
workspaceName: m.workspace.name,
|
||||||
|
role: m.role,
|
||||||
|
joinedAt: m.joinedAt,
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async createWorkspace(dto: CreateWorkspaceDto): Promise<AdminWorkspaceResponse> {
|
||||||
|
const owner = await this.prisma.user.findUnique({ where: { id: dto.ownerId } });
|
||||||
|
if (!owner) {
|
||||||
|
throw new NotFoundException(`User ${dto.ownerId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspace = await this.prisma.$transaction(async (tx) => {
|
||||||
|
const created = await tx.workspace.create({
|
||||||
|
data: {
|
||||||
|
name: dto.name,
|
||||||
|
ownerId: dto.ownerId,
|
||||||
|
settings: dto.settings ? (dto.settings as Prisma.InputJsonValue) : {},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await tx.workspaceMember.create({
|
||||||
|
data: {
|
||||||
|
workspaceId: created.id,
|
||||||
|
userId: dto.ownerId,
|
||||||
|
role: WorkspaceMemberRole.OWNER,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return created;
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`Workspace created: ${workspace.id} with owner ${dto.ownerId}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
settings: workspace.settings as Record<string, unknown>,
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
updatedAt: workspace.updatedAt,
|
||||||
|
memberCount: 1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateWorkspace(
|
||||||
|
id: string,
|
||||||
|
dto: { name?: string; settings?: Record<string, unknown> }
|
||||||
|
): Promise<AdminWorkspaceResponse> {
|
||||||
|
const existing = await this.prisma.workspace.findUnique({ where: { id } });
|
||||||
|
if (!existing) {
|
||||||
|
throw new NotFoundException(`Workspace ${id} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data: Prisma.WorkspaceUpdateInput = {};
|
||||||
|
|
||||||
|
if (dto.name !== undefined) {
|
||||||
|
data.name = dto.name;
|
||||||
|
}
|
||||||
|
if (dto.settings !== undefined) {
|
||||||
|
data.settings = dto.settings as Prisma.InputJsonValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspace = await this.prisma.workspace.update({
|
||||||
|
where: { id },
|
||||||
|
data,
|
||||||
|
include: {
|
||||||
|
_count: { select: { members: true } },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`Workspace updated: ${id}`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
settings: workspace.settings as Record<string, unknown>,
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
updatedAt: workspace.updatedAt,
|
||||||
|
memberCount: workspace._count.members,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
15
apps/api/src/admin/dto/create-workspace.dto.ts
Normal file
15
apps/api/src/admin/dto/create-workspace.dto.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { IsObject, IsOptional, IsString, IsUUID, MaxLength, MinLength } from "class-validator";
|
||||||
|
|
||||||
|
export class CreateWorkspaceDto {
|
||||||
|
@IsString({ message: "name must be a string" })
|
||||||
|
@MinLength(1, { message: "name must not be empty" })
|
||||||
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
|
name!: string;
|
||||||
|
|
||||||
|
@IsUUID("4", { message: "ownerId must be a valid UUID" })
|
||||||
|
ownerId!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsObject({ message: "settings must be an object" })
|
||||||
|
settings?: Record<string, unknown>;
|
||||||
|
}
|
||||||
20
apps/api/src/admin/dto/invite-user.dto.ts
Normal file
20
apps/api/src/admin/dto/invite-user.dto.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
import { IsEmail, IsEnum, IsOptional, IsString, IsUUID, MaxLength } from "class-validator";
|
||||||
|
|
||||||
|
export class InviteUserDto {
|
||||||
|
@IsEmail({}, { message: "email must be a valid email address" })
|
||||||
|
email!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "name must be a string" })
|
||||||
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
|
name?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsUUID("4", { message: "workspaceId must be a valid UUID" })
|
||||||
|
workspaceId?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsEnum(WorkspaceMemberRole, { message: "role must be a valid WorkspaceMemberRole" })
|
||||||
|
role?: WorkspaceMemberRole;
|
||||||
|
}
|
||||||
15
apps/api/src/admin/dto/manage-member.dto.ts
Normal file
15
apps/api/src/admin/dto/manage-member.dto.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
import { IsEnum, IsUUID } from "class-validator";
|
||||||
|
|
||||||
|
export class AddMemberDto {
|
||||||
|
@IsUUID("4", { message: "userId must be a valid UUID" })
|
||||||
|
userId!: string;
|
||||||
|
|
||||||
|
@IsEnum(WorkspaceMemberRole, { message: "role must be a valid WorkspaceMemberRole" })
|
||||||
|
role!: WorkspaceMemberRole;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class UpdateMemberRoleDto {
|
||||||
|
@IsEnum(WorkspaceMemberRole, { message: "role must be a valid WorkspaceMemberRole" })
|
||||||
|
role!: WorkspaceMemberRole;
|
||||||
|
}
|
||||||
17
apps/api/src/admin/dto/query-users.dto.ts
Normal file
17
apps/api/src/admin/dto/query-users.dto.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { IsInt, IsOptional, Max, Min } from "class-validator";
|
||||||
|
import { Type } from "class-transformer";
|
||||||
|
|
||||||
|
export class QueryUsersDto {
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsInt({ message: "page must be an integer" })
|
||||||
|
@Min(1, { message: "page must be at least 1" })
|
||||||
|
page?: number;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsInt({ message: "limit must be an integer" })
|
||||||
|
@Min(1, { message: "limit must be at least 1" })
|
||||||
|
@Max(100, { message: "limit must not exceed 100" })
|
||||||
|
limit?: number;
|
||||||
|
}
|
||||||
27
apps/api/src/admin/dto/update-user.dto.ts
Normal file
27
apps/api/src/admin/dto/update-user.dto.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import {
|
||||||
|
IsBoolean,
|
||||||
|
IsDateString,
|
||||||
|
IsObject,
|
||||||
|
IsOptional,
|
||||||
|
IsString,
|
||||||
|
MaxLength,
|
||||||
|
} from "class-validator";
|
||||||
|
|
||||||
|
export class UpdateUserDto {
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "name must be a string" })
|
||||||
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
|
name?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsDateString({}, { message: "deactivatedAt must be a valid ISO 8601 date string" })
|
||||||
|
deactivatedAt?: string | null;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsBoolean({ message: "emailVerified must be a boolean" })
|
||||||
|
emailVerified?: boolean;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsObject({ message: "preferences must be an object" })
|
||||||
|
preferences?: Record<string, unknown>;
|
||||||
|
}
|
||||||
13
apps/api/src/admin/dto/update-workspace.dto.ts
Normal file
13
apps/api/src/admin/dto/update-workspace.dto.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { IsObject, IsOptional, IsString, MaxLength, MinLength } from "class-validator";
|
||||||
|
|
||||||
|
export class UpdateWorkspaceDto {
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "name must be a string" })
|
||||||
|
@MinLength(1, { message: "name must not be empty" })
|
||||||
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
|
name?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsObject({ message: "settings must be an object" })
|
||||||
|
settings?: Record<string, unknown>;
|
||||||
|
}
|
||||||
49
apps/api/src/admin/types/admin.types.ts
Normal file
49
apps/api/src/admin/types/admin.types.ts
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import type { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
|
||||||
|
export interface AdminUserResponse {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
email: string;
|
||||||
|
emailVerified: boolean;
|
||||||
|
image: string | null;
|
||||||
|
createdAt: Date;
|
||||||
|
deactivatedAt: Date | null;
|
||||||
|
isLocalAuth: boolean;
|
||||||
|
invitedAt: Date | null;
|
||||||
|
invitedBy: string | null;
|
||||||
|
workspaceMemberships: WorkspaceMembershipResponse[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WorkspaceMembershipResponse {
|
||||||
|
workspaceId: string;
|
||||||
|
workspaceName: string;
|
||||||
|
role: WorkspaceMemberRole;
|
||||||
|
joinedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PaginatedResponse<T> {
|
||||||
|
data: T[];
|
||||||
|
meta: {
|
||||||
|
total: number;
|
||||||
|
page: number;
|
||||||
|
limit: number;
|
||||||
|
totalPages: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface InvitationResponse {
|
||||||
|
userId: string;
|
||||||
|
invitationToken: string;
|
||||||
|
email: string;
|
||||||
|
invitedAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AdminWorkspaceResponse {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
ownerId: string;
|
||||||
|
settings: Record<string, unknown>;
|
||||||
|
createdAt: Date;
|
||||||
|
updatedAt: Date;
|
||||||
|
memberCount: number;
|
||||||
|
}
|
||||||
40
apps/api/src/agent-config/agent-config.controller.ts
Normal file
40
apps/api/src/agent-config/agent-config.controller.ts
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import {
|
||||||
|
Controller,
|
||||||
|
ForbiddenException,
|
||||||
|
Get,
|
||||||
|
Param,
|
||||||
|
Req,
|
||||||
|
UnauthorizedException,
|
||||||
|
UseGuards,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { AgentConfigService } from "./agent-config.service";
|
||||||
|
import { AgentConfigGuard, type AgentConfigRequest } from "./agent-config.guard";
|
||||||
|
|
||||||
|
@Controller("internal")
|
||||||
|
@UseGuards(AgentConfigGuard)
|
||||||
|
export class AgentConfigController {
|
||||||
|
constructor(private readonly agentConfigService: AgentConfigService) {}
|
||||||
|
|
||||||
|
// GET /api/internal/agent-config/:id
|
||||||
|
// Auth: Bearer token (validated against UserContainer.gatewayToken or SystemContainer.gatewayToken)
|
||||||
|
// Returns: assembled openclaw.json
|
||||||
|
//
|
||||||
|
// The :id param is the container record ID (cuid)
|
||||||
|
// Token must match the container requesting its own config
|
||||||
|
@Get("agent-config/:id")
|
||||||
|
async getAgentConfig(
|
||||||
|
@Param("id") id: string,
|
||||||
|
@Req() request: AgentConfigRequest
|
||||||
|
): Promise<object> {
|
||||||
|
const containerAuth = request.containerAuth;
|
||||||
|
if (!containerAuth) {
|
||||||
|
throw new UnauthorizedException("Missing container authentication context");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (containerAuth.id !== id) {
|
||||||
|
throw new ForbiddenException("Token is not authorized for the requested container");
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.agentConfigService.generateConfigForContainer(containerAuth.type, id);
|
||||||
|
}
|
||||||
|
}
|
||||||
43
apps/api/src/agent-config/agent-config.guard.ts
Normal file
43
apps/api/src/agent-config/agent-config.guard.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import { CanActivate, ExecutionContext, Injectable, UnauthorizedException } from "@nestjs/common";
|
||||||
|
import type { Request } from "express";
|
||||||
|
import { AgentConfigService, type ContainerTokenValidation } from "./agent-config.service";
|
||||||
|
|
||||||
|
export interface AgentConfigRequest extends Request {
|
||||||
|
containerAuth?: ContainerTokenValidation;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AgentConfigGuard implements CanActivate {
|
||||||
|
constructor(private readonly agentConfigService: AgentConfigService) {}
|
||||||
|
|
||||||
|
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||||
|
const request = context.switchToHttp().getRequest<AgentConfigRequest>();
|
||||||
|
const token = this.extractBearerToken(request.headers.authorization);
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
throw new UnauthorizedException("Missing Bearer token");
|
||||||
|
}
|
||||||
|
|
||||||
|
const containerAuth = await this.agentConfigService.validateContainerToken(token);
|
||||||
|
if (!containerAuth) {
|
||||||
|
throw new UnauthorizedException("Invalid container token");
|
||||||
|
}
|
||||||
|
|
||||||
|
request.containerAuth = containerAuth;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractBearerToken(headerValue: string | string[] | undefined): string | null {
|
||||||
|
const normalizedHeader = Array.isArray(headerValue) ? headerValue[0] : headerValue;
|
||||||
|
if (!normalizedHeader) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [scheme, token] = normalizedHeader.split(" ");
|
||||||
|
if (!scheme || !token || scheme.toLowerCase() !== "bearer") {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
}
|
||||||
14
apps/api/src/agent-config/agent-config.module.ts
Normal file
14
apps/api/src/agent-config/agent-config.module.ts
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
|
import { CryptoModule } from "../crypto/crypto.module";
|
||||||
|
import { AgentConfigController } from "./agent-config.controller";
|
||||||
|
import { AgentConfigService } from "./agent-config.service";
|
||||||
|
import { AgentConfigGuard } from "./agent-config.guard";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [PrismaModule, CryptoModule],
|
||||||
|
controllers: [AgentConfigController],
|
||||||
|
providers: [AgentConfigService, AgentConfigGuard],
|
||||||
|
exports: [AgentConfigService],
|
||||||
|
})
|
||||||
|
export class AgentConfigModule {}
|
||||||
215
apps/api/src/agent-config/agent-config.service.spec.ts
Normal file
215
apps/api/src/agent-config/agent-config.service.spec.ts
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||||
|
import { AgentConfigService } from "./agent-config.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { CryptoService } from "../crypto/crypto.service";
|
||||||
|
|
||||||
|
describe("AgentConfigService", () => {
|
||||||
|
let service: AgentConfigService;
|
||||||
|
|
||||||
|
const mockPrismaService = {
|
||||||
|
userAgentConfig: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
},
|
||||||
|
llmProvider: {
|
||||||
|
findMany: vi.fn(),
|
||||||
|
},
|
||||||
|
userContainer: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
findMany: vi.fn(),
|
||||||
|
},
|
||||||
|
systemContainer: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
findMany: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockCryptoService = {
|
||||||
|
isEncrypted: vi.fn((value: string) => value.startsWith("enc:")),
|
||||||
|
decrypt: vi.fn((value: string) => value.replace(/^enc:/, "")),
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
service = new AgentConfigService(
|
||||||
|
mockPrismaService as unknown as PrismaService,
|
||||||
|
mockCryptoService as unknown as CryptoService
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("generateUserConfig returns valid openclaw.json structure", async () => {
|
||||||
|
mockPrismaService.userAgentConfig.findUnique.mockResolvedValue({
|
||||||
|
id: "cfg-1",
|
||||||
|
userId: "user-1",
|
||||||
|
primaryModel: "my-zai/glm-5",
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.userContainer.findUnique.mockResolvedValue({
|
||||||
|
id: "container-1",
|
||||||
|
userId: "user-1",
|
||||||
|
gatewayPort: 19001,
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.llmProvider.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "provider-1",
|
||||||
|
userId: "user-1",
|
||||||
|
name: "my-zai",
|
||||||
|
displayName: "Z.ai",
|
||||||
|
type: "zai",
|
||||||
|
baseUrl: "https://api.z.ai/v1",
|
||||||
|
apiKey: "enc:secret-zai-key",
|
||||||
|
apiType: "openai-completions",
|
||||||
|
models: [{ id: "glm-5" }],
|
||||||
|
isActive: true,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const result = await service.generateUserConfig("user-1");
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
gateway: {
|
||||||
|
mode: "local",
|
||||||
|
port: 19001,
|
||||||
|
bind: "lan",
|
||||||
|
auth: { mode: "token" },
|
||||||
|
http: {
|
||||||
|
endpoints: {
|
||||||
|
chatCompletions: { enabled: true },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
agents: {
|
||||||
|
defaults: {
|
||||||
|
model: {
|
||||||
|
primary: "my-zai/glm-5",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
models: {
|
||||||
|
providers: {
|
||||||
|
"my-zai": {
|
||||||
|
apiKey: "secret-zai-key",
|
||||||
|
baseUrl: "https://api.z.ai/v1",
|
||||||
|
models: {
|
||||||
|
"glm-5": {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("generateUserConfig decrypts API keys correctly", async () => {
|
||||||
|
mockPrismaService.userAgentConfig.findUnique.mockResolvedValue({
|
||||||
|
id: "cfg-1",
|
||||||
|
userId: "user-1",
|
||||||
|
primaryModel: "openai-work/gpt-4.1",
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.userContainer.findUnique.mockResolvedValue({
|
||||||
|
id: "container-1",
|
||||||
|
userId: "user-1",
|
||||||
|
gatewayPort: 18789,
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.llmProvider.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "provider-1",
|
||||||
|
userId: "user-1",
|
||||||
|
name: "openai-work",
|
||||||
|
displayName: "OpenAI Work",
|
||||||
|
type: "openai",
|
||||||
|
baseUrl: "https://api.openai.com/v1",
|
||||||
|
apiKey: "enc:encrypted-openai-key",
|
||||||
|
apiType: "openai-completions",
|
||||||
|
models: [{ id: "gpt-4.1" }],
|
||||||
|
isActive: true,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const result = await service.generateUserConfig("user-1");
|
||||||
|
|
||||||
|
expect(mockCryptoService.decrypt).toHaveBeenCalledWith("enc:encrypted-openai-key");
|
||||||
|
expect(result.models.providers["openai-work"]?.apiKey).toBe("encrypted-openai-key");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("generateUserConfig handles user with no providers", async () => {
|
||||||
|
mockPrismaService.userAgentConfig.findUnique.mockResolvedValue({
|
||||||
|
id: "cfg-1",
|
||||||
|
userId: "user-2",
|
||||||
|
primaryModel: "openai/gpt-4o-mini",
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.userContainer.findUnique.mockResolvedValue({
|
||||||
|
id: "container-2",
|
||||||
|
userId: "user-2",
|
||||||
|
gatewayPort: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrismaService.llmProvider.findMany.mockResolvedValue([]);
|
||||||
|
|
||||||
|
const result = await service.generateUserConfig("user-2");
|
||||||
|
|
||||||
|
expect(result.models.providers).toEqual({});
|
||||||
|
expect(result.gateway.port).toBe(18789);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("validateContainerToken returns correct type for user container", async () => {
|
||||||
|
mockPrismaService.userContainer.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "user-container-1",
|
||||||
|
gatewayToken: "enc:user-token-1",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
mockPrismaService.systemContainer.findMany.mockResolvedValue([]);
|
||||||
|
|
||||||
|
const result = await service.validateContainerToken("user-token-1");
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
type: "user",
|
||||||
|
id: "user-container-1",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("validateContainerToken returns correct type for system container", async () => {
|
||||||
|
mockPrismaService.userContainer.findMany.mockResolvedValue([]);
|
||||||
|
mockPrismaService.systemContainer.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "system-container-1",
|
||||||
|
gatewayToken: "enc:system-token-1",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const result = await service.validateContainerToken("system-token-1");
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
type: "system",
|
||||||
|
id: "system-container-1",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("validateContainerToken returns null for invalid token", async () => {
|
||||||
|
mockPrismaService.userContainer.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "user-container-1",
|
||||||
|
gatewayToken: "enc:user-token-1",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
mockPrismaService.systemContainer.findMany.mockResolvedValue([
|
||||||
|
{
|
||||||
|
id: "system-container-1",
|
||||||
|
gatewayToken: "enc:system-token-1",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const result = await service.validateContainerToken("no-match");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
285
apps/api/src/agent-config/agent-config.service.ts
Normal file
285
apps/api/src/agent-config/agent-config.service.ts
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
import { Injectable, NotFoundException } from "@nestjs/common";
|
||||||
|
import type { LlmProvider } from "@prisma/client";
|
||||||
|
import { createHash, timingSafeEqual } from "node:crypto";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { CryptoService } from "../crypto/crypto.service";
|
||||||
|
|
||||||
|
const DEFAULT_GATEWAY_PORT = 18789;
|
||||||
|
const DEFAULT_PRIMARY_MODEL = "openai/gpt-4o-mini";
|
||||||
|
|
||||||
|
type ContainerType = "user" | "system";
|
||||||
|
|
||||||
|
export interface ContainerTokenValidation {
|
||||||
|
type: ContainerType;
|
||||||
|
id: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
type OpenClawModelMap = Record<string, Record<string, never>>;
|
||||||
|
|
||||||
|
interface OpenClawProviderConfig {
|
||||||
|
apiKey?: string;
|
||||||
|
baseUrl?: string;
|
||||||
|
models: OpenClawModelMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface OpenClawConfig {
|
||||||
|
gateway: {
|
||||||
|
mode: "local";
|
||||||
|
port: number;
|
||||||
|
bind: "lan";
|
||||||
|
auth: { mode: "token" };
|
||||||
|
http: {
|
||||||
|
endpoints: {
|
||||||
|
chatCompletions: { enabled: true };
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
agents: {
|
||||||
|
defaults: {
|
||||||
|
model: {
|
||||||
|
primary: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
models: {
|
||||||
|
providers: Record<string, OpenClawProviderConfig>;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AgentConfigService {
|
||||||
|
constructor(
|
||||||
|
private readonly prisma: PrismaService,
|
||||||
|
private readonly crypto: CryptoService
|
||||||
|
) {}
|
||||||
|
|
||||||
|
// Generate complete openclaw.json for a user container
|
||||||
|
async generateUserConfig(userId: string): Promise<OpenClawConfig> {
|
||||||
|
const [userAgentConfig, providers, userContainer] = await Promise.all([
|
||||||
|
this.prisma.userAgentConfig.findUnique({
|
||||||
|
where: { userId },
|
||||||
|
}),
|
||||||
|
this.prisma.llmProvider.findMany({
|
||||||
|
where: {
|
||||||
|
userId,
|
||||||
|
isActive: true,
|
||||||
|
},
|
||||||
|
orderBy: {
|
||||||
|
createdAt: "asc",
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
this.prisma.userContainer.findUnique({
|
||||||
|
where: { userId },
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!userContainer) {
|
||||||
|
throw new NotFoundException(`User container not found for user ${userId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const primaryModel =
|
||||||
|
userAgentConfig?.primaryModel ??
|
||||||
|
this.resolvePrimaryModelFromProviders(providers) ??
|
||||||
|
DEFAULT_PRIMARY_MODEL;
|
||||||
|
|
||||||
|
return this.buildOpenClawConfig(primaryModel, userContainer.gatewayPort, providers);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate config for a system container
|
||||||
|
async generateSystemConfig(containerId: string): Promise<OpenClawConfig> {
|
||||||
|
const systemContainer = await this.prisma.systemContainer.findUnique({
|
||||||
|
where: { id: containerId },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!systemContainer) {
|
||||||
|
throw new NotFoundException(`System container ${containerId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.buildOpenClawConfig(
|
||||||
|
systemContainer.primaryModel || DEFAULT_PRIMARY_MODEL,
|
||||||
|
systemContainer.gatewayPort,
|
||||||
|
[]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateConfigForContainer(
|
||||||
|
type: ContainerType,
|
||||||
|
containerId: string
|
||||||
|
): Promise<OpenClawConfig> {
|
||||||
|
if (type === "system") {
|
||||||
|
return this.generateSystemConfig(containerId);
|
||||||
|
}
|
||||||
|
|
||||||
|
const userContainer = await this.prisma.userContainer.findUnique({
|
||||||
|
where: { id: containerId },
|
||||||
|
select: { userId: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!userContainer) {
|
||||||
|
throw new NotFoundException(`User container ${containerId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.generateUserConfig(userContainer.userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate a container's bearer token
|
||||||
|
async validateContainerToken(token: string): Promise<ContainerTokenValidation | null> {
|
||||||
|
if (!token) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [userContainers, systemContainers] = await Promise.all([
|
||||||
|
this.prisma.userContainer.findMany({
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
gatewayToken: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
this.prisma.systemContainer.findMany({
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
gatewayToken: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
let match: ContainerTokenValidation | null = null;
|
||||||
|
|
||||||
|
for (const container of userContainers) {
|
||||||
|
const storedToken = this.decryptContainerToken(container.gatewayToken);
|
||||||
|
if (!match && storedToken && this.tokensEqual(storedToken, token)) {
|
||||||
|
match = { type: "user", id: container.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const container of systemContainers) {
|
||||||
|
const storedToken = this.decryptContainerToken(container.gatewayToken);
|
||||||
|
if (!match && storedToken && this.tokensEqual(storedToken, token)) {
|
||||||
|
match = { type: "system", id: container.id };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return match;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildOpenClawConfig(
|
||||||
|
primaryModel: string,
|
||||||
|
gatewayPort: number | null,
|
||||||
|
providers: LlmProvider[]
|
||||||
|
): OpenClawConfig {
|
||||||
|
return {
|
||||||
|
gateway: {
|
||||||
|
mode: "local",
|
||||||
|
port: gatewayPort ?? DEFAULT_GATEWAY_PORT,
|
||||||
|
bind: "lan",
|
||||||
|
auth: { mode: "token" },
|
||||||
|
http: {
|
||||||
|
endpoints: {
|
||||||
|
chatCompletions: { enabled: true },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
agents: {
|
||||||
|
defaults: {
|
||||||
|
model: {
|
||||||
|
primary: primaryModel,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
models: {
|
||||||
|
providers: this.buildProviderConfig(providers),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildProviderConfig(providers: LlmProvider[]): Record<string, OpenClawProviderConfig> {
|
||||||
|
const providerConfig: Record<string, OpenClawProviderConfig> = {};
|
||||||
|
|
||||||
|
for (const provider of providers) {
|
||||||
|
const config: OpenClawProviderConfig = {
|
||||||
|
models: this.extractModels(provider.models),
|
||||||
|
};
|
||||||
|
|
||||||
|
const apiKey = this.decryptIfNeeded(provider.apiKey);
|
||||||
|
if (apiKey) {
|
||||||
|
config.apiKey = apiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (provider.baseUrl) {
|
||||||
|
config.baseUrl = provider.baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
providerConfig[provider.name] = config;
|
||||||
|
}
|
||||||
|
|
||||||
|
return providerConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractModels(models: unknown): OpenClawModelMap {
|
||||||
|
const modelMap: OpenClawModelMap = {};
|
||||||
|
|
||||||
|
if (!Array.isArray(models)) {
|
||||||
|
return modelMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const modelEntry of models) {
|
||||||
|
if (typeof modelEntry === "string") {
|
||||||
|
modelMap[modelEntry] = {};
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.hasModelId(modelEntry)) {
|
||||||
|
modelMap[modelEntry.id] = {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return modelMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
private resolvePrimaryModelFromProviders(providers: LlmProvider[]): string | null {
|
||||||
|
for (const provider of providers) {
|
||||||
|
const modelIds = Object.keys(this.extractModels(provider.models));
|
||||||
|
const firstModelId = modelIds[0];
|
||||||
|
|
||||||
|
if (firstModelId) {
|
||||||
|
return `${provider.name}/${firstModelId}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private decryptIfNeeded(value: string | null | undefined): string | undefined {
|
||||||
|
if (!value) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.crypto.isEncrypted(value)) {
|
||||||
|
return this.crypto.decrypt(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
private decryptContainerToken(value: string): string | null {
|
||||||
|
try {
|
||||||
|
return this.decryptIfNeeded(value) ?? null;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private tokensEqual(left: string, right: string): boolean {
|
||||||
|
const leftDigest = createHash("sha256").update(left, "utf8").digest();
|
||||||
|
const rightDigest = createHash("sha256").update(right, "utf8").digest();
|
||||||
|
return timingSafeEqual(leftDigest, rightDigest);
|
||||||
|
}
|
||||||
|
|
||||||
|
private hasModelId(modelEntry: unknown): modelEntry is { id: string } {
|
||||||
|
if (typeof modelEntry !== "object" || modelEntry === null || !("id" in modelEntry)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return typeof (modelEntry as { id?: unknown }).id === "string";
|
||||||
|
}
|
||||||
|
}
|
||||||
102
apps/api/src/agent-memory/agent-memory.controller.spec.ts
Normal file
102
apps/api/src/agent-memory/agent-memory.controller.spec.ts
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { AgentMemoryController } from "./agent-memory.controller";
|
||||||
|
import { AgentMemoryService } from "./agent-memory.service";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
|
||||||
|
describe("AgentMemoryController", () => {
|
||||||
|
let controller: AgentMemoryController;
|
||||||
|
|
||||||
|
const mockAgentMemoryService = {
|
||||||
|
upsert: vi.fn(),
|
||||||
|
findAll: vi.fn(),
|
||||||
|
findOne: vi.fn(),
|
||||||
|
remove: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockGuard = { canActivate: vi.fn(() => true) };
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
controllers: [AgentMemoryController],
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: AgentMemoryService,
|
||||||
|
useValue: mockAgentMemoryService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
.overrideGuard(AuthGuard)
|
||||||
|
.useValue(mockGuard)
|
||||||
|
.overrideGuard(WorkspaceGuard)
|
||||||
|
.useValue(mockGuard)
|
||||||
|
.overrideGuard(PermissionGuard)
|
||||||
|
.useValue(mockGuard)
|
||||||
|
.compile();
|
||||||
|
|
||||||
|
controller = module.get<AgentMemoryController>(AgentMemoryController);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
const workspaceId = "workspace-1";
|
||||||
|
const agentId = "agent-1";
|
||||||
|
const key = "context";
|
||||||
|
|
||||||
|
describe("upsert", () => {
|
||||||
|
it("should upsert a memory entry", async () => {
|
||||||
|
const dto = { value: { foo: "bar" } };
|
||||||
|
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: dto.value };
|
||||||
|
|
||||||
|
mockAgentMemoryService.upsert.mockResolvedValue(mockEntry);
|
||||||
|
|
||||||
|
const result = await controller.upsert(agentId, key, dto, workspaceId);
|
||||||
|
|
||||||
|
expect(mockAgentMemoryService.upsert).toHaveBeenCalledWith(workspaceId, agentId, key, dto);
|
||||||
|
expect(result).toEqual(mockEntry);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findAll", () => {
|
||||||
|
it("should list all memory entries for an agent", async () => {
|
||||||
|
const mockEntries = [
|
||||||
|
{ id: "mem-1", key: "a", value: 1 },
|
||||||
|
{ id: "mem-2", key: "b", value: 2 },
|
||||||
|
];
|
||||||
|
|
||||||
|
mockAgentMemoryService.findAll.mockResolvedValue(mockEntries);
|
||||||
|
|
||||||
|
const result = await controller.findAll(agentId, workspaceId);
|
||||||
|
|
||||||
|
expect(mockAgentMemoryService.findAll).toHaveBeenCalledWith(workspaceId, agentId);
|
||||||
|
expect(result).toEqual(mockEntries);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findOne", () => {
|
||||||
|
it("should get a single memory entry", async () => {
|
||||||
|
const mockEntry = { id: "mem-1", key, value: "v" };
|
||||||
|
|
||||||
|
mockAgentMemoryService.findOne.mockResolvedValue(mockEntry);
|
||||||
|
|
||||||
|
const result = await controller.findOne(agentId, key, workspaceId);
|
||||||
|
|
||||||
|
expect(mockAgentMemoryService.findOne).toHaveBeenCalledWith(workspaceId, agentId, key);
|
||||||
|
expect(result).toEqual(mockEntry);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("remove", () => {
|
||||||
|
it("should delete a memory entry", async () => {
|
||||||
|
const mockResponse = { message: "Memory entry deleted successfully" };
|
||||||
|
|
||||||
|
mockAgentMemoryService.remove.mockResolvedValue(mockResponse);
|
||||||
|
|
||||||
|
const result = await controller.remove(agentId, key, workspaceId);
|
||||||
|
|
||||||
|
expect(mockAgentMemoryService.remove).toHaveBeenCalledWith(workspaceId, agentId, key);
|
||||||
|
expect(result).toEqual(mockResponse);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
89
apps/api/src/agent-memory/agent-memory.controller.ts
Normal file
89
apps/api/src/agent-memory/agent-memory.controller.ts
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
import {
|
||||||
|
Controller,
|
||||||
|
Get,
|
||||||
|
Put,
|
||||||
|
Delete,
|
||||||
|
Body,
|
||||||
|
Param,
|
||||||
|
UseGuards,
|
||||||
|
HttpCode,
|
||||||
|
HttpStatus,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { AgentMemoryService } from "./agent-memory.service";
|
||||||
|
import { UpsertAgentMemoryDto } from "./dto";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||||
|
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Controller for per-agent key/value memory endpoints.
|
||||||
|
* All endpoints require authentication and workspace context.
|
||||||
|
*
|
||||||
|
* Guards are applied in order:
|
||||||
|
* 1. AuthGuard - Verifies user authentication
|
||||||
|
* 2. WorkspaceGuard - Validates workspace access
|
||||||
|
* 3. PermissionGuard - Checks role-based permissions
|
||||||
|
*/
|
||||||
|
@Controller("agents/:agentId/memory")
|
||||||
|
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||||
|
export class AgentMemoryController {
|
||||||
|
constructor(private readonly agentMemoryService: AgentMemoryService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PUT /api/agents/:agentId/memory/:key
|
||||||
|
* Upsert a memory entry for an agent
|
||||||
|
* Requires: MEMBER role or higher
|
||||||
|
*/
|
||||||
|
@Put(":key")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||||
|
async upsert(
|
||||||
|
@Param("agentId") agentId: string,
|
||||||
|
@Param("key") key: string,
|
||||||
|
@Body() dto: UpsertAgentMemoryDto,
|
||||||
|
@Workspace() workspaceId: string
|
||||||
|
) {
|
||||||
|
return this.agentMemoryService.upsert(workspaceId, agentId, key, dto);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/agents/:agentId/memory
|
||||||
|
* List all memory entries for an agent
|
||||||
|
* Requires: Any workspace member (including GUEST)
|
||||||
|
*/
|
||||||
|
@Get()
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
|
async findAll(@Param("agentId") agentId: string, @Workspace() workspaceId: string) {
|
||||||
|
return this.agentMemoryService.findAll(workspaceId, agentId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/agents/:agentId/memory/:key
|
||||||
|
* Get a single memory entry by key
|
||||||
|
* Requires: Any workspace member (including GUEST)
|
||||||
|
*/
|
||||||
|
@Get(":key")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
|
async findOne(
|
||||||
|
@Param("agentId") agentId: string,
|
||||||
|
@Param("key") key: string,
|
||||||
|
@Workspace() workspaceId: string
|
||||||
|
) {
|
||||||
|
return this.agentMemoryService.findOne(workspaceId, agentId, key);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DELETE /api/agents/:agentId/memory/:key
|
||||||
|
* Remove a memory entry
|
||||||
|
* Requires: MEMBER role or higher
|
||||||
|
*/
|
||||||
|
@Delete(":key")
|
||||||
|
@HttpCode(HttpStatus.OK)
|
||||||
|
@RequirePermission(Permission.WORKSPACE_MEMBER)
|
||||||
|
async remove(
|
||||||
|
@Param("agentId") agentId: string,
|
||||||
|
@Param("key") key: string,
|
||||||
|
@Workspace() workspaceId: string
|
||||||
|
) {
|
||||||
|
return this.agentMemoryService.remove(workspaceId, agentId, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
198
apps/api/src/agent-memory/agent-memory.integration.spec.ts
Normal file
198
apps/api/src/agent-memory/agent-memory.integration.spec.ts
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
import { beforeAll, beforeEach, describe, expect, it, afterAll } from "vitest";
|
||||||
|
import { randomUUID as uuid } from "crypto";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { NotFoundException } from "@nestjs/common";
|
||||||
|
import { PrismaClient } from "@prisma/client";
|
||||||
|
import { AgentMemoryService } from "./agent-memory.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
|
||||||
|
const shouldRunDbIntegrationTests =
|
||||||
|
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||||
|
const describeFn = shouldRunDbIntegrationTests ? describe : describe.skip;
|
||||||
|
|
||||||
|
async function createWorkspace(
|
||||||
|
prisma: PrismaClient,
|
||||||
|
label: string
|
||||||
|
): Promise<{ workspaceId: string; ownerId: string }> {
|
||||||
|
const workspace = await prisma.workspace.create({
|
||||||
|
data: {
|
||||||
|
name: `${label} ${Date.now()}`,
|
||||||
|
owner: {
|
||||||
|
create: {
|
||||||
|
email: `${label.toLowerCase().replace(/\s+/g, "-")}-${Date.now()}@example.com`,
|
||||||
|
name: `${label} Owner`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
workspaceId: workspace.id,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describeFn("AgentMemoryService Integration", () => {
|
||||||
|
let moduleRef: TestingModule;
|
||||||
|
let prisma: PrismaClient;
|
||||||
|
let service: AgentMemoryService;
|
||||||
|
let setupComplete = false;
|
||||||
|
|
||||||
|
let workspaceAId: string;
|
||||||
|
let workspaceAOwnerId: string;
|
||||||
|
let workspaceBId: string;
|
||||||
|
let workspaceBOwnerId: string;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
prisma = new PrismaClient();
|
||||||
|
await prisma.$connect();
|
||||||
|
|
||||||
|
const workspaceA = await createWorkspace(prisma, "Agent Memory Integration A");
|
||||||
|
workspaceAId = workspaceA.workspaceId;
|
||||||
|
workspaceAOwnerId = workspaceA.ownerId;
|
||||||
|
|
||||||
|
const workspaceB = await createWorkspace(prisma, "Agent Memory Integration B");
|
||||||
|
workspaceBId = workspaceB.workspaceId;
|
||||||
|
workspaceBOwnerId = workspaceB.ownerId;
|
||||||
|
|
||||||
|
moduleRef = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
AgentMemoryService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: prisma,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = moduleRef.get<AgentMemoryService>(AgentMemoryService);
|
||||||
|
setupComplete = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await prisma.agentMemory.deleteMany({
|
||||||
|
where: {
|
||||||
|
workspaceId: {
|
||||||
|
in: [workspaceAId, workspaceBId],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
if (!prisma) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspaceIds = [workspaceAId, workspaceBId].filter(
|
||||||
|
(id): id is string => typeof id === "string"
|
||||||
|
);
|
||||||
|
const ownerIds = [workspaceAOwnerId, workspaceBOwnerId].filter(
|
||||||
|
(id): id is string => typeof id === "string"
|
||||||
|
);
|
||||||
|
|
||||||
|
if (workspaceIds.length > 0) {
|
||||||
|
await prisma.agentMemory.deleteMany({
|
||||||
|
where: {
|
||||||
|
workspaceId: {
|
||||||
|
in: workspaceIds,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await prisma.workspace.deleteMany({ where: { id: { in: workspaceIds } } });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ownerIds.length > 0) {
|
||||||
|
await prisma.user.deleteMany({ where: { id: { in: ownerIds } } });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (moduleRef) {
|
||||||
|
await moduleRef.close();
|
||||||
|
}
|
||||||
|
await prisma.$disconnect();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("upserts and lists memory entries", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const agentId = `agent-${uuid()}`;
|
||||||
|
|
||||||
|
const entry = await service.upsert(workspaceAId, agentId, "session-context", {
|
||||||
|
value: { intent: "create-tests", depth: "integration" },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(entry.workspaceId).toBe(workspaceAId);
|
||||||
|
expect(entry.agentId).toBe(agentId);
|
||||||
|
expect(entry.key).toBe("session-context");
|
||||||
|
|
||||||
|
const listed = await service.findAll(workspaceAId, agentId);
|
||||||
|
|
||||||
|
expect(listed).toHaveLength(1);
|
||||||
|
expect(listed[0]?.id).toBe(entry.id);
|
||||||
|
expect(listed[0]?.value).toMatchObject({ intent: "create-tests" });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("updates existing key via upsert without creating duplicates", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const agentId = `agent-${uuid()}`;
|
||||||
|
|
||||||
|
const first = await service.upsert(workspaceAId, agentId, "preferences", {
|
||||||
|
value: { model: "fast" },
|
||||||
|
});
|
||||||
|
|
||||||
|
const second = await service.upsert(workspaceAId, agentId, "preferences", {
|
||||||
|
value: { model: "accurate" },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(second.id).toBe(first.id);
|
||||||
|
expect(second.value).toMatchObject({ model: "accurate" });
|
||||||
|
|
||||||
|
const rowCount = await prisma.agentMemory.count({
|
||||||
|
where: {
|
||||||
|
workspaceId: workspaceAId,
|
||||||
|
agentId,
|
||||||
|
key: "preferences",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(rowCount).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("lists keys in sorted order and isolates by workspace", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const agentId = `agent-${uuid()}`;
|
||||||
|
|
||||||
|
await service.upsert(workspaceAId, agentId, "beta", { value: { v: 2 } });
|
||||||
|
await service.upsert(workspaceAId, agentId, "alpha", { value: { v: 1 } });
|
||||||
|
await service.upsert(workspaceBId, agentId, "alpha", { value: { v: 99 } });
|
||||||
|
|
||||||
|
const workspaceAEntries = await service.findAll(workspaceAId, agentId);
|
||||||
|
const workspaceBEntries = await service.findAll(workspaceBId, agentId);
|
||||||
|
|
||||||
|
expect(workspaceAEntries.map((row) => row.key)).toEqual(["alpha", "beta"]);
|
||||||
|
expect(workspaceBEntries).toHaveLength(1);
|
||||||
|
expect(workspaceBEntries[0]?.value).toMatchObject({ v: 99 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("throws NotFoundException when requesting unknown key", async () => {
|
||||||
|
if (!setupComplete) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await expect(service.findOne(workspaceAId, `agent-${uuid()}`, "missing")).rejects.toThrow(
|
||||||
|
NotFoundException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
13
apps/api/src/agent-memory/agent-memory.module.ts
Normal file
13
apps/api/src/agent-memory/agent-memory.module.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { AgentMemoryController } from "./agent-memory.controller";
|
||||||
|
import { AgentMemoryService } from "./agent-memory.service";
|
||||||
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
|
import { AuthModule } from "../auth/auth.module";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [PrismaModule, AuthModule],
|
||||||
|
controllers: [AgentMemoryController],
|
||||||
|
providers: [AgentMemoryService],
|
||||||
|
exports: [AgentMemoryService],
|
||||||
|
})
|
||||||
|
export class AgentMemoryModule {}
|
||||||
126
apps/api/src/agent-memory/agent-memory.service.spec.ts
Normal file
126
apps/api/src/agent-memory/agent-memory.service.spec.ts
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { AgentMemoryService } from "./agent-memory.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { NotFoundException } from "@nestjs/common";
|
||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
|
||||||
|
describe("AgentMemoryService", () => {
|
||||||
|
let service: AgentMemoryService;
|
||||||
|
|
||||||
|
const mockPrismaService = {
|
||||||
|
agentMemory: {
|
||||||
|
upsert: vi.fn(),
|
||||||
|
findMany: vi.fn(),
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
delete: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
AgentMemoryService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: mockPrismaService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<AgentMemoryService>(AgentMemoryService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
const workspaceId = "workspace-1";
|
||||||
|
const agentId = "agent-1";
|
||||||
|
const key = "session-context";
|
||||||
|
|
||||||
|
describe("upsert", () => {
|
||||||
|
it("should upsert a memory entry", async () => {
|
||||||
|
const dto = { value: { data: "some context" } };
|
||||||
|
const mockEntry = {
|
||||||
|
id: "mem-1",
|
||||||
|
workspaceId,
|
||||||
|
agentId,
|
||||||
|
key,
|
||||||
|
value: dto.value,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
mockPrismaService.agentMemory.upsert.mockResolvedValue(mockEntry);
|
||||||
|
|
||||||
|
const result = await service.upsert(workspaceId, agentId, key, dto);
|
||||||
|
|
||||||
|
expect(mockPrismaService.agentMemory.upsert).toHaveBeenCalledWith({
|
||||||
|
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||||
|
create: { workspaceId, agentId, key, value: dto.value },
|
||||||
|
update: { value: dto.value },
|
||||||
|
});
|
||||||
|
expect(result).toEqual(mockEntry);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findAll", () => {
|
||||||
|
it("should return all memory entries for an agent", async () => {
|
||||||
|
const mockEntries = [
|
||||||
|
{ id: "mem-1", key: "a", value: 1 },
|
||||||
|
{ id: "mem-2", key: "b", value: 2 },
|
||||||
|
];
|
||||||
|
|
||||||
|
mockPrismaService.agentMemory.findMany.mockResolvedValue(mockEntries);
|
||||||
|
|
||||||
|
const result = await service.findAll(workspaceId, agentId);
|
||||||
|
|
||||||
|
expect(mockPrismaService.agentMemory.findMany).toHaveBeenCalledWith({
|
||||||
|
where: { workspaceId, agentId },
|
||||||
|
orderBy: { key: "asc" },
|
||||||
|
});
|
||||||
|
expect(result).toEqual(mockEntries);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("findOne", () => {
|
||||||
|
it("should return a memory entry by key", async () => {
|
||||||
|
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: "ctx" };
|
||||||
|
|
||||||
|
mockPrismaService.agentMemory.findUnique.mockResolvedValue(mockEntry);
|
||||||
|
|
||||||
|
const result = await service.findOne(workspaceId, agentId, key);
|
||||||
|
|
||||||
|
expect(mockPrismaService.agentMemory.findUnique).toHaveBeenCalledWith({
|
||||||
|
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||||
|
});
|
||||||
|
expect(result).toEqual(mockEntry);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException when key not found", async () => {
|
||||||
|
mockPrismaService.agentMemory.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.findOne(workspaceId, agentId, key)).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("remove", () => {
|
||||||
|
it("should delete a memory entry", async () => {
|
||||||
|
const mockEntry = { id: "mem-1", workspaceId, agentId, key, value: "x" };
|
||||||
|
|
||||||
|
mockPrismaService.agentMemory.findUnique.mockResolvedValue(mockEntry);
|
||||||
|
mockPrismaService.agentMemory.delete.mockResolvedValue(mockEntry);
|
||||||
|
|
||||||
|
const result = await service.remove(workspaceId, agentId, key);
|
||||||
|
|
||||||
|
expect(mockPrismaService.agentMemory.delete).toHaveBeenCalledWith({
|
||||||
|
where: { workspaceId_agentId_key: { workspaceId, agentId, key } },
|
||||||
|
});
|
||||||
|
expect(result).toEqual({ message: "Memory entry deleted successfully" });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException when key not found", async () => {
|
||||||
|
mockPrismaService.agentMemory.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.remove(workspaceId, agentId, key)).rejects.toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
79
apps/api/src/agent-memory/agent-memory.service.ts
Normal file
79
apps/api/src/agent-memory/agent-memory.service.ts
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
import { Injectable, NotFoundException } from "@nestjs/common";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { Prisma } from "@prisma/client";
|
||||||
|
import type { UpsertAgentMemoryDto } from "./dto";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AgentMemoryService {
|
||||||
|
constructor(private readonly prisma: PrismaService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Upsert a memory entry for an agent.
|
||||||
|
*/
|
||||||
|
async upsert(workspaceId: string, agentId: string, key: string, dto: UpsertAgentMemoryDto) {
|
||||||
|
return this.prisma.agentMemory.upsert({
|
||||||
|
where: {
|
||||||
|
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
workspaceId,
|
||||||
|
agentId,
|
||||||
|
key,
|
||||||
|
value: dto.value as Prisma.InputJsonValue,
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
value: dto.value as Prisma.InputJsonValue,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all memory entries for an agent in a workspace.
|
||||||
|
*/
|
||||||
|
async findAll(workspaceId: string, agentId: string) {
|
||||||
|
return this.prisma.agentMemory.findMany({
|
||||||
|
where: { workspaceId, agentId },
|
||||||
|
orderBy: { key: "asc" },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a single memory entry by key.
|
||||||
|
*/
|
||||||
|
async findOne(workspaceId: string, agentId: string, key: string) {
|
||||||
|
const entry = await this.prisma.agentMemory.findUnique({
|
||||||
|
where: {
|
||||||
|
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!entry) {
|
||||||
|
throw new NotFoundException(`Memory key "${key}" not found for agent "${agentId}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a memory entry by key.
|
||||||
|
*/
|
||||||
|
async remove(workspaceId: string, agentId: string, key: string) {
|
||||||
|
const entry = await this.prisma.agentMemory.findUnique({
|
||||||
|
where: {
|
||||||
|
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!entry) {
|
||||||
|
throw new NotFoundException(`Memory key "${key}" not found for agent "${agentId}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.prisma.agentMemory.delete({
|
||||||
|
where: {
|
||||||
|
workspaceId_agentId_key: { workspaceId, agentId, key },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { message: "Memory entry deleted successfully" };
|
||||||
|
}
|
||||||
|
}
|
||||||
1
apps/api/src/agent-memory/dto/index.ts
Normal file
1
apps/api/src/agent-memory/dto/index.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export * from "./upsert-agent-memory.dto";
|
||||||
10
apps/api/src/agent-memory/dto/upsert-agent-memory.dto.ts
Normal file
10
apps/api/src/agent-memory/dto/upsert-agent-memory.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { IsNotEmpty } from "class-validator";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for upserting an agent memory entry.
|
||||||
|
* The value accepts any JSON-serializable data.
|
||||||
|
*/
|
||||||
|
export class UpsertAgentMemoryDto {
|
||||||
|
@IsNotEmpty({ message: "value must not be empty" })
|
||||||
|
value!: unknown;
|
||||||
|
}
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
import { Controller, Get } from "@nestjs/common";
|
import { Controller, Get } from "@nestjs/common";
|
||||||
|
import { SkipThrottle } from "@nestjs/throttler";
|
||||||
import { AppService } from "./app.service";
|
import { AppService } from "./app.service";
|
||||||
import { PrismaService } from "./prisma/prisma.service";
|
import { PrismaService } from "./prisma/prisma.service";
|
||||||
import type { ApiResponse, HealthStatus } from "@mosaic/shared";
|
import type { ApiResponse, HealthStatus } from "@mosaic/shared";
|
||||||
@@ -17,6 +18,7 @@ export class AppController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Get("health")
|
@Get("health")
|
||||||
|
@SkipThrottle()
|
||||||
async getHealth(): Promise<ApiResponse<HealthStatus>> {
|
async getHealth(): Promise<ApiResponse<HealthStatus>> {
|
||||||
const dbHealthy = await this.prisma.isHealthy();
|
const dbHealthy = await this.prisma.isHealthy();
|
||||||
const dbInfo = await this.prisma.getConnectionInfo();
|
const dbInfo = await this.prisma.getConnectionInfo();
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { Module } from "@nestjs/common";
|
|||||||
import { APP_INTERCEPTOR, APP_GUARD } from "@nestjs/core";
|
import { APP_INTERCEPTOR, APP_GUARD } from "@nestjs/core";
|
||||||
import { ThrottlerModule } from "@nestjs/throttler";
|
import { ThrottlerModule } from "@nestjs/throttler";
|
||||||
import { BullModule } from "@nestjs/bullmq";
|
import { BullModule } from "@nestjs/bullmq";
|
||||||
|
import { ScheduleModule } from "@nestjs/schedule";
|
||||||
import { ThrottlerValkeyStorageService, ThrottlerApiKeyGuard } from "./common/throttler";
|
import { ThrottlerValkeyStorageService, ThrottlerApiKeyGuard } from "./common/throttler";
|
||||||
import { CsrfGuard } from "./common/guards/csrf.guard";
|
import { CsrfGuard } from "./common/guards/csrf.guard";
|
||||||
import { CsrfService } from "./common/services/csrf.service";
|
import { CsrfService } from "./common/services/csrf.service";
|
||||||
@@ -27,6 +28,8 @@ import { LlmUsageModule } from "./llm-usage/llm-usage.module";
|
|||||||
import { BrainModule } from "./brain/brain.module";
|
import { BrainModule } from "./brain/brain.module";
|
||||||
import { CronModule } from "./cron/cron.module";
|
import { CronModule } from "./cron/cron.module";
|
||||||
import { AgentTasksModule } from "./agent-tasks/agent-tasks.module";
|
import { AgentTasksModule } from "./agent-tasks/agent-tasks.module";
|
||||||
|
import { FindingsModule } from "./findings/findings.module";
|
||||||
|
import { AgentMemoryModule } from "./agent-memory/agent-memory.module";
|
||||||
import { ValkeyModule } from "./valkey/valkey.module";
|
import { ValkeyModule } from "./valkey/valkey.module";
|
||||||
import { BullMqModule } from "./bullmq/bullmq.module";
|
import { BullMqModule } from "./bullmq/bullmq.module";
|
||||||
import { StitcherModule } from "./stitcher/stitcher.module";
|
import { StitcherModule } from "./stitcher/stitcher.module";
|
||||||
@@ -37,7 +40,25 @@ import { JobStepsModule } from "./job-steps/job-steps.module";
|
|||||||
import { CoordinatorIntegrationModule } from "./coordinator-integration/coordinator-integration.module";
|
import { CoordinatorIntegrationModule } from "./coordinator-integration/coordinator-integration.module";
|
||||||
import { FederationModule } from "./federation/federation.module";
|
import { FederationModule } from "./federation/federation.module";
|
||||||
import { CredentialsModule } from "./credentials/credentials.module";
|
import { CredentialsModule } from "./credentials/credentials.module";
|
||||||
|
import { CryptoModule } from "./crypto/crypto.module";
|
||||||
|
import { MosaicTelemetryModule } from "./mosaic-telemetry";
|
||||||
|
import { SpeechModule } from "./speech/speech.module";
|
||||||
|
import { DashboardModule } from "./dashboard/dashboard.module";
|
||||||
|
import { TerminalModule } from "./terminal/terminal.module";
|
||||||
|
import { PersonalitiesModule } from "./personalities/personalities.module";
|
||||||
|
import { WorkspacesModule } from "./workspaces/workspaces.module";
|
||||||
|
import { AdminModule } from "./admin/admin.module";
|
||||||
|
import { TeamsModule } from "./teams/teams.module";
|
||||||
|
import { ImportModule } from "./import/import.module";
|
||||||
|
import { ConversationArchiveModule } from "./conversation-archive/conversation-archive.module";
|
||||||
import { RlsContextInterceptor } from "./common/interceptors/rls-context.interceptor";
|
import { RlsContextInterceptor } from "./common/interceptors/rls-context.interceptor";
|
||||||
|
import { AgentConfigModule } from "./agent-config/agent-config.module";
|
||||||
|
import { ContainerLifecycleModule } from "./container-lifecycle/container-lifecycle.module";
|
||||||
|
import { ContainerReaperModule } from "./container-reaper/container-reaper.module";
|
||||||
|
import { FleetSettingsModule } from "./fleet-settings/fleet-settings.module";
|
||||||
|
import { OnboardingModule } from "./onboarding/onboarding.module";
|
||||||
|
import { ChatProxyModule } from "./chat-proxy/chat-proxy.module";
|
||||||
|
import { OrchestratorModule } from "./orchestrator/orchestrator.module";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [
|
imports: [
|
||||||
@@ -68,6 +89,7 @@ import { RlsContextInterceptor } from "./common/interceptors/rls-context.interce
|
|||||||
};
|
};
|
||||||
})(),
|
})(),
|
||||||
}),
|
}),
|
||||||
|
ScheduleModule.forRoot(),
|
||||||
TelemetryModule,
|
TelemetryModule,
|
||||||
PrismaModule,
|
PrismaModule,
|
||||||
DatabaseModule,
|
DatabaseModule,
|
||||||
@@ -91,12 +113,32 @@ import { RlsContextInterceptor } from "./common/interceptors/rls-context.interce
|
|||||||
BrainModule,
|
BrainModule,
|
||||||
CronModule,
|
CronModule,
|
||||||
AgentTasksModule,
|
AgentTasksModule,
|
||||||
|
FindingsModule,
|
||||||
|
AgentMemoryModule,
|
||||||
RunnerJobsModule,
|
RunnerJobsModule,
|
||||||
JobEventsModule,
|
JobEventsModule,
|
||||||
JobStepsModule,
|
JobStepsModule,
|
||||||
CoordinatorIntegrationModule,
|
CoordinatorIntegrationModule,
|
||||||
FederationModule,
|
FederationModule,
|
||||||
CredentialsModule,
|
CredentialsModule,
|
||||||
|
CryptoModule,
|
||||||
|
MosaicTelemetryModule,
|
||||||
|
SpeechModule,
|
||||||
|
DashboardModule,
|
||||||
|
TerminalModule,
|
||||||
|
PersonalitiesModule,
|
||||||
|
WorkspacesModule,
|
||||||
|
AdminModule,
|
||||||
|
TeamsModule,
|
||||||
|
ImportModule,
|
||||||
|
ConversationArchiveModule,
|
||||||
|
AgentConfigModule,
|
||||||
|
ContainerLifecycleModule,
|
||||||
|
ContainerReaperModule,
|
||||||
|
FleetSettingsModule,
|
||||||
|
OnboardingModule,
|
||||||
|
ChatProxyModule,
|
||||||
|
OrchestratorModule,
|
||||||
],
|
],
|
||||||
controllers: [AppController, CsrfController],
|
controllers: [AppController, CsrfController],
|
||||||
providers: [
|
providers: [
|
||||||
|
|||||||
@@ -12,7 +12,10 @@ import { PrismaClient, Prisma } from "@prisma/client";
|
|||||||
import { randomUUID as uuid } from "crypto";
|
import { randomUUID as uuid } from "crypto";
|
||||||
import { runWithRlsClient, getRlsClient } from "../prisma/rls-context.provider";
|
import { runWithRlsClient, getRlsClient } from "../prisma/rls-context.provider";
|
||||||
|
|
||||||
describe.skipIf(!process.env.DATABASE_URL)(
|
const shouldRunDbIntegrationTests =
|
||||||
|
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||||
|
|
||||||
|
describe.skipIf(!shouldRunDbIntegrationTests)(
|
||||||
"Auth Tables RLS Policies (requires DATABASE_URL)",
|
"Auth Tables RLS Policies (requires DATABASE_URL)",
|
||||||
() => {
|
() => {
|
||||||
let prisma: PrismaClient;
|
let prisma: PrismaClient;
|
||||||
@@ -28,7 +31,7 @@ describe.skipIf(!process.env.DATABASE_URL)(
|
|||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
// Skip setup if DATABASE_URL is not available
|
// Skip setup if DATABASE_URL is not available
|
||||||
if (!process.env.DATABASE_URL) {
|
if (!shouldRunDbIntegrationTests) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -49,7 +52,7 @@ describe.skipIf(!process.env.DATABASE_URL)(
|
|||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
// Skip cleanup if DATABASE_URL is not available or prisma not initialized
|
// Skip cleanup if DATABASE_URL is not available or prisma not initialized
|
||||||
if (!process.env.DATABASE_URL || !prisma) {
|
if (!shouldRunDbIntegrationTests || !prisma) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,30 @@
|
|||||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
import { isOidcEnabled, validateOidcConfig } from "./auth.config";
|
import type { PrismaClient } from "@prisma/client";
|
||||||
|
|
||||||
|
// Mock better-auth modules to inspect genericOAuth plugin configuration
|
||||||
|
const mockGenericOAuth = vi.fn().mockReturnValue({ id: "generic-oauth" });
|
||||||
|
const mockBetterAuth = vi.fn().mockReturnValue({ handler: vi.fn() });
|
||||||
|
const mockPrismaAdapter = vi.fn().mockReturnValue({});
|
||||||
|
|
||||||
|
vi.mock("better-auth/plugins", () => ({
|
||||||
|
genericOAuth: (...args: unknown[]) => mockGenericOAuth(...args),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth", () => ({
|
||||||
|
betterAuth: (...args: unknown[]) => mockBetterAuth(...args),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/adapters/prisma", () => ({
|
||||||
|
prismaAdapter: (...args: unknown[]) => mockPrismaAdapter(...args),
|
||||||
|
}));
|
||||||
|
|
||||||
|
import {
|
||||||
|
isOidcEnabled,
|
||||||
|
validateOidcConfig,
|
||||||
|
createAuth,
|
||||||
|
getTrustedOrigins,
|
||||||
|
getBetterAuthBaseUrl,
|
||||||
|
} from "./auth.config";
|
||||||
|
|
||||||
describe("auth.config", () => {
|
describe("auth.config", () => {
|
||||||
// Store original env vars to restore after each test
|
// Store original env vars to restore after each test
|
||||||
@@ -11,6 +36,13 @@ describe("auth.config", () => {
|
|||||||
delete process.env.OIDC_ISSUER;
|
delete process.env.OIDC_ISSUER;
|
||||||
delete process.env.OIDC_CLIENT_ID;
|
delete process.env.OIDC_CLIENT_ID;
|
||||||
delete process.env.OIDC_CLIENT_SECRET;
|
delete process.env.OIDC_CLIENT_SECRET;
|
||||||
|
delete process.env.OIDC_REDIRECT_URI;
|
||||||
|
delete process.env.NODE_ENV;
|
||||||
|
delete process.env.BETTER_AUTH_URL;
|
||||||
|
delete process.env.NEXT_PUBLIC_APP_URL;
|
||||||
|
delete process.env.NEXT_PUBLIC_API_URL;
|
||||||
|
delete process.env.TRUSTED_ORIGINS;
|
||||||
|
delete process.env.COOKIE_DOMAIN;
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -70,6 +102,7 @@ describe("auth.config", () => {
|
|||||||
it("should throw when OIDC_ISSUER is missing", () => {
|
it("should throw when OIDC_ISSUER is missing", () => {
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC authentication is enabled");
|
expect(() => validateOidcConfig()).toThrow("OIDC authentication is enabled");
|
||||||
@@ -78,6 +111,7 @@ describe("auth.config", () => {
|
|||||||
it("should throw when OIDC_CLIENT_ID is missing", () => {
|
it("should throw when OIDC_CLIENT_ID is missing", () => {
|
||||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_ID");
|
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_ID");
|
||||||
});
|
});
|
||||||
@@ -85,13 +119,22 @@ describe("auth.config", () => {
|
|||||||
it("should throw when OIDC_CLIENT_SECRET is missing", () => {
|
it("should throw when OIDC_CLIENT_SECRET is missing", () => {
|
||||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_SECRET");
|
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_SECRET");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should throw when OIDC_REDIRECT_URI is missing", () => {
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).toThrow("OIDC_REDIRECT_URI");
|
||||||
|
});
|
||||||
|
|
||||||
it("should throw when all required vars are missing", () => {
|
it("should throw when all required vars are missing", () => {
|
||||||
expect(() => validateOidcConfig()).toThrow(
|
expect(() => validateOidcConfig()).toThrow(
|
||||||
"OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET"
|
"OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET, OIDC_REDIRECT_URI"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -99,9 +142,10 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = "";
|
process.env.OIDC_ISSUER = "";
|
||||||
process.env.OIDC_CLIENT_ID = "";
|
process.env.OIDC_CLIENT_ID = "";
|
||||||
process.env.OIDC_CLIENT_SECRET = "";
|
process.env.OIDC_CLIENT_SECRET = "";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow(
|
expect(() => validateOidcConfig()).toThrow(
|
||||||
"OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET"
|
"OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET, OIDC_REDIRECT_URI"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -109,6 +153,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = " ";
|
process.env.OIDC_ISSUER = " ";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
||||||
});
|
});
|
||||||
@@ -117,6 +162,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic";
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER must end with a trailing slash");
|
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER must end with a trailing slash");
|
||||||
expect(() => validateOidcConfig()).toThrow("https://auth.example.com/application/o/mosaic");
|
expect(() => validateOidcConfig()).toThrow("https://auth.example.com/application/o/mosaic");
|
||||||
@@ -126,6 +172,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).not.toThrow();
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
});
|
});
|
||||||
@@ -133,6 +180,537 @@ describe("auth.config", () => {
|
|||||||
it("should suggest disabling OIDC in error message", () => {
|
it("should suggest disabling OIDC in error message", () => {
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_ENABLED=false");
|
expect(() => validateOidcConfig()).toThrow("OIDC_ENABLED=false");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("OIDC_REDIRECT_URI validation", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when OIDC_REDIRECT_URI is not a valid URL", () => {
|
||||||
|
process.env.OIDC_REDIRECT_URI = "not-a-url";
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).toThrow("OIDC_REDIRECT_URI must be a valid URL");
|
||||||
|
expect(() => validateOidcConfig()).toThrow("not-a-url");
|
||||||
|
expect(() => validateOidcConfig()).toThrow("Parse error:");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when OIDC_REDIRECT_URI path does not start with /auth/oauth2/callback", () => {
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/oauth/callback";
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).toThrow(
|
||||||
|
'OIDC_REDIRECT_URI path must start with "/auth/oauth2/callback"'
|
||||||
|
);
|
||||||
|
expect(() => validateOidcConfig()).toThrow("/oauth/callback");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept a valid OIDC_REDIRECT_URI with /auth/oauth2/callback path", () => {
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept OIDC_REDIRECT_URI with exactly /auth/oauth2/callback path", () => {
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback";
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should warn but not throw when using localhost in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "http://localhost:3000/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
|
expect(warnSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("OIDC_REDIRECT_URI uses localhost")
|
||||||
|
);
|
||||||
|
|
||||||
|
warnSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should warn but not throw when using 127.0.0.1 in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "http://127.0.0.1:3000/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
|
expect(warnSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("OIDC_REDIRECT_URI uses localhost")
|
||||||
|
);
|
||||||
|
|
||||||
|
warnSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not warn about localhost when not in production", () => {
|
||||||
|
process.env.NODE_ENV = "development";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "http://localhost:3000/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
|
expect(warnSpy).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
warnSpy.mockRestore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("createAuth - genericOAuth PKCE configuration", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
mockGenericOAuth.mockClear();
|
||||||
|
mockBetterAuth.mockClear();
|
||||||
|
mockPrismaAdapter.mockClear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should enable PKCE in the genericOAuth provider config when OIDC is enabled", () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockGenericOAuth).toHaveBeenCalledOnce();
|
||||||
|
const callArgs = mockGenericOAuth.mock.calls[0][0] as {
|
||||||
|
config: Array<{ pkce?: boolean; redirectURI?: string }>;
|
||||||
|
};
|
||||||
|
expect(callArgs.config[0].pkce).toBe(true);
|
||||||
|
expect(callArgs.config[0].redirectURI).toBe(
|
||||||
|
"https://app.example.com/auth/oauth2/callback/authentik"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not call genericOAuth when OIDC is disabled", () => {
|
||||||
|
process.env.OIDC_ENABLED = "false";
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockGenericOAuth).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw if OIDC_CLIENT_ID is missing when OIDC is enabled", () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
// OIDC_CLIENT_ID deliberately not set
|
||||||
|
|
||||||
|
// validateOidcConfig will throw first, so we need to bypass it
|
||||||
|
// by setting the var then deleting it after validation
|
||||||
|
// Instead, test via the validation path which is fine — but let's
|
||||||
|
// verify the plugin-level guard by using a direct approach:
|
||||||
|
// Set env to pass validateOidcConfig, then delete OIDC_CLIENT_ID
|
||||||
|
// The validateOidcConfig will catch this first, which is correct behavior
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
expect(() => createAuth(mockPrisma)).toThrow("OIDC_CLIENT_ID");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw if OIDC_CLIENT_SECRET is missing when OIDC is enabled", () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
// OIDC_CLIENT_SECRET deliberately not set
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
expect(() => createAuth(mockPrisma)).toThrow("OIDC_CLIENT_SECRET");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw if OIDC_ISSUER is missing when OIDC is enabled", () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
// OIDC_ISSUER deliberately not set
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
expect(() => createAuth(mockPrisma)).toThrow("OIDC_ISSUER");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getTrustedOrigins", () => {
|
||||||
|
it("should return localhost URLs when NODE_ENV is not production", () => {
|
||||||
|
process.env.NODE_ENV = "development";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("http://localhost:3000");
|
||||||
|
expect(origins).toContain("http://localhost:3001");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return localhost URLs when NODE_ENV is not set", () => {
|
||||||
|
// NODE_ENV is deleted in beforeEach, so it's undefined here
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("http://localhost:3000");
|
||||||
|
expect(origins).toContain("http://localhost:3001");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should exclude localhost URLs in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).not.toContain("http://localhost:3000");
|
||||||
|
expect(origins).not.toContain("http://localhost:3001");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should parse TRUSTED_ORIGINS comma-separated values", () => {
|
||||||
|
process.env.TRUSTED_ORIGINS = "https://app.mosaicstack.dev,https://api.mosaicstack.dev";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||||
|
expect(origins).toContain("https://api.mosaicstack.dev");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should trim whitespace from TRUSTED_ORIGINS entries", () => {
|
||||||
|
process.env.TRUSTED_ORIGINS = " https://app.mosaicstack.dev , https://api.mosaicstack.dev ";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||||
|
expect(origins).toContain("https://api.mosaicstack.dev");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should filter out empty strings from TRUSTED_ORIGINS", () => {
|
||||||
|
process.env.TRUSTED_ORIGINS = "https://app.mosaicstack.dev,,, ,";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||||
|
// No empty strings in the result
|
||||||
|
origins.forEach((o) => expect(o).not.toBe(""));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include NEXT_PUBLIC_APP_URL", () => {
|
||||||
|
process.env.NEXT_PUBLIC_APP_URL = "https://my-app.example.com";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://my-app.example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include NEXT_PUBLIC_API_URL", () => {
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://my-api.example.com";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://my-api.example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should deduplicate origins", () => {
|
||||||
|
process.env.NEXT_PUBLIC_APP_URL = "http://localhost:3000";
|
||||||
|
process.env.TRUSTED_ORIGINS = "http://localhost:3000,http://localhost:3001";
|
||||||
|
// NODE_ENV not set, so localhost fallbacks are also added
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
const countLocalhost3000 = origins.filter((o) => o === "http://localhost:3000").length;
|
||||||
|
const countLocalhost3001 = origins.filter((o) => o === "http://localhost:3001").length;
|
||||||
|
expect(countLocalhost3000).toBe(1);
|
||||||
|
expect(countLocalhost3001).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle all env vars missing gracefully", () => {
|
||||||
|
// All env vars deleted in beforeEach; NODE_ENV is also deleted (not production)
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
// Should still return localhost fallbacks since not in production
|
||||||
|
expect(origins).toContain("http://localhost:3000");
|
||||||
|
expect(origins).toContain("http://localhost:3001");
|
||||||
|
expect(origins).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return empty array when all env vars missing in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should combine all sources correctly", () => {
|
||||||
|
process.env.NEXT_PUBLIC_APP_URL = "https://app.mosaicstack.dev";
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.mosaicstack.dev";
|
||||||
|
process.env.TRUSTED_ORIGINS = "https://extra.example.com";
|
||||||
|
process.env.NODE_ENV = "development";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||||
|
expect(origins).toContain("https://api.mosaicstack.dev");
|
||||||
|
expect(origins).toContain("https://extra.example.com");
|
||||||
|
expect(origins).toContain("http://localhost:3000");
|
||||||
|
expect(origins).toContain("http://localhost:3001");
|
||||||
|
expect(origins).toHaveLength(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject invalid URLs in TRUSTED_ORIGINS with a warning including error details", () => {
|
||||||
|
process.env.TRUSTED_ORIGINS = "not-a-url,https://valid.example.com";
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
|
||||||
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://valid.example.com");
|
||||||
|
expect(origins).not.toContain("not-a-url");
|
||||||
|
expect(warnSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining('Ignoring invalid URL in TRUSTED_ORIGINS: "not-a-url"')
|
||||||
|
);
|
||||||
|
// Verify that error detail is included in the warning
|
||||||
|
const warnCall = warnSpy.mock.calls.find(
|
||||||
|
(call) => typeof call[0] === "string" && call[0].includes("not-a-url")
|
||||||
|
);
|
||||||
|
expect(warnCall).toBeDefined();
|
||||||
|
expect(warnCall![0]).toMatch(/\(.*\)$/);
|
||||||
|
|
||||||
|
warnSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject non-HTTP origins in TRUSTED_ORIGINS with a warning", () => {
|
||||||
|
process.env.TRUSTED_ORIGINS = "ftp://files.example.com,https://valid.example.com";
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
|
||||||
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://valid.example.com");
|
||||||
|
expect(origins).not.toContain("ftp://files.example.com");
|
||||||
|
expect(warnSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("Ignoring non-HTTP origin in TRUSTED_ORIGINS")
|
||||||
|
);
|
||||||
|
|
||||||
|
warnSpy.mockRestore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("createAuth - session and cookie configuration", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
mockGenericOAuth.mockClear();
|
||||||
|
mockBetterAuth.mockClear();
|
||||||
|
mockPrismaAdapter.mockClear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should configure session expiresIn to 7 days (604800 seconds)", () => {
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
session: { expiresIn: number; updateAge: number };
|
||||||
|
};
|
||||||
|
expect(config.session.expiresIn).toBe(604800);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should configure session updateAge to 2 hours (7200 seconds)", () => {
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
session: { expiresIn: number; updateAge: number };
|
||||||
|
};
|
||||||
|
expect(config.session.updateAge).toBe(7200);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should configure BetterAuth database ID generation as UUID", () => {
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
database: {
|
||||||
|
generateId: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.database.generateId).toBe("uuid");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set httpOnly cookie attribute to true", () => {
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.httpOnly).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set sameSite cookie attribute to lax", () => {
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.sameSite).toBe("lax");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set secure cookie attribute to true in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.secure).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set secure cookie attribute to false in non-production", () => {
|
||||||
|
process.env.NODE_ENV = "development";
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.secure).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set cookie domain when COOKIE_DOMAIN env var is present", () => {
|
||||||
|
process.env.COOKIE_DOMAIN = ".mosaicstack.dev";
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
domain?: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.domain).toBe(".mosaicstack.dev");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not set cookie domain when COOKIE_DOMAIN env var is absent", () => {
|
||||||
|
delete process.env.COOKIE_DOMAIN;
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
domain?: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.domain).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getBetterAuthBaseUrl", () => {
|
||||||
|
it("should prefer BETTER_AUTH_URL when set", () => {
|
||||||
|
process.env.BETTER_AUTH_URL = "https://auth-base.example.com";
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||||
|
|
||||||
|
expect(getBetterAuthBaseUrl()).toBe("https://auth-base.example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fall back to NEXT_PUBLIC_API_URL when BETTER_AUTH_URL is not set", () => {
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||||
|
|
||||||
|
expect(getBetterAuthBaseUrl()).toBe("https://api.example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when base URL is invalid", () => {
|
||||||
|
process.env.BETTER_AUTH_URL = "not-a-url";
|
||||||
|
|
||||||
|
expect(() => getBetterAuthBaseUrl()).toThrow("BetterAuth base URL must be a valid URL");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when base URL is missing in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
|
||||||
|
expect(() => getBetterAuthBaseUrl()).toThrow("Missing BetterAuth base URL in production");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when base URL is not https in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
process.env.BETTER_AUTH_URL = "http://api.example.com";
|
||||||
|
|
||||||
|
expect(() => getBetterAuthBaseUrl()).toThrow(
|
||||||
|
"BetterAuth base URL must use https in production"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("createAuth - baseURL wiring", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
mockBetterAuth.mockClear();
|
||||||
|
mockPrismaAdapter.mockClear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass BETTER_AUTH_URL into BetterAuth config", () => {
|
||||||
|
process.env.BETTER_AUTH_URL = "https://api.mosaicstack.dev";
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as { baseURL?: string };
|
||||||
|
expect(config.baseURL).toBe("https://api.mosaicstack.dev");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass NEXT_PUBLIC_API_URL into BetterAuth config when BETTER_AUTH_URL is absent", () => {
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.fallback.dev";
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as { baseURL?: string };
|
||||||
|
expect(config.baseURL).toBe("https://api.fallback.dev");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -6,7 +6,47 @@ import type { PrismaClient } from "@prisma/client";
|
|||||||
/**
|
/**
|
||||||
* Required OIDC environment variables when OIDC is enabled
|
* Required OIDC environment variables when OIDC is enabled
|
||||||
*/
|
*/
|
||||||
const REQUIRED_OIDC_ENV_VARS = ["OIDC_ISSUER", "OIDC_CLIENT_ID", "OIDC_CLIENT_SECRET"] as const;
|
const REQUIRED_OIDC_ENV_VARS = [
|
||||||
|
"OIDC_ISSUER",
|
||||||
|
"OIDC_CLIENT_ID",
|
||||||
|
"OIDC_CLIENT_SECRET",
|
||||||
|
"OIDC_REDIRECT_URI",
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve BetterAuth base URL from explicit auth URL or API URL.
|
||||||
|
* BetterAuth uses this to generate absolute callback/error URLs.
|
||||||
|
*/
|
||||||
|
export function getBetterAuthBaseUrl(): string | undefined {
|
||||||
|
const configured = process.env.BETTER_AUTH_URL ?? process.env.NEXT_PUBLIC_API_URL;
|
||||||
|
|
||||||
|
if (!configured || configured.trim() === "") {
|
||||||
|
if (process.env.NODE_ENV === "production") {
|
||||||
|
throw new Error(
|
||||||
|
"Missing BetterAuth base URL in production. Set BETTER_AUTH_URL (preferred) or NEXT_PUBLIC_API_URL."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsed: URL;
|
||||||
|
try {
|
||||||
|
parsed = new URL(configured);
|
||||||
|
} catch (urlError: unknown) {
|
||||||
|
const detail = urlError instanceof Error ? urlError.message : String(urlError);
|
||||||
|
throw new Error(
|
||||||
|
`BetterAuth base URL must be a valid URL. Current value: "${configured}". Parse error: ${detail}.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.NODE_ENV === "production" && parsed.protocol !== "https:") {
|
||||||
|
throw new Error(
|
||||||
|
`BetterAuth base URL must use https in production. Current value: "${configured}".`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsed.origin;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if OIDC authentication is enabled via environment variable
|
* Check if OIDC authentication is enabled via environment variable
|
||||||
@@ -52,6 +92,54 @@ export function validateOidcConfig(): void {
|
|||||||
`The discovery URL is constructed by appending ".well-known/openid-configuration" to the issuer.`
|
`The discovery URL is constructed by appending ".well-known/openid-configuration" to the issuer.`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Additional validation: OIDC_REDIRECT_URI must be a valid URL with /auth/oauth2/callback path
|
||||||
|
validateRedirectUri();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates the OIDC_REDIRECT_URI environment variable.
|
||||||
|
* - Must be a parseable URL
|
||||||
|
* - Path must start with /auth/oauth2/callback
|
||||||
|
* - Warns (but does not throw) if using localhost in production
|
||||||
|
*
|
||||||
|
* @throws Error if URL is invalid or path does not start with /auth/oauth2/callback
|
||||||
|
*/
|
||||||
|
function validateRedirectUri(): void {
|
||||||
|
const redirectUri = process.env.OIDC_REDIRECT_URI;
|
||||||
|
if (!redirectUri || redirectUri.trim() === "") {
|
||||||
|
// Already caught by REQUIRED_OIDC_ENV_VARS check above
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsed: URL;
|
||||||
|
try {
|
||||||
|
parsed = new URL(redirectUri);
|
||||||
|
} catch (urlError: unknown) {
|
||||||
|
const detail = urlError instanceof Error ? urlError.message : String(urlError);
|
||||||
|
throw new Error(
|
||||||
|
`OIDC_REDIRECT_URI must be a valid URL. Current value: "${redirectUri}". ` +
|
||||||
|
`Parse error: ${detail}. ` +
|
||||||
|
`Example: "https://api.example.com/auth/oauth2/callback/authentik".`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!parsed.pathname.startsWith("/auth/oauth2/callback")) {
|
||||||
|
throw new Error(
|
||||||
|
`OIDC_REDIRECT_URI path must start with "/auth/oauth2/callback". Current path: "${parsed.pathname}". ` +
|
||||||
|
`Example: "https://api.example.com/auth/oauth2/callback/authentik".`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
process.env.NODE_ENV === "production" &&
|
||||||
|
(parsed.hostname === "localhost" || parsed.hostname === "127.0.0.1")
|
||||||
|
) {
|
||||||
|
console.warn(
|
||||||
|
`[AUTH WARNING] OIDC_REDIRECT_URI uses localhost ("${redirectUri}") in production. ` +
|
||||||
|
`This is likely a misconfiguration. Use a public domain for production deployments.`
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -63,14 +151,34 @@ function getOidcPlugins(): ReturnType<typeof genericOAuth>[] {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const clientId = process.env.OIDC_CLIENT_ID;
|
||||||
|
const clientSecret = process.env.OIDC_CLIENT_SECRET;
|
||||||
|
const issuer = process.env.OIDC_ISSUER;
|
||||||
|
const redirectUri = process.env.OIDC_REDIRECT_URI;
|
||||||
|
|
||||||
|
if (!clientId) {
|
||||||
|
throw new Error("OIDC_CLIENT_ID is required when OIDC is enabled but was not set.");
|
||||||
|
}
|
||||||
|
if (!clientSecret) {
|
||||||
|
throw new Error("OIDC_CLIENT_SECRET is required when OIDC is enabled but was not set.");
|
||||||
|
}
|
||||||
|
if (!issuer) {
|
||||||
|
throw new Error("OIDC_ISSUER is required when OIDC is enabled but was not set.");
|
||||||
|
}
|
||||||
|
if (!redirectUri) {
|
||||||
|
throw new Error("OIDC_REDIRECT_URI is required when OIDC is enabled but was not set.");
|
||||||
|
}
|
||||||
|
|
||||||
return [
|
return [
|
||||||
genericOAuth({
|
genericOAuth({
|
||||||
config: [
|
config: [
|
||||||
{
|
{
|
||||||
providerId: "authentik",
|
providerId: "authentik",
|
||||||
clientId: process.env.OIDC_CLIENT_ID ?? "",
|
clientId,
|
||||||
clientSecret: process.env.OIDC_CLIENT_SECRET ?? "",
|
clientSecret,
|
||||||
discoveryUrl: `${process.env.OIDC_ISSUER ?? ""}.well-known/openid-configuration`,
|
discoveryUrl: `${issuer}.well-known/openid-configuration`,
|
||||||
|
redirectURI: redirectUri,
|
||||||
|
pkce: true,
|
||||||
scopes: ["openid", "profile", "email"],
|
scopes: ["openid", "profile", "email"],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@@ -78,28 +186,95 @@ function getOidcPlugins(): ReturnType<typeof genericOAuth>[] {
|
|||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the list of trusted origins from environment variables.
|
||||||
|
*
|
||||||
|
* Sources (in order):
|
||||||
|
* - NEXT_PUBLIC_APP_URL — primary frontend URL
|
||||||
|
* - NEXT_PUBLIC_API_URL — API's own origin
|
||||||
|
* - TRUSTED_ORIGINS — comma-separated additional origins
|
||||||
|
* - localhost fallbacks — only when NODE_ENV !== "production"
|
||||||
|
*
|
||||||
|
* The returned list is deduplicated and empty strings are filtered out.
|
||||||
|
*/
|
||||||
|
export function getTrustedOrigins(): string[] {
|
||||||
|
const origins: string[] = [];
|
||||||
|
|
||||||
|
// Environment-driven origins
|
||||||
|
if (process.env.NEXT_PUBLIC_APP_URL) {
|
||||||
|
origins.push(process.env.NEXT_PUBLIC_APP_URL);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.NEXT_PUBLIC_API_URL) {
|
||||||
|
origins.push(process.env.NEXT_PUBLIC_API_URL);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Comma-separated additional origins (validated)
|
||||||
|
if (process.env.TRUSTED_ORIGINS) {
|
||||||
|
const rawOrigins = process.env.TRUSTED_ORIGINS.split(",")
|
||||||
|
.map((o) => o.trim())
|
||||||
|
.filter((o) => o !== "");
|
||||||
|
for (const origin of rawOrigins) {
|
||||||
|
try {
|
||||||
|
const parsed = new URL(origin);
|
||||||
|
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
||||||
|
console.warn(`[AUTH] Ignoring non-HTTP origin in TRUSTED_ORIGINS: "${origin}"`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
origins.push(origin);
|
||||||
|
} catch (urlError: unknown) {
|
||||||
|
const detail = urlError instanceof Error ? urlError.message : String(urlError);
|
||||||
|
console.warn(`[AUTH] Ignoring invalid URL in TRUSTED_ORIGINS: "${origin}" (${detail})`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Localhost fallbacks for development only
|
||||||
|
if (process.env.NODE_ENV !== "production") {
|
||||||
|
origins.push("http://localhost:3000", "http://localhost:3001");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deduplicate and filter empty strings
|
||||||
|
return [...new Set(origins)].filter((o) => o !== "");
|
||||||
|
}
|
||||||
|
|
||||||
export function createAuth(prisma: PrismaClient) {
|
export function createAuth(prisma: PrismaClient) {
|
||||||
// Validate OIDC configuration at startup - fail fast if misconfigured
|
// Validate OIDC configuration at startup - fail fast if misconfigured
|
||||||
validateOidcConfig();
|
validateOidcConfig();
|
||||||
|
|
||||||
|
const baseURL = getBetterAuthBaseUrl();
|
||||||
|
|
||||||
return betterAuth({
|
return betterAuth({
|
||||||
|
baseURL,
|
||||||
|
basePath: "/auth",
|
||||||
database: prismaAdapter(prisma, {
|
database: prismaAdapter(prisma, {
|
||||||
provider: "postgresql",
|
provider: "postgresql",
|
||||||
}),
|
}),
|
||||||
emailAndPassword: {
|
emailAndPassword: {
|
||||||
enabled: true, // Enable for now, can be disabled later
|
enabled: true,
|
||||||
},
|
},
|
||||||
plugins: [...getOidcPlugins()],
|
plugins: [...getOidcPlugins()],
|
||||||
session: {
|
logger: {
|
||||||
expiresIn: 60 * 60 * 24, // 24 hours
|
disabled: false,
|
||||||
updateAge: 60 * 60 * 24, // 24 hours
|
level: "error",
|
||||||
},
|
},
|
||||||
trustedOrigins: [
|
session: {
|
||||||
process.env.NEXT_PUBLIC_APP_URL ?? "http://localhost:3000",
|
expiresIn: 60 * 60 * 24 * 7, // 7 days absolute max
|
||||||
"http://localhost:3001", // API origin (dev)
|
updateAge: 60 * 60 * 2, // 2 hours — minimum session age before BetterAuth refreshes the expiry on next request
|
||||||
"https://app.mosaicstack.dev", // Production web
|
},
|
||||||
"https://api.mosaicstack.dev", // Production API
|
advanced: {
|
||||||
],
|
database: {
|
||||||
|
// BetterAuth's default ID generator emits opaque strings; our auth tables use UUID PKs.
|
||||||
|
generateId: "uuid",
|
||||||
|
},
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: true,
|
||||||
|
secure: process.env.NODE_ENV === "production",
|
||||||
|
sameSite: "lax" as const,
|
||||||
|
...(process.env.COOKIE_DOMAIN ? { domain: process.env.COOKIE_DOMAIN } : {}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
trustedOrigins: getTrustedOrigins(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,41 @@
|
|||||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
|
||||||
|
// Mock better-auth modules before importing AuthService (pulled in by AuthController)
|
||||||
|
vi.mock("better-auth/node", () => ({
|
||||||
|
toNodeHandler: vi.fn().mockReturnValue(vi.fn()),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth", () => ({
|
||||||
|
betterAuth: vi.fn().mockReturnValue({
|
||||||
|
handler: vi.fn(),
|
||||||
|
api: { getSession: vi.fn() },
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/adapters/prisma", () => ({
|
||||||
|
prismaAdapter: vi.fn().mockReturnValue({}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/plugins", () => ({
|
||||||
|
genericOAuth: vi.fn().mockReturnValue({ id: "generic-oauth" }),
|
||||||
|
}));
|
||||||
|
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { HttpException, HttpStatus, UnauthorizedException } from "@nestjs/common";
|
||||||
import type { AuthUser, AuthSession } from "@mosaic/shared";
|
import type { AuthUser, AuthSession } from "@mosaic/shared";
|
||||||
|
import type { Request as ExpressRequest, Response as ExpressResponse } from "express";
|
||||||
import { AuthController } from "./auth.controller";
|
import { AuthController } from "./auth.controller";
|
||||||
import { AuthService } from "./auth.service";
|
import { AuthService } from "./auth.service";
|
||||||
|
|
||||||
describe("AuthController", () => {
|
describe("AuthController", () => {
|
||||||
let controller: AuthController;
|
let controller: AuthController;
|
||||||
let authService: AuthService;
|
|
||||||
|
const mockNodeHandler = vi.fn().mockResolvedValue(undefined);
|
||||||
|
|
||||||
const mockAuthService = {
|
const mockAuthService = {
|
||||||
getAuth: vi.fn(),
|
getAuth: vi.fn(),
|
||||||
|
getNodeHandler: vi.fn().mockReturnValue(mockNodeHandler),
|
||||||
|
getAuthConfig: vi.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
@@ -24,25 +50,239 @@ describe("AuthController", () => {
|
|||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
controller = module.get<AuthController>(AuthController);
|
controller = module.get<AuthController>(AuthController);
|
||||||
authService = module.get<AuthService>(AuthService);
|
|
||||||
|
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
// Restore mock implementations after clearAllMocks
|
||||||
|
mockAuthService.getNodeHandler.mockReturnValue(mockNodeHandler);
|
||||||
|
mockNodeHandler.mockResolvedValue(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("handleAuth", () => {
|
describe("handleAuth", () => {
|
||||||
it("should call BetterAuth handler", async () => {
|
it("should delegate to BetterAuth node handler with Express req/res", async () => {
|
||||||
const mockHandler = vi.fn().mockResolvedValue({ status: 200 });
|
|
||||||
mockAuthService.getAuth.mockReturnValue({ handler: mockHandler });
|
|
||||||
|
|
||||||
const mockRequest = {
|
const mockRequest = {
|
||||||
method: "GET",
|
method: "GET",
|
||||||
url: "/auth/session",
|
url: "/auth/session",
|
||||||
|
headers: {},
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
|
expect(mockAuthService.getNodeHandler).toHaveBeenCalled();
|
||||||
|
expect(mockNodeHandler).toHaveBeenCalledWith(mockRequest, mockResponse);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw HttpException with 500 when handler throws before headers sent", async () => {
|
||||||
|
const handlerError = new Error("BetterAuth internal failure");
|
||||||
|
mockNodeHandler.mockRejectedValueOnce(handlerError);
|
||||||
|
|
||||||
|
const mockRequest = {
|
||||||
|
method: "POST",
|
||||||
|
url: "/auth/sign-in",
|
||||||
|
headers: {},
|
||||||
|
ip: "192.168.1.10",
|
||||||
|
socket: { remoteAddress: "192.168.1.10" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
// Should not reach here
|
||||||
|
expect.unreachable("Expected HttpException to be thrown");
|
||||||
|
} catch (err) {
|
||||||
|
expect(err).toBeInstanceOf(HttpException);
|
||||||
|
expect((err as HttpException).getStatus()).toBe(HttpStatus.INTERNAL_SERVER_ERROR);
|
||||||
|
expect((err as HttpException).getResponse()).toBe(
|
||||||
|
"Unable to complete authentication. Please try again in a moment."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should preserve better-call status and body for handler APIError", async () => {
|
||||||
|
const apiError = {
|
||||||
|
statusCode: HttpStatus.BAD_REQUEST,
|
||||||
|
message: "Invalid OAuth configuration",
|
||||||
|
body: {
|
||||||
|
message: "Invalid OAuth configuration",
|
||||||
|
code: "INVALID_OAUTH_CONFIGURATION",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
mockNodeHandler.mockRejectedValueOnce(apiError);
|
||||||
|
|
||||||
|
const mockRequest = {
|
||||||
|
method: "POST",
|
||||||
|
url: "/auth/sign-in/oauth2",
|
||||||
|
headers: {},
|
||||||
|
ip: "192.168.1.10",
|
||||||
|
socket: { remoteAddress: "192.168.1.10" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
expect.unreachable("Expected HttpException to be thrown");
|
||||||
|
} catch (err) {
|
||||||
|
expect(err).toBeInstanceOf(HttpException);
|
||||||
|
expect((err as HttpException).getStatus()).toBe(HttpStatus.BAD_REQUEST);
|
||||||
|
expect((err as HttpException).getResponse()).toMatchObject({
|
||||||
|
message: "Invalid OAuth configuration",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should log warning and not throw when handler throws after headers sent", async () => {
|
||||||
|
const handlerError = new Error("Stream interrupted");
|
||||||
|
mockNodeHandler.mockRejectedValueOnce(handlerError);
|
||||||
|
|
||||||
|
const mockRequest = {
|
||||||
|
method: "POST",
|
||||||
|
url: "/auth/sign-up",
|
||||||
|
headers: {},
|
||||||
|
ip: "10.0.0.5",
|
||||||
|
socket: { remoteAddress: "10.0.0.5" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: true,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
// Should not throw when headers already sent
|
||||||
|
await expect(controller.handleAuth(mockRequest, mockResponse)).resolves.toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle non-Error thrown values", async () => {
|
||||||
|
mockNodeHandler.mockRejectedValueOnce("string error");
|
||||||
|
|
||||||
|
const mockRequest = {
|
||||||
|
method: "GET",
|
||||||
|
url: "/auth/callback",
|
||||||
|
headers: {},
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
await expect(controller.handleAuth(mockRequest, mockResponse)).rejects.toThrow(HttpException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getConfig", () => {
|
||||||
|
it("should return auth config from service", async () => {
|
||||||
|
const mockConfig = {
|
||||||
|
providers: [
|
||||||
|
{ id: "email", name: "Email", type: "credentials" as const },
|
||||||
|
{ id: "authentik", name: "Authentik", type: "oauth" as const },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
mockAuthService.getAuthConfig.mockResolvedValue(mockConfig);
|
||||||
|
|
||||||
|
const result = await controller.getConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual(mockConfig);
|
||||||
|
expect(mockAuthService.getAuthConfig).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return correct response shape with only email provider", async () => {
|
||||||
|
const mockConfig = {
|
||||||
|
providers: [{ id: "email", name: "Email", type: "credentials" as const }],
|
||||||
|
};
|
||||||
|
mockAuthService.getAuthConfig.mockResolvedValue(mockConfig);
|
||||||
|
|
||||||
|
const result = await controller.getConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual(mockConfig);
|
||||||
|
expect(result.providers).toHaveLength(1);
|
||||||
|
expect(result.providers[0]).toEqual({
|
||||||
|
id: "email",
|
||||||
|
name: "Email",
|
||||||
|
type: "credentials",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should never leak secrets in auth config response", async () => {
|
||||||
|
// Set ALL sensitive environment variables with known values
|
||||||
|
const sensitiveEnv: Record<string, string> = {
|
||||||
|
OIDC_CLIENT_SECRET: "test-client-secret",
|
||||||
|
OIDC_CLIENT_ID: "test-client-id",
|
||||||
|
OIDC_ISSUER: "https://auth.test.com/",
|
||||||
|
OIDC_REDIRECT_URI: "https://app.test.com/auth/oauth2/callback/authentik",
|
||||||
|
BETTER_AUTH_SECRET: "test-better-auth-secret",
|
||||||
|
JWT_SECRET: "test-jwt-secret",
|
||||||
|
CSRF_SECRET: "test-csrf-secret",
|
||||||
|
DATABASE_URL: "postgresql://user:password@localhost/db",
|
||||||
|
OIDC_ENABLED: "true",
|
||||||
};
|
};
|
||||||
|
|
||||||
await controller.handleAuth(mockRequest as unknown as Request);
|
const originalEnv: Record<string, string | undefined> = {};
|
||||||
|
for (const [key, value] of Object.entries(sensitiveEnv)) {
|
||||||
|
originalEnv[key] = process.env[key];
|
||||||
|
process.env[key] = value;
|
||||||
|
}
|
||||||
|
|
||||||
expect(mockAuthService.getAuth).toHaveBeenCalled();
|
try {
|
||||||
expect(mockHandler).toHaveBeenCalledWith(mockRequest);
|
// Mock the service to return a realistic config with both providers
|
||||||
|
const mockConfig = {
|
||||||
|
providers: [
|
||||||
|
{ id: "email", name: "Email", type: "credentials" as const },
|
||||||
|
{ id: "authentik", name: "Authentik", type: "oauth" as const },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
mockAuthService.getAuthConfig.mockResolvedValue(mockConfig);
|
||||||
|
|
||||||
|
const result = await controller.getConfig();
|
||||||
|
const serialized = JSON.stringify(result);
|
||||||
|
|
||||||
|
// Assert no secret values leak into the serialized response
|
||||||
|
const forbiddenPatterns = [
|
||||||
|
"test-client-secret",
|
||||||
|
"test-client-id",
|
||||||
|
"test-better-auth-secret",
|
||||||
|
"test-jwt-secret",
|
||||||
|
"test-csrf-secret",
|
||||||
|
"auth.test.com",
|
||||||
|
"callback",
|
||||||
|
"password",
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const pattern of forbiddenPatterns) {
|
||||||
|
expect(serialized).not.toContain(pattern);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assert response contains ONLY expected fields
|
||||||
|
expect(result).toHaveProperty("providers");
|
||||||
|
expect(Object.keys(result)).toEqual(["providers"]);
|
||||||
|
expect(Array.isArray(result.providers)).toBe(true);
|
||||||
|
|
||||||
|
for (const provider of result.providers) {
|
||||||
|
const keys = Object.keys(provider);
|
||||||
|
expect(keys).toEqual(expect.arrayContaining(["id", "name", "type"]));
|
||||||
|
expect(keys).toHaveLength(3);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
// Restore original environment
|
||||||
|
for (const [key] of Object.entries(sensitiveEnv)) {
|
||||||
|
if (originalEnv[key] === undefined) {
|
||||||
|
delete process.env[key];
|
||||||
|
} else {
|
||||||
|
process.env[key] = originalEnv[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -80,19 +320,22 @@ describe("AuthController", () => {
|
|||||||
expect(result).toEqual(expected);
|
expect(result).toEqual(expected);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw error if user not found in request", () => {
|
it("should throw UnauthorizedException when req.user is undefined", () => {
|
||||||
const mockRequest = {
|
const mockRequest = {
|
||||||
session: {
|
session: {
|
||||||
id: "session-123",
|
id: "session-123",
|
||||||
token: "session-token",
|
token: "session-token",
|
||||||
expiresAt: new Date(),
|
expiresAt: new Date(Date.now() + 86400000),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(() => controller.getSession(mockRequest)).toThrow("User session not found");
|
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||||
|
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||||
|
"Missing authentication context"
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw error if session not found in request", () => {
|
it("should throw UnauthorizedException when req.session is undefined", () => {
|
||||||
const mockRequest = {
|
const mockRequest = {
|
||||||
user: {
|
user: {
|
||||||
id: "user-123",
|
id: "user-123",
|
||||||
@@ -101,21 +344,30 @@ describe("AuthController", () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(() => controller.getSession(mockRequest)).toThrow("User session not found");
|
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||||
|
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||||
|
"Missing authentication context"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw UnauthorizedException when both req.user and req.session are undefined", () => {
|
||||||
|
const mockRequest = {};
|
||||||
|
|
||||||
|
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||||
|
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||||
|
"Missing authentication context"
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("getProfile", () => {
|
describe("getProfile", () => {
|
||||||
it("should return complete user profile with workspace fields", () => {
|
it("should return complete user profile with identity fields", () => {
|
||||||
const mockUser: AuthUser = {
|
const mockUser: AuthUser = {
|
||||||
id: "user-123",
|
id: "user-123",
|
||||||
email: "test@example.com",
|
email: "test@example.com",
|
||||||
name: "Test User",
|
name: "Test User",
|
||||||
image: "https://example.com/avatar.jpg",
|
image: "https://example.com/avatar.jpg",
|
||||||
emailVerified: true,
|
emailVerified: true,
|
||||||
workspaceId: "workspace-123",
|
|
||||||
currentWorkspaceId: "workspace-456",
|
|
||||||
workspaceRole: "admin",
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = controller.getProfile(mockUser);
|
const result = controller.getProfile(mockUser);
|
||||||
@@ -126,13 +378,10 @@ describe("AuthController", () => {
|
|||||||
name: mockUser.name,
|
name: mockUser.name,
|
||||||
image: mockUser.image,
|
image: mockUser.image,
|
||||||
emailVerified: mockUser.emailVerified,
|
emailVerified: mockUser.emailVerified,
|
||||||
workspaceId: mockUser.workspaceId,
|
|
||||||
currentWorkspaceId: mockUser.currentWorkspaceId,
|
|
||||||
workspaceRole: mockUser.workspaceRole,
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return user profile with optional fields undefined", () => {
|
it("should return user profile with only required fields", () => {
|
||||||
const mockUser: AuthUser = {
|
const mockUser: AuthUser = {
|
||||||
id: "user-123",
|
id: "user-123",
|
||||||
email: "test@example.com",
|
email: "test@example.com",
|
||||||
@@ -145,12 +394,96 @@ describe("AuthController", () => {
|
|||||||
id: mockUser.id,
|
id: mockUser.id,
|
||||||
email: mockUser.email,
|
email: mockUser.email,
|
||||||
name: mockUser.name,
|
name: mockUser.name,
|
||||||
image: undefined,
|
|
||||||
emailVerified: undefined,
|
|
||||||
workspaceId: undefined,
|
|
||||||
currentWorkspaceId: undefined,
|
|
||||||
workspaceRole: undefined,
|
|
||||||
});
|
});
|
||||||
|
// Workspace fields are not included — served by GET /api/workspaces
|
||||||
|
expect(result).not.toHaveProperty("workspaceId");
|
||||||
|
expect(result).not.toHaveProperty("currentWorkspaceId");
|
||||||
|
expect(result).not.toHaveProperty("workspaceRole");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getClientIp (via handleAuth)", () => {
|
||||||
|
it("should extract IP from X-Forwarded-For with single IP", async () => {
|
||||||
|
const mockRequest = {
|
||||||
|
method: "GET",
|
||||||
|
url: "/auth/callback",
|
||||||
|
headers: { "x-forwarded-for": "203.0.113.50" },
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
// Spy on the logger to verify the extracted IP
|
||||||
|
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||||
|
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract first IP from X-Forwarded-For with comma-separated IPs", async () => {
|
||||||
|
const mockRequest = {
|
||||||
|
method: "GET",
|
||||||
|
url: "/auth/callback",
|
||||||
|
headers: { "x-forwarded-for": "203.0.113.50, 70.41.3.18" },
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||||
|
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||||
|
// Ensure it does NOT contain the second IP in the extracted position
|
||||||
|
expect(debugSpy).toHaveBeenCalledWith(expect.not.stringContaining("70.41.3.18"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract first IP from X-Forwarded-For as array", async () => {
|
||||||
|
const mockRequest = {
|
||||||
|
method: "GET",
|
||||||
|
url: "/auth/callback",
|
||||||
|
headers: { "x-forwarded-for": ["203.0.113.50", "70.41.3.18"] },
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||||
|
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fallback to req.ip when no X-Forwarded-For header", async () => {
|
||||||
|
const mockRequest = {
|
||||||
|
method: "GET",
|
||||||
|
url: "/auth/callback",
|
||||||
|
headers: {},
|
||||||
|
ip: "192.168.1.100",
|
||||||
|
socket: { remoteAddress: "192.168.1.100" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||||
|
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("192.168.1.100"));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,19 +1,25 @@
|
|||||||
import { Controller, All, Req, Get, UseGuards, Request, Logger } from "@nestjs/common";
|
import {
|
||||||
|
Controller,
|
||||||
|
All,
|
||||||
|
Req,
|
||||||
|
Res,
|
||||||
|
Get,
|
||||||
|
Header,
|
||||||
|
UseGuards,
|
||||||
|
Request,
|
||||||
|
Logger,
|
||||||
|
HttpException,
|
||||||
|
HttpStatus,
|
||||||
|
UnauthorizedException,
|
||||||
|
} from "@nestjs/common";
|
||||||
import { Throttle } from "@nestjs/throttler";
|
import { Throttle } from "@nestjs/throttler";
|
||||||
import type { AuthUser, AuthSession } from "@mosaic/shared";
|
import type { Request as ExpressRequest, Response as ExpressResponse } from "express";
|
||||||
|
import type { AuthUser, AuthSession, AuthConfigResponse } from "@mosaic/shared";
|
||||||
import { AuthService } from "./auth.service";
|
import { AuthService } from "./auth.service";
|
||||||
import { AuthGuard } from "./guards/auth.guard";
|
import { AuthGuard } from "./guards/auth.guard";
|
||||||
import { CurrentUser } from "./decorators/current-user.decorator";
|
import { CurrentUser } from "./decorators/current-user.decorator";
|
||||||
|
import { SkipCsrf } from "../common/decorators/skip-csrf.decorator";
|
||||||
interface RequestWithSession {
|
import type { AuthenticatedRequest } from "./types/better-auth-request.interface";
|
||||||
user?: AuthUser;
|
|
||||||
session?: {
|
|
||||||
id: string;
|
|
||||||
token: string;
|
|
||||||
expiresAt: Date;
|
|
||||||
[key: string]: unknown;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Controller("auth")
|
@Controller("auth")
|
||||||
export class AuthController {
|
export class AuthController {
|
||||||
@@ -27,10 +33,13 @@ export class AuthController {
|
|||||||
*/
|
*/
|
||||||
@Get("session")
|
@Get("session")
|
||||||
@UseGuards(AuthGuard)
|
@UseGuards(AuthGuard)
|
||||||
getSession(@Request() req: RequestWithSession): AuthSession {
|
getSession(@Request() req: AuthenticatedRequest): AuthSession {
|
||||||
|
// Defense-in-depth: AuthGuard should guarantee these, but if someone adds
|
||||||
|
// a route with AuthenticatedRequest and forgets @UseGuards(AuthGuard),
|
||||||
|
// TypeScript types won't help at runtime.
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||||
if (!req.user || !req.session) {
|
if (!req.user || !req.session) {
|
||||||
// This should never happen after AuthGuard, but TypeScript needs the check
|
throw new UnauthorizedException("Missing authentication context");
|
||||||
throw new Error("User session not found");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -63,19 +72,25 @@ export class AuthController {
|
|||||||
if (user.emailVerified !== undefined) {
|
if (user.emailVerified !== undefined) {
|
||||||
profile.emailVerified = user.emailVerified;
|
profile.emailVerified = user.emailVerified;
|
||||||
}
|
}
|
||||||
if (user.workspaceId !== undefined) {
|
|
||||||
profile.workspaceId = user.workspaceId;
|
// Workspace context is served by GET /api/workspaces, not the auth profile.
|
||||||
}
|
// The deprecated workspaceId/currentWorkspaceId/workspaceRole fields on
|
||||||
if (user.currentWorkspaceId !== undefined) {
|
// AuthUser are never populated by BetterAuth and are omitted here.
|
||||||
profile.currentWorkspaceId = user.currentWorkspaceId;
|
|
||||||
}
|
|
||||||
if (user.workspaceRole !== undefined) {
|
|
||||||
profile.workspaceRole = user.workspaceRole;
|
|
||||||
}
|
|
||||||
|
|
||||||
return profile;
|
return profile;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get available authentication providers.
|
||||||
|
* Public endpoint (no auth guard) so the frontend can discover login options
|
||||||
|
* before the user is authenticated.
|
||||||
|
*/
|
||||||
|
@Get("config")
|
||||||
|
@Header("Cache-Control", "public, max-age=300")
|
||||||
|
async getConfig(): Promise<AuthConfigResponse> {
|
||||||
|
return this.authService.getAuthConfig();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle all other auth routes (sign-in, sign-up, sign-out, etc.)
|
* Handle all other auth routes (sign-in, sign-up, sign-out, etc.)
|
||||||
* Delegates to BetterAuth
|
* Delegates to BetterAuth
|
||||||
@@ -87,38 +102,110 @@ export class AuthController {
|
|||||||
* Rate limiting and logging are applied to mitigate abuse (SEC-API-10).
|
* Rate limiting and logging are applied to mitigate abuse (SEC-API-10).
|
||||||
*/
|
*/
|
||||||
@All("*")
|
@All("*")
|
||||||
@Throttle({ strict: { limit: 10, ttl: 60000 } })
|
// BetterAuth handles CSRF internally (Fetch Metadata + SameSite=Lax cookies).
|
||||||
async handleAuth(@Req() req: Request): Promise<unknown> {
|
// @SkipCsrf avoids double-protection conflicts.
|
||||||
|
// See: https://www.better-auth.com/docs/reference/security
|
||||||
|
@SkipCsrf()
|
||||||
|
@Throttle({ default: { ttl: 60_000, limit: 5 } })
|
||||||
|
async handleAuth(@Req() req: ExpressRequest, @Res() res: ExpressResponse): Promise<void> {
|
||||||
// Extract client IP for logging
|
// Extract client IP for logging
|
||||||
const clientIp = this.getClientIp(req);
|
const clientIp = this.getClientIp(req);
|
||||||
const requestPath = (req as unknown as { url?: string }).url ?? "unknown";
|
|
||||||
const method = (req as unknown as { method?: string }).method ?? "UNKNOWN";
|
|
||||||
|
|
||||||
// Log auth catch-all hits for monitoring and debugging
|
// Log auth catch-all hits for monitoring and debugging
|
||||||
this.logger.debug(`Auth catch-all: ${method} ${requestPath} from ${clientIp}`);
|
this.logger.debug(`Auth catch-all: ${req.method} ${req.url} from ${clientIp}`);
|
||||||
|
|
||||||
const auth = this.authService.getAuth();
|
const handler = this.authService.getNodeHandler();
|
||||||
return auth.handler(req);
|
|
||||||
|
try {
|
||||||
|
await handler(req, res);
|
||||||
|
|
||||||
|
// BetterAuth writes responses directly — catch silent 500s that bypass NestJS error handling
|
||||||
|
if (res.statusCode >= 500) {
|
||||||
|
this.logger.error(
|
||||||
|
`BetterAuth returned ${String(res.statusCode)} for ${req.method} ${req.url} from ${clientIp}` +
|
||||||
|
` — check container stdout for '# SERVER_ERROR' details`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
const stack = error instanceof Error ? error.stack : undefined;
|
||||||
|
|
||||||
|
this.logger.error(
|
||||||
|
`BetterAuth handler error: ${req.method} ${req.url} from ${clientIp} - ${message}`,
|
||||||
|
stack
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!res.headersSent) {
|
||||||
|
const mappedError = this.mapToHttpException(error);
|
||||||
|
if (mappedError) {
|
||||||
|
throw mappedError;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new HttpException(
|
||||||
|
"Unable to complete authentication. Please try again in a moment.",
|
||||||
|
HttpStatus.INTERNAL_SERVER_ERROR
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.error(
|
||||||
|
`Headers already sent for failed auth request ${req.method} ${req.url} — client may have received partial response`
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract client IP from request, handling proxies
|
* Extract client IP from request, handling proxies
|
||||||
*/
|
*/
|
||||||
private getClientIp(req: Request): string {
|
private getClientIp(req: ExpressRequest): string {
|
||||||
const reqWithHeaders = req as unknown as {
|
|
||||||
headers?: Record<string, string | string[] | undefined>;
|
|
||||||
ip?: string;
|
|
||||||
socket?: { remoteAddress?: string };
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check X-Forwarded-For header (for reverse proxy setups)
|
// Check X-Forwarded-For header (for reverse proxy setups)
|
||||||
const forwardedFor = reqWithHeaders.headers?.["x-forwarded-for"];
|
const forwardedFor = req.headers["x-forwarded-for"];
|
||||||
if (forwardedFor) {
|
if (forwardedFor) {
|
||||||
const ips = Array.isArray(forwardedFor) ? forwardedFor[0] : forwardedFor;
|
const ips = Array.isArray(forwardedFor) ? forwardedFor[0] : forwardedFor;
|
||||||
return ips?.split(",")[0]?.trim() ?? "unknown";
|
return ips?.split(",")[0]?.trim() ?? "unknown";
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fall back to direct IP
|
// Fall back to direct IP
|
||||||
return reqWithHeaders.ip ?? reqWithHeaders.socket?.remoteAddress ?? "unknown";
|
return req.ip ?? req.socket.remoteAddress ?? "unknown";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Preserve known HTTP errors from BetterAuth/better-call instead of converting
|
||||||
|
* every failure into a generic 500.
|
||||||
|
*/
|
||||||
|
private mapToHttpException(error: unknown): HttpException | null {
|
||||||
|
if (error instanceof HttpException) {
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!error || typeof error !== "object") {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const statusCode = "statusCode" in error ? error.statusCode : undefined;
|
||||||
|
if (!this.isHttpStatus(statusCode)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const responseBody = "body" in error && error.body !== undefined ? error.body : undefined;
|
||||||
|
if (
|
||||||
|
responseBody !== undefined &&
|
||||||
|
responseBody !== null &&
|
||||||
|
(typeof responseBody === "string" || typeof responseBody === "object")
|
||||||
|
) {
|
||||||
|
return new HttpException(responseBody, statusCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
const message =
|
||||||
|
"message" in error && typeof error.message === "string" && error.message.length > 0
|
||||||
|
? error.message
|
||||||
|
: "Authentication request failed";
|
||||||
|
return new HttpException(message, statusCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
private isHttpStatus(value: unknown): value is number {
|
||||||
|
if (typeof value !== "number" || !Number.isInteger(value)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return value >= 400 && value <= 599;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,11 +3,14 @@ import { PrismaModule } from "../prisma/prisma.module";
|
|||||||
import { AuthService } from "./auth.service";
|
import { AuthService } from "./auth.service";
|
||||||
import { AuthController } from "./auth.controller";
|
import { AuthController } from "./auth.controller";
|
||||||
import { AuthGuard } from "./guards/auth.guard";
|
import { AuthGuard } from "./guards/auth.guard";
|
||||||
|
import { LocalAuthController } from "./local/local-auth.controller";
|
||||||
|
import { LocalAuthService } from "./local/local-auth.service";
|
||||||
|
import { LocalAuthEnabledGuard } from "./local/local-auth.guard";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [PrismaModule],
|
imports: [PrismaModule],
|
||||||
controllers: [AuthController],
|
controllers: [AuthController, LocalAuthController],
|
||||||
providers: [AuthService, AuthGuard],
|
providers: [AuthService, AuthGuard, LocalAuthService, LocalAuthEnabledGuard],
|
||||||
exports: [AuthService, AuthGuard],
|
exports: [AuthService, AuthGuard],
|
||||||
})
|
})
|
||||||
export class AuthModule {}
|
export class AuthModule {}
|
||||||
|
|||||||
@@ -23,10 +23,17 @@ describe("AuthController - Rate Limiting", () => {
|
|||||||
let app: INestApplication;
|
let app: INestApplication;
|
||||||
let loggerSpy: ReturnType<typeof vi.spyOn>;
|
let loggerSpy: ReturnType<typeof vi.spyOn>;
|
||||||
|
|
||||||
|
const mockNodeHandler = vi.fn(
|
||||||
|
(_req: unknown, res: { statusCode: number; end: (body: string) => void }) => {
|
||||||
|
res.statusCode = 200;
|
||||||
|
res.end(JSON.stringify({}));
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
const mockAuthService = {
|
const mockAuthService = {
|
||||||
getAuth: vi.fn().mockReturnValue({
|
getAuth: vi.fn(),
|
||||||
handler: vi.fn().mockResolvedValue({ status: 200, body: {} }),
|
getNodeHandler: vi.fn().mockReturnValue(mockNodeHandler),
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
@@ -76,7 +83,7 @@ describe("AuthController - Rate Limiting", () => {
|
|||||||
expect(response.status).not.toBe(HttpStatus.TOO_MANY_REQUESTS);
|
expect(response.status).not.toBe(HttpStatus.TOO_MANY_REQUESTS);
|
||||||
}
|
}
|
||||||
|
|
||||||
expect(mockAuthService.getAuth).toHaveBeenCalledTimes(3);
|
expect(mockAuthService.getNodeHandler).toHaveBeenCalledTimes(3);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return 429 when rate limit is exceeded", async () => {
|
it("should return 429 when rate limit is exceeded", async () => {
|
||||||
|
|||||||
@@ -1,5 +1,26 @@
|
|||||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
|
||||||
|
// Mock better-auth modules before importing AuthService
|
||||||
|
vi.mock("better-auth/node", () => ({
|
||||||
|
toNodeHandler: vi.fn().mockReturnValue(vi.fn()),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth", () => ({
|
||||||
|
betterAuth: vi.fn().mockReturnValue({
|
||||||
|
handler: vi.fn(),
|
||||||
|
api: { getSession: vi.fn() },
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/adapters/prisma", () => ({
|
||||||
|
prismaAdapter: vi.fn().mockReturnValue({}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/plugins", () => ({
|
||||||
|
genericOAuth: vi.fn().mockReturnValue({ id: "generic-oauth" }),
|
||||||
|
}));
|
||||||
|
|
||||||
import { AuthService } from "./auth.service";
|
import { AuthService } from "./auth.service";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
|
||||||
@@ -30,6 +51,12 @@ describe("AuthService", () => {
|
|||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
delete process.env.OIDC_ENABLED;
|
||||||
|
delete process.env.OIDC_ISSUER;
|
||||||
|
});
|
||||||
|
|
||||||
describe("getAuth", () => {
|
describe("getAuth", () => {
|
||||||
it("should return BetterAuth instance", () => {
|
it("should return BetterAuth instance", () => {
|
||||||
const auth = service.getAuth();
|
const auth = service.getAuth();
|
||||||
@@ -62,6 +89,23 @@ describe("AuthService", () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should return null when user is not found", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
const result = await service.getUserById("nonexistent-id");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
expect(mockPrismaService.user.findUnique).toHaveBeenCalledWith({
|
||||||
|
where: { id: "nonexistent-id" },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
email: true,
|
||||||
|
name: true,
|
||||||
|
authProviderId: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("getUserByEmail", () => {
|
describe("getUserByEmail", () => {
|
||||||
@@ -88,6 +132,269 @@ describe("AuthService", () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should return null when user is not found", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
const result = await service.getUserByEmail("unknown@example.com");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
expect(mockPrismaService.user.findUnique).toHaveBeenCalledWith({
|
||||||
|
where: { email: "unknown@example.com" },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
email: true,
|
||||||
|
name: true,
|
||||||
|
authProviderId: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("isOidcProviderReachable", () => {
|
||||||
|
const discoveryUrl = "https://auth.example.com/.well-known/openid-configuration";
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
|
// Reset the cache by accessing private fields via bracket notation
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthResult = false;
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).consecutiveHealthFailures = 0;
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return true when discovery URL returns 200", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(result).toBe(true);
|
||||||
|
expect(mockFetch).toHaveBeenCalledWith(discoveryUrl, {
|
||||||
|
signal: expect.any(AbortSignal) as AbortSignal,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false on network error", async () => {
|
||||||
|
const mockFetch = vi.fn().mockRejectedValue(new Error("ECONNREFUSED"));
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(result).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false on timeout", async () => {
|
||||||
|
const mockFetch = vi.fn().mockRejectedValue(new DOMException("The operation was aborted"));
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(result).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false when discovery URL returns non-200", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: false,
|
||||||
|
status: 503,
|
||||||
|
});
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(result).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should cache result for 30 seconds", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
// First call - fetches
|
||||||
|
const result1 = await service.isOidcProviderReachable();
|
||||||
|
expect(result1).toBe(true);
|
||||||
|
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
// Second call within 30s - uses cache
|
||||||
|
const result2 = await service.isOidcProviderReachable();
|
||||||
|
expect(result2).toBe(true);
|
||||||
|
expect(mockFetch).toHaveBeenCalledTimes(1); // Still 1, no new fetch
|
||||||
|
|
||||||
|
// Simulate cache expiry by moving lastHealthCheck back
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = Date.now() - 31_000;
|
||||||
|
|
||||||
|
// Third call after cache expiry - fetches again
|
||||||
|
const result3 = await service.isOidcProviderReachable();
|
||||||
|
expect(result3).toBe(true);
|
||||||
|
expect(mockFetch).toHaveBeenCalledTimes(2); // Now 2
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should cache false results too", async () => {
|
||||||
|
const mockFetch = vi
|
||||||
|
.fn()
|
||||||
|
.mockRejectedValueOnce(new Error("ECONNREFUSED"))
|
||||||
|
.mockResolvedValueOnce({ ok: true, status: 200 });
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
// First call - fails
|
||||||
|
const result1 = await service.isOidcProviderReachable();
|
||||||
|
expect(result1).toBe(false);
|
||||||
|
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
// Second call within 30s - returns cached false
|
||||||
|
const result2 = await service.isOidcProviderReachable();
|
||||||
|
expect(result2).toBe(false);
|
||||||
|
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should escalate to error level after 3 consecutive failures", async () => {
|
||||||
|
const mockFetch = vi.fn().mockRejectedValue(new Error("ECONNREFUSED"));
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||||
|
const loggerError = vi.spyOn(service["logger"], "error");
|
||||||
|
|
||||||
|
// Failures 1 and 2 should log at warn level
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0; // Reset cache
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(loggerWarn).toHaveBeenCalledTimes(2);
|
||||||
|
expect(loggerError).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Failure 3 should escalate to error level
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(loggerError).toHaveBeenCalledTimes(1);
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("OIDC provider unreachable")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should escalate to error level after 3 consecutive non-OK responses", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({ ok: false, status: 503 });
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||||
|
const loggerError = vi.spyOn(service["logger"], "error");
|
||||||
|
|
||||||
|
// Failures 1 and 2 at warn level
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(loggerWarn).toHaveBeenCalledTimes(2);
|
||||||
|
expect(loggerError).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Failure 3 at error level
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(loggerError).toHaveBeenCalledTimes(1);
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("OIDC provider returned non-OK status")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reset failure counter and log recovery on success after failures", async () => {
|
||||||
|
const mockFetch = vi
|
||||||
|
.fn()
|
||||||
|
.mockRejectedValueOnce(new Error("ECONNREFUSED"))
|
||||||
|
.mockRejectedValueOnce(new Error("ECONNREFUSED"))
|
||||||
|
.mockResolvedValueOnce({ ok: true, status: 200 });
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const loggerLog = vi.spyOn(service["logger"], "log");
|
||||||
|
|
||||||
|
// Two failures
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
|
||||||
|
// Recovery
|
||||||
|
const result = await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(result).toBe(true);
|
||||||
|
expect(loggerLog).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("OIDC provider recovered after 2 consecutive failure(s)")
|
||||||
|
);
|
||||||
|
// Verify counter reset
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
expect((service as any).consecutiveHealthFailures).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getAuthConfig", () => {
|
||||||
|
it("should return only email provider when OIDC is disabled", async () => {
|
||||||
|
delete process.env.OIDC_ENABLED;
|
||||||
|
|
||||||
|
const result = await service.getAuthConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
providers: [{ id: "email", name: "Email", type: "credentials" }],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return both email and authentik providers when OIDC is enabled and reachable", async () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
|
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({ ok: true, status: 200 });
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.getAuthConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
providers: [
|
||||||
|
{ id: "email", name: "Email", type: "credentials" },
|
||||||
|
{ id: "authentik", name: "Authentik", type: "oauth" },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return only email provider when OIDC_ENABLED is false", async () => {
|
||||||
|
process.env.OIDC_ENABLED = "false";
|
||||||
|
|
||||||
|
const result = await service.getAuthConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
providers: [{ id: "email", name: "Email", type: "credentials" }],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should omit authentik when OIDC is enabled but provider is unreachable", async () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
|
|
||||||
|
// Reset cache
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
|
||||||
|
const mockFetch = vi.fn().mockRejectedValue(new Error("ECONNREFUSED"));
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.getAuthConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
providers: [{ id: "email", name: "Email", type: "credentials" }],
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("verifySession", () => {
|
describe("verifySession", () => {
|
||||||
@@ -103,7 +410,7 @@ describe("AuthService", () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
it("should return session data for valid token", async () => {
|
it("should validate session token using secure BetterAuth cookie header", async () => {
|
||||||
const auth = service.getAuth();
|
const auth = service.getAuth();
|
||||||
const mockGetSession = vi.fn().mockResolvedValue(mockSessionData);
|
const mockGetSession = vi.fn().mockResolvedValue(mockSessionData);
|
||||||
auth.api = { getSession: mockGetSession } as any;
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
@@ -111,7 +418,58 @@ describe("AuthService", () => {
|
|||||||
const result = await service.verifySession("valid-token");
|
const result = await service.verifySession("valid-token");
|
||||||
|
|
||||||
expect(result).toEqual(mockSessionData);
|
expect(result).toEqual(mockSessionData);
|
||||||
|
expect(mockGetSession).toHaveBeenCalledTimes(1);
|
||||||
expect(mockGetSession).toHaveBeenCalledWith({
|
expect(mockGetSession).toHaveBeenCalledWith({
|
||||||
|
headers: {
|
||||||
|
cookie: "__Secure-better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should preserve raw cookie token value without URL re-encoding", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockResolvedValue(mockSessionData);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("tok/with+=chars=");
|
||||||
|
|
||||||
|
expect(result).toEqual(mockSessionData);
|
||||||
|
expect(mockGetSession).toHaveBeenCalledWith({
|
||||||
|
headers: {
|
||||||
|
cookie: "__Secure-better-auth.session_token=tok/with+=chars=",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fall back to Authorization header when cookie-based lookups miss", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi
|
||||||
|
.fn()
|
||||||
|
.mockResolvedValueOnce(null)
|
||||||
|
.mockResolvedValueOnce(null)
|
||||||
|
.mockResolvedValueOnce(null)
|
||||||
|
.mockResolvedValueOnce(mockSessionData);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("valid-token");
|
||||||
|
|
||||||
|
expect(result).toEqual(mockSessionData);
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(1, {
|
||||||
|
headers: {
|
||||||
|
cookie: "__Secure-better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(2, {
|
||||||
|
headers: {
|
||||||
|
cookie: "better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(3, {
|
||||||
|
headers: {
|
||||||
|
cookie: "__Host-better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(4, {
|
||||||
headers: {
|
headers: {
|
||||||
authorization: "Bearer valid-token",
|
authorization: "Bearer valid-token",
|
||||||
},
|
},
|
||||||
@@ -128,14 +486,264 @@ describe("AuthService", () => {
|
|||||||
expect(result).toBeNull();
|
expect(result).toBeNull();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return null and log error on verification failure", async () => {
|
it("should return null for 'invalid token' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Invalid token provided"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("bad-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for 'expired' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Token expired"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("expired-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for 'session not found' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Session not found"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("missing-session");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for 'unauthorized' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Unauthorized"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("unauth-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for 'invalid session' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Invalid session"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("invalid-session");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for 'session expired' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Session expired"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("expired-session");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for bare 'unauthorized' (exact match)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("unauthorized"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("unauth-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for bare 'expired' (exact match)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("expired"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("expired-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw 'certificate has expired' as infrastructure error (not auth)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("certificate has expired"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow("certificate has expired");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw 'Unauthorized: Access denied for user' as infrastructure error (not auth)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi
|
||||||
|
.fn()
|
||||||
|
.mockRejectedValue(new Error("Unauthorized: Access denied for user"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow(
|
||||||
|
"Unauthorized: Access denied for user"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null when a non-Error value is thrown", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue("string-error");
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("any-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null when getSession throws a non-Error value (string)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue("some error");
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("any-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null when getSession throws a non-Error value (object)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue({ code: "ERR_UNKNOWN" });
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("any-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw unexpected errors that are not known auth errors", async () => {
|
||||||
const auth = service.getAuth();
|
const auth = service.getAuth();
|
||||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("Verification failed"));
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Verification failed"));
|
||||||
auth.api = { getSession: mockGetSession } as any;
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
const result = await service.verifySession("error-token");
|
await expect(service.verifySession("error-token")).rejects.toThrow("Verification failed");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw Prisma infrastructure errors", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const prismaError = new Error("connect ECONNREFUSED 127.0.0.1:5432");
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(prismaError);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow("ECONNREFUSED");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw timeout errors as infrastructure errors", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const timeoutError = new Error("Connection timeout after 5000ms");
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(timeoutError);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow("timeout");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw errors with Prisma-prefixed constructor name", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
class PrismaClientKnownRequestError extends Error {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message);
|
||||||
|
this.name = "PrismaClientKnownRequestError";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const prismaError = new PrismaClientKnownRequestError("Database connection lost");
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(prismaError);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow("Database connection lost");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should redact Bearer tokens from logged error messages", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const errorWithToken = new Error(
|
||||||
|
"Request failed: Bearer eyJhbGciOiJIUzI1NiJ9.secret-payload in header"
|
||||||
|
);
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(errorWithToken);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const loggerError = vi.spyOn(service["logger"], "error");
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow();
|
||||||
|
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
"Session verification failed due to unexpected error",
|
||||||
|
expect.stringContaining("Bearer [REDACTED]")
|
||||||
|
);
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
"Session verification failed due to unexpected error",
|
||||||
|
expect.not.stringContaining("eyJhbGciOiJIUzI1NiJ9")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should redact Bearer tokens from error stack traces", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const errorWithToken = new Error("Something went wrong");
|
||||||
|
errorWithToken.stack =
|
||||||
|
"Error: Something went wrong\n at fetch (Bearer abc123-secret-token)\n at verifySession";
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(errorWithToken);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const loggerError = vi.spyOn(service["logger"], "error");
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow();
|
||||||
|
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
"Session verification failed due to unexpected error",
|
||||||
|
expect.stringContaining("Bearer [REDACTED]")
|
||||||
|
);
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
"Session verification failed due to unexpected error",
|
||||||
|
expect.not.stringContaining("abc123-secret-token")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should warn when a non-Error string value is thrown", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue("string-error");
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||||
|
|
||||||
|
const result = await service.verifySession("any-token");
|
||||||
|
|
||||||
expect(result).toBeNull();
|
expect(result).toBeNull();
|
||||||
|
expect(loggerWarn).toHaveBeenCalledWith(
|
||||||
|
"Session verification received non-Error thrown value",
|
||||||
|
"string-error"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should warn with JSON when a non-Error object is thrown", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue({ code: "ERR_UNKNOWN" });
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||||
|
|
||||||
|
const result = await service.verifySession("any-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
expect(loggerWarn).toHaveBeenCalledWith(
|
||||||
|
"Session verification received non-Error thrown value",
|
||||||
|
JSON.stringify({ code: "ERR_UNKNOWN" })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not warn for expected auth errors (Error instances)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Invalid token provided"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||||
|
|
||||||
|
const result = await service.verifySession("bad-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
expect(loggerWarn).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,17 +1,49 @@
|
|||||||
import { Injectable, Logger } from "@nestjs/common";
|
import { Injectable, Logger } from "@nestjs/common";
|
||||||
import type { PrismaClient } from "@prisma/client";
|
import type { PrismaClient } from "@prisma/client";
|
||||||
|
import type { IncomingMessage, ServerResponse } from "http";
|
||||||
|
import { toNodeHandler } from "better-auth/node";
|
||||||
|
import type { AuthConfigResponse, AuthProviderConfig } from "@mosaic/shared";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
import { createAuth, type Auth } from "./auth.config";
|
import { createAuth, isOidcEnabled, type Auth } from "./auth.config";
|
||||||
|
|
||||||
|
/** Duration in milliseconds to cache the OIDC health check result */
|
||||||
|
const OIDC_HEALTH_CACHE_TTL_MS = 30_000;
|
||||||
|
|
||||||
|
/** Timeout in milliseconds for the OIDC discovery URL fetch */
|
||||||
|
const OIDC_HEALTH_TIMEOUT_MS = 2_000;
|
||||||
|
|
||||||
|
/** Number of consecutive health-check failures before escalating to error level */
|
||||||
|
const HEALTH_ESCALATION_THRESHOLD = 3;
|
||||||
|
|
||||||
|
/** Verified session shape returned by BetterAuth's getSession */
|
||||||
|
interface VerifiedSession {
|
||||||
|
user: Record<string, unknown>;
|
||||||
|
session: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SessionHeaderCandidate {
|
||||||
|
headers: Record<string, string>;
|
||||||
|
}
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AuthService {
|
export class AuthService {
|
||||||
private readonly logger = new Logger(AuthService.name);
|
private readonly logger = new Logger(AuthService.name);
|
||||||
private readonly auth: Auth;
|
private readonly auth: Auth;
|
||||||
|
private readonly nodeHandler: (req: IncomingMessage, res: ServerResponse) => Promise<void>;
|
||||||
|
|
||||||
|
/** Timestamp of the last OIDC health check */
|
||||||
|
private lastHealthCheck = 0;
|
||||||
|
/** Cached result of the last OIDC health check */
|
||||||
|
private lastHealthResult = false;
|
||||||
|
/** Consecutive OIDC health check failure count for log-level escalation */
|
||||||
|
private consecutiveHealthFailures = 0;
|
||||||
|
|
||||||
constructor(private readonly prisma: PrismaService) {
|
constructor(private readonly prisma: PrismaService) {
|
||||||
// PrismaService extends PrismaClient and is compatible with BetterAuth's adapter
|
// PrismaService extends PrismaClient and is compatible with BetterAuth's adapter
|
||||||
// Cast is safe as PrismaService provides all required PrismaClient methods
|
// Cast is safe as PrismaService provides all required PrismaClient methods
|
||||||
|
// TODO(#411): BetterAuth returns opaque types — replace when upstream exports typed interfaces
|
||||||
this.auth = createAuth(this.prisma as unknown as PrismaClient);
|
this.auth = createAuth(this.prisma as unknown as PrismaClient);
|
||||||
|
this.nodeHandler = toNodeHandler(this.auth);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -21,6 +53,14 @@ export class AuthService {
|
|||||||
return this.auth;
|
return this.auth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Node.js-compatible request handler for BetterAuth.
|
||||||
|
* Wraps BetterAuth's Web API handler to work with Express/Node.js req/res.
|
||||||
|
*/
|
||||||
|
getNodeHandler(): (req: IncomingMessage, res: ServerResponse) => Promise<void> {
|
||||||
|
return this.nodeHandler;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get user by ID
|
* Get user by ID
|
||||||
*/
|
*/
|
||||||
@@ -63,32 +103,159 @@ export class AuthService {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Verify session token
|
* Verify session token
|
||||||
* Returns session data if valid, null if invalid or expired
|
* Returns session data if valid, null if invalid or expired.
|
||||||
|
* Only known-safe auth errors return null; everything else propagates as 500.
|
||||||
*/
|
*/
|
||||||
async verifySession(
|
async verifySession(token: string): Promise<VerifiedSession | null> {
|
||||||
token: string
|
let sawNonError = false;
|
||||||
): Promise<{ user: Record<string, unknown>; session: Record<string, unknown> } | null> {
|
|
||||||
try {
|
for (const candidate of this.buildSessionHeaderCandidates(token)) {
|
||||||
const session = await this.auth.api.getSession({
|
try {
|
||||||
|
// TODO(#411): BetterAuth getSession returns opaque types — replace when upstream exports typed interfaces
|
||||||
|
const session = await this.auth.api.getSession(candidate);
|
||||||
|
|
||||||
|
if (!session) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
user: session.user as Record<string, unknown>,
|
||||||
|
session: session.session as Record<string, unknown>,
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
if (this.isExpectedAuthError(error.message)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Infrastructure or unexpected — propagate as 500
|
||||||
|
const safeMessage = (error.stack ?? error.message).replace(
|
||||||
|
/Bearer\s+\S+/gi,
|
||||||
|
"Bearer [REDACTED]"
|
||||||
|
);
|
||||||
|
this.logger.error("Session verification failed due to unexpected error", safeMessage);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Non-Error thrown values — log once for observability, treat as auth failure
|
||||||
|
if (!sawNonError) {
|
||||||
|
const errorDetail = typeof error === "string" ? error : JSON.stringify(error);
|
||||||
|
this.logger.warn("Session verification received non-Error thrown value", errorDetail);
|
||||||
|
sawNonError = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildSessionHeaderCandidates(token: string): SessionHeaderCandidate[] {
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
cookie: `__Secure-better-auth.session_token=${token}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
cookie: `better-auth.session_token=${token}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
cookie: `__Host-better-auth.session_token=${token}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
headers: {
|
headers: {
|
||||||
authorization: `Bearer ${token}`,
|
authorization: `Bearer ${token}`,
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
private isExpectedAuthError(message: string): boolean {
|
||||||
|
const normalized = message.toLowerCase();
|
||||||
|
return (
|
||||||
|
normalized.includes("invalid token") ||
|
||||||
|
normalized.includes("token expired") ||
|
||||||
|
normalized.includes("session expired") ||
|
||||||
|
normalized.includes("session not found") ||
|
||||||
|
normalized.includes("invalid session") ||
|
||||||
|
normalized === "unauthorized" ||
|
||||||
|
normalized === "expired"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the OIDC provider (Authentik) is reachable by fetching the discovery URL.
|
||||||
|
* Results are cached for 30 seconds to prevent repeated network calls.
|
||||||
|
*
|
||||||
|
* @returns true if the provider responds with an HTTP 2xx status, false otherwise
|
||||||
|
*/
|
||||||
|
async isOidcProviderReachable(): Promise<boolean> {
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
// Return cached result if still valid
|
||||||
|
if (now - this.lastHealthCheck < OIDC_HEALTH_CACHE_TTL_MS) {
|
||||||
|
this.logger.debug("OIDC health check: returning cached result");
|
||||||
|
return this.lastHealthResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
const discoveryUrl = `${process.env.OIDC_ISSUER ?? ""}.well-known/openid-configuration`;
|
||||||
|
this.logger.debug(`OIDC health check: fetching ${discoveryUrl}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(discoveryUrl, {
|
||||||
|
signal: AbortSignal.timeout(OIDC_HEALTH_TIMEOUT_MS),
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!session) {
|
this.lastHealthCheck = Date.now();
|
||||||
return null;
|
this.lastHealthResult = response.ok;
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
if (this.consecutiveHealthFailures > 0) {
|
||||||
|
this.logger.log(
|
||||||
|
`OIDC provider recovered after ${String(this.consecutiveHealthFailures)} consecutive failure(s)`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
this.consecutiveHealthFailures = 0;
|
||||||
|
} else {
|
||||||
|
this.consecutiveHealthFailures++;
|
||||||
|
const logLevel =
|
||||||
|
this.consecutiveHealthFailures >= HEALTH_ESCALATION_THRESHOLD ? "error" : "warn";
|
||||||
|
this.logger[logLevel](
|
||||||
|
`OIDC provider returned non-OK status: ${String(response.status)} from ${discoveryUrl}`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return this.lastHealthResult;
|
||||||
user: session.user as Record<string, unknown>,
|
} catch (error: unknown) {
|
||||||
session: session.session as Record<string, unknown>,
|
this.lastHealthCheck = Date.now();
|
||||||
};
|
this.lastHealthResult = false;
|
||||||
} catch (error) {
|
this.consecutiveHealthFailures++;
|
||||||
this.logger.error(
|
|
||||||
"Session verification failed",
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
error instanceof Error ? error.message : "Unknown error"
|
const logLevel =
|
||||||
);
|
this.consecutiveHealthFailures >= HEALTH_ESCALATION_THRESHOLD ? "error" : "warn";
|
||||||
return null;
|
this.logger[logLevel](`OIDC provider unreachable at ${discoveryUrl}: ${message}`);
|
||||||
|
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get authentication configuration for the frontend.
|
||||||
|
* Returns available auth providers so the UI can render login options dynamically.
|
||||||
|
* When OIDC is enabled, performs a health check to verify the provider is reachable.
|
||||||
|
*/
|
||||||
|
async getAuthConfig(): Promise<AuthConfigResponse> {
|
||||||
|
const providers: AuthProviderConfig[] = [{ id: "email", name: "Email", type: "credentials" }];
|
||||||
|
|
||||||
|
if (isOidcEnabled() && (await this.isOidcProviderReachable())) {
|
||||||
|
providers.push({ id: "authentik", name: "Authentik", type: "oauth" });
|
||||||
|
}
|
||||||
|
|
||||||
|
return { providers };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
import type { ExecutionContext } from "@nestjs/common";
|
import type { ExecutionContext } from "@nestjs/common";
|
||||||
import { createParamDecorator, UnauthorizedException } from "@nestjs/common";
|
import { createParamDecorator, UnauthorizedException } from "@nestjs/common";
|
||||||
import type { AuthUser } from "@mosaic/shared";
|
import type { AuthUser } from "@mosaic/shared";
|
||||||
|
import type { MaybeAuthenticatedRequest } from "../types/better-auth-request.interface";
|
||||||
interface RequestWithUser {
|
|
||||||
user?: AuthUser;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const CurrentUser = createParamDecorator(
|
export const CurrentUser = createParamDecorator(
|
||||||
(_data: unknown, ctx: ExecutionContext): AuthUser => {
|
(_data: unknown, ctx: ExecutionContext): AuthUser => {
|
||||||
const request = ctx.switchToHttp().getRequest<RequestWithUser>();
|
// Use MaybeAuthenticatedRequest because the decorator doesn't know
|
||||||
|
// whether AuthGuard ran — the null check provides defense-in-depth.
|
||||||
|
const request = ctx.switchToHttp().getRequest<MaybeAuthenticatedRequest>();
|
||||||
if (!request.user) {
|
if (!request.user) {
|
||||||
throw new UnauthorizedException("No authenticated user found on request");
|
throw new UnauthorizedException("No authenticated user found on request");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,30 +1,39 @@
|
|||||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
|
||||||
import { ExecutionContext, UnauthorizedException } from "@nestjs/common";
|
import { ExecutionContext, UnauthorizedException } from "@nestjs/common";
|
||||||
|
|
||||||
|
// Mock better-auth modules before importing AuthGuard (which imports AuthService)
|
||||||
|
vi.mock("better-auth/node", () => ({
|
||||||
|
toNodeHandler: vi.fn().mockReturnValue(vi.fn()),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth", () => ({
|
||||||
|
betterAuth: vi.fn().mockReturnValue({
|
||||||
|
handler: vi.fn(),
|
||||||
|
api: { getSession: vi.fn() },
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/adapters/prisma", () => ({
|
||||||
|
prismaAdapter: vi.fn().mockReturnValue({}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/plugins", () => ({
|
||||||
|
genericOAuth: vi.fn().mockReturnValue({ id: "generic-oauth" }),
|
||||||
|
}));
|
||||||
|
|
||||||
import { AuthGuard } from "./auth.guard";
|
import { AuthGuard } from "./auth.guard";
|
||||||
import { AuthService } from "../auth.service";
|
import type { AuthService } from "../auth.service";
|
||||||
|
|
||||||
describe("AuthGuard", () => {
|
describe("AuthGuard", () => {
|
||||||
let guard: AuthGuard;
|
let guard: AuthGuard;
|
||||||
let authService: AuthService;
|
|
||||||
|
|
||||||
const mockAuthService = {
|
const mockAuthService = {
|
||||||
verifySession: vi.fn(),
|
verifySession: vi.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(() => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
// Directly construct the guard with the mock to avoid NestJS DI issues
|
||||||
providers: [
|
guard = new AuthGuard(mockAuthService as unknown as AuthService);
|
||||||
AuthGuard,
|
|
||||||
{
|
|
||||||
provide: AuthService,
|
|
||||||
useValue: mockAuthService,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}).compile();
|
|
||||||
|
|
||||||
guard = module.get<AuthGuard>(AuthGuard);
|
|
||||||
authService = module.get<AuthService>(AuthService);
|
|
||||||
|
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
@@ -147,17 +156,134 @@ describe("AuthGuard", () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw UnauthorizedException if session verification fails", async () => {
|
it("should propagate non-auth errors as-is (not wrap as 401)", async () => {
|
||||||
mockAuthService.verifySession.mockRejectedValue(new Error("Verification failed"));
|
const infraError = new Error("connect ECONNREFUSED 127.0.0.1:5432");
|
||||||
|
mockAuthService.verifySession.mockRejectedValue(infraError);
|
||||||
|
|
||||||
const context = createMockExecutionContext({
|
const context = createMockExecutionContext({
|
||||||
authorization: "Bearer error-token",
|
authorization: "Bearer error-token",
|
||||||
});
|
});
|
||||||
|
|
||||||
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
await expect(guard.canActivate(context)).rejects.toThrow(infraError);
|
||||||
await expect(guard.canActivate(context)).rejects.toThrow("Authentication failed");
|
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(UnauthorizedException);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should propagate database errors so GlobalExceptionFilter returns 500", async () => {
|
||||||
|
const dbError = new Error("PrismaClientKnownRequestError: Connection refused");
|
||||||
|
mockAuthService.verifySession.mockRejectedValue(dbError);
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(dbError);
|
||||||
|
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(UnauthorizedException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should propagate timeout errors so GlobalExceptionFilter returns 503", async () => {
|
||||||
|
const timeoutError = new Error("Connection timeout after 5000ms");
|
||||||
|
mockAuthService.verifySession.mockRejectedValue(timeoutError);
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(timeoutError);
|
||||||
|
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(UnauthorizedException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("user data validation", () => {
|
||||||
|
const mockSession = {
|
||||||
|
id: "session-123",
|
||||||
|
token: "session-token",
|
||||||
|
expiresAt: new Date(Date.now() + 86400000),
|
||||||
|
};
|
||||||
|
|
||||||
|
it("should throw UnauthorizedException when user is missing id", async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({
|
||||||
|
user: { email: "a@b.com", name: "Test" },
|
||||||
|
session: mockSession,
|
||||||
|
});
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||||
|
"Invalid user data in session"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw UnauthorizedException when user is missing email", async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({
|
||||||
|
user: { id: "1", name: "Test" },
|
||||||
|
session: mockSession,
|
||||||
|
});
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||||
|
"Invalid user data in session"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw UnauthorizedException when user is missing name", async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({
|
||||||
|
user: { id: "1", email: "a@b.com" },
|
||||||
|
session: mockSession,
|
||||||
|
});
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||||
|
"Invalid user data in session"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw UnauthorizedException when user is a string", async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({
|
||||||
|
user: "not-an-object",
|
||||||
|
session: mockSession,
|
||||||
|
});
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||||
|
"Invalid user data in session"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when user is null (typeof null === 'object' causes TypeError on 'in' operator)", async () => {
|
||||||
|
// Note: typeof null === "object" in JS, so the guard's typeof check passes
|
||||||
|
// but "id" in null throws TypeError. The catch block propagates non-auth errors as-is.
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({
|
||||||
|
user: null,
|
||||||
|
session: mockSession,
|
||||||
|
});
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(TypeError);
|
||||||
|
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(
|
||||||
|
UnauthorizedException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("request attachment", () => {
|
||||||
it("should attach user and session to request on success", async () => {
|
it("should attach user and session to request on success", async () => {
|
||||||
mockAuthService.verifySession.mockResolvedValue(mockSessionData);
|
mockAuthService.verifySession.mockResolvedValue(mockSessionData);
|
||||||
|
|
||||||
|
|||||||
@@ -1,23 +1,22 @@
|
|||||||
import { Injectable, CanActivate, ExecutionContext, UnauthorizedException } from "@nestjs/common";
|
import {
|
||||||
|
Injectable,
|
||||||
|
CanActivate,
|
||||||
|
ExecutionContext,
|
||||||
|
UnauthorizedException,
|
||||||
|
Logger,
|
||||||
|
} from "@nestjs/common";
|
||||||
import { AuthService } from "../auth.service";
|
import { AuthService } from "../auth.service";
|
||||||
import type { AuthUser } from "@mosaic/shared";
|
import type { AuthUser } from "@mosaic/shared";
|
||||||
|
import type { MaybeAuthenticatedRequest } from "../types/better-auth-request.interface";
|
||||||
/**
|
|
||||||
* Request type with authentication context
|
|
||||||
*/
|
|
||||||
interface AuthRequest {
|
|
||||||
user?: AuthUser;
|
|
||||||
session?: Record<string, unknown>;
|
|
||||||
headers: Record<string, string | string[] | undefined>;
|
|
||||||
cookies?: Record<string, string>;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AuthGuard implements CanActivate {
|
export class AuthGuard implements CanActivate {
|
||||||
|
private readonly logger = new Logger(AuthGuard.name);
|
||||||
|
|
||||||
constructor(private readonly authService: AuthService) {}
|
constructor(private readonly authService: AuthService) {}
|
||||||
|
|
||||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||||
const request = context.switchToHttp().getRequest<AuthRequest>();
|
const request = context.switchToHttp().getRequest<MaybeAuthenticatedRequest>();
|
||||||
|
|
||||||
// Try to get token from either cookie (preferred) or Authorization header
|
// Try to get token from either cookie (preferred) or Authorization header
|
||||||
const token = this.extractToken(request);
|
const token = this.extractToken(request);
|
||||||
@@ -44,18 +43,19 @@ export class AuthGuard implements CanActivate {
|
|||||||
|
|
||||||
return true;
|
return true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Re-throw if it's already an UnauthorizedException
|
|
||||||
if (error instanceof UnauthorizedException) {
|
if (error instanceof UnauthorizedException) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
throw new UnauthorizedException("Authentication failed");
|
// Infrastructure errors (DB down, connection refused, timeouts) must propagate
|
||||||
|
// as 500/503 via GlobalExceptionFilter — never mask as 401
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract token from cookie (preferred) or Authorization header
|
* Extract token from cookie (preferred) or Authorization header
|
||||||
*/
|
*/
|
||||||
private extractToken(request: AuthRequest): string | undefined {
|
private extractToken(request: MaybeAuthenticatedRequest): string | undefined {
|
||||||
// Try cookie first (BetterAuth default)
|
// Try cookie first (BetterAuth default)
|
||||||
const cookieToken = this.extractTokenFromCookie(request);
|
const cookieToken = this.extractTokenFromCookie(request);
|
||||||
if (cookieToken) {
|
if (cookieToken) {
|
||||||
@@ -67,21 +67,39 @@ export class AuthGuard implements CanActivate {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract token from cookie (BetterAuth stores session token in better-auth.session_token cookie)
|
* Extract token from cookie.
|
||||||
|
* BetterAuth may prefix the cookie name with "__Secure-" when running on HTTPS.
|
||||||
*/
|
*/
|
||||||
private extractTokenFromCookie(request: AuthRequest): string | undefined {
|
private extractTokenFromCookie(request: MaybeAuthenticatedRequest): string | undefined {
|
||||||
if (!request.cookies) {
|
// Express types `cookies` as `any`; cast to a known shape for type safety.
|
||||||
|
const cookies = request.cookies as Record<string, string> | undefined;
|
||||||
|
if (!cookies) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
// BetterAuth uses 'better-auth.session_token' as the cookie name by default
|
// BetterAuth default cookie name is "better-auth.session_token"
|
||||||
return request.cookies["better-auth.session_token"];
|
// When Secure cookies are enabled, BetterAuth prefixes with "__Secure-".
|
||||||
|
const candidates = [
|
||||||
|
"__Secure-better-auth.session_token",
|
||||||
|
"better-auth.session_token",
|
||||||
|
"__Host-better-auth.session_token",
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
for (const name of candidates) {
|
||||||
|
const token = cookies[name];
|
||||||
|
if (token) {
|
||||||
|
this.logger.debug(`Session cookie found: ${name}`);
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract token from Authorization header (Bearer token)
|
* Extract token from Authorization header (Bearer token)
|
||||||
*/
|
*/
|
||||||
private extractTokenFromHeader(request: AuthRequest): string | undefined {
|
private extractTokenFromHeader(request: MaybeAuthenticatedRequest): string | undefined {
|
||||||
const authHeader = request.headers.authorization;
|
const authHeader = request.headers.authorization;
|
||||||
if (typeof authHeader !== "string") {
|
if (typeof authHeader !== "string") {
|
||||||
return undefined;
|
return undefined;
|
||||||
|
|||||||
10
apps/api/src/auth/local/dto/local-login.dto.ts
Normal file
10
apps/api/src/auth/local/dto/local-login.dto.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { IsEmail, IsString, MinLength } from "class-validator";
|
||||||
|
|
||||||
|
export class LocalLoginDto {
|
||||||
|
@IsEmail({}, { message: "email must be a valid email address" })
|
||||||
|
email!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "password must be a string" })
|
||||||
|
@MinLength(1, { message: "password must not be empty" })
|
||||||
|
password!: string;
|
||||||
|
}
|
||||||
20
apps/api/src/auth/local/dto/local-setup.dto.ts
Normal file
20
apps/api/src/auth/local/dto/local-setup.dto.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { IsEmail, IsString, MinLength, MaxLength } from "class-validator";
|
||||||
|
|
||||||
|
export class LocalSetupDto {
|
||||||
|
@IsEmail({}, { message: "email must be a valid email address" })
|
||||||
|
email!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "name must be a string" })
|
||||||
|
@MinLength(1, { message: "name must not be empty" })
|
||||||
|
@MaxLength(255, { message: "name must not exceed 255 characters" })
|
||||||
|
name!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "password must be a string" })
|
||||||
|
@MinLength(12, { message: "password must be at least 12 characters" })
|
||||||
|
@MaxLength(128, { message: "password must not exceed 128 characters" })
|
||||||
|
password!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "setupToken must be a string" })
|
||||||
|
@MinLength(1, { message: "setupToken must not be empty" })
|
||||||
|
setupToken!: string;
|
||||||
|
}
|
||||||
232
apps/api/src/auth/local/local-auth.controller.spec.ts
Normal file
232
apps/api/src/auth/local/local-auth.controller.spec.ts
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import {
|
||||||
|
NotFoundException,
|
||||||
|
ForbiddenException,
|
||||||
|
UnauthorizedException,
|
||||||
|
ConflictException,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { LocalAuthController } from "./local-auth.controller";
|
||||||
|
import { LocalAuthService } from "./local-auth.service";
|
||||||
|
import { LocalAuthEnabledGuard } from "./local-auth.guard";
|
||||||
|
|
||||||
|
describe("LocalAuthController", () => {
|
||||||
|
let controller: LocalAuthController;
|
||||||
|
let localAuthService: LocalAuthService;
|
||||||
|
|
||||||
|
const mockLocalAuthService = {
|
||||||
|
setup: vi.fn(),
|
||||||
|
login: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockRequest = {
|
||||||
|
headers: { "user-agent": "TestAgent/1.0" },
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
};
|
||||||
|
|
||||||
|
const originalEnv = {
|
||||||
|
ENABLE_LOCAL_AUTH: process.env.ENABLE_LOCAL_AUTH,
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = "true";
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
controllers: [LocalAuthController],
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: LocalAuthService,
|
||||||
|
useValue: mockLocalAuthService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
.overrideGuard(LocalAuthEnabledGuard)
|
||||||
|
.useValue({ canActivate: () => true })
|
||||||
|
.compile();
|
||||||
|
|
||||||
|
controller = module.get<LocalAuthController>(LocalAuthController);
|
||||||
|
localAuthService = module.get<LocalAuthService>(LocalAuthService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
if (originalEnv.ENABLE_LOCAL_AUTH !== undefined) {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = originalEnv.ENABLE_LOCAL_AUTH;
|
||||||
|
} else {
|
||||||
|
delete process.env.ENABLE_LOCAL_AUTH;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("setup", () => {
|
||||||
|
const setupDto = {
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
password: "securePassword123!",
|
||||||
|
setupToken: "valid-token-123",
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockSetupResult = {
|
||||||
|
user: {
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
isLocalAuth: true,
|
||||||
|
createdAt: new Date("2026-02-28T00:00:00Z"),
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
token: "session-token-abc",
|
||||||
|
expiresAt: new Date("2026-03-07T00:00:00Z"),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
it("should create a break-glass user and return user data with session", async () => {
|
||||||
|
mockLocalAuthService.setup.mockResolvedValue(mockSetupResult);
|
||||||
|
|
||||||
|
const result = await controller.setup(setupDto, mockRequest as never);
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
user: mockSetupResult.user,
|
||||||
|
session: mockSetupResult.session,
|
||||||
|
});
|
||||||
|
expect(mockLocalAuthService.setup).toHaveBeenCalledWith(
|
||||||
|
"admin@example.com",
|
||||||
|
"Break Glass Admin",
|
||||||
|
"securePassword123!",
|
||||||
|
"valid-token-123",
|
||||||
|
"127.0.0.1",
|
||||||
|
"TestAgent/1.0"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract client IP from x-forwarded-for header", async () => {
|
||||||
|
mockLocalAuthService.setup.mockResolvedValue(mockSetupResult);
|
||||||
|
const reqWithProxy = {
|
||||||
|
...mockRequest,
|
||||||
|
headers: {
|
||||||
|
...mockRequest.headers,
|
||||||
|
"x-forwarded-for": "203.0.113.50, 70.41.3.18",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
await controller.setup(setupDto, reqWithProxy as never);
|
||||||
|
|
||||||
|
expect(mockLocalAuthService.setup).toHaveBeenCalledWith(
|
||||||
|
expect.any(String) as string,
|
||||||
|
expect.any(String) as string,
|
||||||
|
expect.any(String) as string,
|
||||||
|
expect.any(String) as string,
|
||||||
|
"203.0.113.50",
|
||||||
|
"TestAgent/1.0"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should propagate ForbiddenException from service", async () => {
|
||||||
|
mockLocalAuthService.setup.mockRejectedValue(new ForbiddenException("Invalid setup token"));
|
||||||
|
|
||||||
|
await expect(controller.setup(setupDto, mockRequest as never)).rejects.toThrow(
|
||||||
|
ForbiddenException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should propagate ConflictException from service", async () => {
|
||||||
|
mockLocalAuthService.setup.mockRejectedValue(
|
||||||
|
new ConflictException("A user with this email already exists")
|
||||||
|
);
|
||||||
|
|
||||||
|
await expect(controller.setup(setupDto, mockRequest as never)).rejects.toThrow(
|
||||||
|
ConflictException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("login", () => {
|
||||||
|
const loginDto = {
|
||||||
|
email: "admin@example.com",
|
||||||
|
password: "securePassword123!",
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockLoginResult = {
|
||||||
|
user: {
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
token: "session-token-abc",
|
||||||
|
expiresAt: new Date("2026-03-07T00:00:00Z"),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
it("should authenticate and return user data with session", async () => {
|
||||||
|
mockLocalAuthService.login.mockResolvedValue(mockLoginResult);
|
||||||
|
|
||||||
|
const result = await controller.login(loginDto, mockRequest as never);
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
user: mockLoginResult.user,
|
||||||
|
session: mockLoginResult.session,
|
||||||
|
});
|
||||||
|
expect(mockLocalAuthService.login).toHaveBeenCalledWith(
|
||||||
|
"admin@example.com",
|
||||||
|
"securePassword123!",
|
||||||
|
"127.0.0.1",
|
||||||
|
"TestAgent/1.0"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should propagate UnauthorizedException from service", async () => {
|
||||||
|
mockLocalAuthService.login.mockRejectedValue(
|
||||||
|
new UnauthorizedException("Invalid email or password")
|
||||||
|
);
|
||||||
|
|
||||||
|
await expect(controller.login(loginDto, mockRequest as never)).rejects.toThrow(
|
||||||
|
UnauthorizedException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("LocalAuthEnabledGuard", () => {
|
||||||
|
let guard: LocalAuthEnabledGuard;
|
||||||
|
|
||||||
|
const originalEnv = process.env.ENABLE_LOCAL_AUTH;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
guard = new LocalAuthEnabledGuard();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
if (originalEnv !== undefined) {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = originalEnv;
|
||||||
|
} else {
|
||||||
|
delete process.env.ENABLE_LOCAL_AUTH;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should allow access when ENABLE_LOCAL_AUTH is true", () => {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = "true";
|
||||||
|
|
||||||
|
expect(guard.canActivate()).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException when ENABLE_LOCAL_AUTH is not set", () => {
|
||||||
|
delete process.env.ENABLE_LOCAL_AUTH;
|
||||||
|
|
||||||
|
expect(() => guard.canActivate()).toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException when ENABLE_LOCAL_AUTH is false", () => {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = "false";
|
||||||
|
|
||||||
|
expect(() => guard.canActivate()).toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException when ENABLE_LOCAL_AUTH is empty", () => {
|
||||||
|
process.env.ENABLE_LOCAL_AUTH = "";
|
||||||
|
|
||||||
|
expect(() => guard.canActivate()).toThrow(NotFoundException);
|
||||||
|
});
|
||||||
|
});
|
||||||
81
apps/api/src/auth/local/local-auth.controller.ts
Normal file
81
apps/api/src/auth/local/local-auth.controller.ts
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import {
|
||||||
|
Controller,
|
||||||
|
Post,
|
||||||
|
Body,
|
||||||
|
UseGuards,
|
||||||
|
Req,
|
||||||
|
Logger,
|
||||||
|
HttpCode,
|
||||||
|
HttpStatus,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { Throttle } from "@nestjs/throttler";
|
||||||
|
import type { Request as ExpressRequest } from "express";
|
||||||
|
import { SkipCsrf } from "../../common/decorators/skip-csrf.decorator";
|
||||||
|
import { LocalAuthService } from "./local-auth.service";
|
||||||
|
import { LocalAuthEnabledGuard } from "./local-auth.guard";
|
||||||
|
import { LocalLoginDto } from "./dto/local-login.dto";
|
||||||
|
import { LocalSetupDto } from "./dto/local-setup.dto";
|
||||||
|
|
||||||
|
@Controller("auth/local")
|
||||||
|
@UseGuards(LocalAuthEnabledGuard)
|
||||||
|
export class LocalAuthController {
|
||||||
|
private readonly logger = new Logger(LocalAuthController.name);
|
||||||
|
|
||||||
|
constructor(private readonly localAuthService: LocalAuthService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* First-time break-glass user creation.
|
||||||
|
* Requires BREAKGLASS_SETUP_TOKEN from environment.
|
||||||
|
*/
|
||||||
|
@Post("setup")
|
||||||
|
@SkipCsrf()
|
||||||
|
@Throttle({ strict: { limit: 5, ttl: 60000 } })
|
||||||
|
async setup(@Body() dto: LocalSetupDto, @Req() req: ExpressRequest) {
|
||||||
|
const ipAddress = this.getClientIp(req);
|
||||||
|
const userAgent = req.headers["user-agent"];
|
||||||
|
|
||||||
|
this.logger.log(`Break-glass setup attempt from ${ipAddress}`);
|
||||||
|
|
||||||
|
const result = await this.localAuthService.setup(
|
||||||
|
dto.email,
|
||||||
|
dto.name,
|
||||||
|
dto.password,
|
||||||
|
dto.setupToken,
|
||||||
|
ipAddress,
|
||||||
|
userAgent
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
user: result.user,
|
||||||
|
session: result.session,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Break-glass login with email + password.
|
||||||
|
*/
|
||||||
|
@Post("login")
|
||||||
|
@SkipCsrf()
|
||||||
|
@HttpCode(HttpStatus.OK)
|
||||||
|
@Throttle({ strict: { limit: 10, ttl: 60000 } })
|
||||||
|
async login(@Body() dto: LocalLoginDto, @Req() req: ExpressRequest) {
|
||||||
|
const ipAddress = this.getClientIp(req);
|
||||||
|
const userAgent = req.headers["user-agent"];
|
||||||
|
|
||||||
|
const result = await this.localAuthService.login(dto.email, dto.password, ipAddress, userAgent);
|
||||||
|
|
||||||
|
return {
|
||||||
|
user: result.user,
|
||||||
|
session: result.session,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private getClientIp(req: ExpressRequest): string {
|
||||||
|
const forwardedFor = req.headers["x-forwarded-for"];
|
||||||
|
if (forwardedFor) {
|
||||||
|
const ips = Array.isArray(forwardedFor) ? forwardedFor[0] : forwardedFor;
|
||||||
|
return ips?.split(",")[0]?.trim() ?? "unknown";
|
||||||
|
}
|
||||||
|
return req.ip ?? req.socket.remoteAddress ?? "unknown";
|
||||||
|
}
|
||||||
|
}
|
||||||
15
apps/api/src/auth/local/local-auth.guard.ts
Normal file
15
apps/api/src/auth/local/local-auth.guard.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { Injectable, CanActivate, NotFoundException } from "@nestjs/common";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Guard that checks if local authentication is enabled via ENABLE_LOCAL_AUTH env var.
|
||||||
|
* Returns 404 when disabled so endpoints are invisible to callers.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class LocalAuthEnabledGuard implements CanActivate {
|
||||||
|
canActivate(): boolean {
|
||||||
|
if (process.env.ENABLE_LOCAL_AUTH !== "true") {
|
||||||
|
throw new NotFoundException();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
389
apps/api/src/auth/local/local-auth.service.spec.ts
Normal file
389
apps/api/src/auth/local/local-auth.service.spec.ts
Normal file
@@ -0,0 +1,389 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import {
|
||||||
|
ConflictException,
|
||||||
|
ForbiddenException,
|
||||||
|
InternalServerErrorException,
|
||||||
|
UnauthorizedException,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { hash } from "bcryptjs";
|
||||||
|
import { LocalAuthService } from "./local-auth.service";
|
||||||
|
import { PrismaService } from "../../prisma/prisma.service";
|
||||||
|
|
||||||
|
describe("LocalAuthService", () => {
|
||||||
|
let service: LocalAuthService;
|
||||||
|
|
||||||
|
const mockTxSession = {
|
||||||
|
create: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockTxWorkspace = {
|
||||||
|
findFirst: vi.fn(),
|
||||||
|
create: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockTxWorkspaceMember = {
|
||||||
|
create: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockTxUser = {
|
||||||
|
create: vi.fn(),
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockTx = {
|
||||||
|
user: mockTxUser,
|
||||||
|
workspace: mockTxWorkspace,
|
||||||
|
workspaceMember: mockTxWorkspaceMember,
|
||||||
|
session: mockTxSession,
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockPrismaService = {
|
||||||
|
user: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
},
|
||||||
|
session: {
|
||||||
|
create: vi.fn(),
|
||||||
|
},
|
||||||
|
$transaction: vi
|
||||||
|
.fn()
|
||||||
|
.mockImplementation((fn: (tx: typeof mockTx) => Promise<unknown>) => fn(mockTx)),
|
||||||
|
};
|
||||||
|
|
||||||
|
const originalEnv = {
|
||||||
|
BREAKGLASS_SETUP_TOKEN: process.env.BREAKGLASS_SETUP_TOKEN,
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
LocalAuthService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: mockPrismaService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<LocalAuthService>(LocalAuthService);
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
if (originalEnv.BREAKGLASS_SETUP_TOKEN !== undefined) {
|
||||||
|
process.env.BREAKGLASS_SETUP_TOKEN = originalEnv.BREAKGLASS_SETUP_TOKEN;
|
||||||
|
} else {
|
||||||
|
delete process.env.BREAKGLASS_SETUP_TOKEN;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("setup", () => {
|
||||||
|
const validSetupArgs = {
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
password: "securePassword123!",
|
||||||
|
setupToken: "valid-token-123",
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockCreatedUser = {
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
isLocalAuth: true,
|
||||||
|
createdAt: new Date("2026-02-28T00:00:00Z"),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockWorkspace = {
|
||||||
|
id: "workspace-uuid-123",
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
process.env.BREAKGLASS_SETUP_TOKEN = "valid-token-123";
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
mockTxUser.create.mockResolvedValue(mockCreatedUser);
|
||||||
|
mockTxWorkspace.findFirst.mockResolvedValue(mockWorkspace);
|
||||||
|
mockTxWorkspaceMember.create.mockResolvedValue({});
|
||||||
|
mockTxSession.create.mockResolvedValue({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create a local auth user with hashed password", async () => {
|
||||||
|
const result = await service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(result.user).toEqual(mockCreatedUser);
|
||||||
|
expect(result.session.token).toBeDefined();
|
||||||
|
expect(result.session.token.length).toBeGreaterThan(0);
|
||||||
|
expect(result.session.expiresAt).toBeInstanceOf(Date);
|
||||||
|
expect(result.session.expiresAt.getTime()).toBeGreaterThan(Date.now());
|
||||||
|
|
||||||
|
expect(mockTxUser.create).toHaveBeenCalledWith({
|
||||||
|
data: expect.objectContaining({
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
isLocalAuth: true,
|
||||||
|
emailVerified: true,
|
||||||
|
passwordHash: expect.any(String) as string,
|
||||||
|
}),
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
email: true,
|
||||||
|
name: true,
|
||||||
|
isLocalAuth: true,
|
||||||
|
createdAt: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should assign OWNER role on default workspace", async () => {
|
||||||
|
await service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockTxWorkspaceMember.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
workspaceId: "workspace-uuid-123",
|
||||||
|
userId: "user-uuid-123",
|
||||||
|
role: "OWNER",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create a new workspace if none exists", async () => {
|
||||||
|
mockTxWorkspace.findFirst.mockResolvedValue(null);
|
||||||
|
mockTxWorkspace.create.mockResolvedValue({ id: "new-workspace-uuid" });
|
||||||
|
|
||||||
|
await service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockTxWorkspace.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
name: "Default Workspace",
|
||||||
|
ownerId: "user-uuid-123",
|
||||||
|
settings: {},
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
expect(mockTxWorkspaceMember.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
workspaceId: "new-workspace-uuid",
|
||||||
|
userId: "user-uuid-123",
|
||||||
|
role: "OWNER",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create a BetterAuth-compatible session", async () => {
|
||||||
|
await service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken,
|
||||||
|
"192.168.1.1",
|
||||||
|
"TestAgent/1.0"
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(mockTxSession.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
userId: "user-uuid-123",
|
||||||
|
token: expect.any(String) as string,
|
||||||
|
expiresAt: expect.any(Date) as Date,
|
||||||
|
ipAddress: "192.168.1.1",
|
||||||
|
userAgent: "TestAgent/1.0",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when BREAKGLASS_SETUP_TOKEN is not set", async () => {
|
||||||
|
delete process.env.BREAKGLASS_SETUP_TOKEN;
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
)
|
||||||
|
).rejects.toThrow(ForbiddenException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when BREAKGLASS_SETUP_TOKEN is empty", async () => {
|
||||||
|
process.env.BREAKGLASS_SETUP_TOKEN = "";
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
)
|
||||||
|
).rejects.toThrow(ForbiddenException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when setup token does not match", async () => {
|
||||||
|
await expect(
|
||||||
|
service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
"wrong-token"
|
||||||
|
)
|
||||||
|
).rejects.toThrow(ForbiddenException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when email already exists", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
id: "existing-user",
|
||||||
|
email: "admin@example.com",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
)
|
||||||
|
).rejects.toThrow(ConflictException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return session token and expiry", async () => {
|
||||||
|
const result = await service.setup(
|
||||||
|
validSetupArgs.email,
|
||||||
|
validSetupArgs.name,
|
||||||
|
validSetupArgs.password,
|
||||||
|
validSetupArgs.setupToken
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(typeof result.session.token).toBe("string");
|
||||||
|
expect(result.session.token.length).toBe(64); // 32 bytes hex
|
||||||
|
expect(result.session.expiresAt).toBeInstanceOf(Date);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("login", () => {
|
||||||
|
const validPasswordHash = "$2a$12$LJ3m4ys3Lz/YgP7xYz5k5uU6b5F6X1234567890abcdefghijkl";
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Create a real bcrypt hash for testing
|
||||||
|
const realHash = await hash("securePassword123!", 4); // Low rounds for test speed
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
isLocalAuth: true,
|
||||||
|
passwordHash: realHash,
|
||||||
|
deactivatedAt: null,
|
||||||
|
});
|
||||||
|
mockPrismaService.session.create.mockResolvedValue({});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should authenticate a valid local auth user", async () => {
|
||||||
|
const result = await service.login("admin@example.com", "securePassword123!");
|
||||||
|
|
||||||
|
expect(result.user).toEqual({
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Break Glass Admin",
|
||||||
|
});
|
||||||
|
expect(result.session.token).toBeDefined();
|
||||||
|
expect(result.session.expiresAt).toBeInstanceOf(Date);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create a session with ip and user agent", async () => {
|
||||||
|
await service.login("admin@example.com", "securePassword123!", "10.0.0.1", "Mozilla/5.0");
|
||||||
|
|
||||||
|
expect(mockPrismaService.session.create).toHaveBeenCalledWith({
|
||||||
|
data: {
|
||||||
|
userId: "user-uuid-123",
|
||||||
|
token: expect.any(String) as string,
|
||||||
|
expiresAt: expect.any(Date) as Date,
|
||||||
|
ipAddress: "10.0.0.1",
|
||||||
|
userAgent: "Mozilla/5.0",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when user does not exist", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await expect(service.login("nonexistent@example.com", "password123456")).rejects.toThrow(
|
||||||
|
UnauthorizedException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when user is not a local auth user", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "OIDC User",
|
||||||
|
isLocalAuth: false,
|
||||||
|
passwordHash: null,
|
||||||
|
deactivatedAt: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(service.login("admin@example.com", "password123456")).rejects.toThrow(
|
||||||
|
UnauthorizedException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when user is deactivated", async () => {
|
||||||
|
const realHash = await hash("securePassword123!", 4);
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Deactivated User",
|
||||||
|
isLocalAuth: true,
|
||||||
|
passwordHash: realHash,
|
||||||
|
deactivatedAt: new Date("2026-01-01"),
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(service.login("admin@example.com", "securePassword123!")).rejects.toThrow(
|
||||||
|
new UnauthorizedException("Account has been deactivated")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when password is incorrect", async () => {
|
||||||
|
await expect(service.login("admin@example.com", "wrongPassword123!")).rejects.toThrow(
|
||||||
|
UnauthorizedException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw InternalServerError when local auth user has no password hash", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue({
|
||||||
|
id: "user-uuid-123",
|
||||||
|
email: "admin@example.com",
|
||||||
|
name: "Broken User",
|
||||||
|
isLocalAuth: true,
|
||||||
|
passwordHash: null,
|
||||||
|
deactivatedAt: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(service.login("admin@example.com", "securePassword123!")).rejects.toThrow(
|
||||||
|
InternalServerErrorException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not reveal whether email exists in error messages", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await service.login("nonexistent@example.com", "password123456");
|
||||||
|
} catch (error) {
|
||||||
|
expect(error).toBeInstanceOf(UnauthorizedException);
|
||||||
|
expect((error as UnauthorizedException).message).toBe("Invalid email or password");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
230
apps/api/src/auth/local/local-auth.service.ts
Normal file
230
apps/api/src/auth/local/local-auth.service.ts
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
import {
|
||||||
|
Injectable,
|
||||||
|
Logger,
|
||||||
|
ForbiddenException,
|
||||||
|
UnauthorizedException,
|
||||||
|
ConflictException,
|
||||||
|
InternalServerErrorException,
|
||||||
|
} from "@nestjs/common";
|
||||||
|
import { WorkspaceMemberRole } from "@prisma/client";
|
||||||
|
import { hash, compare } from "bcryptjs";
|
||||||
|
import { randomBytes, timingSafeEqual } from "crypto";
|
||||||
|
import { PrismaService } from "../../prisma/prisma.service";
|
||||||
|
|
||||||
|
const BCRYPT_ROUNDS = 12;
|
||||||
|
|
||||||
|
/** Session expiry: 7 days (matches BetterAuth config in auth.config.ts) */
|
||||||
|
const SESSION_EXPIRY_MS = 7 * 24 * 60 * 60 * 1000;
|
||||||
|
|
||||||
|
interface SetupResult {
|
||||||
|
user: {
|
||||||
|
id: string;
|
||||||
|
email: string;
|
||||||
|
name: string;
|
||||||
|
isLocalAuth: boolean;
|
||||||
|
createdAt: Date;
|
||||||
|
};
|
||||||
|
session: {
|
||||||
|
token: string;
|
||||||
|
expiresAt: Date;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LoginResult {
|
||||||
|
user: {
|
||||||
|
id: string;
|
||||||
|
email: string;
|
||||||
|
name: string;
|
||||||
|
};
|
||||||
|
session: {
|
||||||
|
token: string;
|
||||||
|
expiresAt: Date;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class LocalAuthService {
|
||||||
|
private readonly logger = new Logger(LocalAuthService.name);
|
||||||
|
|
||||||
|
constructor(private readonly prisma: PrismaService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* First-time break-glass user creation.
|
||||||
|
* Validates the setup token, creates a local auth user with bcrypt-hashed password,
|
||||||
|
* and assigns OWNER role on the default workspace.
|
||||||
|
*/
|
||||||
|
async setup(
|
||||||
|
email: string,
|
||||||
|
name: string,
|
||||||
|
password: string,
|
||||||
|
setupToken: string,
|
||||||
|
ipAddress?: string,
|
||||||
|
userAgent?: string
|
||||||
|
): Promise<SetupResult> {
|
||||||
|
this.validateSetupToken(setupToken);
|
||||||
|
|
||||||
|
const existing = await this.prisma.user.findUnique({ where: { email } });
|
||||||
|
if (existing) {
|
||||||
|
throw new ConflictException("A user with this email already exists");
|
||||||
|
}
|
||||||
|
|
||||||
|
const passwordHash = await hash(password, BCRYPT_ROUNDS);
|
||||||
|
|
||||||
|
const result = await this.prisma.$transaction(async (tx) => {
|
||||||
|
const user = await tx.user.create({
|
||||||
|
data: {
|
||||||
|
email,
|
||||||
|
name,
|
||||||
|
isLocalAuth: true,
|
||||||
|
passwordHash,
|
||||||
|
emailVerified: true,
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
email: true,
|
||||||
|
name: true,
|
||||||
|
isLocalAuth: true,
|
||||||
|
createdAt: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Find or create a default workspace and assign OWNER role
|
||||||
|
await this.assignDefaultWorkspace(tx, user.id);
|
||||||
|
|
||||||
|
// Create a BetterAuth-compatible session
|
||||||
|
const session = await this.createSession(tx, user.id, ipAddress, userAgent);
|
||||||
|
|
||||||
|
return { user, session };
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`Break-glass user created: ${email}`);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Break-glass login: verify email + password against bcrypt hash.
|
||||||
|
* Only works for users with isLocalAuth=true.
|
||||||
|
*/
|
||||||
|
async login(
|
||||||
|
email: string,
|
||||||
|
password: string,
|
||||||
|
ipAddress?: string,
|
||||||
|
userAgent?: string
|
||||||
|
): Promise<LoginResult> {
|
||||||
|
const user = await this.prisma.user.findUnique({
|
||||||
|
where: { email },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
email: true,
|
||||||
|
name: true,
|
||||||
|
isLocalAuth: true,
|
||||||
|
passwordHash: true,
|
||||||
|
deactivatedAt: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!user?.isLocalAuth) {
|
||||||
|
throw new UnauthorizedException("Invalid email or password");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (user.deactivatedAt) {
|
||||||
|
throw new UnauthorizedException("Account has been deactivated");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!user.passwordHash) {
|
||||||
|
this.logger.error(`Local auth user ${email} has no password hash`);
|
||||||
|
throw new InternalServerErrorException("Account configuration error");
|
||||||
|
}
|
||||||
|
|
||||||
|
const passwordValid = await compare(password, user.passwordHash);
|
||||||
|
if (!passwordValid) {
|
||||||
|
throw new UnauthorizedException("Invalid email or password");
|
||||||
|
}
|
||||||
|
|
||||||
|
const session = await this.createSession(this.prisma, user.id, ipAddress, userAgent);
|
||||||
|
|
||||||
|
this.logger.log(`Break-glass login: ${email}`);
|
||||||
|
return {
|
||||||
|
user: { id: user.id, email: user.email, name: user.name },
|
||||||
|
session,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate the setup token against the environment variable.
|
||||||
|
*/
|
||||||
|
private validateSetupToken(token: string): void {
|
||||||
|
const expectedToken = process.env.BREAKGLASS_SETUP_TOKEN;
|
||||||
|
|
||||||
|
if (!expectedToken || expectedToken.trim() === "") {
|
||||||
|
throw new ForbiddenException(
|
||||||
|
"Break-glass setup is not configured. Set BREAKGLASS_SETUP_TOKEN environment variable."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const tokenBuffer = Buffer.from(token);
|
||||||
|
const expectedBuffer = Buffer.from(expectedToken);
|
||||||
|
if (
|
||||||
|
tokenBuffer.length !== expectedBuffer.length ||
|
||||||
|
!timingSafeEqual(tokenBuffer, expectedBuffer)
|
||||||
|
) {
|
||||||
|
this.logger.warn("Invalid break-glass setup token attempt");
|
||||||
|
throw new ForbiddenException("Invalid setup token");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the first workspace or create a default one, then assign OWNER role.
|
||||||
|
*/
|
||||||
|
private async assignDefaultWorkspace(
|
||||||
|
tx: Parameters<Parameters<PrismaService["$transaction"]>[0]>[0],
|
||||||
|
userId: string
|
||||||
|
): Promise<void> {
|
||||||
|
let workspace = await tx.workspace.findFirst({
|
||||||
|
orderBy: { createdAt: "asc" },
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
workspace ??= await tx.workspace.create({
|
||||||
|
data: {
|
||||||
|
name: "Default Workspace",
|
||||||
|
ownerId: userId,
|
||||||
|
settings: {},
|
||||||
|
},
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
await tx.workspaceMember.create({
|
||||||
|
data: {
|
||||||
|
workspaceId: workspace.id,
|
||||||
|
userId,
|
||||||
|
role: WorkspaceMemberRole.OWNER,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a BetterAuth-compatible session record.
|
||||||
|
*/
|
||||||
|
private async createSession(
|
||||||
|
tx: { session: { create: typeof PrismaService.prototype.session.create } },
|
||||||
|
userId: string,
|
||||||
|
ipAddress?: string,
|
||||||
|
userAgent?: string
|
||||||
|
): Promise<{ token: string; expiresAt: Date }> {
|
||||||
|
const token = randomBytes(32).toString("hex");
|
||||||
|
const expiresAt = new Date(Date.now() + SESSION_EXPIRY_MS);
|
||||||
|
|
||||||
|
await tx.session.create({
|
||||||
|
data: {
|
||||||
|
userId,
|
||||||
|
token,
|
||||||
|
expiresAt,
|
||||||
|
ipAddress: ipAddress ?? null,
|
||||||
|
userAgent: userAgent ?? null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { token, expiresAt };
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,11 +1,14 @@
|
|||||||
/**
|
/**
|
||||||
* BetterAuth Request Type
|
* Unified request types for authentication context.
|
||||||
*
|
*
|
||||||
* BetterAuth expects a Request object compatible with the Fetch API standard.
|
* Replaces the previously scattered interfaces:
|
||||||
* This extends the web standard Request interface with additional properties
|
* - RequestWithSession (auth.controller.ts)
|
||||||
* that may be present in the Express request object at runtime.
|
* - AuthRequest (auth.guard.ts)
|
||||||
|
* - BetterAuthRequest (this file, removed)
|
||||||
|
* - RequestWithUser (current-user.decorator.ts)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import type { Request } from "express";
|
||||||
import type { AuthUser } from "@mosaic/shared";
|
import type { AuthUser } from "@mosaic/shared";
|
||||||
|
|
||||||
// Re-export AuthUser for use in other modules
|
// Re-export AuthUser for use in other modules
|
||||||
@@ -22,19 +25,21 @@ export interface RequestSession {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Web standard Request interface extended with Express-specific properties
|
* Request that may or may not have auth data (before guard runs).
|
||||||
* This matches the Fetch API Request specification that BetterAuth expects.
|
* Used by AuthGuard and other middleware that processes requests
|
||||||
|
* before authentication is confirmed.
|
||||||
*/
|
*/
|
||||||
export interface BetterAuthRequest extends Request {
|
export interface MaybeAuthenticatedRequest extends Request {
|
||||||
// Express route parameters
|
|
||||||
params?: Record<string, string>;
|
|
||||||
|
|
||||||
// Express query string parameters
|
|
||||||
query?: Record<string, string | string[]>;
|
|
||||||
|
|
||||||
// Session data attached by AuthGuard after successful authentication
|
|
||||||
session?: RequestSession;
|
|
||||||
|
|
||||||
// Authenticated user attached by AuthGuard
|
|
||||||
user?: AuthUser;
|
user?: AuthUser;
|
||||||
|
session?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Request with authenticated user attached by AuthGuard.
|
||||||
|
* After AuthGuard runs, user and session are guaranteed present.
|
||||||
|
* Use this type in controllers/decorators that sit behind AuthGuard.
|
||||||
|
*/
|
||||||
|
export interface AuthenticatedRequest extends Request {
|
||||||
|
user: AuthUser;
|
||||||
|
session: RequestSession;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -93,7 +93,10 @@ export class MatrixRoomService {
|
|||||||
select: { matrixRoomId: true },
|
select: { matrixRoomId: true },
|
||||||
});
|
});
|
||||||
|
|
||||||
return workspace?.matrixRoomId ?? null;
|
if (!workspace) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return workspace.matrixRoomId ?? null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
151
apps/api/src/chat-proxy/chat-proxy.controller.ts
Normal file
151
apps/api/src/chat-proxy/chat-proxy.controller.ts
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
import { Body, Controller, HttpException, Logger, Post, Req, Res, UseGuards } from "@nestjs/common";
|
||||||
|
import type { Response } from "express";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { SkipCsrf } from "../common/decorators/skip-csrf.decorator";
|
||||||
|
import type { MaybeAuthenticatedRequest } from "../auth/types/better-auth-request.interface";
|
||||||
|
import { ChatStreamDto } from "./chat-proxy.dto";
|
||||||
|
import { ChatProxyService } from "./chat-proxy.service";
|
||||||
|
|
||||||
|
@Controller("chat")
|
||||||
|
export class ChatProxyController {
|
||||||
|
private readonly logger = new Logger(ChatProxyController.name);
|
||||||
|
|
||||||
|
constructor(private readonly chatProxyService: ChatProxyService) {}
|
||||||
|
|
||||||
|
// POST /api/chat/guest
|
||||||
|
// Guest chat endpoint - no authentication required
|
||||||
|
// Uses a shared LLM configuration for unauthenticated users
|
||||||
|
@SkipCsrf()
|
||||||
|
@Post("guest")
|
||||||
|
async guestChat(
|
||||||
|
@Body() body: ChatStreamDto,
|
||||||
|
@Req() req: MaybeAuthenticatedRequest,
|
||||||
|
@Res() res: Response
|
||||||
|
): Promise<void> {
|
||||||
|
const abortController = new AbortController();
|
||||||
|
req.once("close", () => {
|
||||||
|
abortController.abort();
|
||||||
|
});
|
||||||
|
|
||||||
|
res.setHeader("Content-Type", "text/event-stream");
|
||||||
|
res.setHeader("Cache-Control", "no-cache");
|
||||||
|
res.setHeader("Connection", "keep-alive");
|
||||||
|
res.setHeader("X-Accel-Buffering", "no");
|
||||||
|
|
||||||
|
try {
|
||||||
|
const upstreamResponse = await this.chatProxyService.proxyGuestChat(
|
||||||
|
body.messages,
|
||||||
|
abortController.signal
|
||||||
|
);
|
||||||
|
|
||||||
|
const upstreamContentType = upstreamResponse.headers.get("content-type");
|
||||||
|
if (upstreamContentType) {
|
||||||
|
res.setHeader("Content-Type", upstreamContentType);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!upstreamResponse.body) {
|
||||||
|
throw new Error("LLM response did not include a stream body");
|
||||||
|
}
|
||||||
|
|
||||||
|
for await (const chunk of upstreamResponse.body as unknown as AsyncIterable<Uint8Array>) {
|
||||||
|
if (res.writableEnded || res.destroyed) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
res.write(Buffer.from(chunk));
|
||||||
|
}
|
||||||
|
} catch (error: unknown) {
|
||||||
|
this.logStreamError(error);
|
||||||
|
|
||||||
|
if (!res.writableEnded && !res.destroyed) {
|
||||||
|
res.write("event: error\n");
|
||||||
|
res.write(`data: ${JSON.stringify({ error: this.toSafeClientMessage(error) })}\n\n`);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (!res.writableEnded && !res.destroyed) {
|
||||||
|
res.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// POST /api/chat/stream
|
||||||
|
// Request: { messages: Array<{role, content}> }
|
||||||
|
// Response: SSE stream of chat completion events
|
||||||
|
// Requires authentication - uses user's personal OpenClaw container
|
||||||
|
@Post("stream")
|
||||||
|
@UseGuards(AuthGuard)
|
||||||
|
async streamChat(
|
||||||
|
@Body() body: ChatStreamDto,
|
||||||
|
@Req() req: MaybeAuthenticatedRequest,
|
||||||
|
@Res() res: Response
|
||||||
|
): Promise<void> {
|
||||||
|
const userId = req.user?.id;
|
||||||
|
if (!userId) {
|
||||||
|
this.logger.warn("streamChat called without user ID after AuthGuard");
|
||||||
|
throw new HttpException("Authentication required", 401);
|
||||||
|
}
|
||||||
|
|
||||||
|
const abortController = new AbortController();
|
||||||
|
req.once("close", () => {
|
||||||
|
abortController.abort();
|
||||||
|
});
|
||||||
|
|
||||||
|
res.setHeader("Content-Type", "text/event-stream");
|
||||||
|
res.setHeader("Cache-Control", "no-cache");
|
||||||
|
res.setHeader("Connection", "keep-alive");
|
||||||
|
res.setHeader("X-Accel-Buffering", "no");
|
||||||
|
|
||||||
|
try {
|
||||||
|
const upstreamResponse = await this.chatProxyService.proxyChat(
|
||||||
|
userId,
|
||||||
|
body.messages,
|
||||||
|
abortController.signal
|
||||||
|
);
|
||||||
|
|
||||||
|
const upstreamContentType = upstreamResponse.headers.get("content-type");
|
||||||
|
if (upstreamContentType) {
|
||||||
|
res.setHeader("Content-Type", upstreamContentType);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!upstreamResponse.body) {
|
||||||
|
throw new Error("OpenClaw response did not include a stream body");
|
||||||
|
}
|
||||||
|
|
||||||
|
for await (const chunk of upstreamResponse.body as unknown as AsyncIterable<Uint8Array>) {
|
||||||
|
if (res.writableEnded || res.destroyed) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
res.write(Buffer.from(chunk));
|
||||||
|
}
|
||||||
|
} catch (error: unknown) {
|
||||||
|
this.logStreamError(error);
|
||||||
|
|
||||||
|
if (!res.writableEnded && !res.destroyed) {
|
||||||
|
res.write("event: error\n");
|
||||||
|
res.write(`data: ${JSON.stringify({ error: this.toSafeClientMessage(error) })}\n\n`);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (!res.writableEnded && !res.destroyed) {
|
||||||
|
res.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private toSafeClientMessage(error: unknown): string {
|
||||||
|
if (error instanceof HttpException && error.getStatus() < 500) {
|
||||||
|
return "Chat request was rejected";
|
||||||
|
}
|
||||||
|
|
||||||
|
return "Chat stream failed";
|
||||||
|
}
|
||||||
|
|
||||||
|
private logStreamError(error: unknown): void {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
this.logger.warn(`Chat stream failed: ${error.message}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.warn(`Chat stream failed: ${String(error)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
25
apps/api/src/chat-proxy/chat-proxy.dto.ts
Normal file
25
apps/api/src/chat-proxy/chat-proxy.dto.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import { Type } from "class-transformer";
|
||||||
|
import { ArrayMinSize, IsArray, IsNotEmpty, IsString, ValidateNested } from "class-validator";
|
||||||
|
|
||||||
|
export interface ChatMessage {
|
||||||
|
role: string;
|
||||||
|
content: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ChatMessageDto implements ChatMessage {
|
||||||
|
@IsString({ message: "role must be a string" })
|
||||||
|
@IsNotEmpty({ message: "role is required" })
|
||||||
|
role!: string;
|
||||||
|
|
||||||
|
@IsString({ message: "content must be a string" })
|
||||||
|
@IsNotEmpty({ message: "content is required" })
|
||||||
|
content!: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ChatStreamDto {
|
||||||
|
@IsArray({ message: "messages must be an array" })
|
||||||
|
@ArrayMinSize(1, { message: "messages must contain at least one message" })
|
||||||
|
@ValidateNested({ each: true })
|
||||||
|
@Type(() => ChatMessageDto)
|
||||||
|
messages!: ChatMessageDto[];
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user