Compare commits
111 Commits
ad428598a9
...
feat/ms19-
| Author | SHA1 | Date | |
|---|---|---|---|
| e41fedb3c2 | |||
| 5ba77d8952 | |||
| 7de0e734b0 | |||
| 6290fc3d53 | |||
| 9f4de1682f | |||
| 374ca7ace3 | |||
| 72c64d2eeb | |||
| 5f6c520a98 | |||
| 9a7673bea2 | |||
| 91934b9933 | |||
| 7f89682946 | |||
| 8b4c565f20 | |||
| d5ecc0b107 | |||
| a81c4a5edd | |||
| ff5a09c3fb | |||
| f93fa60fff | |||
| cc56f2cbe1 | |||
| f9cccd6965 | |||
| 90c3bbccdf | |||
| 79286e98c6 | |||
| cfd1def4a9 | |||
| f435d8e8c6 | |||
| 3d78b09064 | |||
| a7955b9b32 | |||
| 372cc100cc | |||
| 37cf813b88 | |||
| 3d5b50af11 | |||
| f30c2f790c | |||
| 05b1a93ccb | |||
| a78a8b88e1 | |||
| 172ed1d40f | |||
| ee2ddfc8b8 | |||
| 5a6d00a064 | |||
| ffda74ec12 | |||
| f97be2e6a3 | |||
| 97606713b5 | |||
| d0c720e6da | |||
| 64e817cfb8 | |||
| cd5c2218c8 | |||
| f643d2bc04 | |||
| 8957904ea9 | |||
| 458cac7cdd | |||
| 7581d26567 | |||
| 07f5225a76 | |||
| 7c55464d54 | |||
| ea1620fa7a | |||
| d218902cb0 | |||
| b43e860c40 | |||
| 716f230f72 | |||
| a5ed260fbd | |||
| 9b5c15ca56 | |||
| 74c8c376b7 | |||
| 9901fba61e | |||
| 17144b1c42 | |||
| a6f75cd587 | |||
| 06e54328d5 | |||
| 7480deff10 | |||
| 1b66417be5 | |||
| 23d610ba5b | |||
| 25ae14aba1 | |||
| 1425893318 | |||
| bc4c1f9c70 | |||
| d66451cf48 | |||
| c23ebca648 | |||
|
|
eae55bc4a3 | ||
| b5ac2630c1 | |||
| 8424a28faa | |||
| d2cec04cba | |||
| 9ac971e857 | |||
| 0c2a6b14cf | |||
| af299abdaf | |||
| fa9f173f8e | |||
| 7935d86015 | |||
| f43631671f | |||
| 8328f9509b | |||
| f72e8c2da9 | |||
| 1a668627a3 | |||
| bd3625ae1b | |||
| aeac188d40 | |||
| f219dd71a0 | |||
| 2c3c1f67ac | |||
| dedc1af080 | |||
| 3b16b2c743 | |||
|
|
6fd8e85266 | ||
|
|
d3474cdd74 | ||
| 157b702331 | |||
|
|
63c6a129bd | ||
| 4a4aee7b7c | |||
|
|
9d9a01f5f7 | ||
|
|
5bce7dbb05 | ||
|
|
ab902250f8 | ||
|
|
d34f097a5c | ||
|
|
f4ad7eba37 | ||
|
|
4d089cd020 | ||
|
|
3258cd4f4d | ||
| 35dd623ab5 | |||
|
|
758b2a839b | ||
| af113707d9 | |||
|
|
57d0f5d2a3 | ||
| 0a780a5062 | |||
| a1515676db | |||
| 027fee1afa | |||
| abe57621cd | |||
| 7c7ad59002 | |||
| ca430d6fdf | |||
| 18e5f6312b | |||
| d2ed1f2817 | |||
| fb609d40e3 | |||
| 0c93be417a | |||
| b719fa0444 | |||
| d58bf47cd7 |
117
.env.example
117
.env.example
@@ -15,11 +15,19 @@ WEB_PORT=3000
|
|||||||
# ======================
|
# ======================
|
||||||
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||||
NEXT_PUBLIC_API_URL=http://localhost:3001
|
NEXT_PUBLIC_API_URL=http://localhost:3001
|
||||||
|
# Frontend auth mode:
|
||||||
|
# - real: Normal auth/session flow
|
||||||
|
# - mock: Local-only seeded user for FE development (blocked outside NODE_ENV=development)
|
||||||
|
# Use `mock` locally to continue FE work when auth flow is unstable.
|
||||||
|
# If omitted, web runtime defaults:
|
||||||
|
# - development -> mock
|
||||||
|
# - production -> real
|
||||||
|
NEXT_PUBLIC_AUTH_MODE=real
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# PostgreSQL Database
|
# PostgreSQL Database
|
||||||
# ======================
|
# ======================
|
||||||
# Bundled PostgreSQL (when database profile enabled)
|
# Bundled PostgreSQL
|
||||||
# SECURITY: Change POSTGRES_PASSWORD to a strong random password in production
|
# SECURITY: Change POSTGRES_PASSWORD to a strong random password in production
|
||||||
DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@postgres:5432/mosaic
|
DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@postgres:5432/mosaic
|
||||||
POSTGRES_USER=mosaic
|
POSTGRES_USER=mosaic
|
||||||
@@ -28,7 +36,7 @@ POSTGRES_DB=mosaic
|
|||||||
POSTGRES_PORT=5432
|
POSTGRES_PORT=5432
|
||||||
|
|
||||||
# External PostgreSQL (managed service)
|
# External PostgreSQL (managed service)
|
||||||
# Disable 'database' profile and point DATABASE_URL to your external instance
|
# To use an external instance, update DATABASE_URL above
|
||||||
# Example: DATABASE_URL=postgresql://user:pass@rds.amazonaws.com:5432/mosaic
|
# Example: DATABASE_URL=postgresql://user:pass@rds.amazonaws.com:5432/mosaic
|
||||||
|
|
||||||
# PostgreSQL Performance Tuning (Optional)
|
# PostgreSQL Performance Tuning (Optional)
|
||||||
@@ -39,7 +47,7 @@ POSTGRES_MAX_CONNECTIONS=100
|
|||||||
# ======================
|
# ======================
|
||||||
# Valkey Cache (Redis-compatible)
|
# Valkey Cache (Redis-compatible)
|
||||||
# ======================
|
# ======================
|
||||||
# Bundled Valkey (when cache profile enabled)
|
# Bundled Valkey
|
||||||
VALKEY_URL=redis://valkey:6379
|
VALKEY_URL=redis://valkey:6379
|
||||||
VALKEY_HOST=valkey
|
VALKEY_HOST=valkey
|
||||||
VALKEY_PORT=6379
|
VALKEY_PORT=6379
|
||||||
@@ -47,7 +55,7 @@ VALKEY_PORT=6379
|
|||||||
VALKEY_MAXMEMORY=256mb
|
VALKEY_MAXMEMORY=256mb
|
||||||
|
|
||||||
# External Redis/Valkey (managed service)
|
# External Redis/Valkey (managed service)
|
||||||
# Disable 'cache' profile and point VALKEY_URL to your external instance
|
# To use an external instance, update VALKEY_URL above
|
||||||
# Example: VALKEY_URL=redis://elasticache.amazonaws.com:6379
|
# Example: VALKEY_URL=redis://elasticache.amazonaws.com:6379
|
||||||
# Example with auth: VALKEY_URL=redis://:password@redis.example.com:6379
|
# Example with auth: VALKEY_URL=redis://:password@redis.example.com:6379
|
||||||
|
|
||||||
@@ -70,9 +78,9 @@ OIDC_ISSUER=https://auth.example.com/application/o/mosaic-stack/
|
|||||||
OIDC_CLIENT_ID=your-client-id-here
|
OIDC_CLIENT_ID=your-client-id-here
|
||||||
OIDC_CLIENT_SECRET=your-client-secret-here
|
OIDC_CLIENT_SECRET=your-client-secret-here
|
||||||
# Redirect URI must match what's configured in Authentik
|
# Redirect URI must match what's configured in Authentik
|
||||||
# Development: http://localhost:3001/auth/callback/authentik
|
# Development: http://localhost:3001/auth/oauth2/callback/authentik
|
||||||
# Production: https://api.mosaicstack.dev/auth/callback/authentik
|
# Production: https://api.mosaicstack.dev/auth/oauth2/callback/authentik
|
||||||
OIDC_REDIRECT_URI=http://localhost:3001/auth/callback/authentik
|
OIDC_REDIRECT_URI=http://localhost:3001/auth/oauth2/callback/authentik
|
||||||
|
|
||||||
# Authentik PostgreSQL Database
|
# Authentik PostgreSQL Database
|
||||||
AUTHENTIK_POSTGRES_USER=authentik
|
AUTHENTIK_POSTGRES_USER=authentik
|
||||||
@@ -116,6 +124,9 @@ JWT_EXPIRATION=24h
|
|||||||
# This is used by BetterAuth for session management and CSRF protection
|
# This is used by BetterAuth for session management and CSRF protection
|
||||||
# Example: openssl rand -base64 32
|
# Example: openssl rand -base64 32
|
||||||
BETTER_AUTH_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS
|
BETTER_AUTH_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS
|
||||||
|
# Optional explicit BetterAuth origin for callback/error URL generation.
|
||||||
|
# When empty, backend falls back to NEXT_PUBLIC_API_URL.
|
||||||
|
BETTER_AUTH_URL=
|
||||||
|
|
||||||
# Trusted Origins (comma-separated list of additional trusted origins for CORS and auth)
|
# Trusted Origins (comma-separated list of additional trusted origins for CORS and auth)
|
||||||
# These are added to NEXT_PUBLIC_APP_URL and NEXT_PUBLIC_API_URL automatically
|
# These are added to NEXT_PUBLIC_APP_URL and NEXT_PUBLIC_API_URL automatically
|
||||||
@@ -204,11 +215,9 @@ NODE_ENV=development
|
|||||||
# Used by docker-compose.yml (pulls images) and docker-swarm.yml
|
# Used by docker-compose.yml (pulls images) and docker-swarm.yml
|
||||||
# For local builds, use docker-compose.build.yml instead
|
# For local builds, use docker-compose.build.yml instead
|
||||||
# Options:
|
# Options:
|
||||||
# - dev: Pull development images from registry (default, built from develop branch)
|
# - latest: Pull latest images from registry (default, built from main branch)
|
||||||
# - latest: Pull latest stable images from registry (built from main branch)
|
|
||||||
# - <commit-sha>: Use specific commit SHA tag (e.g., 658ec077)
|
|
||||||
# - <version>: Use specific version tag (e.g., v1.0.0)
|
# - <version>: Use specific version tag (e.g., v1.0.0)
|
||||||
IMAGE_TAG=dev
|
IMAGE_TAG=latest
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Docker Compose Profiles
|
# Docker Compose Profiles
|
||||||
@@ -244,12 +253,16 @@ MOSAIC_API_DOMAIN=api.mosaic.local
|
|||||||
MOSAIC_WEB_DOMAIN=mosaic.local
|
MOSAIC_WEB_DOMAIN=mosaic.local
|
||||||
MOSAIC_AUTH_DOMAIN=auth.mosaic.local
|
MOSAIC_AUTH_DOMAIN=auth.mosaic.local
|
||||||
|
|
||||||
# External Traefik network name (for upstream mode)
|
# External Traefik network name (for upstream mode and swarm)
|
||||||
# Must match the network name of your existing Traefik instance
|
# Must match the network name of your existing Traefik instance
|
||||||
TRAEFIK_NETWORK=traefik-public
|
TRAEFIK_NETWORK=traefik-public
|
||||||
|
TRAEFIK_DOCKER_NETWORK=traefik-public
|
||||||
|
|
||||||
# TLS/SSL Configuration
|
# TLS/SSL Configuration
|
||||||
TRAEFIK_TLS_ENABLED=true
|
TRAEFIK_TLS_ENABLED=true
|
||||||
|
TRAEFIK_ENTRYPOINT=websecure
|
||||||
|
# Cert resolver name (leave empty if TLS is handled externally or using self-signed certs)
|
||||||
|
TRAEFIK_CERTRESOLVER=
|
||||||
# For Let's Encrypt (production):
|
# For Let's Encrypt (production):
|
||||||
TRAEFIK_ACME_EMAIL=admin@example.com
|
TRAEFIK_ACME_EMAIL=admin@example.com
|
||||||
# For self-signed certificates (development), leave TRAEFIK_ACME_EMAIL empty
|
# For self-signed certificates (development), leave TRAEFIK_ACME_EMAIL empty
|
||||||
@@ -285,6 +298,15 @@ GITEA_WEBHOOK_SECRET=REPLACE_WITH_RANDOM_WEBHOOK_SECRET
|
|||||||
# The coordinator service uses this key to authenticate with the API
|
# The coordinator service uses this key to authenticate with the API
|
||||||
COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
||||||
|
|
||||||
|
# Anthropic API Key (used by coordinator for issue parsing)
|
||||||
|
# Get your API key from: https://console.anthropic.com/
|
||||||
|
ANTHROPIC_API_KEY=REPLACE_WITH_ANTHROPIC_API_KEY
|
||||||
|
|
||||||
|
# Coordinator tuning
|
||||||
|
COORDINATOR_POLL_INTERVAL=5.0
|
||||||
|
COORDINATOR_MAX_CONCURRENT_AGENTS=10
|
||||||
|
COORDINATOR_ENABLED=true
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Rate Limiting
|
# Rate Limiting
|
||||||
# ======================
|
# ======================
|
||||||
@@ -329,16 +351,34 @@ RATE_LIMIT_STORAGE=redis
|
|||||||
# ======================
|
# ======================
|
||||||
# Matrix bot integration for chat-based control via Matrix protocol
|
# Matrix bot integration for chat-based control via Matrix protocol
|
||||||
# Requires a Matrix account with an access token for the bot user
|
# Requires a Matrix account with an access token for the bot user
|
||||||
# MATRIX_HOMESERVER_URL=https://matrix.example.com
|
# Set these AFTER deploying Synapse and creating the bot account.
|
||||||
# MATRIX_ACCESS_TOKEN=
|
|
||||||
# MATRIX_BOT_USER_ID=@mosaic-bot:example.com
|
|
||||||
# MATRIX_CONTROL_ROOM_ID=!roomid:example.com
|
|
||||||
# MATRIX_WORKSPACE_ID=your-workspace-uuid
|
|
||||||
#
|
#
|
||||||
# SECURITY: MATRIX_WORKSPACE_ID must be a valid workspace UUID from your database.
|
# SECURITY: MATRIX_WORKSPACE_ID must be a valid workspace UUID from your database.
|
||||||
# All Matrix commands will execute within this workspace context for proper
|
# All Matrix commands will execute within this workspace context for proper
|
||||||
# multi-tenant isolation. Each Matrix bot instance should be configured for
|
# multi-tenant isolation. Each Matrix bot instance should be configured for
|
||||||
# a single workspace.
|
# a single workspace.
|
||||||
|
MATRIX_HOMESERVER_URL=http://synapse:8008
|
||||||
|
MATRIX_ACCESS_TOKEN=
|
||||||
|
MATRIX_BOT_USER_ID=@mosaic-bot:matrix.example.com
|
||||||
|
MATRIX_SERVER_NAME=matrix.example.com
|
||||||
|
# MATRIX_CONTROL_ROOM_ID=!roomid:matrix.example.com
|
||||||
|
# MATRIX_WORKSPACE_ID=your-workspace-uuid
|
||||||
|
|
||||||
|
# ======================
|
||||||
|
# Matrix / Synapse Deployment
|
||||||
|
# ======================
|
||||||
|
# Domains for Traefik routing to Matrix services
|
||||||
|
MATRIX_DOMAIN=matrix.example.com
|
||||||
|
ELEMENT_DOMAIN=chat.example.com
|
||||||
|
|
||||||
|
# Synapse database (created automatically by synapse-db-init in the swarm compose)
|
||||||
|
SYNAPSE_POSTGRES_DB=synapse
|
||||||
|
SYNAPSE_POSTGRES_USER=synapse
|
||||||
|
SYNAPSE_POSTGRES_PASSWORD=REPLACE_WITH_SECURE_SYNAPSE_DB_PASSWORD
|
||||||
|
|
||||||
|
# Image tags for Matrix services
|
||||||
|
SYNAPSE_IMAGE_TAG=latest
|
||||||
|
ELEMENT_IMAGE_TAG=latest
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Orchestrator Configuration
|
# Orchestrator Configuration
|
||||||
@@ -350,6 +390,17 @@ RATE_LIMIT_STORAGE=redis
|
|||||||
# Health endpoints (/health/*) remain unauthenticated
|
# Health endpoints (/health/*) remain unauthenticated
|
||||||
ORCHESTRATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
ORCHESTRATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
||||||
|
|
||||||
|
# Runtime safety defaults (recommended for low-memory hosts)
|
||||||
|
MAX_CONCURRENT_AGENTS=2
|
||||||
|
SESSION_CLEANUP_DELAY_MS=30000
|
||||||
|
ORCHESTRATOR_QUEUE_NAME=orchestrator-tasks
|
||||||
|
ORCHESTRATOR_QUEUE_CONCURRENCY=1
|
||||||
|
ORCHESTRATOR_QUEUE_MAX_RETRIES=3
|
||||||
|
ORCHESTRATOR_QUEUE_BASE_DELAY_MS=1000
|
||||||
|
ORCHESTRATOR_QUEUE_MAX_DELAY_MS=60000
|
||||||
|
SANDBOX_DEFAULT_MEMORY_MB=256
|
||||||
|
SANDBOX_DEFAULT_CPU_LIMIT=1.0
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# AI Provider Configuration
|
# AI Provider Configuration
|
||||||
# ======================
|
# ======================
|
||||||
@@ -363,11 +414,10 @@ AI_PROVIDER=ollama
|
|||||||
# For remote Ollama: http://your-ollama-server:11434
|
# For remote Ollama: http://your-ollama-server:11434
|
||||||
OLLAMA_MODEL=llama3.1:latest
|
OLLAMA_MODEL=llama3.1:latest
|
||||||
|
|
||||||
# Claude API Configuration (when AI_PROVIDER=claude)
|
# Claude API Key
|
||||||
# OPTIONAL: Only required if AI_PROVIDER=claude
|
# Required only when AI_PROVIDER=claude.
|
||||||
# Get your API key from: https://console.anthropic.com/
|
# Get your API key from: https://console.anthropic.com/
|
||||||
# Note: Claude Max subscription users should use AI_PROVIDER=ollama instead
|
CLAUDE_API_KEY=REPLACE_WITH_CLAUDE_API_KEY
|
||||||
# CLAUDE_API_KEY=sk-ant-...
|
|
||||||
|
|
||||||
# OpenAI API Configuration (when AI_PROVIDER=openai)
|
# OpenAI API Configuration (when AI_PROVIDER=openai)
|
||||||
# OPTIONAL: Only required if AI_PROVIDER=openai
|
# OPTIONAL: Only required if AI_PROVIDER=openai
|
||||||
@@ -405,6 +455,9 @@ TTS_PREMIUM_URL=http://chatterbox-tts:8881/v1
|
|||||||
TTS_FALLBACK_ENABLED=false
|
TTS_FALLBACK_ENABLED=false
|
||||||
TTS_FALLBACK_URL=http://openedai-speech:8000/v1
|
TTS_FALLBACK_URL=http://openedai-speech:8000/v1
|
||||||
|
|
||||||
|
# Whisper model for Speaches STT engine
|
||||||
|
SPEACHES_WHISPER_MODEL=Systran/faster-whisper-large-v3-turbo
|
||||||
|
|
||||||
# Speech Service Limits
|
# Speech Service Limits
|
||||||
# Maximum upload file size in bytes (default: 25MB)
|
# Maximum upload file size in bytes (default: 25MB)
|
||||||
SPEECH_MAX_UPLOAD_SIZE=25000000
|
SPEECH_MAX_UPLOAD_SIZE=25000000
|
||||||
@@ -439,28 +492,6 @@ MOSAIC_TELEMETRY_INSTANCE_ID=your-instance-uuid-here
|
|||||||
# Useful for development and debugging telemetry payloads
|
# Useful for development and debugging telemetry payloads
|
||||||
MOSAIC_TELEMETRY_DRY_RUN=false
|
MOSAIC_TELEMETRY_DRY_RUN=false
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Matrix Dev Environment (docker-compose.matrix.yml overlay)
|
|
||||||
# ======================
|
|
||||||
# These variables configure the local Matrix dev environment.
|
|
||||||
# Only used when running: docker compose -f docker/docker-compose.yml -f docker/docker-compose.matrix.yml up
|
|
||||||
#
|
|
||||||
# Synapse homeserver
|
|
||||||
# SYNAPSE_CLIENT_PORT=8008
|
|
||||||
# SYNAPSE_FEDERATION_PORT=8448
|
|
||||||
# SYNAPSE_POSTGRES_DB=synapse
|
|
||||||
# SYNAPSE_POSTGRES_USER=synapse
|
|
||||||
# SYNAPSE_POSTGRES_PASSWORD=synapse_dev_password
|
|
||||||
#
|
|
||||||
# Element Web client
|
|
||||||
# ELEMENT_PORT=8501
|
|
||||||
#
|
|
||||||
# Matrix bridge connection (set after running docker/matrix/scripts/setup-bot.sh)
|
|
||||||
# MATRIX_HOMESERVER_URL=http://localhost:8008
|
|
||||||
# MATRIX_ACCESS_TOKEN=<obtained from setup-bot.sh>
|
|
||||||
# MATRIX_BOT_USER_ID=@mosaic-bot:localhost
|
|
||||||
# MATRIX_SERVER_NAME=localhost
|
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Logging & Debugging
|
# Logging & Debugging
|
||||||
# ======================
|
# ======================
|
||||||
|
|||||||
@@ -1,66 +0,0 @@
|
|||||||
# ==============================================
|
|
||||||
# Mosaic Stack Production Environment
|
|
||||||
# ==============================================
|
|
||||||
# Copy to .env and configure for production deployment
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# PostgreSQL Database
|
|
||||||
# ======================
|
|
||||||
# CRITICAL: Use a strong, unique password
|
|
||||||
POSTGRES_USER=mosaic
|
|
||||||
POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
POSTGRES_DB=mosaic
|
|
||||||
POSTGRES_SHARED_BUFFERS=256MB
|
|
||||||
POSTGRES_EFFECTIVE_CACHE_SIZE=1GB
|
|
||||||
POSTGRES_MAX_CONNECTIONS=100
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Valkey Cache
|
|
||||||
# ======================
|
|
||||||
VALKEY_MAXMEMORY=256mb
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# API Configuration
|
|
||||||
# ======================
|
|
||||||
API_PORT=3001
|
|
||||||
API_HOST=0.0.0.0
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Web Configuration
|
|
||||||
# ======================
|
|
||||||
WEB_PORT=3000
|
|
||||||
NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Authentication (Authentik OIDC)
|
|
||||||
# ======================
|
|
||||||
OIDC_ISSUER=https://auth.diversecanvas.com/application/o/mosaic-stack/
|
|
||||||
OIDC_CLIENT_ID=your-client-id
|
|
||||||
OIDC_CLIENT_SECRET=your-client-secret
|
|
||||||
OIDC_REDIRECT_URI=https://api.mosaicstack.dev/auth/callback/authentik
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# JWT Configuration
|
|
||||||
# ======================
|
|
||||||
# CRITICAL: Generate a random secret (openssl rand -base64 32)
|
|
||||||
JWT_SECRET=REPLACE_WITH_RANDOM_SECRET
|
|
||||||
JWT_EXPIRATION=24h
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Traefik Integration
|
|
||||||
# ======================
|
|
||||||
# Set to true if using external Traefik
|
|
||||||
TRAEFIK_ENABLE=true
|
|
||||||
TRAEFIK_ENTRYPOINT=websecure
|
|
||||||
TRAEFIK_TLS_ENABLED=true
|
|
||||||
TRAEFIK_DOCKER_NETWORK=traefik-public
|
|
||||||
TRAEFIK_CERTRESOLVER=letsencrypt
|
|
||||||
|
|
||||||
# Domain configuration
|
|
||||||
MOSAIC_API_DOMAIN=api.mosaicstack.dev
|
|
||||||
MOSAIC_WEB_DOMAIN=app.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Optional: Ollama
|
|
||||||
# ======================
|
|
||||||
# OLLAMA_ENDPOINT=http://ollama.diversecanvas.com:11434
|
|
||||||
@@ -1,161 +0,0 @@
|
|||||||
# ==============================================
|
|
||||||
# Mosaic Stack - Docker Swarm Configuration
|
|
||||||
# ==============================================
|
|
||||||
# Copy this file to .env for Docker Swarm deployment
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Application Ports (Internal)
|
|
||||||
# ======================
|
|
||||||
API_PORT=3001
|
|
||||||
API_HOST=0.0.0.0
|
|
||||||
WEB_PORT=3000
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Domain Configuration (Traefik)
|
|
||||||
# ======================
|
|
||||||
# These domains must be configured in your DNS or /etc/hosts
|
|
||||||
MOSAIC_API_DOMAIN=api.mosaicstack.dev
|
|
||||||
MOSAIC_WEB_DOMAIN=mosaic.mosaicstack.dev
|
|
||||||
MOSAIC_AUTH_DOMAIN=auth.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Web Configuration
|
|
||||||
# ======================
|
|
||||||
# Use the Traefik domain for the API URL
|
|
||||||
NEXT_PUBLIC_APP_URL=http://mosaic.mosaicstack.dev
|
|
||||||
NEXT_PUBLIC_API_URL=http://api.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# PostgreSQL Database
|
|
||||||
# ======================
|
|
||||||
DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@postgres:5432/mosaic
|
|
||||||
POSTGRES_USER=mosaic
|
|
||||||
POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
POSTGRES_DB=mosaic
|
|
||||||
POSTGRES_PORT=5432
|
|
||||||
|
|
||||||
# PostgreSQL Performance Tuning
|
|
||||||
POSTGRES_SHARED_BUFFERS=256MB
|
|
||||||
POSTGRES_EFFECTIVE_CACHE_SIZE=1GB
|
|
||||||
POSTGRES_MAX_CONNECTIONS=100
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Valkey Cache
|
|
||||||
# ======================
|
|
||||||
VALKEY_URL=redis://valkey:6379
|
|
||||||
VALKEY_HOST=valkey
|
|
||||||
VALKEY_PORT=6379
|
|
||||||
VALKEY_MAXMEMORY=256mb
|
|
||||||
|
|
||||||
# Knowledge Module Cache Configuration
|
|
||||||
KNOWLEDGE_CACHE_ENABLED=true
|
|
||||||
KNOWLEDGE_CACHE_TTL=300
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Authentication (Authentik OIDC)
|
|
||||||
# ======================
|
|
||||||
# NOTE: Authentik services are COMMENTED OUT in docker-compose.swarm.yml by default
|
|
||||||
# Uncomment those services if you want to run Authentik internally
|
|
||||||
# Otherwise, use external Authentik by configuring OIDC_* variables below
|
|
||||||
|
|
||||||
# External Authentik Configuration (default)
|
|
||||||
OIDC_ENABLED=true
|
|
||||||
OIDC_ISSUER=https://auth.example.com/application/o/mosaic-stack/
|
|
||||||
OIDC_CLIENT_ID=your-client-id-here
|
|
||||||
OIDC_CLIENT_SECRET=your-client-secret-here
|
|
||||||
OIDC_REDIRECT_URI=https://api.mosaicstack.dev/auth/callback/authentik
|
|
||||||
|
|
||||||
# Internal Authentik Configuration (only needed if uncommenting Authentik services)
|
|
||||||
# Authentik PostgreSQL Database
|
|
||||||
AUTHENTIK_POSTGRES_USER=authentik
|
|
||||||
AUTHENTIK_POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
AUTHENTIK_POSTGRES_DB=authentik
|
|
||||||
|
|
||||||
# Authentik Server Configuration
|
|
||||||
AUTHENTIK_SECRET_KEY=REPLACE_WITH_RANDOM_SECRET_MINIMUM_50_CHARS
|
|
||||||
AUTHENTIK_ERROR_REPORTING=false
|
|
||||||
AUTHENTIK_BOOTSTRAP_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
AUTHENTIK_BOOTSTRAP_EMAIL=admin@mosaicstack.dev
|
|
||||||
AUTHENTIK_COOKIE_DOMAIN=.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# JWT Configuration
|
|
||||||
# ======================
|
|
||||||
JWT_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS
|
|
||||||
JWT_EXPIRATION=24h
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Encryption (Credential Security)
|
|
||||||
# ======================
|
|
||||||
# Generate with: openssl rand -hex 32
|
|
||||||
ENCRYPTION_KEY=REPLACE_WITH_64_CHAR_HEX_STRING_GENERATE_WITH_OPENSSL_RAND_HEX_32
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# OpenBao Secrets Management
|
|
||||||
# ======================
|
|
||||||
OPENBAO_ADDR=http://openbao:8200
|
|
||||||
OPENBAO_PORT=8200
|
|
||||||
# For development only - remove in production
|
|
||||||
OPENBAO_DEV_ROOT_TOKEN_ID=root
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Ollama (Optional AI Service)
|
|
||||||
# ======================
|
|
||||||
OLLAMA_ENDPOINT=http://ollama:11434
|
|
||||||
OLLAMA_PORT=11434
|
|
||||||
OLLAMA_EMBEDDING_MODEL=mxbai-embed-large
|
|
||||||
|
|
||||||
# Semantic Search Configuration
|
|
||||||
SEMANTIC_SEARCH_SIMILARITY_THRESHOLD=0.5
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# OpenAI API (Optional)
|
|
||||||
# ======================
|
|
||||||
# OPENAI_API_KEY=sk-...
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Application Environment
|
|
||||||
# ======================
|
|
||||||
NODE_ENV=production
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Gitea Integration (Coordinator)
|
|
||||||
# ======================
|
|
||||||
GITEA_URL=https://git.mosaicstack.dev
|
|
||||||
GITEA_BOT_USERNAME=mosaic
|
|
||||||
GITEA_BOT_TOKEN=REPLACE_WITH_COORDINATOR_BOT_API_TOKEN
|
|
||||||
GITEA_BOT_PASSWORD=REPLACE_WITH_COORDINATOR_BOT_PASSWORD
|
|
||||||
GITEA_REPO_OWNER=mosaic
|
|
||||||
GITEA_REPO_NAME=stack
|
|
||||||
GITEA_WEBHOOK_SECRET=REPLACE_WITH_RANDOM_WEBHOOK_SECRET
|
|
||||||
COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Coordinator Service
|
|
||||||
# ======================
|
|
||||||
ANTHROPIC_API_KEY=REPLACE_WITH_ANTHROPIC_API_KEY
|
|
||||||
COORDINATOR_POLL_INTERVAL=5.0
|
|
||||||
COORDINATOR_MAX_CONCURRENT_AGENTS=10
|
|
||||||
COORDINATOR_ENABLED=true
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Rate Limiting
|
|
||||||
# ======================
|
|
||||||
RATE_LIMIT_TTL=60
|
|
||||||
RATE_LIMIT_GLOBAL_LIMIT=100
|
|
||||||
RATE_LIMIT_WEBHOOK_LIMIT=60
|
|
||||||
RATE_LIMIT_COORDINATOR_LIMIT=100
|
|
||||||
RATE_LIMIT_HEALTH_LIMIT=300
|
|
||||||
RATE_LIMIT_STORAGE=redis
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Orchestrator Configuration
|
|
||||||
# ======================
|
|
||||||
ORCHESTRATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
|
||||||
CLAUDE_API_KEY=REPLACE_WITH_CLAUDE_API_KEY
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Logging & Debugging
|
|
||||||
# ======================
|
|
||||||
LOG_LEVEL=info
|
|
||||||
DEBUG=false
|
|
||||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -59,3 +59,13 @@ yarn-error.log*
|
|||||||
|
|
||||||
# Orchestrator reports (generated by QA automation, cleaned up after processing)
|
# Orchestrator reports (generated by QA automation, cleaned up after processing)
|
||||||
docs/reports/qa-automation/
|
docs/reports/qa-automation/
|
||||||
|
|
||||||
|
# Repo-local orchestrator runtime artifacts
|
||||||
|
.mosaic/orchestrator/orchestrator.pid
|
||||||
|
.mosaic/orchestrator/state.json
|
||||||
|
.mosaic/orchestrator/tasks.json
|
||||||
|
.mosaic/orchestrator/matrix_state.json
|
||||||
|
.mosaic/orchestrator/logs/*.log
|
||||||
|
.mosaic/orchestrator/results/*
|
||||||
|
!.mosaic/orchestrator/logs/.gitkeep
|
||||||
|
!.mosaic/orchestrator/results/.gitkeep
|
||||||
|
|||||||
@@ -4,12 +4,12 @@ This repository is attached to the machine-wide Mosaic framework.
|
|||||||
|
|
||||||
## Load Order for Agents
|
## Load Order for Agents
|
||||||
|
|
||||||
1. `~/.mosaic/STANDARDS.md`
|
1. `~/.config/mosaic/STANDARDS.md`
|
||||||
2. `AGENTS.md` (this repository)
|
2. `AGENTS.md` (this repository)
|
||||||
3. `.mosaic/repo-hooks.sh` (repo-specific automation hooks)
|
3. `.mosaic/repo-hooks.sh` (repo-specific automation hooks)
|
||||||
|
|
||||||
## Purpose
|
## Purpose
|
||||||
|
|
||||||
- Keep universal standards in `~/.mosaic`
|
- Keep universal standards in `~/.config/mosaic`
|
||||||
- Keep repo-specific behavior in this repo
|
- Keep repo-specific behavior in this repo
|
||||||
- Avoid copying large runtime configs into each project
|
- Avoid copying large runtime configs into each project
|
||||||
|
|||||||
18
.mosaic/orchestrator/config.json
Normal file
18
.mosaic/orchestrator/config.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"transport": "matrix",
|
||||||
|
"matrix": {
|
||||||
|
"control_room_id": "",
|
||||||
|
"workspace_id": "",
|
||||||
|
"homeserver_url": "",
|
||||||
|
"access_token": "",
|
||||||
|
"bot_user_id": ""
|
||||||
|
},
|
||||||
|
"worker": {
|
||||||
|
"runtime": "codex",
|
||||||
|
"command_template": "bash scripts/agent/orchestrator-worker.sh {task_file}",
|
||||||
|
"timeout_seconds": 7200,
|
||||||
|
"max_attempts": 1
|
||||||
|
},
|
||||||
|
"quality_gates": ["pnpm lint", "pnpm typecheck", "pnpm test"]
|
||||||
|
}
|
||||||
1
.mosaic/orchestrator/logs/.gitkeep
Normal file
1
.mosaic/orchestrator/logs/.gitkeep
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
14
.mosaic/orchestrator/mission.json
Normal file
14
.mosaic/orchestrator/mission.json
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"schema_version": 1,
|
||||||
|
"mission_id": "prd-implementation-20260222",
|
||||||
|
"name": "PRD implementation",
|
||||||
|
"description": "",
|
||||||
|
"project_path": "/home/jwoltje/src/mosaic-stack",
|
||||||
|
"created_at": "2026-02-23T03:20:55Z",
|
||||||
|
"status": "active",
|
||||||
|
"task_prefix": "",
|
||||||
|
"quality_gates": "",
|
||||||
|
"milestone_version": "0.0.1",
|
||||||
|
"milestones": [],
|
||||||
|
"sessions": []
|
||||||
|
}
|
||||||
1
.mosaic/orchestrator/results/.gitkeep
Normal file
1
.mosaic/orchestrator/results/.gitkeep
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
10
.mosaic/quality-rails.yml
Normal file
10
.mosaic/quality-rails.yml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
enabled: false
|
||||||
|
template: ""
|
||||||
|
|
||||||
|
# Set enabled: true and choose one template:
|
||||||
|
# - typescript-node
|
||||||
|
# - typescript-nextjs
|
||||||
|
# - monorepo
|
||||||
|
#
|
||||||
|
# Apply manually:
|
||||||
|
# ~/.config/mosaic/bin/mosaic-quality-apply --template <template> --target <repo>
|
||||||
13
.trivyignore
13
.trivyignore
@@ -6,7 +6,7 @@
|
|||||||
# - npm bundled CVEs (5): npm removed from production Node.js images
|
# - npm bundled CVEs (5): npm removed from production Node.js images
|
||||||
# - Node.js 20 → 24 LTS migration (#367): base images updated
|
# - Node.js 20 → 24 LTS migration (#367): base images updated
|
||||||
#
|
#
|
||||||
# REMAINING: OpenBao (5 CVEs) + Next.js bundled tar (3 CVEs)
|
# REMAINING: OpenBao (5 CVEs) + Next.js bundled tar/minimatch (5 CVEs)
|
||||||
# Re-evaluate when upgrading openbao image beyond 2.5.0 or Next.js beyond 16.1.6.
|
# Re-evaluate when upgrading openbao image beyond 2.5.0 or Next.js beyond 16.1.6.
|
||||||
|
|
||||||
# === OpenBao false positives ===
|
# === OpenBao false positives ===
|
||||||
@@ -17,15 +17,18 @@ CVE-2024-9180 # HIGH: privilege escalation (fixed in 2.0.3)
|
|||||||
CVE-2025-59043 # HIGH: DoS via malicious JSON (fixed in 2.4.1)
|
CVE-2025-59043 # HIGH: DoS via malicious JSON (fixed in 2.4.1)
|
||||||
CVE-2025-64761 # HIGH: identity group root escalation (fixed in 2.4.4)
|
CVE-2025-64761 # HIGH: identity group root escalation (fixed in 2.4.4)
|
||||||
|
|
||||||
# === Next.js bundled tar CVEs (upstream — waiting on Next.js release) ===
|
# === Next.js bundled tar/minimatch CVEs (upstream — waiting on Next.js release) ===
|
||||||
# Next.js 16.1.6 bundles tar@7.5.2 in next/dist/compiled/tar/ (pre-compiled).
|
# Next.js 16.1.6 bundles tar@7.5.2 and minimatch@9.0.5 in next/dist/compiled/ (pre-compiled).
|
||||||
# This is NOT a pnpm dependency — it's embedded in the Next.js package itself.
|
# These are NOT pnpm dependencies — they're embedded in the Next.js package itself.
|
||||||
|
# pnpm overrides cannot reach these; only a Next.js upgrade can fix them.
|
||||||
# Affects web image only (orchestrator and API are clean).
|
# Affects web image only (orchestrator and API are clean).
|
||||||
# npm was also removed from all production images, eliminating the npm-bundled copy.
|
# npm was also removed from all production images, eliminating the npm-bundled copy.
|
||||||
# To resolve: upgrade Next.js when a release bundles tar >= 7.5.7.
|
# To resolve: upgrade Next.js when a release bundles tar >= 7.5.8 and minimatch >= 10.2.1.
|
||||||
CVE-2026-23745 # HIGH: tar arbitrary file overwrite via unsanitized linkpaths (fixed in 7.5.3)
|
CVE-2026-23745 # HIGH: tar arbitrary file overwrite via unsanitized linkpaths (fixed in 7.5.3)
|
||||||
CVE-2026-23950 # HIGH: tar arbitrary file overwrite via Unicode path collision (fixed in 7.5.4)
|
CVE-2026-23950 # HIGH: tar arbitrary file overwrite via Unicode path collision (fixed in 7.5.4)
|
||||||
CVE-2026-24842 # HIGH: tar arbitrary file creation via hardlink path traversal (needs tar >= 7.5.7)
|
CVE-2026-24842 # HIGH: tar arbitrary file creation via hardlink path traversal (needs tar >= 7.5.7)
|
||||||
|
CVE-2026-26960 # HIGH: tar arbitrary file read/write via malicious archive hardlink (needs tar >= 7.5.8)
|
||||||
|
CVE-2026-26996 # HIGH: minimatch DoS via specially crafted glob patterns (needs minimatch >= 10.2.1)
|
||||||
|
|
||||||
# === OpenBao Go stdlib (waiting on upstream rebuild) ===
|
# === OpenBao Go stdlib (waiting on upstream rebuild) ===
|
||||||
# OpenBao 2.5.0 compiled with Go 1.25.6, fix needs Go >= 1.25.7.
|
# OpenBao 2.5.0 compiled with Go 1.25.6, fix needs Go >= 1.25.7.
|
||||||
|
|||||||
@@ -86,10 +86,9 @@ install -> [ruff-check, mypy, security-bandit, security-pip-audit, test]
|
|||||||
## Image Tagging
|
## Image Tagging
|
||||||
|
|
||||||
| Condition | Tag | Purpose |
|
| Condition | Tag | Purpose |
|
||||||
| ---------------- | -------------------------- | -------------------------- |
|
| ------------- | -------------------------- | -------------------------- |
|
||||||
| Always | `${CI_COMMIT_SHA:0:8}` | Immutable commit reference |
|
| Always | `${CI_COMMIT_SHA:0:8}` | Immutable commit reference |
|
||||||
| `main` branch | `latest` | Current production release |
|
| `main` branch | `latest` | Current latest build |
|
||||||
| `develop` branch | `dev` | Current development build |
|
|
||||||
| Git tag | tag value (e.g., `v1.0.0`) | Semantic version release |
|
| Git tag | tag value (e.g., `v1.0.0`) | Semantic version release |
|
||||||
|
|
||||||
## Required Secrets
|
## Required Secrets
|
||||||
@@ -138,5 +137,5 @@ Fails on blockers or critical/high severity security findings.
|
|||||||
|
|
||||||
### Pipeline runs Docker builds on pull requests
|
### Pipeline runs Docker builds on pull requests
|
||||||
|
|
||||||
- Docker build steps have `when: branch: [main, develop]` guards
|
- Docker build steps have `when: branch: [main]` guards
|
||||||
- PRs only run quality gates, not Docker builds
|
- PRs only run quality gates, not Docker builds
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ when:
|
|||||||
- "turbo.json"
|
- "turbo.json"
|
||||||
- "package.json"
|
- "package.json"
|
||||||
- ".woodpecker/api.yml"
|
- ".woodpecker/api.yml"
|
||||||
|
- ".trivyignore"
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- &node_image "node:24-alpine"
|
- &node_image "node:24-alpine"
|
||||||
@@ -112,7 +113,7 @@ steps:
|
|||||||
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||||
commands:
|
commands:
|
||||||
- *use_deps
|
- *use_deps
|
||||||
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts'
|
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts' --exclude 'src/mosaic-telemetry/mosaic-telemetry.module.spec.ts'
|
||||||
depends_on:
|
depends_on:
|
||||||
- prisma-migrate
|
- prisma-migrate
|
||||||
|
|
||||||
@@ -151,12 +152,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context . --dockerfile apps/api/Dockerfile $DESTINATIONS
|
/kaniko/executor --context . --dockerfile apps/api/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- build
|
- build
|
||||||
@@ -179,7 +178,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -187,7 +186,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-api
|
- docker-build-api
|
||||||
@@ -229,7 +228,7 @@ steps:
|
|||||||
}
|
}
|
||||||
link_package "stack-api"
|
link_package "stack-api"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-api
|
- security-trivy-api
|
||||||
|
|||||||
@@ -92,12 +92,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context apps/coordinator --dockerfile apps/coordinator/Dockerfile $DESTINATIONS
|
/kaniko/executor --context apps/coordinator --dockerfile apps/coordinator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- ruff-check
|
- ruff-check
|
||||||
@@ -124,7 +122,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -132,7 +130,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-coordinator:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-coordinator:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-coordinator
|
- docker-build-coordinator
|
||||||
@@ -174,7 +172,7 @@ steps:
|
|||||||
}
|
}
|
||||||
link_package "stack-coordinator"
|
link_package "stack-coordinator"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-coordinator
|
- security-trivy-coordinator
|
||||||
|
|||||||
@@ -36,12 +36,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context docker/postgres --dockerfile docker/postgres/Dockerfile $DESTINATIONS
|
/kaniko/executor --context docker/postgres --dockerfile docker/postgres/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
|
|
||||||
docker-build-openbao:
|
docker-build-openbao:
|
||||||
@@ -61,12 +59,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context docker/openbao --dockerfile docker/openbao/Dockerfile $DESTINATIONS
|
/kaniko/executor --context docker/openbao --dockerfile docker/openbao/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
|
|
||||||
# === Container Security Scans ===
|
# === Container Security Scans ===
|
||||||
@@ -87,7 +83,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -95,7 +91,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-postgres:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-postgres:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-postgres
|
- docker-build-postgres
|
||||||
@@ -116,7 +112,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -124,7 +120,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-openbao:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-openbao:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-openbao
|
- docker-build-openbao
|
||||||
@@ -167,7 +163,7 @@ steps:
|
|||||||
link_package "stack-postgres"
|
link_package "stack-postgres"
|
||||||
link_package "stack-openbao"
|
link_package "stack-openbao"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-postgres
|
- security-trivy-postgres
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ when:
|
|||||||
- "turbo.json"
|
- "turbo.json"
|
||||||
- "package.json"
|
- "package.json"
|
||||||
- ".woodpecker/orchestrator.yml"
|
- ".woodpecker/orchestrator.yml"
|
||||||
|
- ".trivyignore"
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- &node_image "node:24-alpine"
|
- &node_image "node:24-alpine"
|
||||||
@@ -108,12 +109,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile $DESTINATIONS
|
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- build
|
- build
|
||||||
@@ -136,7 +135,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -144,7 +143,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-orchestrator
|
- docker-build-orchestrator
|
||||||
@@ -186,7 +185,7 @@ steps:
|
|||||||
}
|
}
|
||||||
link_package "stack-orchestrator"
|
link_package "stack-orchestrator"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-orchestrator
|
- security-trivy-orchestrator
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ when:
|
|||||||
- "turbo.json"
|
- "turbo.json"
|
||||||
- "package.json"
|
- "package.json"
|
||||||
- ".woodpecker/web.yml"
|
- ".woodpecker/web.yml"
|
||||||
|
- ".trivyignore"
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- &node_image "node:24-alpine"
|
- &node_image "node:24-alpine"
|
||||||
@@ -119,12 +120,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --snapshot-mode=redo --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- build
|
- build
|
||||||
@@ -147,7 +146,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -155,7 +154,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-web
|
- docker-build-web
|
||||||
@@ -197,7 +196,7 @@ steps:
|
|||||||
}
|
}
|
||||||
link_package "stack-web"
|
link_package "stack-web"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-web
|
- security-trivy-web
|
||||||
|
|||||||
@@ -25,6 +25,8 @@ Optional:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
bash scripts/agent/log-limitation.sh "Short Name"
|
bash scripts/agent/log-limitation.sh "Short Name"
|
||||||
|
bash scripts/agent/orchestrator-daemon.sh status
|
||||||
|
bash scripts/agent/orchestrator-events.sh recent --limit 50
|
||||||
```
|
```
|
||||||
|
|
||||||
## Repo Context
|
## Repo Context
|
||||||
|
|||||||
16
CLAUDE.md
16
CLAUDE.md
@@ -1,14 +1,10 @@
|
|||||||
# Compatibility Pointer
|
# CLAUDE Compatibility Pointer
|
||||||
|
|
||||||
This repository uses an agent-neutral Mosaic standards model.
|
This file exists so Claude Code sessions load Mosaic standards.
|
||||||
|
|
||||||
Authoritative repo guidance is in `AGENTS.md`.
|
## MANDATORY — Read Before Any Response
|
||||||
|
|
||||||
Load order for Claude sessions:
|
BEFORE responding to any user message, READ `~/.config/mosaic/AGENTS.md`.
|
||||||
|
|
||||||
1. `SOUL.md`
|
That file is the universal agent configuration. Do NOT respond until you have loaded it.
|
||||||
2. `~/.mosaic/STANDARDS.md`
|
Then read the project-local `AGENTS.md` in this repository for project-specific guidance.
|
||||||
3. `AGENTS.md`
|
|
||||||
4. `.mosaic/repo-hooks.sh`
|
|
||||||
|
|
||||||
If you were started from `CLAUDE.md`, continue by reading `AGENTS.md` now.
|
|
||||||
|
|||||||
11
README.md
11
README.md
@@ -232,7 +232,7 @@ docker compose -f docker-compose.openbao.yml up -d
|
|||||||
sleep 30 # Wait for auto-initialization
|
sleep 30 # Wait for auto-initialization
|
||||||
|
|
||||||
# 5. Deploy swarm stack
|
# 5. Deploy swarm stack
|
||||||
IMAGE_TAG=dev ./scripts/deploy-swarm.sh mosaic
|
IMAGE_TAG=latest ./scripts/deploy-swarm.sh mosaic
|
||||||
|
|
||||||
# 6. Check deployment status
|
# 6. Check deployment status
|
||||||
docker stack services mosaic
|
docker stack services mosaic
|
||||||
@@ -526,10 +526,9 @@ KNOWLEDGE_CACHE_TTL=300 # 5 minutes
|
|||||||
|
|
||||||
### Branch Strategy
|
### Branch Strategy
|
||||||
|
|
||||||
- `main` — Stable releases only
|
- `main` — Trunk branch (all development merges here)
|
||||||
- `develop` — Active development (default working branch)
|
- `feature/*` — Feature branches from main
|
||||||
- `feature/*` — Feature branches from develop
|
- `fix/*` — Bug fix branches from main
|
||||||
- `fix/*` — Bug fix branches
|
|
||||||
|
|
||||||
### Running Locally
|
### Running Locally
|
||||||
|
|
||||||
@@ -739,7 +738,7 @@ See [Type Sharing Strategy](docs/2-development/3-type-sharing/1-strategy.md) for
|
|||||||
4. Run tests: `pnpm test`
|
4. Run tests: `pnpm test`
|
||||||
5. Build: `pnpm build`
|
5. Build: `pnpm build`
|
||||||
6. Commit with conventional format: `feat(#issue): Description`
|
6. Commit with conventional format: `feat(#issue): Description`
|
||||||
7. Push and create a pull request to `develop`
|
7. Push and create a pull request to `main`
|
||||||
|
|
||||||
### Commit Format
|
### Commit Format
|
||||||
|
|
||||||
|
|||||||
2
SOUL.md
2
SOUL.md
@@ -10,7 +10,7 @@ You are Jarvis for the Mosaic Stack repository, running on the current agent run
|
|||||||
- Be calm and clear: keep responses concise, chunked, and PDA-friendly.
|
- Be calm and clear: keep responses concise, chunked, and PDA-friendly.
|
||||||
- Respect canonical sources:
|
- Respect canonical sources:
|
||||||
- Repo operations and conventions: `AGENTS.md`
|
- Repo operations and conventions: `AGENTS.md`
|
||||||
- Machine-wide rails: `~/.mosaic/STANDARDS.md`
|
- Machine-wide rails: `~/.config/mosaic/STANDARDS.md`
|
||||||
- Repo lifecycle hooks: `.mosaic/repo-hooks.sh`
|
- Repo lifecycle hooks: `.mosaic/repo-hooks.sh`
|
||||||
|
|
||||||
## Guardrails
|
## Guardrails
|
||||||
|
|||||||
@@ -1,6 +1,3 @@
|
|||||||
# syntax=docker/dockerfile:1
|
|
||||||
# Enable BuildKit features for cache mounts
|
|
||||||
|
|
||||||
# Base image for all stages
|
# Base image for all stages
|
||||||
# Uses Debian slim (glibc) instead of Alpine (musl) because native Node.js addons
|
# Uses Debian slim (glibc) instead of Alpine (musl) because native Node.js addons
|
||||||
# (matrix-sdk-crypto-nodejs, Prisma engines) require glibc-compatible binaries.
|
# (matrix-sdk-crypto-nodejs, Prisma engines) require glibc-compatible binaries.
|
||||||
@@ -27,9 +24,8 @@ COPY packages/ui/package.json ./packages/ui/
|
|||||||
COPY packages/config/package.json ./packages/config/
|
COPY packages/config/package.json ./packages/config/
|
||||||
COPY apps/api/package.json ./apps/api/
|
COPY apps/api/package.json ./apps/api/
|
||||||
|
|
||||||
# Install dependencies with pnpm store cache
|
# Install dependencies (no cache mount — Kaniko builds are ephemeral in CI)
|
||||||
RUN --mount=type=cache,id=pnpm-store,target=/root/.local/share/pnpm/store \
|
RUN pnpm install --frozen-lockfile
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Builder stage
|
# Builder stage
|
||||||
@@ -57,15 +53,14 @@ RUN pnpm turbo build --filter=@mosaic/api --force
|
|||||||
# ======================
|
# ======================
|
||||||
FROM node:24-slim AS production
|
FROM node:24-slim AS production
|
||||||
|
|
||||||
# Remove npm (unused in production — we use pnpm) to reduce attack surface
|
# Install dumb-init for proper signal handling (static binary from GitHub,
|
||||||
RUN rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx
|
# avoids apt-get which fails under Kaniko with bookworm GPG signature errors)
|
||||||
|
ADD https://github.com/Yelp/dumb-init/releases/download/v1.2.5/dumb-init_1.2.5_x86_64 /usr/local/bin/dumb-init
|
||||||
|
|
||||||
# Install dumb-init for proper signal handling
|
# Single RUN to minimize Kaniko filesystem snapshots (each RUN = full snapshot)
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends dumb-init \
|
RUN rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& chmod 755 /usr/local/bin/dumb-init \
|
||||||
|
&& groupadd -g 1001 nodejs && useradd -m -u 1001 -g nodejs nestjs
|
||||||
# Create non-root user
|
|
||||||
RUN groupadd -g 1001 nodejs && useradd -m -u 1001 -g nodejs nestjs
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
|||||||
@@ -66,6 +66,7 @@
|
|||||||
"marked-gfm-heading-id": "^4.1.3",
|
"marked-gfm-heading-id": "^4.1.3",
|
||||||
"marked-highlight": "^2.2.3",
|
"marked-highlight": "^2.2.3",
|
||||||
"matrix-bot-sdk": "^0.8.0",
|
"matrix-bot-sdk": "^0.8.0",
|
||||||
|
"node-pty": "^1.0.0",
|
||||||
"ollama": "^0.6.3",
|
"ollama": "^0.6.3",
|
||||||
"openai": "^6.17.0",
|
"openai": "^6.17.0",
|
||||||
"reflect-metadata": "^0.2.2",
|
"reflect-metadata": "^0.2.2",
|
||||||
|
|||||||
@@ -0,0 +1,23 @@
|
|||||||
|
-- CreateEnum
|
||||||
|
CREATE TYPE "TerminalSessionStatus" AS ENUM ('ACTIVE', 'CLOSED');
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "terminal_sessions" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"workspace_id" UUID NOT NULL,
|
||||||
|
"name" TEXT NOT NULL DEFAULT 'Terminal',
|
||||||
|
"status" "TerminalSessionStatus" NOT NULL DEFAULT 'ACTIVE',
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"closed_at" TIMESTAMPTZ,
|
||||||
|
|
||||||
|
CONSTRAINT "terminal_sessions_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "terminal_sessions_workspace_id_idx" ON "terminal_sessions"("workspace_id");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "terminal_sessions_workspace_id_status_idx" ON "terminal_sessions"("workspace_id", "status");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "terminal_sessions" ADD CONSTRAINT "terminal_sessions_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@@ -206,6 +206,11 @@ enum CredentialScope {
|
|||||||
SYSTEM
|
SYSTEM
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enum TerminalSessionStatus {
|
||||||
|
ACTIVE
|
||||||
|
CLOSED
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================
|
// ============================================
|
||||||
// MODELS
|
// MODELS
|
||||||
// ============================================
|
// ============================================
|
||||||
@@ -297,6 +302,7 @@ model Workspace {
|
|||||||
federationEventSubscriptions FederationEventSubscription[]
|
federationEventSubscriptions FederationEventSubscription[]
|
||||||
llmUsageLogs LlmUsageLog[]
|
llmUsageLogs LlmUsageLog[]
|
||||||
userCredentials UserCredential[]
|
userCredentials UserCredential[]
|
||||||
|
terminalSessions TerminalSession[]
|
||||||
|
|
||||||
@@index([ownerId])
|
@@index([ownerId])
|
||||||
@@map("workspaces")
|
@@map("workspaces")
|
||||||
@@ -1507,3 +1513,23 @@ model LlmUsageLog {
|
|||||||
@@index([conversationId])
|
@@index([conversationId])
|
||||||
@@map("llm_usage_logs")
|
@@map("llm_usage_logs")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// TERMINAL MODULE
|
||||||
|
// ============================================
|
||||||
|
|
||||||
|
model TerminalSession {
|
||||||
|
id String @id @default(uuid()) @db.Uuid
|
||||||
|
workspaceId String @map("workspace_id") @db.Uuid
|
||||||
|
name String @default("Terminal")
|
||||||
|
status TerminalSessionStatus @default(ACTIVE)
|
||||||
|
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||||
|
closedAt DateTime? @map("closed_at") @db.Timestamptz
|
||||||
|
|
||||||
|
// Relations
|
||||||
|
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
@@index([workspaceId])
|
||||||
|
@@index([workspaceId, status])
|
||||||
|
@@map("terminal_sessions")
|
||||||
|
}
|
||||||
|
|||||||
@@ -65,6 +65,136 @@ async function main() {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// ============================================
|
||||||
|
// WIDGET DEFINITIONS (global, not workspace-scoped)
|
||||||
|
// ============================================
|
||||||
|
const widgetDefs = [
|
||||||
|
{
|
||||||
|
name: "TasksWidget",
|
||||||
|
displayName: "Tasks",
|
||||||
|
description: "View and manage your tasks",
|
||||||
|
component: "TasksWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 2,
|
||||||
|
minWidth: 1,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 4,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "CalendarWidget",
|
||||||
|
displayName: "Calendar",
|
||||||
|
description: "View upcoming events and schedule",
|
||||||
|
component: "CalendarWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 2,
|
||||||
|
minWidth: 2,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 4,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "QuickCaptureWidget",
|
||||||
|
displayName: "Quick Capture",
|
||||||
|
description: "Quickly capture notes and tasks",
|
||||||
|
component: "QuickCaptureWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 1,
|
||||||
|
minWidth: 2,
|
||||||
|
minHeight: 1,
|
||||||
|
maxWidth: 4,
|
||||||
|
maxHeight: 2,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "AgentStatusWidget",
|
||||||
|
displayName: "Agent Status",
|
||||||
|
description: "Monitor agent activity and status",
|
||||||
|
component: "AgentStatusWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 2,
|
||||||
|
minWidth: 1,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 3,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ActiveProjectsWidget",
|
||||||
|
displayName: "Active Projects & Agent Chains",
|
||||||
|
description: "View active projects and running agent sessions",
|
||||||
|
component: "ActiveProjectsWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 3,
|
||||||
|
minWidth: 2,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 4,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "TaskProgressWidget",
|
||||||
|
displayName: "Task Progress",
|
||||||
|
description: "Live progress of orchestrator agent tasks",
|
||||||
|
component: "TaskProgressWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 2,
|
||||||
|
minWidth: 1,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 3,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "OrchestratorEventsWidget",
|
||||||
|
displayName: "Orchestrator Events",
|
||||||
|
description: "Recent orchestration events with stream/Matrix visibility",
|
||||||
|
component: "OrchestratorEventsWidget",
|
||||||
|
defaultWidth: 2,
|
||||||
|
defaultHeight: 2,
|
||||||
|
minWidth: 1,
|
||||||
|
minHeight: 2,
|
||||||
|
maxWidth: 4,
|
||||||
|
maxHeight: null,
|
||||||
|
configSchema: {},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const wd of widgetDefs) {
|
||||||
|
await prisma.widgetDefinition.upsert({
|
||||||
|
where: { name: wd.name },
|
||||||
|
update: {
|
||||||
|
displayName: wd.displayName,
|
||||||
|
description: wd.description,
|
||||||
|
component: wd.component,
|
||||||
|
defaultWidth: wd.defaultWidth,
|
||||||
|
defaultHeight: wd.defaultHeight,
|
||||||
|
minWidth: wd.minWidth,
|
||||||
|
minHeight: wd.minHeight,
|
||||||
|
maxWidth: wd.maxWidth,
|
||||||
|
maxHeight: wd.maxHeight,
|
||||||
|
configSchema: wd.configSchema,
|
||||||
|
},
|
||||||
|
create: {
|
||||||
|
name: wd.name,
|
||||||
|
displayName: wd.displayName,
|
||||||
|
description: wd.description,
|
||||||
|
component: wd.component,
|
||||||
|
defaultWidth: wd.defaultWidth,
|
||||||
|
defaultHeight: wd.defaultHeight,
|
||||||
|
minWidth: wd.minWidth,
|
||||||
|
minHeight: wd.minHeight,
|
||||||
|
maxWidth: wd.maxWidth,
|
||||||
|
maxHeight: wd.maxHeight,
|
||||||
|
configSchema: wd.configSchema,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Seeded ${widgetDefs.length} widget definitions`);
|
||||||
|
|
||||||
// Use transaction for atomic seed data reset and creation
|
// Use transaction for atomic seed data reset and creation
|
||||||
await prisma.$transaction(async (tx) => {
|
await prisma.$transaction(async (tx) => {
|
||||||
// Delete existing seed data for idempotency (avoids duplicates on re-run)
|
// Delete existing seed data for idempotency (avoids duplicates on re-run)
|
||||||
|
|||||||
@@ -39,6 +39,8 @@ import { FederationModule } from "./federation/federation.module";
|
|||||||
import { CredentialsModule } from "./credentials/credentials.module";
|
import { CredentialsModule } from "./credentials/credentials.module";
|
||||||
import { MosaicTelemetryModule } from "./mosaic-telemetry";
|
import { MosaicTelemetryModule } from "./mosaic-telemetry";
|
||||||
import { SpeechModule } from "./speech/speech.module";
|
import { SpeechModule } from "./speech/speech.module";
|
||||||
|
import { DashboardModule } from "./dashboard/dashboard.module";
|
||||||
|
import { TerminalModule } from "./terminal/terminal.module";
|
||||||
import { RlsContextInterceptor } from "./common/interceptors/rls-context.interceptor";
|
import { RlsContextInterceptor } from "./common/interceptors/rls-context.interceptor";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
@@ -101,6 +103,8 @@ import { RlsContextInterceptor } from "./common/interceptors/rls-context.interce
|
|||||||
CredentialsModule,
|
CredentialsModule,
|
||||||
MosaicTelemetryModule,
|
MosaicTelemetryModule,
|
||||||
SpeechModule,
|
SpeechModule,
|
||||||
|
DashboardModule,
|
||||||
|
TerminalModule,
|
||||||
],
|
],
|
||||||
controllers: [AppController, CsrfController],
|
controllers: [AppController, CsrfController],
|
||||||
providers: [
|
providers: [
|
||||||
|
|||||||
@@ -18,7 +18,13 @@ vi.mock("better-auth/adapters/prisma", () => ({
|
|||||||
prismaAdapter: (...args: unknown[]) => mockPrismaAdapter(...args),
|
prismaAdapter: (...args: unknown[]) => mockPrismaAdapter(...args),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
import { isOidcEnabled, validateOidcConfig, createAuth, getTrustedOrigins } from "./auth.config";
|
import {
|
||||||
|
isOidcEnabled,
|
||||||
|
validateOidcConfig,
|
||||||
|
createAuth,
|
||||||
|
getTrustedOrigins,
|
||||||
|
getBetterAuthBaseUrl,
|
||||||
|
} from "./auth.config";
|
||||||
|
|
||||||
describe("auth.config", () => {
|
describe("auth.config", () => {
|
||||||
// Store original env vars to restore after each test
|
// Store original env vars to restore after each test
|
||||||
@@ -32,6 +38,7 @@ describe("auth.config", () => {
|
|||||||
delete process.env.OIDC_CLIENT_SECRET;
|
delete process.env.OIDC_CLIENT_SECRET;
|
||||||
delete process.env.OIDC_REDIRECT_URI;
|
delete process.env.OIDC_REDIRECT_URI;
|
||||||
delete process.env.NODE_ENV;
|
delete process.env.NODE_ENV;
|
||||||
|
delete process.env.BETTER_AUTH_URL;
|
||||||
delete process.env.NEXT_PUBLIC_APP_URL;
|
delete process.env.NEXT_PUBLIC_APP_URL;
|
||||||
delete process.env.NEXT_PUBLIC_API_URL;
|
delete process.env.NEXT_PUBLIC_API_URL;
|
||||||
delete process.env.TRUSTED_ORIGINS;
|
delete process.env.TRUSTED_ORIGINS;
|
||||||
@@ -95,7 +102,7 @@ describe("auth.config", () => {
|
|||||||
it("should throw when OIDC_ISSUER is missing", () => {
|
it("should throw when OIDC_ISSUER is missing", () => {
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC authentication is enabled");
|
expect(() => validateOidcConfig()).toThrow("OIDC authentication is enabled");
|
||||||
@@ -104,7 +111,7 @@ describe("auth.config", () => {
|
|||||||
it("should throw when OIDC_CLIENT_ID is missing", () => {
|
it("should throw when OIDC_CLIENT_ID is missing", () => {
|
||||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_ID");
|
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_ID");
|
||||||
});
|
});
|
||||||
@@ -112,7 +119,7 @@ describe("auth.config", () => {
|
|||||||
it("should throw when OIDC_CLIENT_SECRET is missing", () => {
|
it("should throw when OIDC_CLIENT_SECRET is missing", () => {
|
||||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_SECRET");
|
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_SECRET");
|
||||||
});
|
});
|
||||||
@@ -146,7 +153,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = " ";
|
process.env.OIDC_ISSUER = " ";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
||||||
});
|
});
|
||||||
@@ -155,7 +162,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic";
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER must end with a trailing slash");
|
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER must end with a trailing slash");
|
||||||
expect(() => validateOidcConfig()).toThrow("https://auth.example.com/application/o/mosaic");
|
expect(() => validateOidcConfig()).toThrow("https://auth.example.com/application/o/mosaic");
|
||||||
@@ -165,7 +172,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).not.toThrow();
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
});
|
});
|
||||||
@@ -189,30 +196,30 @@ describe("auth.config", () => {
|
|||||||
expect(() => validateOidcConfig()).toThrow("Parse error:");
|
expect(() => validateOidcConfig()).toThrow("Parse error:");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw when OIDC_REDIRECT_URI path does not start with /auth/callback", () => {
|
it("should throw when OIDC_REDIRECT_URI path does not start with /auth/oauth2/callback", () => {
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/oauth/callback";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/oauth/callback";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow(
|
expect(() => validateOidcConfig()).toThrow(
|
||||||
'OIDC_REDIRECT_URI path must start with "/auth/callback"'
|
'OIDC_REDIRECT_URI path must start with "/auth/oauth2/callback"'
|
||||||
);
|
);
|
||||||
expect(() => validateOidcConfig()).toThrow("/oauth/callback");
|
expect(() => validateOidcConfig()).toThrow("/oauth/callback");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should accept a valid OIDC_REDIRECT_URI with /auth/callback path", () => {
|
it("should accept a valid OIDC_REDIRECT_URI with /auth/oauth2/callback path", () => {
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).not.toThrow();
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should accept OIDC_REDIRECT_URI with exactly /auth/callback path", () => {
|
it("should accept OIDC_REDIRECT_URI with exactly /auth/oauth2/callback path", () => {
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).not.toThrow();
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should warn but not throw when using localhost in production", () => {
|
it("should warn but not throw when using localhost in production", () => {
|
||||||
process.env.NODE_ENV = "production";
|
process.env.NODE_ENV = "production";
|
||||||
process.env.OIDC_REDIRECT_URI = "http://localhost:3000/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "http://localhost:3000/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
@@ -226,7 +233,7 @@ describe("auth.config", () => {
|
|||||||
|
|
||||||
it("should warn but not throw when using 127.0.0.1 in production", () => {
|
it("should warn but not throw when using 127.0.0.1 in production", () => {
|
||||||
process.env.NODE_ENV = "production";
|
process.env.NODE_ENV = "production";
|
||||||
process.env.OIDC_REDIRECT_URI = "http://127.0.0.1:3000/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "http://127.0.0.1:3000/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
@@ -240,7 +247,7 @@ describe("auth.config", () => {
|
|||||||
|
|
||||||
it("should not warn about localhost when not in production", () => {
|
it("should not warn about localhost when not in production", () => {
|
||||||
process.env.NODE_ENV = "development";
|
process.env.NODE_ENV = "development";
|
||||||
process.env.OIDC_REDIRECT_URI = "http://localhost:3000/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "http://localhost:3000/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
@@ -265,16 +272,19 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
const mockPrisma = {} as PrismaClient;
|
const mockPrisma = {} as PrismaClient;
|
||||||
createAuth(mockPrisma);
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
expect(mockGenericOAuth).toHaveBeenCalledOnce();
|
expect(mockGenericOAuth).toHaveBeenCalledOnce();
|
||||||
const callArgs = mockGenericOAuth.mock.calls[0][0] as {
|
const callArgs = mockGenericOAuth.mock.calls[0][0] as {
|
||||||
config: Array<{ pkce?: boolean }>;
|
config: Array<{ pkce?: boolean; redirectURI?: string }>;
|
||||||
};
|
};
|
||||||
expect(callArgs.config[0].pkce).toBe(true);
|
expect(callArgs.config[0].pkce).toBe(true);
|
||||||
|
expect(callArgs.config[0].redirectURI).toBe(
|
||||||
|
"https://app.example.com/auth/oauth2/callback/authentik"
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should not call genericOAuth when OIDC is disabled", () => {
|
it("should not call genericOAuth when OIDC is disabled", () => {
|
||||||
@@ -290,7 +300,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ENABLED = "true";
|
process.env.OIDC_ENABLED = "true";
|
||||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
// OIDC_CLIENT_ID deliberately not set
|
// OIDC_CLIENT_ID deliberately not set
|
||||||
|
|
||||||
// validateOidcConfig will throw first, so we need to bypass it
|
// validateOidcConfig will throw first, so we need to bypass it
|
||||||
@@ -307,7 +317,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ENABLED = "true";
|
process.env.OIDC_ENABLED = "true";
|
||||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
// OIDC_CLIENT_SECRET deliberately not set
|
// OIDC_CLIENT_SECRET deliberately not set
|
||||||
|
|
||||||
const mockPrisma = {} as PrismaClient;
|
const mockPrisma = {} as PrismaClient;
|
||||||
@@ -318,7 +328,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ENABLED = "true";
|
process.env.OIDC_ENABLED = "true";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/callback/authentik";
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
// OIDC_ISSUER deliberately not set
|
// OIDC_ISSUER deliberately not set
|
||||||
|
|
||||||
const mockPrisma = {} as PrismaClient;
|
const mockPrisma = {} as PrismaClient;
|
||||||
@@ -354,8 +364,7 @@ describe("auth.config", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should parse TRUSTED_ORIGINS comma-separated values", () => {
|
it("should parse TRUSTED_ORIGINS comma-separated values", () => {
|
||||||
process.env.TRUSTED_ORIGINS =
|
process.env.TRUSTED_ORIGINS = "https://app.mosaicstack.dev,https://api.mosaicstack.dev";
|
||||||
"https://app.mosaicstack.dev,https://api.mosaicstack.dev";
|
|
||||||
|
|
||||||
const origins = getTrustedOrigins();
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
@@ -364,8 +373,7 @@ describe("auth.config", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should trim whitespace from TRUSTED_ORIGINS entries", () => {
|
it("should trim whitespace from TRUSTED_ORIGINS entries", () => {
|
||||||
process.env.TRUSTED_ORIGINS =
|
process.env.TRUSTED_ORIGINS = " https://app.mosaicstack.dev , https://api.mosaicstack.dev ";
|
||||||
" https://app.mosaicstack.dev , https://api.mosaicstack.dev ";
|
|
||||||
|
|
||||||
const origins = getTrustedOrigins();
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
@@ -516,6 +524,21 @@ describe("auth.config", () => {
|
|||||||
expect(config.session.updateAge).toBe(7200);
|
expect(config.session.updateAge).toBe(7200);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should configure BetterAuth database ID generation as UUID", () => {
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
database: {
|
||||||
|
generateId: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.database.generateId).toBe("uuid");
|
||||||
|
});
|
||||||
|
|
||||||
it("should set httpOnly cookie attribute to true", () => {
|
it("should set httpOnly cookie attribute to true", () => {
|
||||||
const mockPrisma = {} as PrismaClient;
|
const mockPrisma = {} as PrismaClient;
|
||||||
createAuth(mockPrisma);
|
createAuth(mockPrisma);
|
||||||
@@ -552,6 +575,7 @@ describe("auth.config", () => {
|
|||||||
|
|
||||||
it("should set secure cookie attribute to true in production", () => {
|
it("should set secure cookie attribute to true in production", () => {
|
||||||
process.env.NODE_ENV = "production";
|
process.env.NODE_ENV = "production";
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||||
const mockPrisma = {} as PrismaClient;
|
const mockPrisma = {} as PrismaClient;
|
||||||
createAuth(mockPrisma);
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
@@ -624,4 +648,69 @@ describe("auth.config", () => {
|
|||||||
expect(config.advanced.defaultCookieAttributes.domain).toBeUndefined();
|
expect(config.advanced.defaultCookieAttributes.domain).toBeUndefined();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("getBetterAuthBaseUrl", () => {
|
||||||
|
it("should prefer BETTER_AUTH_URL when set", () => {
|
||||||
|
process.env.BETTER_AUTH_URL = "https://auth-base.example.com";
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||||
|
|
||||||
|
expect(getBetterAuthBaseUrl()).toBe("https://auth-base.example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fall back to NEXT_PUBLIC_API_URL when BETTER_AUTH_URL is not set", () => {
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||||
|
|
||||||
|
expect(getBetterAuthBaseUrl()).toBe("https://api.example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when base URL is invalid", () => {
|
||||||
|
process.env.BETTER_AUTH_URL = "not-a-url";
|
||||||
|
|
||||||
|
expect(() => getBetterAuthBaseUrl()).toThrow("BetterAuth base URL must be a valid URL");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when base URL is missing in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
|
||||||
|
expect(() => getBetterAuthBaseUrl()).toThrow("Missing BetterAuth base URL in production");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when base URL is not https in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
process.env.BETTER_AUTH_URL = "http://api.example.com";
|
||||||
|
|
||||||
|
expect(() => getBetterAuthBaseUrl()).toThrow(
|
||||||
|
"BetterAuth base URL must use https in production"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("createAuth - baseURL wiring", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
mockBetterAuth.mockClear();
|
||||||
|
mockPrismaAdapter.mockClear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass BETTER_AUTH_URL into BetterAuth config", () => {
|
||||||
|
process.env.BETTER_AUTH_URL = "https://api.mosaicstack.dev";
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as { baseURL?: string };
|
||||||
|
expect(config.baseURL).toBe("https://api.mosaicstack.dev");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass NEXT_PUBLIC_API_URL into BetterAuth config when BETTER_AUTH_URL is absent", () => {
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.fallback.dev";
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as { baseURL?: string };
|
||||||
|
expect(config.baseURL).toBe("https://api.fallback.dev");
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -13,6 +13,41 @@ const REQUIRED_OIDC_ENV_VARS = [
|
|||||||
"OIDC_REDIRECT_URI",
|
"OIDC_REDIRECT_URI",
|
||||||
] as const;
|
] as const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve BetterAuth base URL from explicit auth URL or API URL.
|
||||||
|
* BetterAuth uses this to generate absolute callback/error URLs.
|
||||||
|
*/
|
||||||
|
export function getBetterAuthBaseUrl(): string | undefined {
|
||||||
|
const configured = process.env.BETTER_AUTH_URL ?? process.env.NEXT_PUBLIC_API_URL;
|
||||||
|
|
||||||
|
if (!configured || configured.trim() === "") {
|
||||||
|
if (process.env.NODE_ENV === "production") {
|
||||||
|
throw new Error(
|
||||||
|
"Missing BetterAuth base URL in production. Set BETTER_AUTH_URL (preferred) or NEXT_PUBLIC_API_URL."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsed: URL;
|
||||||
|
try {
|
||||||
|
parsed = new URL(configured);
|
||||||
|
} catch (urlError: unknown) {
|
||||||
|
const detail = urlError instanceof Error ? urlError.message : String(urlError);
|
||||||
|
throw new Error(
|
||||||
|
`BetterAuth base URL must be a valid URL. Current value: "${configured}". Parse error: ${detail}.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.NODE_ENV === "production" && parsed.protocol !== "https:") {
|
||||||
|
throw new Error(
|
||||||
|
`BetterAuth base URL must use https in production. Current value: "${configured}".`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsed.origin;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if OIDC authentication is enabled via environment variable
|
* Check if OIDC authentication is enabled via environment variable
|
||||||
*/
|
*/
|
||||||
@@ -58,17 +93,17 @@ export function validateOidcConfig(): void {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Additional validation: OIDC_REDIRECT_URI must be a valid URL with /auth/callback path
|
// Additional validation: OIDC_REDIRECT_URI must be a valid URL with /auth/oauth2/callback path
|
||||||
validateRedirectUri();
|
validateRedirectUri();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validates the OIDC_REDIRECT_URI environment variable.
|
* Validates the OIDC_REDIRECT_URI environment variable.
|
||||||
* - Must be a parseable URL
|
* - Must be a parseable URL
|
||||||
* - Path must start with /auth/callback
|
* - Path must start with /auth/oauth2/callback
|
||||||
* - Warns (but does not throw) if using localhost in production
|
* - Warns (but does not throw) if using localhost in production
|
||||||
*
|
*
|
||||||
* @throws Error if URL is invalid or path does not start with /auth/callback
|
* @throws Error if URL is invalid or path does not start with /auth/oauth2/callback
|
||||||
*/
|
*/
|
||||||
function validateRedirectUri(): void {
|
function validateRedirectUri(): void {
|
||||||
const redirectUri = process.env.OIDC_REDIRECT_URI;
|
const redirectUri = process.env.OIDC_REDIRECT_URI;
|
||||||
@@ -85,14 +120,14 @@ function validateRedirectUri(): void {
|
|||||||
throw new Error(
|
throw new Error(
|
||||||
`OIDC_REDIRECT_URI must be a valid URL. Current value: "${redirectUri}". ` +
|
`OIDC_REDIRECT_URI must be a valid URL. Current value: "${redirectUri}". ` +
|
||||||
`Parse error: ${detail}. ` +
|
`Parse error: ${detail}. ` +
|
||||||
`Example: "https://app.example.com/auth/callback/authentik".`
|
`Example: "https://api.example.com/auth/oauth2/callback/authentik".`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!parsed.pathname.startsWith("/auth/callback")) {
|
if (!parsed.pathname.startsWith("/auth/oauth2/callback")) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`OIDC_REDIRECT_URI path must start with "/auth/callback". Current path: "${parsed.pathname}". ` +
|
`OIDC_REDIRECT_URI path must start with "/auth/oauth2/callback". Current path: "${parsed.pathname}". ` +
|
||||||
`Example: "https://app.example.com/auth/callback/authentik".`
|
`Example: "https://api.example.com/auth/oauth2/callback/authentik".`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -119,6 +154,7 @@ function getOidcPlugins(): ReturnType<typeof genericOAuth>[] {
|
|||||||
const clientId = process.env.OIDC_CLIENT_ID;
|
const clientId = process.env.OIDC_CLIENT_ID;
|
||||||
const clientSecret = process.env.OIDC_CLIENT_SECRET;
|
const clientSecret = process.env.OIDC_CLIENT_SECRET;
|
||||||
const issuer = process.env.OIDC_ISSUER;
|
const issuer = process.env.OIDC_ISSUER;
|
||||||
|
const redirectUri = process.env.OIDC_REDIRECT_URI;
|
||||||
|
|
||||||
if (!clientId) {
|
if (!clientId) {
|
||||||
throw new Error("OIDC_CLIENT_ID is required when OIDC is enabled but was not set.");
|
throw new Error("OIDC_CLIENT_ID is required when OIDC is enabled but was not set.");
|
||||||
@@ -129,6 +165,9 @@ function getOidcPlugins(): ReturnType<typeof genericOAuth>[] {
|
|||||||
if (!issuer) {
|
if (!issuer) {
|
||||||
throw new Error("OIDC_ISSUER is required when OIDC is enabled but was not set.");
|
throw new Error("OIDC_ISSUER is required when OIDC is enabled but was not set.");
|
||||||
}
|
}
|
||||||
|
if (!redirectUri) {
|
||||||
|
throw new Error("OIDC_REDIRECT_URI is required when OIDC is enabled but was not set.");
|
||||||
|
}
|
||||||
|
|
||||||
return [
|
return [
|
||||||
genericOAuth({
|
genericOAuth({
|
||||||
@@ -138,6 +177,7 @@ function getOidcPlugins(): ReturnType<typeof genericOAuth>[] {
|
|||||||
clientId,
|
clientId,
|
||||||
clientSecret,
|
clientSecret,
|
||||||
discoveryUrl: `${issuer}.well-known/openid-configuration`,
|
discoveryUrl: `${issuer}.well-known/openid-configuration`,
|
||||||
|
redirectURI: redirectUri,
|
||||||
pkce: true,
|
pkce: true,
|
||||||
scopes: ["openid", "profile", "email"],
|
scopes: ["openid", "profile", "email"],
|
||||||
},
|
},
|
||||||
@@ -202,7 +242,10 @@ export function createAuth(prisma: PrismaClient) {
|
|||||||
// Validate OIDC configuration at startup - fail fast if misconfigured
|
// Validate OIDC configuration at startup - fail fast if misconfigured
|
||||||
validateOidcConfig();
|
validateOidcConfig();
|
||||||
|
|
||||||
|
const baseURL = getBetterAuthBaseUrl();
|
||||||
|
|
||||||
return betterAuth({
|
return betterAuth({
|
||||||
|
baseURL,
|
||||||
basePath: "/auth",
|
basePath: "/auth",
|
||||||
database: prismaAdapter(prisma, {
|
database: prismaAdapter(prisma, {
|
||||||
provider: "postgresql",
|
provider: "postgresql",
|
||||||
@@ -211,11 +254,19 @@ export function createAuth(prisma: PrismaClient) {
|
|||||||
enabled: true,
|
enabled: true,
|
||||||
},
|
},
|
||||||
plugins: [...getOidcPlugins()],
|
plugins: [...getOidcPlugins()],
|
||||||
|
logger: {
|
||||||
|
disabled: false,
|
||||||
|
level: "error",
|
||||||
|
},
|
||||||
session: {
|
session: {
|
||||||
expiresIn: 60 * 60 * 24 * 7, // 7 days absolute max
|
expiresIn: 60 * 60 * 24 * 7, // 7 days absolute max
|
||||||
updateAge: 60 * 60 * 2, // 2 hours — minimum session age before BetterAuth refreshes the expiry on next request
|
updateAge: 60 * 60 * 2, // 2 hours — minimum session age before BetterAuth refreshes the expiry on next request
|
||||||
},
|
},
|
||||||
advanced: {
|
advanced: {
|
||||||
|
database: {
|
||||||
|
// BetterAuth's default ID generator emits opaque strings; our auth tables use UUID PKs.
|
||||||
|
generateId: "uuid",
|
||||||
|
},
|
||||||
defaultCookieAttributes: {
|
defaultCookieAttributes: {
|
||||||
httpOnly: true,
|
httpOnly: true,
|
||||||
secure: process.env.NODE_ENV === "production",
|
secure: process.env.NODE_ENV === "production",
|
||||||
|
|||||||
@@ -102,11 +102,46 @@ describe("AuthController", () => {
|
|||||||
expect(err).toBeInstanceOf(HttpException);
|
expect(err).toBeInstanceOf(HttpException);
|
||||||
expect((err as HttpException).getStatus()).toBe(HttpStatus.INTERNAL_SERVER_ERROR);
|
expect((err as HttpException).getStatus()).toBe(HttpStatus.INTERNAL_SERVER_ERROR);
|
||||||
expect((err as HttpException).getResponse()).toBe(
|
expect((err as HttpException).getResponse()).toBe(
|
||||||
"Unable to complete authentication. Please try again in a moment.",
|
"Unable to complete authentication. Please try again in a moment."
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should preserve better-call status and body for handler APIError", async () => {
|
||||||
|
const apiError = {
|
||||||
|
statusCode: HttpStatus.BAD_REQUEST,
|
||||||
|
message: "Invalid OAuth configuration",
|
||||||
|
body: {
|
||||||
|
message: "Invalid OAuth configuration",
|
||||||
|
code: "INVALID_OAUTH_CONFIGURATION",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
mockNodeHandler.mockRejectedValueOnce(apiError);
|
||||||
|
|
||||||
|
const mockRequest = {
|
||||||
|
method: "POST",
|
||||||
|
url: "/auth/sign-in/oauth2",
|
||||||
|
headers: {},
|
||||||
|
ip: "192.168.1.10",
|
||||||
|
socket: { remoteAddress: "192.168.1.10" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
expect.unreachable("Expected HttpException to be thrown");
|
||||||
|
} catch (err) {
|
||||||
|
expect(err).toBeInstanceOf(HttpException);
|
||||||
|
expect((err as HttpException).getStatus()).toBe(HttpStatus.BAD_REQUEST);
|
||||||
|
expect((err as HttpException).getResponse()).toMatchObject({
|
||||||
|
message: "Invalid OAuth configuration",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
it("should log warning and not throw when handler throws after headers sent", async () => {
|
it("should log warning and not throw when handler throws after headers sent", async () => {
|
||||||
const handlerError = new Error("Stream interrupted");
|
const handlerError = new Error("Stream interrupted");
|
||||||
mockNodeHandler.mockRejectedValueOnce(handlerError);
|
mockNodeHandler.mockRejectedValueOnce(handlerError);
|
||||||
@@ -142,9 +177,7 @@ describe("AuthController", () => {
|
|||||||
headersSent: false,
|
headersSent: false,
|
||||||
} as unknown as ExpressResponse;
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
await expect(controller.handleAuth(mockRequest, mockResponse)).rejects.toThrow(
|
await expect(controller.handleAuth(mockRequest, mockResponse)).rejects.toThrow(HttpException);
|
||||||
HttpException,
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -187,7 +220,7 @@ describe("AuthController", () => {
|
|||||||
OIDC_CLIENT_SECRET: "test-client-secret",
|
OIDC_CLIENT_SECRET: "test-client-secret",
|
||||||
OIDC_CLIENT_ID: "test-client-id",
|
OIDC_CLIENT_ID: "test-client-id",
|
||||||
OIDC_ISSUER: "https://auth.test.com/",
|
OIDC_ISSUER: "https://auth.test.com/",
|
||||||
OIDC_REDIRECT_URI: "https://app.test.com/auth/callback/authentik",
|
OIDC_REDIRECT_URI: "https://app.test.com/auth/oauth2/callback/authentik",
|
||||||
BETTER_AUTH_SECRET: "test-better-auth-secret",
|
BETTER_AUTH_SECRET: "test-better-auth-secret",
|
||||||
JWT_SECRET: "test-jwt-secret",
|
JWT_SECRET: "test-jwt-secret",
|
||||||
CSRF_SECRET: "test-csrf-secret",
|
CSRF_SECRET: "test-csrf-secret",
|
||||||
@@ -296,11 +329,9 @@ describe("AuthController", () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||||
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||||
UnauthorizedException,
|
"Missing authentication context"
|
||||||
);
|
|
||||||
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
|
||||||
"Missing authentication context",
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -313,22 +344,18 @@ describe("AuthController", () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||||
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||||
UnauthorizedException,
|
"Missing authentication context"
|
||||||
);
|
|
||||||
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
|
||||||
"Missing authentication context",
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw UnauthorizedException when both req.user and req.session are undefined", () => {
|
it("should throw UnauthorizedException when both req.user and req.session are undefined", () => {
|
||||||
const mockRequest = {};
|
const mockRequest = {};
|
||||||
|
|
||||||
|
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||||
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||||
UnauthorizedException,
|
"Missing authentication context"
|
||||||
);
|
|
||||||
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
|
||||||
"Missing authentication context",
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -401,9 +428,7 @@ describe("AuthController", () => {
|
|||||||
|
|
||||||
await controller.handleAuth(mockRequest, mockResponse);
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
expect(debugSpy).toHaveBeenCalledWith(
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||||
expect.stringContaining("203.0.113.50"),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should extract first IP from X-Forwarded-For with comma-separated IPs", async () => {
|
it("should extract first IP from X-Forwarded-For with comma-separated IPs", async () => {
|
||||||
@@ -423,13 +448,9 @@ describe("AuthController", () => {
|
|||||||
|
|
||||||
await controller.handleAuth(mockRequest, mockResponse);
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
expect(debugSpy).toHaveBeenCalledWith(
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||||
expect.stringContaining("203.0.113.50"),
|
|
||||||
);
|
|
||||||
// Ensure it does NOT contain the second IP in the extracted position
|
// Ensure it does NOT contain the second IP in the extracted position
|
||||||
expect(debugSpy).toHaveBeenCalledWith(
|
expect(debugSpy).toHaveBeenCalledWith(expect.not.stringContaining("70.41.3.18"));
|
||||||
expect.not.stringContaining("70.41.3.18"),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should extract first IP from X-Forwarded-For as array", async () => {
|
it("should extract first IP from X-Forwarded-For as array", async () => {
|
||||||
@@ -449,9 +470,7 @@ describe("AuthController", () => {
|
|||||||
|
|
||||||
await controller.handleAuth(mockRequest, mockResponse);
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
expect(debugSpy).toHaveBeenCalledWith(
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||||
expect.stringContaining("203.0.113.50"),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should fallback to req.ip when no X-Forwarded-For header", async () => {
|
it("should fallback to req.ip when no X-Forwarded-For header", async () => {
|
||||||
@@ -471,9 +490,7 @@ describe("AuthController", () => {
|
|||||||
|
|
||||||
await controller.handleAuth(mockRequest, mockResponse);
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
expect(debugSpy).toHaveBeenCalledWith(
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("192.168.1.100"));
|
||||||
expect.stringContaining("192.168.1.100"),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -123,6 +123,14 @@ export class AuthController {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await handler(req, res);
|
await handler(req, res);
|
||||||
|
|
||||||
|
// BetterAuth writes responses directly — catch silent 500s that bypass NestJS error handling
|
||||||
|
if (res.statusCode >= 500) {
|
||||||
|
this.logger.error(
|
||||||
|
`BetterAuth returned ${String(res.statusCode)} for ${req.method} ${req.url} from ${clientIp}` +
|
||||||
|
` — check container stdout for '# SERVER_ERROR' details`
|
||||||
|
);
|
||||||
|
}
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const message = error instanceof Error ? error.message : String(error);
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
const stack = error instanceof Error ? error.stack : undefined;
|
const stack = error instanceof Error ? error.stack : undefined;
|
||||||
@@ -133,6 +141,11 @@ export class AuthController {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (!res.headersSent) {
|
if (!res.headersSent) {
|
||||||
|
const mappedError = this.mapToHttpException(error);
|
||||||
|
if (mappedError) {
|
||||||
|
throw mappedError;
|
||||||
|
}
|
||||||
|
|
||||||
throw new HttpException(
|
throw new HttpException(
|
||||||
"Unable to complete authentication. Please try again in a moment.",
|
"Unable to complete authentication. Please try again in a moment.",
|
||||||
HttpStatus.INTERNAL_SERVER_ERROR
|
HttpStatus.INTERNAL_SERVER_ERROR
|
||||||
@@ -159,4 +172,45 @@ export class AuthController {
|
|||||||
// Fall back to direct IP
|
// Fall back to direct IP
|
||||||
return req.ip ?? req.socket.remoteAddress ?? "unknown";
|
return req.ip ?? req.socket.remoteAddress ?? "unknown";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Preserve known HTTP errors from BetterAuth/better-call instead of converting
|
||||||
|
* every failure into a generic 500.
|
||||||
|
*/
|
||||||
|
private mapToHttpException(error: unknown): HttpException | null {
|
||||||
|
if (error instanceof HttpException) {
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!error || typeof error !== "object") {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const statusCode = "statusCode" in error ? error.statusCode : undefined;
|
||||||
|
if (!this.isHttpStatus(statusCode)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const responseBody = "body" in error && error.body !== undefined ? error.body : undefined;
|
||||||
|
if (
|
||||||
|
responseBody !== undefined &&
|
||||||
|
responseBody !== null &&
|
||||||
|
(typeof responseBody === "string" || typeof responseBody === "object")
|
||||||
|
) {
|
||||||
|
return new HttpException(responseBody, statusCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
const message =
|
||||||
|
"message" in error && typeof error.message === "string" && error.message.length > 0
|
||||||
|
? error.message
|
||||||
|
: "Authentication request failed";
|
||||||
|
return new HttpException(message, statusCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
private isHttpStatus(value: unknown): value is number {
|
||||||
|
if (typeof value !== "number" || !Number.isInteger(value)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return value >= 400 && value <= 599;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -410,7 +410,7 @@ describe("AuthService", () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
it("should return session data for valid token", async () => {
|
it("should validate session token using secure BetterAuth cookie header", async () => {
|
||||||
const auth = service.getAuth();
|
const auth = service.getAuth();
|
||||||
const mockGetSession = vi.fn().mockResolvedValue(mockSessionData);
|
const mockGetSession = vi.fn().mockResolvedValue(mockSessionData);
|
||||||
auth.api = { getSession: mockGetSession } as any;
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
@@ -418,7 +418,58 @@ describe("AuthService", () => {
|
|||||||
const result = await service.verifySession("valid-token");
|
const result = await service.verifySession("valid-token");
|
||||||
|
|
||||||
expect(result).toEqual(mockSessionData);
|
expect(result).toEqual(mockSessionData);
|
||||||
|
expect(mockGetSession).toHaveBeenCalledTimes(1);
|
||||||
expect(mockGetSession).toHaveBeenCalledWith({
|
expect(mockGetSession).toHaveBeenCalledWith({
|
||||||
|
headers: {
|
||||||
|
cookie: "__Secure-better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should preserve raw cookie token value without URL re-encoding", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockResolvedValue(mockSessionData);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("tok/with+=chars=");
|
||||||
|
|
||||||
|
expect(result).toEqual(mockSessionData);
|
||||||
|
expect(mockGetSession).toHaveBeenCalledWith({
|
||||||
|
headers: {
|
||||||
|
cookie: "__Secure-better-auth.session_token=tok/with+=chars=",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fall back to Authorization header when cookie-based lookups miss", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi
|
||||||
|
.fn()
|
||||||
|
.mockResolvedValueOnce(null)
|
||||||
|
.mockResolvedValueOnce(null)
|
||||||
|
.mockResolvedValueOnce(null)
|
||||||
|
.mockResolvedValueOnce(mockSessionData);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("valid-token");
|
||||||
|
|
||||||
|
expect(result).toEqual(mockSessionData);
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(1, {
|
||||||
|
headers: {
|
||||||
|
cookie: "__Secure-better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(2, {
|
||||||
|
headers: {
|
||||||
|
cookie: "better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(3, {
|
||||||
|
headers: {
|
||||||
|
cookie: "__Host-better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(4, {
|
||||||
headers: {
|
headers: {
|
||||||
authorization: "Bearer valid-token",
|
authorization: "Bearer valid-token",
|
||||||
},
|
},
|
||||||
@@ -517,14 +568,10 @@ describe("AuthService", () => {
|
|||||||
|
|
||||||
it("should re-throw 'certificate has expired' as infrastructure error (not auth)", async () => {
|
it("should re-throw 'certificate has expired' as infrastructure error (not auth)", async () => {
|
||||||
const auth = service.getAuth();
|
const auth = service.getAuth();
|
||||||
const mockGetSession = vi
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("certificate has expired"));
|
||||||
.fn()
|
|
||||||
.mockRejectedValue(new Error("certificate has expired"));
|
|
||||||
auth.api = { getSession: mockGetSession } as any;
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
await expect(service.verifySession("any-token")).rejects.toThrow(
|
await expect(service.verifySession("any-token")).rejects.toThrow("certificate has expired");
|
||||||
"certificate has expired"
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should re-throw 'Unauthorized: Access denied for user' as infrastructure error (not auth)", async () => {
|
it("should re-throw 'Unauthorized: Access denied for user' as infrastructure error (not auth)", async () => {
|
||||||
|
|||||||
@@ -21,6 +21,10 @@ interface VerifiedSession {
|
|||||||
session: Record<string, unknown>;
|
session: Record<string, unknown>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface SessionHeaderCandidate {
|
||||||
|
headers: Record<string, string>;
|
||||||
|
}
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AuthService {
|
export class AuthService {
|
||||||
private readonly logger = new Logger(AuthService.name);
|
private readonly logger = new Logger(AuthService.name);
|
||||||
@@ -103,16 +107,15 @@ export class AuthService {
|
|||||||
* Only known-safe auth errors return null; everything else propagates as 500.
|
* Only known-safe auth errors return null; everything else propagates as 500.
|
||||||
*/
|
*/
|
||||||
async verifySession(token: string): Promise<VerifiedSession | null> {
|
async verifySession(token: string): Promise<VerifiedSession | null> {
|
||||||
|
let sawNonError = false;
|
||||||
|
|
||||||
|
for (const candidate of this.buildSessionHeaderCandidates(token)) {
|
||||||
try {
|
try {
|
||||||
// TODO(#411): BetterAuth getSession returns opaque types — replace when upstream exports typed interfaces
|
// TODO(#411): BetterAuth getSession returns opaque types — replace when upstream exports typed interfaces
|
||||||
const session = await this.auth.api.getSession({
|
const session = await this.auth.api.getSession(candidate);
|
||||||
headers: {
|
|
||||||
authorization: `Bearer ${token}`,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!session) {
|
if (!session) {
|
||||||
return null;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -120,19 +123,11 @@ export class AuthService {
|
|||||||
session: session.session as Record<string, unknown>,
|
session: session.session as Record<string, unknown>,
|
||||||
};
|
};
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
// Only known-safe auth errors return null
|
|
||||||
if (error instanceof Error) {
|
if (error instanceof Error) {
|
||||||
const msg = error.message.toLowerCase();
|
if (this.isExpectedAuthError(error.message)) {
|
||||||
const isExpectedAuthError =
|
continue;
|
||||||
msg.includes("invalid token") ||
|
}
|
||||||
msg.includes("token expired") ||
|
|
||||||
msg.includes("session expired") ||
|
|
||||||
msg.includes("session not found") ||
|
|
||||||
msg.includes("invalid session") ||
|
|
||||||
msg === "unauthorized" ||
|
|
||||||
msg === "expired";
|
|
||||||
|
|
||||||
if (!isExpectedAuthError) {
|
|
||||||
// Infrastructure or unexpected — propagate as 500
|
// Infrastructure or unexpected — propagate as 500
|
||||||
const safeMessage = (error.stack ?? error.message).replace(
|
const safeMessage = (error.stack ?? error.message).replace(
|
||||||
/Bearer\s+\S+/gi,
|
/Bearer\s+\S+/gi,
|
||||||
@@ -141,14 +136,55 @@ export class AuthService {
|
|||||||
this.logger.error("Session verification failed due to unexpected error", safeMessage);
|
this.logger.error("Session verification failed due to unexpected error", safeMessage);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
// Non-Error thrown values — log for observability, treat as auth failure
|
// Non-Error thrown values — log once for observability, treat as auth failure
|
||||||
if (!(error instanceof Error)) {
|
if (!sawNonError) {
|
||||||
const errorDetail = typeof error === "string" ? error : JSON.stringify(error);
|
const errorDetail = typeof error === "string" ? error : JSON.stringify(error);
|
||||||
this.logger.warn("Session verification received non-Error thrown value", errorDetail);
|
this.logger.warn("Session verification received non-Error thrown value", errorDetail);
|
||||||
|
sawNonError = true;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private buildSessionHeaderCandidates(token: string): SessionHeaderCandidate[] {
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
cookie: `__Secure-better-auth.session_token=${token}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
cookie: `better-auth.session_token=${token}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
cookie: `__Host-better-auth.session_token=${token}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
authorization: `Bearer ${token}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
private isExpectedAuthError(message: string): boolean {
|
||||||
|
const normalized = message.toLowerCase();
|
||||||
|
return (
|
||||||
|
normalized.includes("invalid token") ||
|
||||||
|
normalized.includes("token expired") ||
|
||||||
|
normalized.includes("session expired") ||
|
||||||
|
normalized.includes("session not found") ||
|
||||||
|
normalized.includes("invalid session") ||
|
||||||
|
normalized === "unauthorized" ||
|
||||||
|
normalized === "expired"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,10 +1,18 @@
|
|||||||
import { Injectable, CanActivate, ExecutionContext, UnauthorizedException } from "@nestjs/common";
|
import {
|
||||||
|
Injectable,
|
||||||
|
CanActivate,
|
||||||
|
ExecutionContext,
|
||||||
|
UnauthorizedException,
|
||||||
|
Logger,
|
||||||
|
} from "@nestjs/common";
|
||||||
import { AuthService } from "../auth.service";
|
import { AuthService } from "../auth.service";
|
||||||
import type { AuthUser } from "@mosaic/shared";
|
import type { AuthUser } from "@mosaic/shared";
|
||||||
import type { MaybeAuthenticatedRequest } from "../types/better-auth-request.interface";
|
import type { MaybeAuthenticatedRequest } from "../types/better-auth-request.interface";
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AuthGuard implements CanActivate {
|
export class AuthGuard implements CanActivate {
|
||||||
|
private readonly logger = new Logger(AuthGuard.name);
|
||||||
|
|
||||||
constructor(private readonly authService: AuthService) {}
|
constructor(private readonly authService: AuthService) {}
|
||||||
|
|
||||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||||
@@ -59,7 +67,8 @@ export class AuthGuard implements CanActivate {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract token from cookie (BetterAuth stores session token in better-auth.session_token cookie)
|
* Extract token from cookie.
|
||||||
|
* BetterAuth may prefix the cookie name with "__Secure-" when running on HTTPS.
|
||||||
*/
|
*/
|
||||||
private extractTokenFromCookie(request: MaybeAuthenticatedRequest): string | undefined {
|
private extractTokenFromCookie(request: MaybeAuthenticatedRequest): string | undefined {
|
||||||
// Express types `cookies` as `any`; cast to a known shape for type safety.
|
// Express types `cookies` as `any`; cast to a known shape for type safety.
|
||||||
@@ -68,8 +77,23 @@ export class AuthGuard implements CanActivate {
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
// BetterAuth uses 'better-auth.session_token' as the cookie name by default
|
// BetterAuth default cookie name is "better-auth.session_token"
|
||||||
return cookies["better-auth.session_token"];
|
// When Secure cookies are enabled, BetterAuth prefixes with "__Secure-".
|
||||||
|
const candidates = [
|
||||||
|
"__Secure-better-auth.session_token",
|
||||||
|
"better-auth.session_token",
|
||||||
|
"__Host-better-auth.session_token",
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
for (const name of candidates) {
|
||||||
|
const token = cookies[name];
|
||||||
|
if (token) {
|
||||||
|
this.logger.debug(`Session cookie found: ${name}`);
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ interface AuthenticatedRequest extends Request {
|
|||||||
user?: AuthenticatedUser;
|
user?: AuthenticatedUser;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Controller("api/v1/csrf")
|
@Controller("v1/csrf")
|
||||||
export class CsrfController {
|
export class CsrfController {
|
||||||
constructor(private readonly csrfService: CsrfService) {}
|
constructor(private readonly csrfService: CsrfService) {}
|
||||||
|
|
||||||
|
|||||||
@@ -174,17 +174,19 @@ describe("CsrfGuard", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe("Session binding validation", () => {
|
describe("Session binding validation", () => {
|
||||||
it("should reject when user is not authenticated", () => {
|
it("should allow when user context is not yet available (global guard ordering)", () => {
|
||||||
|
// CsrfGuard runs as APP_GUARD before per-controller AuthGuard,
|
||||||
|
// so request.user may not be populated. Double-submit cookie match
|
||||||
|
// is sufficient protection in this case.
|
||||||
const token = generateValidToken("user-123");
|
const token = generateValidToken("user-123");
|
||||||
const context = createContext(
|
const context = createContext(
|
||||||
"POST",
|
"POST",
|
||||||
{ "csrf-token": token },
|
{ "csrf-token": token },
|
||||||
{ "x-csrf-token": token },
|
{ "x-csrf-token": token },
|
||||||
false
|
false
|
||||||
// No userId - unauthenticated
|
// No userId - AuthGuard hasn't run yet
|
||||||
);
|
);
|
||||||
expect(() => guard.canActivate(context)).toThrow(ForbiddenException);
|
expect(guard.canActivate(context)).toBe(true);
|
||||||
expect(() => guard.canActivate(context)).toThrow("CSRF validation requires authentication");
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should reject token from different session", () => {
|
it("should reject token from different session", () => {
|
||||||
|
|||||||
@@ -89,20 +89,12 @@ export class CsrfGuard implements CanActivate {
|
|||||||
throw new ForbiddenException("CSRF token mismatch");
|
throw new ForbiddenException("CSRF token mismatch");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate session binding via HMAC
|
// Validate session binding via HMAC when user context is available.
|
||||||
|
// CsrfGuard is a global guard (APP_GUARD) that runs before per-controller
|
||||||
|
// AuthGuard, so request.user may not be populated yet. In that case, the
|
||||||
|
// double-submit cookie match above is sufficient CSRF protection.
|
||||||
const userId = request.user?.id;
|
const userId = request.user?.id;
|
||||||
if (!userId) {
|
if (userId) {
|
||||||
this.logger.warn({
|
|
||||||
event: "CSRF_NO_USER_CONTEXT",
|
|
||||||
method: request.method,
|
|
||||||
path: request.path,
|
|
||||||
securityEvent: true,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
});
|
|
||||||
|
|
||||||
throw new ForbiddenException("CSRF validation requires authentication");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!this.csrfService.validateToken(cookieToken, userId)) {
|
if (!this.csrfService.validateToken(cookieToken, userId)) {
|
||||||
this.logger.warn({
|
this.logger.warn({
|
||||||
event: "CSRF_SESSION_BINDING_INVALID",
|
event: "CSRF_SESSION_BINDING_INVALID",
|
||||||
@@ -114,6 +106,14 @@ export class CsrfGuard implements CanActivate {
|
|||||||
|
|
||||||
throw new ForbiddenException("CSRF token not bound to session");
|
throw new ForbiddenException("CSRF token not bound to session");
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
this.logger.debug({
|
||||||
|
event: "CSRF_SKIP_SESSION_BINDING",
|
||||||
|
method: request.method,
|
||||||
|
path: request.path,
|
||||||
|
reason: "User context not yet available (global guard runs before AuthGuard)",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -137,13 +137,13 @@ describe("RLS Context Integration", () => {
|
|||||||
queries: ["findMany"],
|
queries: ["findMany"],
|
||||||
});
|
});
|
||||||
|
|
||||||
// Verify SET LOCAL was called
|
// Verify transaction-local set_config calls were made
|
||||||
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
||||||
expect.arrayContaining(["SET LOCAL app.current_user_id = ", ""]),
|
expect.arrayContaining(["SELECT set_config('app.current_user_id', ", ", true)"]),
|
||||||
userId
|
userId
|
||||||
);
|
);
|
||||||
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
||||||
expect.arrayContaining(["SET LOCAL app.current_workspace_id = ", ""]),
|
expect.arrayContaining(["SELECT set_config('app.current_workspace_id', ", ", true)"]),
|
||||||
workspaceId
|
workspaceId
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ describe("RlsContextInterceptor", () => {
|
|||||||
|
|
||||||
expect(result).toEqual({ data: "test response" });
|
expect(result).toEqual({ data: "test response" });
|
||||||
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
||||||
expect.arrayContaining(["SET LOCAL app.current_user_id = ", ""]),
|
expect.arrayContaining(["SELECT set_config('app.current_user_id', ", ", true)"]),
|
||||||
userId
|
userId
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -111,13 +111,13 @@ describe("RlsContextInterceptor", () => {
|
|||||||
// Check that user context was set
|
// Check that user context was set
|
||||||
expect(mockTransactionClient.$executeRaw).toHaveBeenNthCalledWith(
|
expect(mockTransactionClient.$executeRaw).toHaveBeenNthCalledWith(
|
||||||
1,
|
1,
|
||||||
expect.arrayContaining(["SET LOCAL app.current_user_id = ", ""]),
|
expect.arrayContaining(["SELECT set_config('app.current_user_id', ", ", true)"]),
|
||||||
userId
|
userId
|
||||||
);
|
);
|
||||||
// Check that workspace context was set
|
// Check that workspace context was set
|
||||||
expect(mockTransactionClient.$executeRaw).toHaveBeenNthCalledWith(
|
expect(mockTransactionClient.$executeRaw).toHaveBeenNthCalledWith(
|
||||||
2,
|
2,
|
||||||
expect.arrayContaining(["SET LOCAL app.current_workspace_id = ", ""]),
|
expect.arrayContaining(["SELECT set_config('app.current_workspace_id', ", ", true)"]),
|
||||||
workspaceId
|
workspaceId
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -100,12 +100,12 @@ export class RlsContextInterceptor implements NestInterceptor {
|
|||||||
this.prisma
|
this.prisma
|
||||||
.$transaction(
|
.$transaction(
|
||||||
async (tx) => {
|
async (tx) => {
|
||||||
// Set user context (always present for authenticated requests)
|
// Use set_config(..., true) so values are transaction-local and parameterized safely.
|
||||||
await tx.$executeRaw`SET LOCAL app.current_user_id = ${userId}`;
|
// Direct SET LOCAL with bind parameters produces invalid SQL on PostgreSQL.
|
||||||
|
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${userId}, true)`;
|
||||||
|
|
||||||
// Set workspace context (if present)
|
|
||||||
if (workspaceId) {
|
if (workspaceId) {
|
||||||
await tx.$executeRaw`SET LOCAL app.current_workspace_id = ${workspaceId}`;
|
await tx.$executeRaw`SELECT set_config('app.current_workspace_id', ${workspaceId}, true)`;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Propagate the transaction client via AsyncLocalStorage
|
// Propagate the transaction client via AsyncLocalStorage
|
||||||
|
|||||||
143
apps/api/src/dashboard/dashboard.controller.spec.ts
Normal file
143
apps/api/src/dashboard/dashboard.controller.spec.ts
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { DashboardController } from "./dashboard.controller";
|
||||||
|
import { DashboardService } from "./dashboard.service";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { WorkspaceGuard } from "../common/guards/workspace.guard";
|
||||||
|
import { PermissionGuard } from "../common/guards/permission.guard";
|
||||||
|
import type { DashboardSummaryDto } from "./dto";
|
||||||
|
|
||||||
|
describe("DashboardController", () => {
|
||||||
|
let controller: DashboardController;
|
||||||
|
let service: DashboardService;
|
||||||
|
|
||||||
|
const mockWorkspaceId = "550e8400-e29b-41d4-a716-446655440001";
|
||||||
|
|
||||||
|
const mockSummary: DashboardSummaryDto = {
|
||||||
|
metrics: {
|
||||||
|
activeAgents: 3,
|
||||||
|
tasksCompleted: 12,
|
||||||
|
totalTasks: 25,
|
||||||
|
tasksInProgress: 5,
|
||||||
|
activeProjects: 4,
|
||||||
|
errorRate: 2.5,
|
||||||
|
},
|
||||||
|
recentActivity: [
|
||||||
|
{
|
||||||
|
id: "550e8400-e29b-41d4-a716-446655440010",
|
||||||
|
action: "CREATED",
|
||||||
|
entityType: "TASK",
|
||||||
|
entityId: "550e8400-e29b-41d4-a716-446655440011",
|
||||||
|
details: { title: "New task" },
|
||||||
|
userId: "550e8400-e29b-41d4-a716-446655440002",
|
||||||
|
createdAt: "2026-02-22T12:00:00.000Z",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
activeJobs: [
|
||||||
|
{
|
||||||
|
id: "550e8400-e29b-41d4-a716-446655440020",
|
||||||
|
type: "code-task",
|
||||||
|
status: "RUNNING",
|
||||||
|
progressPercent: 45,
|
||||||
|
createdAt: "2026-02-22T11:00:00.000Z",
|
||||||
|
updatedAt: "2026-02-22T11:30:00.000Z",
|
||||||
|
steps: [
|
||||||
|
{
|
||||||
|
id: "550e8400-e29b-41d4-a716-446655440030",
|
||||||
|
name: "Setup",
|
||||||
|
status: "COMPLETED",
|
||||||
|
phase: "SETUP",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
tokenBudget: [
|
||||||
|
{
|
||||||
|
model: "agent-1",
|
||||||
|
used: 5000,
|
||||||
|
limit: 10000,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockDashboardService = {
|
||||||
|
getSummary: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockAuthGuard = {
|
||||||
|
canActivate: vi.fn(() => true),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockWorkspaceGuard = {
|
||||||
|
canActivate: vi.fn(() => true),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockPermissionGuard = {
|
||||||
|
canActivate: vi.fn(() => true),
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
controllers: [DashboardController],
|
||||||
|
providers: [
|
||||||
|
{
|
||||||
|
provide: DashboardService,
|
||||||
|
useValue: mockDashboardService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
.overrideGuard(AuthGuard)
|
||||||
|
.useValue(mockAuthGuard)
|
||||||
|
.overrideGuard(WorkspaceGuard)
|
||||||
|
.useValue(mockWorkspaceGuard)
|
||||||
|
.overrideGuard(PermissionGuard)
|
||||||
|
.useValue(mockPermissionGuard)
|
||||||
|
.compile();
|
||||||
|
|
||||||
|
controller = module.get<DashboardController>(DashboardController);
|
||||||
|
service = module.get<DashboardService>(DashboardService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be defined", () => {
|
||||||
|
expect(controller).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getSummary", () => {
|
||||||
|
it("should return dashboard summary for workspace", async () => {
|
||||||
|
mockDashboardService.getSummary.mockResolvedValue(mockSummary);
|
||||||
|
|
||||||
|
const result = await controller.getSummary(mockWorkspaceId);
|
||||||
|
|
||||||
|
expect(result).toEqual(mockSummary);
|
||||||
|
expect(service.getSummary).toHaveBeenCalledWith(mockWorkspaceId);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return empty arrays when no data exists", async () => {
|
||||||
|
const emptySummary: DashboardSummaryDto = {
|
||||||
|
metrics: {
|
||||||
|
activeAgents: 0,
|
||||||
|
tasksCompleted: 0,
|
||||||
|
totalTasks: 0,
|
||||||
|
tasksInProgress: 0,
|
||||||
|
activeProjects: 0,
|
||||||
|
errorRate: 0,
|
||||||
|
},
|
||||||
|
recentActivity: [],
|
||||||
|
activeJobs: [],
|
||||||
|
tokenBudget: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
mockDashboardService.getSummary.mockResolvedValue(emptySummary);
|
||||||
|
|
||||||
|
const result = await controller.getSummary(mockWorkspaceId);
|
||||||
|
|
||||||
|
expect(result).toEqual(emptySummary);
|
||||||
|
expect(result.metrics.errorRate).toBe(0);
|
||||||
|
expect(result.recentActivity).toHaveLength(0);
|
||||||
|
expect(result.activeJobs).toHaveLength(0);
|
||||||
|
expect(result.tokenBudget).toHaveLength(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
35
apps/api/src/dashboard/dashboard.controller.ts
Normal file
35
apps/api/src/dashboard/dashboard.controller.ts
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import { Controller, Get, UseGuards, BadRequestException } from "@nestjs/common";
|
||||||
|
import { DashboardService } from "./dashboard.service";
|
||||||
|
import { AuthGuard } from "../auth/guards/auth.guard";
|
||||||
|
import { WorkspaceGuard, PermissionGuard } from "../common/guards";
|
||||||
|
import { Workspace, Permission, RequirePermission } from "../common/decorators";
|
||||||
|
import type { DashboardSummaryDto } from "./dto";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Controller for dashboard endpoints.
|
||||||
|
* Returns aggregated summary data for the workspace dashboard.
|
||||||
|
*
|
||||||
|
* Guards are applied in order:
|
||||||
|
* 1. AuthGuard - Verifies user authentication
|
||||||
|
* 2. WorkspaceGuard - Validates workspace access and sets RLS context
|
||||||
|
* 3. PermissionGuard - Checks role-based permissions
|
||||||
|
*/
|
||||||
|
@Controller("dashboard")
|
||||||
|
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
|
||||||
|
export class DashboardController {
|
||||||
|
constructor(private readonly dashboardService: DashboardService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/dashboard/summary
|
||||||
|
* Returns aggregated metrics, recent activity, active jobs, and token budgets
|
||||||
|
* Requires: Any workspace member (including GUEST)
|
||||||
|
*/
|
||||||
|
@Get("summary")
|
||||||
|
@RequirePermission(Permission.WORKSPACE_ANY)
|
||||||
|
async getSummary(@Workspace() workspaceId: string | undefined): Promise<DashboardSummaryDto> {
|
||||||
|
if (!workspaceId) {
|
||||||
|
throw new BadRequestException("Workspace context required");
|
||||||
|
}
|
||||||
|
return this.dashboardService.getSummary(workspaceId);
|
||||||
|
}
|
||||||
|
}
|
||||||
13
apps/api/src/dashboard/dashboard.module.ts
Normal file
13
apps/api/src/dashboard/dashboard.module.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { DashboardController } from "./dashboard.controller";
|
||||||
|
import { DashboardService } from "./dashboard.service";
|
||||||
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
|
import { AuthModule } from "../auth/auth.module";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [PrismaModule, AuthModule],
|
||||||
|
controllers: [DashboardController],
|
||||||
|
providers: [DashboardService],
|
||||||
|
exports: [DashboardService],
|
||||||
|
})
|
||||||
|
export class DashboardModule {}
|
||||||
187
apps/api/src/dashboard/dashboard.service.ts
Normal file
187
apps/api/src/dashboard/dashboard.service.ts
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
import { Injectable } from "@nestjs/common";
|
||||||
|
import { AgentStatus, ProjectStatus, RunnerJobStatus, TaskStatus } from "@prisma/client";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import type {
|
||||||
|
DashboardSummaryDto,
|
||||||
|
ActiveJobDto,
|
||||||
|
RecentActivityDto,
|
||||||
|
TokenBudgetEntryDto,
|
||||||
|
} from "./dto";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Service for aggregating dashboard summary data.
|
||||||
|
* Executes all queries in parallel to minimize latency.
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class DashboardService {
|
||||||
|
constructor(private readonly prisma: PrismaService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get aggregated dashboard summary for a workspace
|
||||||
|
*/
|
||||||
|
async getSummary(workspaceId: string): Promise<DashboardSummaryDto> {
|
||||||
|
const now = new Date();
|
||||||
|
const oneDayAgo = new Date(now.getTime() - 24 * 60 * 60 * 1000);
|
||||||
|
|
||||||
|
// Execute all queries in parallel
|
||||||
|
const [
|
||||||
|
activeAgents,
|
||||||
|
tasksCompleted,
|
||||||
|
totalTasks,
|
||||||
|
tasksInProgress,
|
||||||
|
activeProjects,
|
||||||
|
failedJobsLast24h,
|
||||||
|
totalJobsLast24h,
|
||||||
|
recentActivityRows,
|
||||||
|
activeJobRows,
|
||||||
|
tokenBudgetRows,
|
||||||
|
] = await Promise.all([
|
||||||
|
// Active agents: IDLE, WORKING, WAITING
|
||||||
|
this.prisma.agent.count({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
status: { in: [AgentStatus.IDLE, AgentStatus.WORKING, AgentStatus.WAITING] },
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Tasks completed
|
||||||
|
this.prisma.task.count({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
status: TaskStatus.COMPLETED,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Total tasks
|
||||||
|
this.prisma.task.count({
|
||||||
|
where: { workspaceId },
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Tasks in progress
|
||||||
|
this.prisma.task.count({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
status: TaskStatus.IN_PROGRESS,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Active projects
|
||||||
|
this.prisma.project.count({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
status: ProjectStatus.ACTIVE,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Failed jobs in last 24h (for error rate)
|
||||||
|
this.prisma.runnerJob.count({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
status: RunnerJobStatus.FAILED,
|
||||||
|
createdAt: { gte: oneDayAgo },
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Total jobs in last 24h (for error rate)
|
||||||
|
this.prisma.runnerJob.count({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
createdAt: { gte: oneDayAgo },
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Recent activity: last 10 entries
|
||||||
|
this.prisma.activityLog.findMany({
|
||||||
|
where: { workspaceId },
|
||||||
|
orderBy: { createdAt: "desc" },
|
||||||
|
take: 10,
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Active jobs: PENDING, QUEUED, RUNNING with steps
|
||||||
|
this.prisma.runnerJob.findMany({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
status: {
|
||||||
|
in: [RunnerJobStatus.PENDING, RunnerJobStatus.QUEUED, RunnerJobStatus.RUNNING],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
include: {
|
||||||
|
steps: {
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
name: true,
|
||||||
|
status: true,
|
||||||
|
phase: true,
|
||||||
|
},
|
||||||
|
orderBy: { ordinal: "asc" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
orderBy: { createdAt: "desc" },
|
||||||
|
}),
|
||||||
|
|
||||||
|
// Token budgets for workspace (active, not yet completed)
|
||||||
|
this.prisma.tokenBudget.findMany({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
completedAt: null,
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
agentId: true,
|
||||||
|
totalTokensUsed: true,
|
||||||
|
allocatedTokens: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Compute error rate
|
||||||
|
const errorRate = totalJobsLast24h > 0 ? (failedJobsLast24h / totalJobsLast24h) * 100 : 0;
|
||||||
|
|
||||||
|
// Map recent activity
|
||||||
|
const recentActivity: RecentActivityDto[] = recentActivityRows.map((row) => ({
|
||||||
|
id: row.id,
|
||||||
|
action: row.action,
|
||||||
|
entityType: row.entityType,
|
||||||
|
entityId: row.entityId,
|
||||||
|
details: row.details as Record<string, unknown> | null,
|
||||||
|
userId: row.userId,
|
||||||
|
createdAt: row.createdAt.toISOString(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Map active jobs (RunnerJob lacks updatedAt; use startedAt or createdAt as proxy)
|
||||||
|
const activeJobs: ActiveJobDto[] = activeJobRows.map((row) => ({
|
||||||
|
id: row.id,
|
||||||
|
type: row.type,
|
||||||
|
status: row.status,
|
||||||
|
progressPercent: row.progressPercent,
|
||||||
|
createdAt: row.createdAt.toISOString(),
|
||||||
|
updatedAt: (row.startedAt ?? row.createdAt).toISOString(),
|
||||||
|
steps: row.steps.map((step) => ({
|
||||||
|
id: step.id,
|
||||||
|
name: step.name,
|
||||||
|
status: step.status,
|
||||||
|
phase: step.phase,
|
||||||
|
})),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Map token budget entries
|
||||||
|
const tokenBudget: TokenBudgetEntryDto[] = tokenBudgetRows.map((row) => ({
|
||||||
|
model: row.agentId,
|
||||||
|
used: row.totalTokensUsed,
|
||||||
|
limit: row.allocatedTokens,
|
||||||
|
}));
|
||||||
|
|
||||||
|
return {
|
||||||
|
metrics: {
|
||||||
|
activeAgents,
|
||||||
|
tasksCompleted,
|
||||||
|
totalTasks,
|
||||||
|
tasksInProgress,
|
||||||
|
activeProjects,
|
||||||
|
errorRate: Math.round(errorRate * 100) / 100,
|
||||||
|
},
|
||||||
|
recentActivity,
|
||||||
|
activeJobs,
|
||||||
|
tokenBudget,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
53
apps/api/src/dashboard/dto/dashboard-summary.dto.ts
Normal file
53
apps/api/src/dashboard/dto/dashboard-summary.dto.ts
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
/**
|
||||||
|
* Dashboard Summary DTO
|
||||||
|
* Defines the response shape for the dashboard summary endpoint.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export class DashboardMetricsDto {
|
||||||
|
activeAgents!: number;
|
||||||
|
tasksCompleted!: number;
|
||||||
|
totalTasks!: number;
|
||||||
|
tasksInProgress!: number;
|
||||||
|
activeProjects!: number;
|
||||||
|
errorRate!: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class RecentActivityDto {
|
||||||
|
id!: string;
|
||||||
|
action!: string;
|
||||||
|
entityType!: string;
|
||||||
|
entityId!: string;
|
||||||
|
details!: Record<string, unknown> | null;
|
||||||
|
userId!: string;
|
||||||
|
createdAt!: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ActiveJobStepDto {
|
||||||
|
id!: string;
|
||||||
|
name!: string;
|
||||||
|
status!: string;
|
||||||
|
phase!: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ActiveJobDto {
|
||||||
|
id!: string;
|
||||||
|
type!: string;
|
||||||
|
status!: string;
|
||||||
|
progressPercent!: number;
|
||||||
|
createdAt!: string;
|
||||||
|
updatedAt!: string;
|
||||||
|
steps!: ActiveJobStepDto[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export class TokenBudgetEntryDto {
|
||||||
|
model!: string;
|
||||||
|
used!: number;
|
||||||
|
limit!: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DashboardSummaryDto {
|
||||||
|
metrics!: DashboardMetricsDto;
|
||||||
|
recentActivity!: RecentActivityDto[];
|
||||||
|
activeJobs!: ActiveJobDto[];
|
||||||
|
tokenBudget!: TokenBudgetEntryDto[];
|
||||||
|
}
|
||||||
1
apps/api/src/dashboard/dto/index.ts
Normal file
1
apps/api/src/dashboard/dto/index.ts
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export * from "./dashboard-summary.dto";
|
||||||
@@ -12,7 +12,7 @@ import type { AuthenticatedRequest } from "../common/types/user.types";
|
|||||||
import type { CommandMessageDetails, CommandResponse } from "./types/message.types";
|
import type { CommandMessageDetails, CommandResponse } from "./types/message.types";
|
||||||
import type { FederationMessageStatus } from "@prisma/client";
|
import type { FederationMessageStatus } from "@prisma/client";
|
||||||
|
|
||||||
@Controller("api/v1/federation")
|
@Controller("v1/federation")
|
||||||
export class CommandController {
|
export class CommandController {
|
||||||
private readonly logger = new Logger(CommandController.name);
|
private readonly logger = new Logger(CommandController.name);
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ import {
|
|||||||
IncomingEventAckDto,
|
IncomingEventAckDto,
|
||||||
} from "./dto/event.dto";
|
} from "./dto/event.dto";
|
||||||
|
|
||||||
@Controller("api/v1/federation")
|
@Controller("v1/federation")
|
||||||
export class EventController {
|
export class EventController {
|
||||||
private readonly logger = new Logger(EventController.name);
|
private readonly logger = new Logger(EventController.name);
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ import {
|
|||||||
ValidateFederatedTokenDto,
|
ValidateFederatedTokenDto,
|
||||||
} from "./dto/federated-auth.dto";
|
} from "./dto/federated-auth.dto";
|
||||||
|
|
||||||
@Controller("api/v1/federation/auth")
|
@Controller("v1/federation/auth")
|
||||||
export class FederationAuthController {
|
export class FederationAuthController {
|
||||||
private readonly logger = new Logger(FederationAuthController.name);
|
private readonly logger = new Logger(FederationAuthController.name);
|
||||||
|
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ import {
|
|||||||
} from "./dto/connection.dto";
|
} from "./dto/connection.dto";
|
||||||
import { FederationConnectionStatus } from "@prisma/client";
|
import { FederationConnectionStatus } from "@prisma/client";
|
||||||
|
|
||||||
@Controller("api/v1/federation")
|
@Controller("v1/federation")
|
||||||
export class FederationController {
|
export class FederationController {
|
||||||
private readonly logger = new Logger(FederationController.name);
|
private readonly logger = new Logger(FederationController.name);
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import type { AuthenticatedRequest } from "../common/types/user.types";
|
|||||||
import type { QueryMessageDetails, QueryResponse } from "./types/message.types";
|
import type { QueryMessageDetails, QueryResponse } from "./types/message.types";
|
||||||
import type { FederationMessageStatus } from "@prisma/client";
|
import type { FederationMessageStatus } from "@prisma/client";
|
||||||
|
|
||||||
@Controller("api/v1/federation")
|
@Controller("v1/federation")
|
||||||
export class QueryController {
|
export class QueryController {
|
||||||
private readonly logger = new Logger(QueryController.name);
|
private readonly logger = new Logger(QueryController.name);
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { NestFactory } from "@nestjs/core";
|
import { NestFactory } from "@nestjs/core";
|
||||||
import { ValidationPipe } from "@nestjs/common";
|
import { RequestMethod, ValidationPipe } from "@nestjs/common";
|
||||||
import cookieParser from "cookie-parser";
|
import cookieParser from "cookie-parser";
|
||||||
import { AppModule } from "./app.module";
|
import { AppModule } from "./app.module";
|
||||||
import { getTrustedOrigins } from "./auth/auth.config";
|
import { getTrustedOrigins } from "./auth/auth.config";
|
||||||
@@ -47,10 +47,22 @@ async function bootstrap() {
|
|||||||
|
|
||||||
app.useGlobalFilters(new GlobalExceptionFilter());
|
app.useGlobalFilters(new GlobalExceptionFilter());
|
||||||
|
|
||||||
|
// Set global API prefix — all routes get /api/* except auth and health
|
||||||
|
// Auth routes are excluded because BetterAuth expects /auth/* paths
|
||||||
|
// Health is excluded because Docker healthchecks hit /health directly
|
||||||
|
app.setGlobalPrefix("api", {
|
||||||
|
exclude: [
|
||||||
|
{ path: "health", method: RequestMethod.GET },
|
||||||
|
{ path: "auth/(.*)", method: RequestMethod.ALL },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
// Configure CORS for cookie-based authentication
|
// Configure CORS for cookie-based authentication
|
||||||
// Origin list is shared with BetterAuth trustedOrigins via getTrustedOrigins()
|
// Origin list is shared with BetterAuth trustedOrigins via getTrustedOrigins()
|
||||||
|
const trustedOrigins = getTrustedOrigins();
|
||||||
|
console.log(`[CORS] Trusted origins: ${JSON.stringify(trustedOrigins)}`);
|
||||||
app.enableCors({
|
app.enableCors({
|
||||||
origin: getTrustedOrigins(),
|
origin: trustedOrigins,
|
||||||
credentials: true, // Required for cookie-based authentication
|
credentials: true, // Required for cookie-based authentication
|
||||||
methods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
methods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
||||||
allowedHeaders: ["Content-Type", "Authorization", "Cookie", "X-CSRF-Token", "X-Workspace-Id"],
|
allowedHeaders: ["Content-Type", "Authorization", "Cookie", "X-CSRF-Token", "X-Workspace-Id"],
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { RunnerJobsService } from "./runner-jobs.service";
|
|||||||
import { PrismaModule } from "../prisma/prisma.module";
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
import { BullMqModule } from "../bullmq/bullmq.module";
|
import { BullMqModule } from "../bullmq/bullmq.module";
|
||||||
import { AuthModule } from "../auth/auth.module";
|
import { AuthModule } from "../auth/auth.module";
|
||||||
|
import { WebSocketModule } from "../websocket/websocket.module";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Runner Jobs Module
|
* Runner Jobs Module
|
||||||
@@ -12,7 +13,7 @@ import { AuthModule } from "../auth/auth.module";
|
|||||||
* for asynchronous job processing.
|
* for asynchronous job processing.
|
||||||
*/
|
*/
|
||||||
@Module({
|
@Module({
|
||||||
imports: [PrismaModule, BullMqModule, AuthModule],
|
imports: [PrismaModule, BullMqModule, AuthModule, WebSocketModule],
|
||||||
controllers: [RunnerJobsController],
|
controllers: [RunnerJobsController],
|
||||||
providers: [RunnerJobsService],
|
providers: [RunnerJobsService],
|
||||||
exports: [RunnerJobsService],
|
exports: [RunnerJobsService],
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { Test, TestingModule } from "@nestjs/testing";
|
|||||||
import { RunnerJobsService } from "./runner-jobs.service";
|
import { RunnerJobsService } from "./runner-jobs.service";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
import { BullMqService } from "../bullmq/bullmq.service";
|
import { BullMqService } from "../bullmq/bullmq.service";
|
||||||
|
import { WebSocketGateway } from "../websocket/websocket.gateway";
|
||||||
import { RunnerJobStatus } from "@prisma/client";
|
import { RunnerJobStatus } from "@prisma/client";
|
||||||
import { ConflictException, BadRequestException } from "@nestjs/common";
|
import { ConflictException, BadRequestException } from "@nestjs/common";
|
||||||
|
|
||||||
@@ -19,6 +20,12 @@ describe("RunnerJobsService - Concurrency", () => {
|
|||||||
getQueue: vi.fn(),
|
getQueue: vi.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const mockWebSocketGateway = {
|
||||||
|
emitJobCreated: vi.fn(),
|
||||||
|
emitJobStatusChanged: vi.fn(),
|
||||||
|
emitJobProgress: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
@@ -37,6 +44,10 @@ describe("RunnerJobsService - Concurrency", () => {
|
|||||||
provide: BullMqService,
|
provide: BullMqService,
|
||||||
useValue: mockBullMqService,
|
useValue: mockBullMqService,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
provide: WebSocketGateway,
|
||||||
|
useValue: mockWebSocketGateway,
|
||||||
|
},
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { Test, TestingModule } from "@nestjs/testing";
|
|||||||
import { RunnerJobsService } from "./runner-jobs.service";
|
import { RunnerJobsService } from "./runner-jobs.service";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
import { BullMqService } from "../bullmq/bullmq.service";
|
import { BullMqService } from "../bullmq/bullmq.service";
|
||||||
|
import { WebSocketGateway } from "../websocket/websocket.gateway";
|
||||||
import { RunnerJobStatus } from "@prisma/client";
|
import { RunnerJobStatus } from "@prisma/client";
|
||||||
import { NotFoundException, BadRequestException } from "@nestjs/common";
|
import { NotFoundException, BadRequestException } from "@nestjs/common";
|
||||||
import { CreateJobDto, QueryJobsDto } from "./dto";
|
import { CreateJobDto, QueryJobsDto } from "./dto";
|
||||||
@@ -32,6 +33,12 @@ describe("RunnerJobsService", () => {
|
|||||||
getQueue: vi.fn(),
|
getQueue: vi.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const mockWebSocketGateway = {
|
||||||
|
emitJobCreated: vi.fn(),
|
||||||
|
emitJobStatusChanged: vi.fn(),
|
||||||
|
emitJobProgress: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
@@ -44,6 +51,10 @@ describe("RunnerJobsService", () => {
|
|||||||
provide: BullMqService,
|
provide: BullMqService,
|
||||||
useValue: mockBullMqService,
|
useValue: mockBullMqService,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
provide: WebSocketGateway,
|
||||||
|
useValue: mockWebSocketGateway,
|
||||||
|
},
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { Prisma, RunnerJobStatus } from "@prisma/client";
|
|||||||
import { Response } from "express";
|
import { Response } from "express";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
import { BullMqService } from "../bullmq/bullmq.service";
|
import { BullMqService } from "../bullmq/bullmq.service";
|
||||||
|
import { WebSocketGateway } from "../websocket/websocket.gateway";
|
||||||
import { QUEUE_NAMES } from "../bullmq/queues";
|
import { QUEUE_NAMES } from "../bullmq/queues";
|
||||||
import { ConcurrentUpdateException } from "../common/exceptions/concurrent-update.exception";
|
import { ConcurrentUpdateException } from "../common/exceptions/concurrent-update.exception";
|
||||||
import type { CreateJobDto, QueryJobsDto } from "./dto";
|
import type { CreateJobDto, QueryJobsDto } from "./dto";
|
||||||
@@ -14,7 +15,8 @@ import type { CreateJobDto, QueryJobsDto } from "./dto";
|
|||||||
export class RunnerJobsService {
|
export class RunnerJobsService {
|
||||||
constructor(
|
constructor(
|
||||||
private readonly prisma: PrismaService,
|
private readonly prisma: PrismaService,
|
||||||
private readonly bullMq: BullMqService
|
private readonly bullMq: BullMqService,
|
||||||
|
private readonly wsGateway: WebSocketGateway
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -56,6 +58,8 @@ export class RunnerJobsService {
|
|||||||
{ priority }
|
{ priority }
|
||||||
);
|
);
|
||||||
|
|
||||||
|
this.wsGateway.emitJobCreated(workspaceId, job);
|
||||||
|
|
||||||
return job;
|
return job;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -194,6 +198,13 @@ export class RunnerJobsService {
|
|||||||
throw new NotFoundException(`RunnerJob with ID ${id} not found after cancel`);
|
throw new NotFoundException(`RunnerJob with ID ${id} not found after cancel`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.wsGateway.emitJobStatusChanged(workspaceId, id, {
|
||||||
|
id,
|
||||||
|
workspaceId,
|
||||||
|
status: job.status,
|
||||||
|
previousStatus: existingJob.status,
|
||||||
|
});
|
||||||
|
|
||||||
return job;
|
return job;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -248,6 +259,8 @@ export class RunnerJobsService {
|
|||||||
{ priority: existingJob.priority }
|
{ priority: existingJob.priority }
|
||||||
);
|
);
|
||||||
|
|
||||||
|
this.wsGateway.emitJobCreated(workspaceId, newJob);
|
||||||
|
|
||||||
return newJob;
|
return newJob;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -530,6 +543,13 @@ export class RunnerJobsService {
|
|||||||
throw new NotFoundException(`RunnerJob with ID ${id} not found after update`);
|
throw new NotFoundException(`RunnerJob with ID ${id} not found after update`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.wsGateway.emitJobStatusChanged(workspaceId, id, {
|
||||||
|
id,
|
||||||
|
workspaceId,
|
||||||
|
status: updatedJob.status,
|
||||||
|
previousStatus: existingJob.status,
|
||||||
|
});
|
||||||
|
|
||||||
return updatedJob;
|
return updatedJob;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -606,6 +626,12 @@ export class RunnerJobsService {
|
|||||||
throw new NotFoundException(`RunnerJob with ID ${id} not found after update`);
|
throw new NotFoundException(`RunnerJob with ID ${id} not found after update`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.wsGateway.emitJobProgress(workspaceId, id, {
|
||||||
|
id,
|
||||||
|
workspaceId,
|
||||||
|
progressPercent: updatedJob.progressPercent,
|
||||||
|
});
|
||||||
|
|
||||||
return updatedJob;
|
return updatedJob;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -50,6 +50,8 @@ describe("TelemetryInterceptor", () => {
|
|||||||
getResponse: vi.fn().mockReturnValue({
|
getResponse: vi.fn().mockReturnValue({
|
||||||
statusCode: 200,
|
statusCode: 200,
|
||||||
setHeader: vi.fn(),
|
setHeader: vi.fn(),
|
||||||
|
headersSent: false,
|
||||||
|
writableEnded: false,
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
getClass: vi.fn().mockReturnValue({ name: "TestController" }),
|
getClass: vi.fn().mockReturnValue({ name: "TestController" }),
|
||||||
@@ -101,6 +103,35 @@ describe("TelemetryInterceptor", () => {
|
|||||||
expect(mockResponse.setHeader).toHaveBeenCalledWith("x-trace-id", "test-trace-id");
|
expect(mockResponse.setHeader).toHaveBeenCalledWith("x-trace-id", "test-trace-id");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should not set trace header when response is already committed", async () => {
|
||||||
|
const committedResponseContext = {
|
||||||
|
...mockContext,
|
||||||
|
switchToHttp: vi.fn().mockReturnValue({
|
||||||
|
getRequest: vi.fn().mockReturnValue({
|
||||||
|
method: "GET",
|
||||||
|
url: "/api/test",
|
||||||
|
path: "/api/test",
|
||||||
|
}),
|
||||||
|
getResponse: vi.fn().mockReturnValue({
|
||||||
|
statusCode: 200,
|
||||||
|
setHeader: vi.fn(),
|
||||||
|
headersSent: true,
|
||||||
|
writableEnded: true,
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
} as unknown as ExecutionContext;
|
||||||
|
|
||||||
|
mockHandler = {
|
||||||
|
handle: vi.fn().mockReturnValue(of({ data: "test" })),
|
||||||
|
} as unknown as CallHandler;
|
||||||
|
|
||||||
|
const committedResponse = committedResponseContext.switchToHttp().getResponse();
|
||||||
|
|
||||||
|
await lastValueFrom(interceptor.intercept(committedResponseContext, mockHandler));
|
||||||
|
|
||||||
|
expect(committedResponse.setHeader).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
it("should record exception on error", async () => {
|
it("should record exception on error", async () => {
|
||||||
const error = new Error("Test error");
|
const error = new Error("Test error");
|
||||||
mockHandler = {
|
mockHandler = {
|
||||||
|
|||||||
@@ -88,7 +88,7 @@ export class TelemetryInterceptor implements NestInterceptor {
|
|||||||
|
|
||||||
// Add trace context to response headers for distributed tracing
|
// Add trace context to response headers for distributed tracing
|
||||||
const spanContext = span.spanContext();
|
const spanContext = span.spanContext();
|
||||||
if (spanContext.traceId) {
|
if (spanContext.traceId && !response.headersSent && !response.writableEnded) {
|
||||||
response.setHeader("x-trace-id", spanContext.traceId);
|
response.setHeader("x-trace-id", spanContext.traceId);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
53
apps/api/src/terminal/terminal-session.dto.ts
Normal file
53
apps/api/src/terminal/terminal-session.dto.ts
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
/**
|
||||||
|
* Terminal Session DTOs
|
||||||
|
*
|
||||||
|
* Data Transfer Objects for terminal session persistence endpoints.
|
||||||
|
* Validated using class-validator decorators.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { IsString, IsOptional, MaxLength, IsEnum, IsUUID } from "class-validator";
|
||||||
|
import { TerminalSessionStatus } from "@prisma/client";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for creating a new terminal session record.
|
||||||
|
*/
|
||||||
|
export class CreateTerminalSessionDto {
|
||||||
|
@IsString()
|
||||||
|
@IsUUID()
|
||||||
|
workspaceId!: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString()
|
||||||
|
@MaxLength(128)
|
||||||
|
name?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for querying terminal sessions by workspace.
|
||||||
|
*/
|
||||||
|
export class FindTerminalSessionsByWorkspaceDto {
|
||||||
|
@IsString()
|
||||||
|
@IsUUID()
|
||||||
|
workspaceId!: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Response shape for a terminal session.
|
||||||
|
*/
|
||||||
|
export class TerminalSessionResponseDto {
|
||||||
|
id!: string;
|
||||||
|
workspaceId!: string;
|
||||||
|
name!: string;
|
||||||
|
status!: TerminalSessionStatus;
|
||||||
|
createdAt!: Date;
|
||||||
|
closedAt!: Date | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for filtering terminal sessions by status.
|
||||||
|
*/
|
||||||
|
export class TerminalSessionStatusFilterDto {
|
||||||
|
@IsOptional()
|
||||||
|
@IsEnum(TerminalSessionStatus)
|
||||||
|
status?: TerminalSessionStatus;
|
||||||
|
}
|
||||||
229
apps/api/src/terminal/terminal-session.service.spec.ts
Normal file
229
apps/api/src/terminal/terminal-session.service.spec.ts
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
/**
|
||||||
|
* TerminalSessionService Tests
|
||||||
|
*
|
||||||
|
* Unit tests for database-backed terminal session CRUD:
|
||||||
|
* create, findByWorkspace, close, and findById.
|
||||||
|
* PrismaService is mocked to isolate the service logic.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
import { NotFoundException } from "@nestjs/common";
|
||||||
|
import { TerminalSessionStatus } from "@prisma/client";
|
||||||
|
import type { TerminalSession } from "@prisma/client";
|
||||||
|
import { TerminalSessionService } from "./terminal-session.service";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Helpers
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
function makeSession(overrides: Partial<TerminalSession> = {}): TerminalSession {
|
||||||
|
return {
|
||||||
|
id: "session-uuid-1",
|
||||||
|
workspaceId: "workspace-uuid-1",
|
||||||
|
name: "Terminal",
|
||||||
|
status: TerminalSessionStatus.ACTIVE,
|
||||||
|
createdAt: new Date("2026-02-25T00:00:00Z"),
|
||||||
|
closedAt: null,
|
||||||
|
...overrides,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Mock PrismaService
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
function makeMockPrisma() {
|
||||||
|
return {
|
||||||
|
terminalSession: {
|
||||||
|
create: vi.fn(),
|
||||||
|
findMany: vi.fn(),
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
update: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Tests
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("TerminalSessionService", () => {
|
||||||
|
let service: TerminalSessionService;
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
let mockPrisma: any;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
mockPrisma = makeMockPrisma();
|
||||||
|
service = new TerminalSessionService(mockPrisma);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// create
|
||||||
|
// ==========================================
|
||||||
|
describe("create", () => {
|
||||||
|
it("should call prisma.terminalSession.create with workspaceId only when no name provided", async () => {
|
||||||
|
const session = makeSession();
|
||||||
|
mockPrisma.terminalSession.create.mockResolvedValueOnce(session);
|
||||||
|
|
||||||
|
const result = await service.create("workspace-uuid-1");
|
||||||
|
|
||||||
|
expect(mockPrisma.terminalSession.create).toHaveBeenCalledWith({
|
||||||
|
data: { workspaceId: "workspace-uuid-1" },
|
||||||
|
});
|
||||||
|
expect(result).toEqual(session);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include name in create data when name is provided", async () => {
|
||||||
|
const session = makeSession({ name: "My Terminal" });
|
||||||
|
mockPrisma.terminalSession.create.mockResolvedValueOnce(session);
|
||||||
|
|
||||||
|
const result = await service.create("workspace-uuid-1", "My Terminal");
|
||||||
|
|
||||||
|
expect(mockPrisma.terminalSession.create).toHaveBeenCalledWith({
|
||||||
|
data: { workspaceId: "workspace-uuid-1", name: "My Terminal" },
|
||||||
|
});
|
||||||
|
expect(result).toEqual(session);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return the created session", async () => {
|
||||||
|
const session = makeSession();
|
||||||
|
mockPrisma.terminalSession.create.mockResolvedValueOnce(session);
|
||||||
|
|
||||||
|
const result = await service.create("workspace-uuid-1");
|
||||||
|
|
||||||
|
expect(result.id).toBe("session-uuid-1");
|
||||||
|
expect(result.status).toBe(TerminalSessionStatus.ACTIVE);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// findByWorkspace
|
||||||
|
// ==========================================
|
||||||
|
describe("findByWorkspace", () => {
|
||||||
|
it("should query for ACTIVE sessions in the given workspace, ordered by createdAt desc", async () => {
|
||||||
|
const sessions = [makeSession(), makeSession({ id: "session-uuid-2" })];
|
||||||
|
mockPrisma.terminalSession.findMany.mockResolvedValueOnce(sessions);
|
||||||
|
|
||||||
|
const result = await service.findByWorkspace("workspace-uuid-1");
|
||||||
|
|
||||||
|
expect(mockPrisma.terminalSession.findMany).toHaveBeenCalledWith({
|
||||||
|
where: {
|
||||||
|
workspaceId: "workspace-uuid-1",
|
||||||
|
status: TerminalSessionStatus.ACTIVE,
|
||||||
|
},
|
||||||
|
orderBy: { createdAt: "desc" },
|
||||||
|
});
|
||||||
|
expect(result).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return an empty array when no active sessions exist", async () => {
|
||||||
|
mockPrisma.terminalSession.findMany.mockResolvedValueOnce([]);
|
||||||
|
|
||||||
|
const result = await service.findByWorkspace("workspace-uuid-empty");
|
||||||
|
|
||||||
|
expect(result).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not include CLOSED sessions", async () => {
|
||||||
|
// The where clause enforces ACTIVE status — verify it is present
|
||||||
|
mockPrisma.terminalSession.findMany.mockResolvedValueOnce([]);
|
||||||
|
|
||||||
|
await service.findByWorkspace("workspace-uuid-1");
|
||||||
|
|
||||||
|
const callArgs = mockPrisma.terminalSession.findMany.mock.calls[0][0] as {
|
||||||
|
where: { status: TerminalSessionStatus };
|
||||||
|
};
|
||||||
|
expect(callArgs.where.status).toBe(TerminalSessionStatus.ACTIVE);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// close
|
||||||
|
// ==========================================
|
||||||
|
describe("close", () => {
|
||||||
|
it("should set status to CLOSED and set closedAt when session exists", async () => {
|
||||||
|
const existingSession = makeSession();
|
||||||
|
const closedSession = makeSession({
|
||||||
|
status: TerminalSessionStatus.CLOSED,
|
||||||
|
closedAt: new Date("2026-02-25T01:00:00Z"),
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrisma.terminalSession.findUnique.mockResolvedValueOnce(existingSession);
|
||||||
|
mockPrisma.terminalSession.update.mockResolvedValueOnce(closedSession);
|
||||||
|
|
||||||
|
const result = await service.close("session-uuid-1");
|
||||||
|
|
||||||
|
expect(mockPrisma.terminalSession.findUnique).toHaveBeenCalledWith({
|
||||||
|
where: { id: "session-uuid-1" },
|
||||||
|
});
|
||||||
|
expect(mockPrisma.terminalSession.update).toHaveBeenCalledWith({
|
||||||
|
where: { id: "session-uuid-1" },
|
||||||
|
data: {
|
||||||
|
status: TerminalSessionStatus.CLOSED,
|
||||||
|
closedAt: expect.any(Date),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(result.status).toBe(TerminalSessionStatus.CLOSED);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw NotFoundException when session does not exist", async () => {
|
||||||
|
mockPrisma.terminalSession.findUnique.mockResolvedValueOnce(null);
|
||||||
|
|
||||||
|
await expect(service.close("nonexistent-id")).rejects.toThrow(NotFoundException);
|
||||||
|
expect(mockPrisma.terminalSession.update).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include a non-null closedAt timestamp on close", async () => {
|
||||||
|
const existingSession = makeSession();
|
||||||
|
const closedSession = makeSession({
|
||||||
|
status: TerminalSessionStatus.CLOSED,
|
||||||
|
closedAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrisma.terminalSession.findUnique.mockResolvedValueOnce(existingSession);
|
||||||
|
mockPrisma.terminalSession.update.mockResolvedValueOnce(closedSession);
|
||||||
|
|
||||||
|
const result = await service.close("session-uuid-1");
|
||||||
|
|
||||||
|
expect(result.closedAt).not.toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// findById
|
||||||
|
// ==========================================
|
||||||
|
describe("findById", () => {
|
||||||
|
it("should return the session when it exists", async () => {
|
||||||
|
const session = makeSession();
|
||||||
|
mockPrisma.terminalSession.findUnique.mockResolvedValueOnce(session);
|
||||||
|
|
||||||
|
const result = await service.findById("session-uuid-1");
|
||||||
|
|
||||||
|
expect(mockPrisma.terminalSession.findUnique).toHaveBeenCalledWith({
|
||||||
|
where: { id: "session-uuid-1" },
|
||||||
|
});
|
||||||
|
expect(result).toEqual(session);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null when session does not exist", async () => {
|
||||||
|
mockPrisma.terminalSession.findUnique.mockResolvedValueOnce(null);
|
||||||
|
|
||||||
|
const result = await service.findById("no-such-id");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should find CLOSED sessions as well as ACTIVE ones", async () => {
|
||||||
|
const closedSession = makeSession({
|
||||||
|
status: TerminalSessionStatus.CLOSED,
|
||||||
|
closedAt: new Date(),
|
||||||
|
});
|
||||||
|
mockPrisma.terminalSession.findUnique.mockResolvedValueOnce(closedSession);
|
||||||
|
|
||||||
|
const result = await service.findById("session-uuid-1");
|
||||||
|
|
||||||
|
expect(result?.status).toBe(TerminalSessionStatus.CLOSED);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
96
apps/api/src/terminal/terminal-session.service.ts
Normal file
96
apps/api/src/terminal/terminal-session.service.ts
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
/**
|
||||||
|
* TerminalSessionService
|
||||||
|
*
|
||||||
|
* Manages database persistence for terminal sessions.
|
||||||
|
* Provides CRUD operations on the TerminalSession model,
|
||||||
|
* enabling session tracking, recovery, and workspace-level listing.
|
||||||
|
*
|
||||||
|
* Session lifecycle:
|
||||||
|
* - create: record a new terminal session with ACTIVE status
|
||||||
|
* - findByWorkspace: return all ACTIVE sessions for a workspace
|
||||||
|
* - close: mark a session as CLOSED, set closedAt timestamp
|
||||||
|
* - findById: retrieve a single session by ID
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Injectable, NotFoundException, Logger } from "@nestjs/common";
|
||||||
|
import { TerminalSessionStatus } from "@prisma/client";
|
||||||
|
import type { TerminalSession } from "@prisma/client";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class TerminalSessionService {
|
||||||
|
private readonly logger = new Logger(TerminalSessionService.name);
|
||||||
|
|
||||||
|
constructor(private readonly prisma: PrismaService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new terminal session record in the database.
|
||||||
|
*
|
||||||
|
* @param workspaceId - The workspace this session belongs to
|
||||||
|
* @param name - Optional display name for the session (defaults to "Terminal")
|
||||||
|
* @returns The created TerminalSession record
|
||||||
|
*/
|
||||||
|
async create(workspaceId: string, name?: string): Promise<TerminalSession> {
|
||||||
|
this.logger.log(
|
||||||
|
`Creating terminal session for workspace ${workspaceId}${name !== undefined ? ` (name: ${name})` : ""}`
|
||||||
|
);
|
||||||
|
|
||||||
|
const data: { workspaceId: string; name?: string } = { workspaceId };
|
||||||
|
if (name !== undefined) {
|
||||||
|
data.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.prisma.terminalSession.create({ data });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all ACTIVE terminal sessions for a workspace.
|
||||||
|
*
|
||||||
|
* @param workspaceId - The workspace to query
|
||||||
|
* @returns Array of active TerminalSession records, ordered by creation time (newest first)
|
||||||
|
*/
|
||||||
|
async findByWorkspace(workspaceId: string): Promise<TerminalSession[]> {
|
||||||
|
return this.prisma.terminalSession.findMany({
|
||||||
|
where: {
|
||||||
|
workspaceId,
|
||||||
|
status: TerminalSessionStatus.ACTIVE,
|
||||||
|
},
|
||||||
|
orderBy: { createdAt: "desc" },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close a terminal session by setting its status to CLOSED and recording closedAt.
|
||||||
|
*
|
||||||
|
* @param id - The session ID to close
|
||||||
|
* @returns The updated TerminalSession record
|
||||||
|
* @throws NotFoundException if the session does not exist
|
||||||
|
*/
|
||||||
|
async close(id: string): Promise<TerminalSession> {
|
||||||
|
const existing = await this.prisma.terminalSession.findUnique({ where: { id } });
|
||||||
|
|
||||||
|
if (!existing) {
|
||||||
|
throw new NotFoundException(`Terminal session ${id} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.log(`Closing terminal session ${id} (workspace: ${existing.workspaceId})`);
|
||||||
|
|
||||||
|
return this.prisma.terminalSession.update({
|
||||||
|
where: { id },
|
||||||
|
data: {
|
||||||
|
status: TerminalSessionStatus.CLOSED,
|
||||||
|
closedAt: new Date(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find a terminal session by ID.
|
||||||
|
*
|
||||||
|
* @param id - The session ID to retrieve
|
||||||
|
* @returns The TerminalSession record, or null if not found
|
||||||
|
*/
|
||||||
|
async findById(id: string): Promise<TerminalSession | null> {
|
||||||
|
return this.prisma.terminalSession.findUnique({ where: { id } });
|
||||||
|
}
|
||||||
|
}
|
||||||
89
apps/api/src/terminal/terminal.dto.ts
Normal file
89
apps/api/src/terminal/terminal.dto.ts
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
/**
|
||||||
|
* Terminal DTOs
|
||||||
|
*
|
||||||
|
* Data Transfer Objects for terminal WebSocket events.
|
||||||
|
* Validated using class-validator decorators.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {
|
||||||
|
IsString,
|
||||||
|
IsOptional,
|
||||||
|
IsNumber,
|
||||||
|
IsInt,
|
||||||
|
Min,
|
||||||
|
Max,
|
||||||
|
MinLength,
|
||||||
|
MaxLength,
|
||||||
|
} from "class-validator";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for creating a new terminal PTY session.
|
||||||
|
*/
|
||||||
|
export class CreateTerminalDto {
|
||||||
|
@IsOptional()
|
||||||
|
@IsString()
|
||||||
|
@MaxLength(128)
|
||||||
|
name?: string;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsInt()
|
||||||
|
@Min(1)
|
||||||
|
@Max(500)
|
||||||
|
cols?: number;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsInt()
|
||||||
|
@Min(1)
|
||||||
|
@Max(200)
|
||||||
|
rows?: number;
|
||||||
|
|
||||||
|
@IsOptional()
|
||||||
|
@IsString()
|
||||||
|
@MaxLength(4096)
|
||||||
|
cwd?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for sending input data to a terminal PTY session.
|
||||||
|
*/
|
||||||
|
export class TerminalInputDto {
|
||||||
|
@IsString()
|
||||||
|
@MinLength(1)
|
||||||
|
@MaxLength(64)
|
||||||
|
sessionId!: string;
|
||||||
|
|
||||||
|
@IsString()
|
||||||
|
data!: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for resizing a terminal PTY session.
|
||||||
|
*/
|
||||||
|
export class TerminalResizeDto {
|
||||||
|
@IsString()
|
||||||
|
@MinLength(1)
|
||||||
|
@MaxLength(64)
|
||||||
|
sessionId!: string;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
@IsInt()
|
||||||
|
@Min(1)
|
||||||
|
@Max(500)
|
||||||
|
cols!: number;
|
||||||
|
|
||||||
|
@IsNumber()
|
||||||
|
@IsInt()
|
||||||
|
@Min(1)
|
||||||
|
@Max(200)
|
||||||
|
rows!: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DTO for closing a terminal PTY session.
|
||||||
|
*/
|
||||||
|
export class CloseTerminalDto {
|
||||||
|
@IsString()
|
||||||
|
@MinLength(1)
|
||||||
|
@MaxLength(64)
|
||||||
|
sessionId!: string;
|
||||||
|
}
|
||||||
501
apps/api/src/terminal/terminal.gateway.spec.ts
Normal file
501
apps/api/src/terminal/terminal.gateway.spec.ts
Normal file
@@ -0,0 +1,501 @@
|
|||||||
|
/**
|
||||||
|
* TerminalGateway Tests
|
||||||
|
*
|
||||||
|
* Unit tests for WebSocket terminal gateway:
|
||||||
|
* - Authentication on connection
|
||||||
|
* - terminal:create event handling
|
||||||
|
* - terminal:input event handling
|
||||||
|
* - terminal:resize event handling
|
||||||
|
* - terminal:close event handling
|
||||||
|
* - disconnect cleanup
|
||||||
|
* - Error paths
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeEach, vi, afterEach } from "vitest";
|
||||||
|
import type { Socket } from "socket.io";
|
||||||
|
import { TerminalGateway } from "./terminal.gateway";
|
||||||
|
import { TerminalService } from "./terminal.service";
|
||||||
|
import { AuthService } from "../auth/auth.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Mocks
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
// Mock node-pty globally so TerminalService doesn't fail to import
|
||||||
|
vi.mock("node-pty", () => ({
|
||||||
|
spawn: vi.fn(() => ({
|
||||||
|
onData: vi.fn(),
|
||||||
|
onExit: vi.fn(),
|
||||||
|
write: vi.fn(),
|
||||||
|
resize: vi.fn(),
|
||||||
|
kill: vi.fn(),
|
||||||
|
pid: 1000,
|
||||||
|
})),
|
||||||
|
}));
|
||||||
|
|
||||||
|
interface AuthenticatedSocket extends Socket {
|
||||||
|
data: {
|
||||||
|
userId?: string;
|
||||||
|
workspaceId?: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockSocket(id = "test-socket-id"): AuthenticatedSocket {
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
emit: vi.fn(),
|
||||||
|
join: vi.fn(),
|
||||||
|
leave: vi.fn(),
|
||||||
|
disconnect: vi.fn(),
|
||||||
|
data: {},
|
||||||
|
handshake: {
|
||||||
|
auth: { token: "valid-token" },
|
||||||
|
query: {},
|
||||||
|
headers: {},
|
||||||
|
},
|
||||||
|
} as unknown as AuthenticatedSocket;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockAuthService() {
|
||||||
|
return {
|
||||||
|
verifySession: vi.fn().mockResolvedValue({
|
||||||
|
user: { id: "user-123" },
|
||||||
|
session: { id: "session-123" },
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockPrismaService() {
|
||||||
|
return {
|
||||||
|
workspaceMember: {
|
||||||
|
findFirst: vi.fn().mockResolvedValue({
|
||||||
|
userId: "user-123",
|
||||||
|
workspaceId: "workspace-456",
|
||||||
|
role: "MEMBER",
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockTerminalService() {
|
||||||
|
return {
|
||||||
|
createSession: vi.fn().mockReturnValue({
|
||||||
|
sessionId: "session-uuid-1",
|
||||||
|
name: undefined,
|
||||||
|
cols: 80,
|
||||||
|
rows: 24,
|
||||||
|
}),
|
||||||
|
writeToSession: vi.fn(),
|
||||||
|
resizeSession: vi.fn(),
|
||||||
|
closeSession: vi.fn().mockReturnValue(true),
|
||||||
|
closeWorkspaceSessions: vi.fn(),
|
||||||
|
sessionBelongsToWorkspace: vi.fn().mockReturnValue(true),
|
||||||
|
getWorkspaceSessionCount: vi.fn().mockReturnValue(0),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Tests
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("TerminalGateway", () => {
|
||||||
|
let gateway: TerminalGateway;
|
||||||
|
let mockAuthService: ReturnType<typeof createMockAuthService>;
|
||||||
|
let mockPrismaService: ReturnType<typeof createMockPrismaService>;
|
||||||
|
let mockTerminalService: ReturnType<typeof createMockTerminalService>;
|
||||||
|
let mockClient: AuthenticatedSocket;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
mockAuthService = createMockAuthService();
|
||||||
|
mockPrismaService = createMockPrismaService();
|
||||||
|
mockTerminalService = createMockTerminalService();
|
||||||
|
mockClient = createMockSocket();
|
||||||
|
|
||||||
|
gateway = new TerminalGateway(
|
||||||
|
mockAuthService as unknown as AuthService,
|
||||||
|
mockPrismaService as unknown as PrismaService,
|
||||||
|
mockTerminalService as unknown as TerminalService
|
||||||
|
);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// handleConnection (authentication)
|
||||||
|
// ==========================================
|
||||||
|
describe("handleConnection", () => {
|
||||||
|
it("should authenticate client and join workspace room on valid token", async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({
|
||||||
|
user: { id: "user-123" },
|
||||||
|
});
|
||||||
|
mockPrismaService.workspaceMember.findFirst.mockResolvedValue({
|
||||||
|
userId: "user-123",
|
||||||
|
workspaceId: "workspace-456",
|
||||||
|
role: "MEMBER",
|
||||||
|
});
|
||||||
|
|
||||||
|
await gateway.handleConnection(mockClient);
|
||||||
|
|
||||||
|
expect(mockAuthService.verifySession).toHaveBeenCalledWith("valid-token");
|
||||||
|
expect(mockClient.data.userId).toBe("user-123");
|
||||||
|
expect(mockClient.data.workspaceId).toBe("workspace-456");
|
||||||
|
expect(mockClient.join).toHaveBeenCalledWith("terminal:workspace-456");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should disconnect and emit error if no token provided", async () => {
|
||||||
|
const clientNoToken = createMockSocket("no-token");
|
||||||
|
clientNoToken.handshake = {
|
||||||
|
auth: {},
|
||||||
|
query: {},
|
||||||
|
headers: {},
|
||||||
|
} as typeof clientNoToken.handshake;
|
||||||
|
|
||||||
|
await gateway.handleConnection(clientNoToken);
|
||||||
|
|
||||||
|
expect(clientNoToken.disconnect).toHaveBeenCalled();
|
||||||
|
expect(clientNoToken.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("no token") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should disconnect and emit error if token is invalid", async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await gateway.handleConnection(mockClient);
|
||||||
|
|
||||||
|
expect(mockClient.disconnect).toHaveBeenCalled();
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("invalid") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should disconnect and emit error if no workspace access", async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({ user: { id: "user-123" } });
|
||||||
|
mockPrismaService.workspaceMember.findFirst.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await gateway.handleConnection(mockClient);
|
||||||
|
|
||||||
|
expect(mockClient.disconnect).toHaveBeenCalled();
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("workspace") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should disconnect and emit error if auth throws", async () => {
|
||||||
|
mockAuthService.verifySession.mockRejectedValue(new Error("Auth service down"));
|
||||||
|
|
||||||
|
await gateway.handleConnection(mockClient);
|
||||||
|
|
||||||
|
expect(mockClient.disconnect).toHaveBeenCalled();
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.any(String) })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract token from handshake.query as fallback", async () => {
|
||||||
|
const clientQueryToken = createMockSocket("query-token-client");
|
||||||
|
clientQueryToken.handshake = {
|
||||||
|
auth: {},
|
||||||
|
query: { token: "query-token" },
|
||||||
|
headers: {},
|
||||||
|
} as typeof clientQueryToken.handshake;
|
||||||
|
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({ user: { id: "user-123" } });
|
||||||
|
mockPrismaService.workspaceMember.findFirst.mockResolvedValue({
|
||||||
|
userId: "user-123",
|
||||||
|
workspaceId: "workspace-456",
|
||||||
|
role: "MEMBER",
|
||||||
|
});
|
||||||
|
|
||||||
|
await gateway.handleConnection(clientQueryToken);
|
||||||
|
|
||||||
|
expect(mockAuthService.verifySession).toHaveBeenCalledWith("query-token");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract token from Authorization header as last fallback", async () => {
|
||||||
|
const clientHeaderToken = createMockSocket("header-token-client");
|
||||||
|
clientHeaderToken.handshake = {
|
||||||
|
auth: {},
|
||||||
|
query: {},
|
||||||
|
headers: { authorization: "Bearer header-token" },
|
||||||
|
} as typeof clientHeaderToken.handshake;
|
||||||
|
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({ user: { id: "user-123" } });
|
||||||
|
mockPrismaService.workspaceMember.findFirst.mockResolvedValue({
|
||||||
|
userId: "user-123",
|
||||||
|
workspaceId: "workspace-456",
|
||||||
|
role: "MEMBER",
|
||||||
|
});
|
||||||
|
|
||||||
|
await gateway.handleConnection(clientHeaderToken);
|
||||||
|
|
||||||
|
expect(mockAuthService.verifySession).toHaveBeenCalledWith("header-token");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// handleDisconnect
|
||||||
|
// ==========================================
|
||||||
|
describe("handleDisconnect", () => {
|
||||||
|
it("should close all workspace sessions on disconnect", async () => {
|
||||||
|
await gateway.handleConnection(mockClient);
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
gateway.handleDisconnect(mockClient);
|
||||||
|
|
||||||
|
expect(mockTerminalService.closeWorkspaceSessions).toHaveBeenCalledWith("workspace-456");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not throw for unauthenticated client disconnect", () => {
|
||||||
|
const unauthClient = createMockSocket("unauth-disconnect");
|
||||||
|
|
||||||
|
expect(() => gateway.handleDisconnect(unauthClient)).not.toThrow();
|
||||||
|
expect(mockTerminalService.closeWorkspaceSessions).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// handleCreate (terminal:create)
|
||||||
|
// ==========================================
|
||||||
|
describe("handleCreate", () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({ user: { id: "user-123" } });
|
||||||
|
mockPrismaService.workspaceMember.findFirst.mockResolvedValue({
|
||||||
|
userId: "user-123",
|
||||||
|
workspaceId: "workspace-456",
|
||||||
|
role: "MEMBER",
|
||||||
|
});
|
||||||
|
await gateway.handleConnection(mockClient);
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should create a PTY session and emit terminal:created", async () => {
|
||||||
|
mockTerminalService.createSession.mockReturnValue({
|
||||||
|
sessionId: "new-session-id",
|
||||||
|
cols: 80,
|
||||||
|
rows: 24,
|
||||||
|
});
|
||||||
|
|
||||||
|
await gateway.handleCreate(mockClient, {});
|
||||||
|
|
||||||
|
expect(mockTerminalService.createSession).toHaveBeenCalled();
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:created",
|
||||||
|
expect.objectContaining({ sessionId: "new-session-id" })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass cols, rows, cwd, name to service", async () => {
|
||||||
|
await gateway.handleCreate(mockClient, {
|
||||||
|
cols: 132,
|
||||||
|
rows: 50,
|
||||||
|
cwd: "/home/user",
|
||||||
|
name: "my-shell",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockTerminalService.createSession).toHaveBeenCalledWith(
|
||||||
|
expect.anything(),
|
||||||
|
expect.objectContaining({ cols: 132, rows: 50, cwd: "/home/user", name: "my-shell" })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should emit terminal:error if not authenticated", async () => {
|
||||||
|
const unauthClient = createMockSocket("unauth");
|
||||||
|
|
||||||
|
await gateway.handleCreate(unauthClient, {});
|
||||||
|
|
||||||
|
expect(unauthClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("authenticated") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should emit terminal:error if service throws (session limit)", async () => {
|
||||||
|
mockTerminalService.createSession.mockImplementation(() => {
|
||||||
|
throw new Error("Workspace has reached the maximum of 10 concurrent terminal sessions");
|
||||||
|
});
|
||||||
|
|
||||||
|
await gateway.handleCreate(mockClient, {});
|
||||||
|
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("maximum") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should emit terminal:error for invalid payload (negative cols)", async () => {
|
||||||
|
await gateway.handleCreate(mockClient, { cols: -1 });
|
||||||
|
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("Invalid payload") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// handleInput (terminal:input)
|
||||||
|
// ==========================================
|
||||||
|
describe("handleInput", () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({ user: { id: "user-123" } });
|
||||||
|
mockPrismaService.workspaceMember.findFirst.mockResolvedValue({
|
||||||
|
userId: "user-123",
|
||||||
|
workspaceId: "workspace-456",
|
||||||
|
role: "MEMBER",
|
||||||
|
});
|
||||||
|
await gateway.handleConnection(mockClient);
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should write data to the PTY session", async () => {
|
||||||
|
mockTerminalService.sessionBelongsToWorkspace.mockReturnValue(true);
|
||||||
|
|
||||||
|
await gateway.handleInput(mockClient, { sessionId: "sess-1", data: "ls\n" });
|
||||||
|
|
||||||
|
expect(mockTerminalService.writeToSession).toHaveBeenCalledWith("sess-1", "ls\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should emit terminal:error if session does not belong to workspace", async () => {
|
||||||
|
mockTerminalService.sessionBelongsToWorkspace.mockReturnValue(false);
|
||||||
|
|
||||||
|
await gateway.handleInput(mockClient, { sessionId: "alien-sess", data: "data" });
|
||||||
|
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("not found") })
|
||||||
|
);
|
||||||
|
expect(mockTerminalService.writeToSession).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should emit terminal:error if not authenticated", async () => {
|
||||||
|
const unauthClient = createMockSocket("unauth");
|
||||||
|
|
||||||
|
await gateway.handleInput(unauthClient, { sessionId: "sess-1", data: "x" });
|
||||||
|
|
||||||
|
expect(unauthClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("authenticated") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should emit terminal:error for invalid payload (missing sessionId)", async () => {
|
||||||
|
await gateway.handleInput(mockClient, { data: "some input" });
|
||||||
|
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("Invalid payload") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// handleResize (terminal:resize)
|
||||||
|
// ==========================================
|
||||||
|
describe("handleResize", () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({ user: { id: "user-123" } });
|
||||||
|
mockPrismaService.workspaceMember.findFirst.mockResolvedValue({
|
||||||
|
userId: "user-123",
|
||||||
|
workspaceId: "workspace-456",
|
||||||
|
role: "MEMBER",
|
||||||
|
});
|
||||||
|
await gateway.handleConnection(mockClient);
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should resize the PTY session", async () => {
|
||||||
|
mockTerminalService.sessionBelongsToWorkspace.mockReturnValue(true);
|
||||||
|
|
||||||
|
await gateway.handleResize(mockClient, { sessionId: "sess-1", cols: 120, rows: 40 });
|
||||||
|
|
||||||
|
expect(mockTerminalService.resizeSession).toHaveBeenCalledWith("sess-1", 120, 40);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should emit terminal:error if session does not belong to workspace", async () => {
|
||||||
|
mockTerminalService.sessionBelongsToWorkspace.mockReturnValue(false);
|
||||||
|
|
||||||
|
await gateway.handleResize(mockClient, { sessionId: "alien-sess", cols: 80, rows: 24 });
|
||||||
|
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("not found") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should emit terminal:error for invalid payload (cols too large)", async () => {
|
||||||
|
await gateway.handleResize(mockClient, { sessionId: "sess-1", cols: 9999, rows: 24 });
|
||||||
|
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("Invalid payload") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// handleClose (terminal:close)
|
||||||
|
// ==========================================
|
||||||
|
describe("handleClose", () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({ user: { id: "user-123" } });
|
||||||
|
mockPrismaService.workspaceMember.findFirst.mockResolvedValue({
|
||||||
|
userId: "user-123",
|
||||||
|
workspaceId: "workspace-456",
|
||||||
|
role: "MEMBER",
|
||||||
|
});
|
||||||
|
await gateway.handleConnection(mockClient);
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should close an existing PTY session", async () => {
|
||||||
|
mockTerminalService.sessionBelongsToWorkspace.mockReturnValue(true);
|
||||||
|
mockTerminalService.closeSession.mockReturnValue(true);
|
||||||
|
|
||||||
|
await gateway.handleClose(mockClient, { sessionId: "sess-1" });
|
||||||
|
|
||||||
|
expect(mockTerminalService.closeSession).toHaveBeenCalledWith("sess-1");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should emit terminal:error if session does not belong to workspace", async () => {
|
||||||
|
mockTerminalService.sessionBelongsToWorkspace.mockReturnValue(false);
|
||||||
|
|
||||||
|
await gateway.handleClose(mockClient, { sessionId: "alien-sess" });
|
||||||
|
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("not found") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should emit terminal:error if closeSession returns false (session gone)", async () => {
|
||||||
|
mockTerminalService.sessionBelongsToWorkspace.mockReturnValue(true);
|
||||||
|
mockTerminalService.closeSession.mockReturnValue(false);
|
||||||
|
|
||||||
|
await gateway.handleClose(mockClient, { sessionId: "gone-sess" });
|
||||||
|
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("not found") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should emit terminal:error for invalid payload (missing sessionId)", async () => {
|
||||||
|
await gateway.handleClose(mockClient, {});
|
||||||
|
|
||||||
|
expect(mockClient.emit).toHaveBeenCalledWith(
|
||||||
|
"terminal:error",
|
||||||
|
expect.objectContaining({ message: expect.stringContaining("Invalid payload") })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
423
apps/api/src/terminal/terminal.gateway.ts
Normal file
423
apps/api/src/terminal/terminal.gateway.ts
Normal file
@@ -0,0 +1,423 @@
|
|||||||
|
/**
|
||||||
|
* TerminalGateway
|
||||||
|
*
|
||||||
|
* WebSocket gateway for real-time PTY terminal sessions.
|
||||||
|
* Uses the `/terminal` namespace to keep terminal traffic separate
|
||||||
|
* from the main WebSocket gateway.
|
||||||
|
*
|
||||||
|
* Protocol:
|
||||||
|
* 1. Client connects with auth token in handshake
|
||||||
|
* 2. Client emits `terminal:create` to spawn a new PTY session
|
||||||
|
* 3. Server emits `terminal:created` with { sessionId }
|
||||||
|
* 4. Client emits `terminal:input` with { sessionId, data } to send keystrokes
|
||||||
|
* 5. Server emits `terminal:output` with { sessionId, data } for stdout/stderr
|
||||||
|
* 6. Client emits `terminal:resize` with { sessionId, cols, rows } on window resize
|
||||||
|
* 7. Client emits `terminal:close` with { sessionId } to terminate the PTY
|
||||||
|
* 8. Server emits `terminal:exit` with { sessionId, exitCode, signal } on PTY exit
|
||||||
|
*
|
||||||
|
* Authentication:
|
||||||
|
* - Same pattern as websocket.gateway.ts and speech.gateway.ts
|
||||||
|
* - Token extracted from handshake.auth.token / query.token / Authorization header
|
||||||
|
*
|
||||||
|
* Workspace isolation:
|
||||||
|
* - Clients join room `terminal:{workspaceId}` on connect
|
||||||
|
* - Sessions are scoped to workspace; cross-workspace access is denied
|
||||||
|
*/
|
||||||
|
|
||||||
|
import {
|
||||||
|
WebSocketGateway as WSGateway,
|
||||||
|
WebSocketServer,
|
||||||
|
SubscribeMessage,
|
||||||
|
OnGatewayConnection,
|
||||||
|
OnGatewayDisconnect,
|
||||||
|
} from "@nestjs/websockets";
|
||||||
|
import { Logger } from "@nestjs/common";
|
||||||
|
import { Server, Socket } from "socket.io";
|
||||||
|
import { AuthService } from "../auth/auth.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
import { TerminalService } from "./terminal.service";
|
||||||
|
import {
|
||||||
|
CreateTerminalDto,
|
||||||
|
TerminalInputDto,
|
||||||
|
TerminalResizeDto,
|
||||||
|
CloseTerminalDto,
|
||||||
|
} from "./terminal.dto";
|
||||||
|
import { validate } from "class-validator";
|
||||||
|
import { plainToInstance } from "class-transformer";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Types
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
interface AuthenticatedSocket extends Socket {
|
||||||
|
data: {
|
||||||
|
userId?: string;
|
||||||
|
workspaceId?: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Gateway
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
@WSGateway({
|
||||||
|
namespace: "/terminal",
|
||||||
|
cors: {
|
||||||
|
origin: process.env.WEB_URL ?? "http://localhost:3000",
|
||||||
|
credentials: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
export class TerminalGateway implements OnGatewayConnection, OnGatewayDisconnect {
|
||||||
|
@WebSocketServer()
|
||||||
|
server!: Server;
|
||||||
|
|
||||||
|
private readonly logger = new Logger(TerminalGateway.name);
|
||||||
|
private readonly CONNECTION_TIMEOUT_MS = 5000;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private readonly authService: AuthService,
|
||||||
|
private readonly prisma: PrismaService,
|
||||||
|
private readonly terminalService: TerminalService
|
||||||
|
) {}
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Connection lifecycle
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Authenticate client on connection using handshake token.
|
||||||
|
* Validates workspace membership and joins the workspace-scoped room.
|
||||||
|
*/
|
||||||
|
async handleConnection(client: Socket): Promise<void> {
|
||||||
|
const authenticatedClient = client as AuthenticatedSocket;
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
if (!authenticatedClient.data.userId) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Terminal client ${authenticatedClient.id} timed out during authentication`
|
||||||
|
);
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: "Authentication timed out.",
|
||||||
|
});
|
||||||
|
authenticatedClient.disconnect();
|
||||||
|
}
|
||||||
|
}, this.CONNECTION_TIMEOUT_MS);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const token = this.extractTokenFromHandshake(authenticatedClient);
|
||||||
|
|
||||||
|
if (!token) {
|
||||||
|
this.logger.warn(`Terminal client ${authenticatedClient.id} connected without token`);
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: "Authentication failed: no token provided.",
|
||||||
|
});
|
||||||
|
authenticatedClient.disconnect();
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionData = await this.authService.verifySession(token);
|
||||||
|
|
||||||
|
if (!sessionData) {
|
||||||
|
this.logger.warn(`Terminal client ${authenticatedClient.id} has invalid token`);
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: "Authentication failed: invalid or expired token.",
|
||||||
|
});
|
||||||
|
authenticatedClient.disconnect();
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = sessionData.user as { id: string };
|
||||||
|
const userId = user.id;
|
||||||
|
|
||||||
|
const workspaceMembership = await this.prisma.workspaceMember.findFirst({
|
||||||
|
where: { userId },
|
||||||
|
select: { workspaceId: true, userId: true, role: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!workspaceMembership) {
|
||||||
|
this.logger.warn(`Terminal user ${userId} has no workspace access`);
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: "Authentication failed: no workspace access.",
|
||||||
|
});
|
||||||
|
authenticatedClient.disconnect();
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
authenticatedClient.data.userId = userId;
|
||||||
|
authenticatedClient.data.workspaceId = workspaceMembership.workspaceId;
|
||||||
|
|
||||||
|
// Join workspace-scoped terminal room
|
||||||
|
const room = this.getWorkspaceRoom(workspaceMembership.workspaceId);
|
||||||
|
await authenticatedClient.join(room);
|
||||||
|
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
this.logger.log(
|
||||||
|
`Terminal client ${authenticatedClient.id} connected (user: ${userId}, workspace: ${workspaceMembership.workspaceId})`
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
this.logger.error(
|
||||||
|
`Authentication failed for terminal client ${authenticatedClient.id}:`,
|
||||||
|
error instanceof Error ? error.message : "Unknown error"
|
||||||
|
);
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: "Authentication failed: an unexpected error occurred.",
|
||||||
|
});
|
||||||
|
authenticatedClient.disconnect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up all PTY sessions for this client's workspace on disconnect.
|
||||||
|
*/
|
||||||
|
handleDisconnect(client: Socket): void {
|
||||||
|
const authenticatedClient = client as AuthenticatedSocket;
|
||||||
|
const { workspaceId, userId } = authenticatedClient.data;
|
||||||
|
|
||||||
|
if (workspaceId) {
|
||||||
|
this.terminalService.closeWorkspaceSessions(workspaceId);
|
||||||
|
|
||||||
|
const room = this.getWorkspaceRoom(workspaceId);
|
||||||
|
void authenticatedClient.leave(room);
|
||||||
|
this.logger.log(
|
||||||
|
`Terminal client ${authenticatedClient.id} disconnected (user: ${userId ?? "unknown"}, workspace: ${workspaceId})`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
this.logger.debug(`Terminal client ${authenticatedClient.id} disconnected (unauthenticated)`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Terminal events
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Spawn a new PTY session for the connected client.
|
||||||
|
*
|
||||||
|
* Emits `terminal:created` with { sessionId, name, cols, rows } on success.
|
||||||
|
* Emits `terminal:error` on failure.
|
||||||
|
*/
|
||||||
|
@SubscribeMessage("terminal:create")
|
||||||
|
async handleCreate(client: Socket, payload: unknown): Promise<void> {
|
||||||
|
const authenticatedClient = client as AuthenticatedSocket;
|
||||||
|
const { userId, workspaceId } = authenticatedClient.data;
|
||||||
|
|
||||||
|
if (!userId || !workspaceId) {
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: "Not authenticated. Connect with a valid token.",
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate DTO
|
||||||
|
const dto = plainToInstance(CreateTerminalDto, payload ?? {});
|
||||||
|
const errors = await validate(dto);
|
||||||
|
if (errors.length > 0) {
|
||||||
|
const messages = errors.map((e) => Object.values(e.constraints ?? {}).join(", ")).join("; ");
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: `Invalid payload: ${messages}`,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = this.terminalService.createSession(authenticatedClient, {
|
||||||
|
workspaceId,
|
||||||
|
socketId: authenticatedClient.id,
|
||||||
|
...(dto.name !== undefined ? { name: dto.name } : {}),
|
||||||
|
...(dto.cols !== undefined ? { cols: dto.cols } : {}),
|
||||||
|
...(dto.rows !== undefined ? { rows: dto.rows } : {}),
|
||||||
|
...(dto.cwd !== undefined ? { cwd: dto.cwd } : {}),
|
||||||
|
});
|
||||||
|
|
||||||
|
authenticatedClient.emit("terminal:created", {
|
||||||
|
sessionId: result.sessionId,
|
||||||
|
name: result.name,
|
||||||
|
cols: result.cols,
|
||||||
|
rows: result.rows,
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(
|
||||||
|
`Terminal session ${result.sessionId} created for client ${authenticatedClient.id} (workspace: ${workspaceId})`
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
this.logger.error(
|
||||||
|
`Failed to create terminal session for client ${authenticatedClient.id}: ${message}`
|
||||||
|
);
|
||||||
|
authenticatedClient.emit("terminal:error", { message });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write input data to an existing PTY session.
|
||||||
|
*
|
||||||
|
* Emits `terminal:error` if the session is not found or unauthorized.
|
||||||
|
*/
|
||||||
|
@SubscribeMessage("terminal:input")
|
||||||
|
async handleInput(client: Socket, payload: unknown): Promise<void> {
|
||||||
|
const authenticatedClient = client as AuthenticatedSocket;
|
||||||
|
const { userId, workspaceId } = authenticatedClient.data;
|
||||||
|
|
||||||
|
if (!userId || !workspaceId) {
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: "Not authenticated. Connect with a valid token.",
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const dto = plainToInstance(TerminalInputDto, payload ?? {});
|
||||||
|
const errors = await validate(dto);
|
||||||
|
if (errors.length > 0) {
|
||||||
|
const messages = errors.map((e) => Object.values(e.constraints ?? {}).join(", ")).join("; ");
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: `Invalid payload: ${messages}`,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.terminalService.sessionBelongsToWorkspace(dto.sessionId, workspaceId)) {
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: `Terminal session ${dto.sessionId} not found or unauthorized.`,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.terminalService.writeToSession(dto.sessionId, dto.data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
this.logger.warn(`Failed to write to terminal session ${dto.sessionId}: ${message}`);
|
||||||
|
authenticatedClient.emit("terminal:error", { message });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resize an existing PTY session.
|
||||||
|
*
|
||||||
|
* Emits `terminal:error` if the session is not found or unauthorized.
|
||||||
|
*/
|
||||||
|
@SubscribeMessage("terminal:resize")
|
||||||
|
async handleResize(client: Socket, payload: unknown): Promise<void> {
|
||||||
|
const authenticatedClient = client as AuthenticatedSocket;
|
||||||
|
const { userId, workspaceId } = authenticatedClient.data;
|
||||||
|
|
||||||
|
if (!userId || !workspaceId) {
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: "Not authenticated. Connect with a valid token.",
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const dto = plainToInstance(TerminalResizeDto, payload ?? {});
|
||||||
|
const errors = await validate(dto);
|
||||||
|
if (errors.length > 0) {
|
||||||
|
const messages = errors.map((e) => Object.values(e.constraints ?? {}).join(", ")).join("; ");
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: `Invalid payload: ${messages}`,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.terminalService.sessionBelongsToWorkspace(dto.sessionId, workspaceId)) {
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: `Terminal session ${dto.sessionId} not found or unauthorized.`,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.terminalService.resizeSession(dto.sessionId, dto.cols, dto.rows);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
this.logger.warn(`Failed to resize terminal session ${dto.sessionId}: ${message}`);
|
||||||
|
authenticatedClient.emit("terminal:error", { message });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Kill and close an existing PTY session.
|
||||||
|
*
|
||||||
|
* Emits `terminal:error` if the session is not found or unauthorized.
|
||||||
|
*/
|
||||||
|
@SubscribeMessage("terminal:close")
|
||||||
|
async handleClose(client: Socket, payload: unknown): Promise<void> {
|
||||||
|
const authenticatedClient = client as AuthenticatedSocket;
|
||||||
|
const { userId, workspaceId } = authenticatedClient.data;
|
||||||
|
|
||||||
|
if (!userId || !workspaceId) {
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: "Not authenticated. Connect with a valid token.",
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const dto = plainToInstance(CloseTerminalDto, payload ?? {});
|
||||||
|
const errors = await validate(dto);
|
||||||
|
if (errors.length > 0) {
|
||||||
|
const messages = errors.map((e) => Object.values(e.constraints ?? {}).join(", ")).join("; ");
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: `Invalid payload: ${messages}`,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.terminalService.sessionBelongsToWorkspace(dto.sessionId, workspaceId)) {
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: `Terminal session ${dto.sessionId} not found or unauthorized.`,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const closed = this.terminalService.closeSession(dto.sessionId);
|
||||||
|
if (!closed) {
|
||||||
|
authenticatedClient.emit("terminal:error", {
|
||||||
|
message: `Terminal session ${dto.sessionId} not found.`,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.log(`Terminal session ${dto.sessionId} closed by client ${authenticatedClient.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Private helpers
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract authentication token from Socket.IO handshake.
|
||||||
|
* Checks auth.token, query.token, and Authorization header (in that order).
|
||||||
|
*/
|
||||||
|
private extractTokenFromHandshake(client: Socket): string | undefined {
|
||||||
|
const authToken = client.handshake.auth.token as unknown;
|
||||||
|
if (typeof authToken === "string" && authToken.length > 0) {
|
||||||
|
return authToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
const queryToken = client.handshake.query.token as unknown;
|
||||||
|
if (typeof queryToken === "string" && queryToken.length > 0) {
|
||||||
|
return queryToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
const authHeader = client.handshake.headers.authorization as unknown;
|
||||||
|
if (typeof authHeader === "string") {
|
||||||
|
const parts = authHeader.split(" ");
|
||||||
|
const [type, token] = parts;
|
||||||
|
if (type === "Bearer" && token) {
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the workspace-scoped room name for the terminal namespace.
|
||||||
|
*/
|
||||||
|
private getWorkspaceRoom(workspaceId: string): string {
|
||||||
|
return `terminal:${workspaceId}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
31
apps/api/src/terminal/terminal.module.ts
Normal file
31
apps/api/src/terminal/terminal.module.ts
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
/**
|
||||||
|
* TerminalModule
|
||||||
|
*
|
||||||
|
* NestJS module for WebSocket-based terminal sessions via node-pty.
|
||||||
|
*
|
||||||
|
* Imports:
|
||||||
|
* - AuthModule for WebSocket authentication (verifySession)
|
||||||
|
* - PrismaModule for workspace membership queries and session persistence
|
||||||
|
*
|
||||||
|
* Providers:
|
||||||
|
* - TerminalService: manages PTY session lifecycle (in-memory)
|
||||||
|
* - TerminalSessionService: persists session records to the database
|
||||||
|
* - TerminalGateway: WebSocket gateway on /terminal namespace
|
||||||
|
*
|
||||||
|
* The module does not export providers; terminal sessions are
|
||||||
|
* self-contained within this module.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { TerminalGateway } from "./terminal.gateway";
|
||||||
|
import { TerminalService } from "./terminal.service";
|
||||||
|
import { TerminalSessionService } from "./terminal-session.service";
|
||||||
|
import { AuthModule } from "../auth/auth.module";
|
||||||
|
import { PrismaModule } from "../prisma/prisma.module";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [AuthModule, PrismaModule],
|
||||||
|
providers: [TerminalGateway, TerminalService, TerminalSessionService],
|
||||||
|
exports: [TerminalSessionService],
|
||||||
|
})
|
||||||
|
export class TerminalModule {}
|
||||||
337
apps/api/src/terminal/terminal.service.spec.ts
Normal file
337
apps/api/src/terminal/terminal.service.spec.ts
Normal file
@@ -0,0 +1,337 @@
|
|||||||
|
/**
|
||||||
|
* TerminalService Tests
|
||||||
|
*
|
||||||
|
* Unit tests for PTY session management: create, write, resize, close,
|
||||||
|
* workspace cleanup, and access control.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeEach, vi, afterEach } from "vitest";
|
||||||
|
import type { Socket } from "socket.io";
|
||||||
|
import { TerminalService, MAX_SESSIONS_PER_WORKSPACE } from "./terminal.service";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Mocks
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
// Mock node-pty before importing service
|
||||||
|
const mockPtyProcess = {
|
||||||
|
onData: vi.fn(),
|
||||||
|
onExit: vi.fn(),
|
||||||
|
write: vi.fn(),
|
||||||
|
resize: vi.fn(),
|
||||||
|
kill: vi.fn(),
|
||||||
|
pid: 12345,
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mock("node-pty", () => ({
|
||||||
|
spawn: vi.fn(() => mockPtyProcess),
|
||||||
|
}));
|
||||||
|
|
||||||
|
function createMockSocket(id = "socket-1"): Socket {
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
emit: vi.fn(),
|
||||||
|
join: vi.fn(),
|
||||||
|
leave: vi.fn(),
|
||||||
|
disconnect: vi.fn(),
|
||||||
|
data: {},
|
||||||
|
} as unknown as Socket;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Tests
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("TerminalService", () => {
|
||||||
|
let service: TerminalService;
|
||||||
|
let mockSocket: Socket;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
// Reset mock implementations
|
||||||
|
mockPtyProcess.onData.mockImplementation((_cb: (data: string) => void) => {});
|
||||||
|
mockPtyProcess.onExit.mockImplementation(
|
||||||
|
(_cb: (e: { exitCode: number; signal?: number }) => void) => {}
|
||||||
|
);
|
||||||
|
service = new TerminalService();
|
||||||
|
mockSocket = createMockSocket();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// createSession
|
||||||
|
// ==========================================
|
||||||
|
describe("createSession", () => {
|
||||||
|
it("should create a PTY session and return sessionId", () => {
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.sessionId).toBeDefined();
|
||||||
|
expect(typeof result.sessionId).toBe("string");
|
||||||
|
expect(result.cols).toBe(80);
|
||||||
|
expect(result.rows).toBe(24);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use provided cols and rows", () => {
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
cols: 120,
|
||||||
|
rows: 40,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.cols).toBe(120);
|
||||||
|
expect(result.rows).toBe(40);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return the provided session name", () => {
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
name: "my-terminal",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.name).toBe("my-terminal");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should wire PTY onData to emit terminal:output", () => {
|
||||||
|
let dataCallback: ((data: string) => void) | undefined;
|
||||||
|
mockPtyProcess.onData.mockImplementation((cb: (data: string) => void) => {
|
||||||
|
dataCallback = cb;
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dataCallback).toBeDefined();
|
||||||
|
dataCallback!("hello world");
|
||||||
|
|
||||||
|
expect(mockSocket.emit).toHaveBeenCalledWith("terminal:output", {
|
||||||
|
sessionId: result.sessionId,
|
||||||
|
data: "hello world",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should wire PTY onExit to emit terminal:exit and cleanup", () => {
|
||||||
|
let exitCallback: ((e: { exitCode: number; signal?: number }) => void) | undefined;
|
||||||
|
mockPtyProcess.onExit.mockImplementation(
|
||||||
|
(cb: (e: { exitCode: number; signal?: number }) => void) => {
|
||||||
|
exitCallback = cb;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(exitCallback).toBeDefined();
|
||||||
|
exitCallback!({ exitCode: 0 });
|
||||||
|
|
||||||
|
expect(mockSocket.emit).toHaveBeenCalledWith("terminal:exit", {
|
||||||
|
sessionId: result.sessionId,
|
||||||
|
exitCode: 0,
|
||||||
|
signal: undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Session should be cleaned up
|
||||||
|
expect(service.sessionBelongsToWorkspace(result.sessionId, "ws-1")).toBe(false);
|
||||||
|
expect(service.getWorkspaceSessionCount("ws-1")).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when workspace session limit is reached", () => {
|
||||||
|
const limit = MAX_SESSIONS_PER_WORKSPACE;
|
||||||
|
|
||||||
|
for (let i = 0; i < limit; i++) {
|
||||||
|
service.createSession(createMockSocket(`socket-${String(i)}`), {
|
||||||
|
workspaceId: "ws-limit",
|
||||||
|
socketId: `socket-${String(i)}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(() =>
|
||||||
|
service.createSession(createMockSocket("socket-overflow"), {
|
||||||
|
workspaceId: "ws-limit",
|
||||||
|
socketId: "socket-overflow",
|
||||||
|
})
|
||||||
|
).toThrow(/maximum/i);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should allow sessions in different workspaces independently", () => {
|
||||||
|
service.createSession(mockSocket, { workspaceId: "ws-a", socketId: "s1" });
|
||||||
|
service.createSession(createMockSocket("s2"), { workspaceId: "ws-b", socketId: "s2" });
|
||||||
|
|
||||||
|
expect(service.getWorkspaceSessionCount("ws-a")).toBe(1);
|
||||||
|
expect(service.getWorkspaceSessionCount("ws-b")).toBe(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// writeToSession
|
||||||
|
// ==========================================
|
||||||
|
describe("writeToSession", () => {
|
||||||
|
it("should write data to PTY", () => {
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
});
|
||||||
|
|
||||||
|
service.writeToSession(result.sessionId, "ls -la\n");
|
||||||
|
|
||||||
|
expect(mockPtyProcess.write).toHaveBeenCalledWith("ls -la\n");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw for unknown sessionId", () => {
|
||||||
|
expect(() => service.writeToSession("nonexistent-id", "data")).toThrow(/not found/i);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// resizeSession
|
||||||
|
// ==========================================
|
||||||
|
describe("resizeSession", () => {
|
||||||
|
it("should resize PTY dimensions", () => {
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
});
|
||||||
|
|
||||||
|
service.resizeSession(result.sessionId, 132, 50);
|
||||||
|
|
||||||
|
expect(mockPtyProcess.resize).toHaveBeenCalledWith(132, 50);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw for unknown sessionId", () => {
|
||||||
|
expect(() => service.resizeSession("nonexistent-id", 80, 24)).toThrow(/not found/i);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// closeSession
|
||||||
|
// ==========================================
|
||||||
|
describe("closeSession", () => {
|
||||||
|
it("should kill PTY and return true for existing session", () => {
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
});
|
||||||
|
|
||||||
|
const closed = service.closeSession(result.sessionId);
|
||||||
|
|
||||||
|
expect(closed).toBe(true);
|
||||||
|
expect(mockPtyProcess.kill).toHaveBeenCalled();
|
||||||
|
expect(service.sessionBelongsToWorkspace(result.sessionId, "ws-1")).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false for nonexistent sessionId", () => {
|
||||||
|
const closed = service.closeSession("does-not-exist");
|
||||||
|
expect(closed).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should clean up workspace tracking after close", () => {
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(service.getWorkspaceSessionCount("ws-1")).toBe(1);
|
||||||
|
service.closeSession(result.sessionId);
|
||||||
|
expect(service.getWorkspaceSessionCount("ws-1")).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not throw if PTY kill throws", () => {
|
||||||
|
mockPtyProcess.kill.mockImplementationOnce(() => {
|
||||||
|
throw new Error("PTY already dead");
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(() => service.closeSession(result.sessionId)).not.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// closeWorkspaceSessions
|
||||||
|
// ==========================================
|
||||||
|
describe("closeWorkspaceSessions", () => {
|
||||||
|
it("should kill all sessions for a workspace", () => {
|
||||||
|
service.createSession(mockSocket, { workspaceId: "ws-1", socketId: "s1" });
|
||||||
|
service.createSession(createMockSocket("s2"), { workspaceId: "ws-1", socketId: "s2" });
|
||||||
|
|
||||||
|
expect(service.getWorkspaceSessionCount("ws-1")).toBe(2);
|
||||||
|
|
||||||
|
service.closeWorkspaceSessions("ws-1");
|
||||||
|
|
||||||
|
expect(service.getWorkspaceSessionCount("ws-1")).toBe(0);
|
||||||
|
expect(mockPtyProcess.kill).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not affect sessions in other workspaces", () => {
|
||||||
|
service.createSession(mockSocket, { workspaceId: "ws-1", socketId: "s1" });
|
||||||
|
service.createSession(createMockSocket("s2"), { workspaceId: "ws-2", socketId: "s2" });
|
||||||
|
|
||||||
|
service.closeWorkspaceSessions("ws-1");
|
||||||
|
|
||||||
|
expect(service.getWorkspaceSessionCount("ws-1")).toBe(0);
|
||||||
|
expect(service.getWorkspaceSessionCount("ws-2")).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not throw for workspaces with no sessions", () => {
|
||||||
|
expect(() => service.closeWorkspaceSessions("ws-nonexistent")).not.toThrow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// sessionBelongsToWorkspace
|
||||||
|
// ==========================================
|
||||||
|
describe("sessionBelongsToWorkspace", () => {
|
||||||
|
it("should return true for a session belonging to the workspace", () => {
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(service.sessionBelongsToWorkspace(result.sessionId, "ws-1")).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false for a session in a different workspace", () => {
|
||||||
|
const result = service.createSession(mockSocket, {
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
socketId: "socket-1",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(service.sessionBelongsToWorkspace(result.sessionId, "ws-2")).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false for a nonexistent sessionId", () => {
|
||||||
|
expect(service.sessionBelongsToWorkspace("no-such-id", "ws-1")).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// getWorkspaceSessionCount
|
||||||
|
// ==========================================
|
||||||
|
describe("getWorkspaceSessionCount", () => {
|
||||||
|
it("should return 0 for workspace with no sessions", () => {
|
||||||
|
expect(service.getWorkspaceSessionCount("empty-ws")).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should track session count accurately", () => {
|
||||||
|
service.createSession(mockSocket, { workspaceId: "ws-count", socketId: "s1" });
|
||||||
|
expect(service.getWorkspaceSessionCount("ws-count")).toBe(1);
|
||||||
|
|
||||||
|
service.createSession(createMockSocket("s2"), { workspaceId: "ws-count", socketId: "s2" });
|
||||||
|
expect(service.getWorkspaceSessionCount("ws-count")).toBe(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
251
apps/api/src/terminal/terminal.service.ts
Normal file
251
apps/api/src/terminal/terminal.service.ts
Normal file
@@ -0,0 +1,251 @@
|
|||||||
|
/**
|
||||||
|
* TerminalService
|
||||||
|
*
|
||||||
|
* Manages PTY (pseudo-terminal) sessions for workspace users.
|
||||||
|
* Spawns real shell processes via node-pty, streams I/O to connected sockets,
|
||||||
|
* and enforces per-workspace session limits.
|
||||||
|
*
|
||||||
|
* Session lifecycle:
|
||||||
|
* - createSession: spawn a new PTY, wire onData/onExit, return sessionId
|
||||||
|
* - writeToSession: send input data to PTY stdin
|
||||||
|
* - resizeSession: resize PTY dimensions (cols x rows)
|
||||||
|
* - closeSession: kill PTY process, emit terminal:exit, cleanup
|
||||||
|
* - closeWorkspaceSessions: kill all sessions for a workspace (on disconnect)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Injectable, Logger } from "@nestjs/common";
|
||||||
|
import * as pty from "node-pty";
|
||||||
|
import type { Socket } from "socket.io";
|
||||||
|
import { randomUUID } from "node:crypto";
|
||||||
|
|
||||||
|
/** Maximum concurrent PTY sessions per workspace */
|
||||||
|
export const MAX_SESSIONS_PER_WORKSPACE = parseInt(
|
||||||
|
process.env.TERMINAL_MAX_SESSIONS_PER_WORKSPACE ?? "10",
|
||||||
|
10
|
||||||
|
);
|
||||||
|
|
||||||
|
/** Default PTY dimensions */
|
||||||
|
const DEFAULT_COLS = 80;
|
||||||
|
const DEFAULT_ROWS = 24;
|
||||||
|
|
||||||
|
export interface TerminalSession {
|
||||||
|
sessionId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
pty: pty.IPty;
|
||||||
|
name?: string;
|
||||||
|
createdAt: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateSessionOptions {
|
||||||
|
name?: string;
|
||||||
|
cols?: number;
|
||||||
|
rows?: number;
|
||||||
|
cwd?: string;
|
||||||
|
workspaceId: string;
|
||||||
|
socketId: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SessionCreatedResult {
|
||||||
|
sessionId: string;
|
||||||
|
name?: string;
|
||||||
|
cols: number;
|
||||||
|
rows: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class TerminalService {
|
||||||
|
private readonly logger = new Logger(TerminalService.name);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Map of sessionId -> TerminalSession
|
||||||
|
*/
|
||||||
|
private readonly sessions = new Map<string, TerminalSession>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Map of workspaceId -> Set<sessionId> for fast per-workspace lookups
|
||||||
|
*/
|
||||||
|
private readonly workspaceSessions = new Map<string, Set<string>>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new PTY session for the given workspace and socket.
|
||||||
|
* Wires PTY onData -> emit terminal:output and onExit -> emit terminal:exit.
|
||||||
|
*
|
||||||
|
* @throws Error if workspace session limit is exceeded
|
||||||
|
*/
|
||||||
|
createSession(socket: Socket, options: CreateSessionOptions): SessionCreatedResult {
|
||||||
|
const { workspaceId, name, cwd, socketId } = options;
|
||||||
|
const cols = options.cols ?? DEFAULT_COLS;
|
||||||
|
const rows = options.rows ?? DEFAULT_ROWS;
|
||||||
|
|
||||||
|
// Enforce per-workspace session limit
|
||||||
|
const workspaceSessionIds = this.workspaceSessions.get(workspaceId) ?? new Set<string>();
|
||||||
|
if (workspaceSessionIds.size >= MAX_SESSIONS_PER_WORKSPACE) {
|
||||||
|
throw new Error(
|
||||||
|
`Workspace ${workspaceId} has reached the maximum of ${String(MAX_SESSIONS_PER_WORKSPACE)} concurrent terminal sessions`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionId = randomUUID();
|
||||||
|
const shell = process.env.SHELL ?? "/bin/bash";
|
||||||
|
|
||||||
|
this.logger.log(
|
||||||
|
`Spawning PTY session ${sessionId} for workspace ${workspaceId} (socket: ${socketId}, shell: ${shell}, ${String(cols)}x${String(rows)})`
|
||||||
|
);
|
||||||
|
|
||||||
|
const ptyProcess = pty.spawn(shell, [], {
|
||||||
|
name: "xterm-256color",
|
||||||
|
cols,
|
||||||
|
rows,
|
||||||
|
cwd: cwd ?? process.cwd(),
|
||||||
|
env: process.env as Record<string, string>,
|
||||||
|
});
|
||||||
|
|
||||||
|
const session: TerminalSession = {
|
||||||
|
sessionId,
|
||||||
|
workspaceId,
|
||||||
|
pty: ptyProcess,
|
||||||
|
...(name !== undefined ? { name } : {}),
|
||||||
|
createdAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
this.sessions.set(sessionId, session);
|
||||||
|
|
||||||
|
// Track by workspace
|
||||||
|
if (!this.workspaceSessions.has(workspaceId)) {
|
||||||
|
this.workspaceSessions.set(workspaceId, new Set());
|
||||||
|
}
|
||||||
|
const wsSet = this.workspaceSessions.get(workspaceId);
|
||||||
|
if (wsSet) {
|
||||||
|
wsSet.add(sessionId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wire PTY stdout/stderr -> terminal:output
|
||||||
|
ptyProcess.onData((data: string) => {
|
||||||
|
socket.emit("terminal:output", { sessionId, data });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wire PTY exit -> terminal:exit, cleanup
|
||||||
|
ptyProcess.onExit(({ exitCode, signal }) => {
|
||||||
|
this.logger.log(
|
||||||
|
`PTY session ${sessionId} exited (exitCode: ${String(exitCode)}, signal: ${String(signal ?? "none")})`
|
||||||
|
);
|
||||||
|
socket.emit("terminal:exit", { sessionId, exitCode, signal });
|
||||||
|
this.cleanupSession(sessionId, workspaceId);
|
||||||
|
});
|
||||||
|
|
||||||
|
return { sessionId, ...(name !== undefined ? { name } : {}), cols, rows };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write input data to a PTY session's stdin.
|
||||||
|
*
|
||||||
|
* @throws Error if session not found
|
||||||
|
*/
|
||||||
|
writeToSession(sessionId: string, data: string): void {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (!session) {
|
||||||
|
throw new Error(`Terminal session ${sessionId} not found`);
|
||||||
|
}
|
||||||
|
session.pty.write(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resize a PTY session's terminal dimensions.
|
||||||
|
*
|
||||||
|
* @throws Error if session not found
|
||||||
|
*/
|
||||||
|
resizeSession(sessionId: string, cols: number, rows: number): void {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (!session) {
|
||||||
|
throw new Error(`Terminal session ${sessionId} not found`);
|
||||||
|
}
|
||||||
|
session.pty.resize(cols, rows);
|
||||||
|
this.logger.debug(`Resized PTY session ${sessionId} to ${String(cols)}x${String(rows)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Kill and clean up a specific PTY session.
|
||||||
|
* Returns true if the session existed, false if it was already gone.
|
||||||
|
*/
|
||||||
|
closeSession(sessionId: string): boolean {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (!session) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.log(`Closing PTY session ${sessionId} for workspace ${session.workspaceId}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
session.pty.kill();
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Error killing PTY session ${sessionId}: ${error instanceof Error ? error.message : String(error)}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.cleanupSession(sessionId, session.workspaceId);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Close all PTY sessions for a workspace (called on client disconnect).
|
||||||
|
*/
|
||||||
|
closeWorkspaceSessions(workspaceId: string): void {
|
||||||
|
const sessionIds = this.workspaceSessions.get(workspaceId);
|
||||||
|
if (!sessionIds || sessionIds.size === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.log(
|
||||||
|
`Closing ${String(sessionIds.size)} PTY session(s) for workspace ${workspaceId} (disconnect)`
|
||||||
|
);
|
||||||
|
|
||||||
|
// Copy to array to avoid mutation during iteration
|
||||||
|
const ids = Array.from(sessionIds);
|
||||||
|
for (const sessionId of ids) {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
if (session) {
|
||||||
|
try {
|
||||||
|
session.pty.kill();
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Error killing PTY session ${sessionId} on disconnect: ${error instanceof Error ? error.message : String(error)}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
this.cleanupSession(sessionId, workspaceId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of active sessions for a workspace.
|
||||||
|
*/
|
||||||
|
getWorkspaceSessionCount(workspaceId: string): number {
|
||||||
|
return this.workspaceSessions.get(workspaceId)?.size ?? 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a session belongs to a given workspace.
|
||||||
|
* Used for access control in the gateway.
|
||||||
|
*/
|
||||||
|
sessionBelongsToWorkspace(sessionId: string, workspaceId: string): boolean {
|
||||||
|
const session = this.sessions.get(sessionId);
|
||||||
|
return session?.workspaceId === workspaceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Internal cleanup: remove session from tracking maps.
|
||||||
|
* Does NOT kill the PTY (caller is responsible).
|
||||||
|
*/
|
||||||
|
private cleanupSession(sessionId: string, workspaceId: string): void {
|
||||||
|
this.sessions.delete(sessionId);
|
||||||
|
|
||||||
|
const workspaceSessionIds = this.workspaceSessions.get(workspaceId);
|
||||||
|
if (workspaceSessionIds) {
|
||||||
|
workspaceSessionIds.delete(sessionId);
|
||||||
|
if (workspaceSessionIds.size === 0) {
|
||||||
|
this.workspaceSessions.delete(workspaceId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,14 +1,10 @@
|
|||||||
# Multi-stage build for mosaic-coordinator
|
# Multi-stage build for mosaic-coordinator
|
||||||
FROM python:3.11-slim AS builder
|
# Builder uses the full Python image which already includes gcc/g++/make,
|
||||||
|
# avoiding a 336 MB build-essential install that exceeds Kaniko disk budget.
|
||||||
|
FROM python:3.11 AS builder
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install build dependencies
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt-get install -y --no-install-recommends \
|
|
||||||
build-essential \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Copy dependency files and private registry config
|
# Copy dependency files and private registry config
|
||||||
COPY pyproject.toml .
|
COPY pyproject.toml .
|
||||||
COPY pip.conf /etc/pip.conf
|
COPY pip.conf /etc/pip.conf
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
# Orchestrator Configuration
|
# Orchestrator Configuration
|
||||||
ORCHESTRATOR_PORT=3001
|
ORCHESTRATOR_PORT=3001
|
||||||
NODE_ENV=development
|
NODE_ENV=development
|
||||||
|
# AI provider for orchestrator agents: ollama, claude, openai
|
||||||
|
AI_PROVIDER=ollama
|
||||||
|
|
||||||
# Valkey
|
# Valkey
|
||||||
VALKEY_HOST=localhost
|
VALKEY_HOST=localhost
|
||||||
@@ -8,6 +10,7 @@ VALKEY_PORT=6379
|
|||||||
VALKEY_URL=redis://localhost:6379
|
VALKEY_URL=redis://localhost:6379
|
||||||
|
|
||||||
# Claude API
|
# Claude API
|
||||||
|
# Required only when AI_PROVIDER=claude.
|
||||||
CLAUDE_API_KEY=your-api-key-here
|
CLAUDE_API_KEY=your-api-key-here
|
||||||
|
|
||||||
# Docker
|
# Docker
|
||||||
|
|||||||
@@ -1,6 +1,3 @@
|
|||||||
# syntax=docker/dockerfile:1
|
|
||||||
# Enable BuildKit features for cache mounts
|
|
||||||
|
|
||||||
# Base image for all stages
|
# Base image for all stages
|
||||||
# Uses Debian slim (glibc) instead of Alpine (musl) for native addon compatibility.
|
# Uses Debian slim (glibc) instead of Alpine (musl) for native addon compatibility.
|
||||||
FROM node:24-slim AS base
|
FROM node:24-slim AS base
|
||||||
@@ -26,9 +23,8 @@ COPY packages/config/package.json ./packages/config/
|
|||||||
COPY apps/orchestrator/package.json ./apps/orchestrator/
|
COPY apps/orchestrator/package.json ./apps/orchestrator/
|
||||||
|
|
||||||
# Install ALL dependencies (not just production)
|
# Install ALL dependencies (not just production)
|
||||||
# This ensures NestJS packages and other required deps are available
|
# No cache mount — Kaniko builds are ephemeral in CI
|
||||||
RUN --mount=type=cache,id=pnpm-store,target=/root/.local/share/pnpm/store \
|
RUN pnpm install --frozen-lockfile
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Builder stage
|
# Builder stage
|
||||||
@@ -69,15 +65,14 @@ LABEL org.opencontainers.image.vendor="Mosaic Stack"
|
|||||||
LABEL org.opencontainers.image.title="Mosaic Orchestrator"
|
LABEL org.opencontainers.image.title="Mosaic Orchestrator"
|
||||||
LABEL org.opencontainers.image.description="Agent orchestration service for Mosaic Stack"
|
LABEL org.opencontainers.image.description="Agent orchestration service for Mosaic Stack"
|
||||||
|
|
||||||
# Remove npm (unused in production — we use pnpm) to reduce attack surface
|
# Install dumb-init for proper signal handling (static binary from GitHub,
|
||||||
RUN rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx
|
# avoids apt-get which fails under Kaniko with bookworm GPG signature errors)
|
||||||
|
ADD https://github.com/Yelp/dumb-init/releases/download/v1.2.5/dumb-init_1.2.5_x86_64 /usr/local/bin/dumb-init
|
||||||
|
|
||||||
# Install wget and dumb-init
|
# Single RUN to minimize Kaniko filesystem snapshots (each RUN = full snapshot)
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends wget dumb-init \
|
RUN rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& chmod 755 /usr/local/bin/dumb-init \
|
||||||
|
&& groupadd -g 1001 nodejs && useradd -m -u 1001 -g nodejs nestjs
|
||||||
# Create non-root user
|
|
||||||
RUN groupadd -g 1001 nodejs && useradd -m -u 1001 -g nodejs nestjs
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
@@ -105,7 +100,7 @@ EXPOSE 3001
|
|||||||
|
|
||||||
# Health check
|
# Health check
|
||||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
||||||
CMD wget --no-verbose --tries=1 --spider http://localhost:3001/health || exit 1
|
CMD node -e "require('http').get('http://localhost:3001/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})"
|
||||||
|
|
||||||
# Use dumb-init to handle signals properly
|
# Use dumb-init to handle signals properly
|
||||||
ENTRYPOINT ["dumb-init", "--"]
|
ENTRYPOINT ["dumb-init", "--"]
|
||||||
|
|||||||
@@ -46,11 +46,21 @@ Monitored via `apps/web/` (Agent Dashboard).
|
|||||||
### Agents
|
### Agents
|
||||||
|
|
||||||
| Method | Path | Description |
|
| Method | Path | Description |
|
||||||
| ------ | ------------------------- | ---------------------- |
|
| ------ | ------------------------- | ------------------------- |
|
||||||
| POST | `/agents/spawn` | Spawn a new agent |
|
| POST | `/agents/spawn` | Spawn a new agent |
|
||||||
| GET | `/agents/:agentId/status` | Get agent status |
|
| GET | `/agents/:agentId/status` | Get agent status |
|
||||||
| POST | `/agents/:agentId/kill` | Kill a single agent |
|
| POST | `/agents/:agentId/kill` | Kill a single agent |
|
||||||
| POST | `/agents/kill-all` | Kill all active agents |
|
| POST | `/agents/kill-all` | Kill all active agents |
|
||||||
|
| GET | `/agents/events` | SSE lifecycle/task events |
|
||||||
|
| GET | `/agents/events/recent` | Recent events (polling) |
|
||||||
|
|
||||||
|
### Queue
|
||||||
|
|
||||||
|
| Method | Path | Description |
|
||||||
|
| ------ | --------------- | ---------------------------- |
|
||||||
|
| GET | `/queue/stats` | Queue depth and worker stats |
|
||||||
|
| POST | `/queue/pause` | Pause queue processing |
|
||||||
|
| POST | `/queue/resume` | Resume queue processing |
|
||||||
|
|
||||||
#### POST /agents/spawn
|
#### POST /agents/spawn
|
||||||
|
|
||||||
@@ -177,13 +187,17 @@ pnpm --filter @mosaic/orchestrator lint
|
|||||||
Environment variables loaded via `@nestjs/config`. Key variables:
|
Environment variables loaded via `@nestjs/config`. Key variables:
|
||||||
|
|
||||||
| Variable | Description |
|
| Variable | Description |
|
||||||
| ------------------- | -------------------------------------- |
|
| -------------------------------- | ------------------------------------------------------------ |
|
||||||
| `ORCHESTRATOR_PORT` | HTTP port (default: 3001) |
|
| `ORCHESTRATOR_PORT` | HTTP port (default: 3001) |
|
||||||
| `CLAUDE_API_KEY` | Claude API key for agents |
|
| `AI_PROVIDER` | LLM provider for orchestrator (`ollama`, `claude`, `openai`) |
|
||||||
|
| `CLAUDE_API_KEY` | Required only when `AI_PROVIDER=claude` |
|
||||||
| `VALKEY_HOST` | Valkey/Redis host (default: localhost) |
|
| `VALKEY_HOST` | Valkey/Redis host (default: localhost) |
|
||||||
| `VALKEY_PORT` | Valkey/Redis port (default: 6379) |
|
| `VALKEY_PORT` | Valkey/Redis port (default: 6379) |
|
||||||
| `COORDINATOR_URL` | Quality Coordinator base URL |
|
| `COORDINATOR_URL` | Quality Coordinator base URL |
|
||||||
| `SANDBOX_ENABLED` | Enable Docker sandbox (true/false) |
|
| `SANDBOX_ENABLED` | Enable Docker sandbox (true/false) |
|
||||||
|
| `MAX_CONCURRENT_AGENTS` | Maximum concurrent in-memory sessions (default: 2) |
|
||||||
|
| `ORCHESTRATOR_QUEUE_CONCURRENCY` | BullMQ worker concurrency (default: 1) |
|
||||||
|
| `SANDBOX_DEFAULT_MEMORY_MB` | Sandbox memory limit in MB (default: 256) |
|
||||||
|
|
||||||
## Related Documentation
|
## Related Documentation
|
||||||
|
|
||||||
|
|||||||
@@ -192,7 +192,8 @@ LABEL com.mosaic.security.non-root=true
|
|||||||
|
|
||||||
Sensitive configuration is passed via environment variables:
|
Sensitive configuration is passed via environment variables:
|
||||||
|
|
||||||
- `CLAUDE_API_KEY`: Claude API credentials
|
- `AI_PROVIDER`: Orchestrator LLM provider
|
||||||
|
- `CLAUDE_API_KEY`: Claude credentials (required only for `AI_PROVIDER=claude`)
|
||||||
- `VALKEY_URL`: Cache connection string
|
- `VALKEY_URL`: Cache connection string
|
||||||
|
|
||||||
**Best Practices:**
|
**Best Practices:**
|
||||||
|
|||||||
89
apps/orchestrator/src/api/agents/agent-events.service.ts
Normal file
89
apps/orchestrator/src/api/agents/agent-events.service.ts
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
import { Injectable, Logger, OnModuleInit } from "@nestjs/common";
|
||||||
|
import { randomUUID } from "crypto";
|
||||||
|
import { ValkeyService } from "../../valkey/valkey.service";
|
||||||
|
import type { EventHandler, OrchestratorEvent } from "../../valkey/types";
|
||||||
|
|
||||||
|
type UnsubscribeFn = () => void;
|
||||||
|
const MAX_RECENT_EVENTS = 500;
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AgentEventsService implements OnModuleInit {
|
||||||
|
private readonly logger = new Logger(AgentEventsService.name);
|
||||||
|
private readonly subscribers = new Map<string, EventHandler>();
|
||||||
|
private readonly recentEvents: OrchestratorEvent[] = [];
|
||||||
|
private connected = false;
|
||||||
|
|
||||||
|
constructor(private readonly valkeyService: ValkeyService) {}
|
||||||
|
|
||||||
|
async onModuleInit(): Promise<void> {
|
||||||
|
if (this.connected) return;
|
||||||
|
|
||||||
|
await this.valkeyService.subscribeToEvents(
|
||||||
|
(event) => {
|
||||||
|
this.appendRecentEvent(event);
|
||||||
|
this.subscribers.forEach((handler) => {
|
||||||
|
void handler(event);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
(error, _raw, channel) => {
|
||||||
|
this.logger.warn(`Event stream parse/validation warning on ${channel}: ${error.message}`);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
this.connected = true;
|
||||||
|
this.logger.log("Agent event stream subscription active");
|
||||||
|
}
|
||||||
|
|
||||||
|
subscribe(handler: EventHandler): UnsubscribeFn {
|
||||||
|
const id = randomUUID();
|
||||||
|
this.subscribers.set(id, handler);
|
||||||
|
return () => {
|
||||||
|
this.subscribers.delete(id);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async getInitialSnapshot(): Promise<{
|
||||||
|
type: "stream.snapshot";
|
||||||
|
timestamp: string;
|
||||||
|
agents: number;
|
||||||
|
tasks: number;
|
||||||
|
}> {
|
||||||
|
const [agents, tasks] = await Promise.all([
|
||||||
|
this.valkeyService.listAgents(),
|
||||||
|
this.valkeyService.listTasks(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "stream.snapshot",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
agents: agents.length,
|
||||||
|
tasks: tasks.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
createHeartbeat(): OrchestratorEvent {
|
||||||
|
return {
|
||||||
|
type: "task.processing",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
data: {
|
||||||
|
heartbeat: true,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
getRecentEvents(limit = 100): OrchestratorEvent[] {
|
||||||
|
const safeLimit = Math.min(Math.max(Math.floor(limit), 1), MAX_RECENT_EVENTS);
|
||||||
|
if (safeLimit >= this.recentEvents.length) {
|
||||||
|
return [...this.recentEvents];
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.recentEvents.slice(-safeLimit);
|
||||||
|
}
|
||||||
|
|
||||||
|
private appendRecentEvent(event: OrchestratorEvent): void {
|
||||||
|
this.recentEvents.push(event);
|
||||||
|
if (this.recentEvents.length > MAX_RECENT_EVENTS) {
|
||||||
|
this.recentEvents.shift();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,6 +4,7 @@ import { QueueService } from "../../queue/queue.service";
|
|||||||
import { AgentSpawnerService } from "../../spawner/agent-spawner.service";
|
import { AgentSpawnerService } from "../../spawner/agent-spawner.service";
|
||||||
import { AgentLifecycleService } from "../../spawner/agent-lifecycle.service";
|
import { AgentLifecycleService } from "../../spawner/agent-lifecycle.service";
|
||||||
import { KillswitchService } from "../../killswitch/killswitch.service";
|
import { KillswitchService } from "../../killswitch/killswitch.service";
|
||||||
|
import { AgentEventsService } from "./agent-events.service";
|
||||||
import type { KillAllResult } from "../../killswitch/killswitch.service";
|
import type { KillAllResult } from "../../killswitch/killswitch.service";
|
||||||
|
|
||||||
describe("AgentsController - Killswitch Endpoints", () => {
|
describe("AgentsController - Killswitch Endpoints", () => {
|
||||||
@@ -20,6 +21,12 @@ describe("AgentsController - Killswitch Endpoints", () => {
|
|||||||
};
|
};
|
||||||
let mockLifecycleService: {
|
let mockLifecycleService: {
|
||||||
getAgentLifecycleState: ReturnType<typeof vi.fn>;
|
getAgentLifecycleState: ReturnType<typeof vi.fn>;
|
||||||
|
registerSpawnedAgent: ReturnType<typeof vi.fn>;
|
||||||
|
};
|
||||||
|
let mockEventsService: {
|
||||||
|
subscribe: ReturnType<typeof vi.fn>;
|
||||||
|
getInitialSnapshot: ReturnType<typeof vi.fn>;
|
||||||
|
createHeartbeat: ReturnType<typeof vi.fn>;
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
@@ -38,13 +45,30 @@ describe("AgentsController - Killswitch Endpoints", () => {
|
|||||||
|
|
||||||
mockLifecycleService = {
|
mockLifecycleService = {
|
||||||
getAgentLifecycleState: vi.fn(),
|
getAgentLifecycleState: vi.fn(),
|
||||||
|
registerSpawnedAgent: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
mockEventsService = {
|
||||||
|
subscribe: vi.fn().mockReturnValue(() => {}),
|
||||||
|
getInitialSnapshot: vi.fn().mockResolvedValue({
|
||||||
|
type: "stream.snapshot",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
agents: 0,
|
||||||
|
tasks: 0,
|
||||||
|
}),
|
||||||
|
createHeartbeat: vi.fn().mockReturnValue({
|
||||||
|
type: "task.processing",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
data: { heartbeat: true },
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
controller = new AgentsController(
|
controller = new AgentsController(
|
||||||
mockQueueService as unknown as QueueService,
|
mockQueueService as unknown as QueueService,
|
||||||
mockSpawnerService as unknown as AgentSpawnerService,
|
mockSpawnerService as unknown as AgentSpawnerService,
|
||||||
mockLifecycleService as unknown as AgentLifecycleService,
|
mockLifecycleService as unknown as AgentLifecycleService,
|
||||||
mockKillswitchService as unknown as KillswitchService
|
mockKillswitchService as unknown as KillswitchService,
|
||||||
|
mockEventsService as unknown as AgentEventsService
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { QueueService } from "../../queue/queue.service";
|
|||||||
import { AgentSpawnerService } from "../../spawner/agent-spawner.service";
|
import { AgentSpawnerService } from "../../spawner/agent-spawner.service";
|
||||||
import { AgentLifecycleService } from "../../spawner/agent-lifecycle.service";
|
import { AgentLifecycleService } from "../../spawner/agent-lifecycle.service";
|
||||||
import { KillswitchService } from "../../killswitch/killswitch.service";
|
import { KillswitchService } from "../../killswitch/killswitch.service";
|
||||||
|
import { AgentEventsService } from "./agent-events.service";
|
||||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
|
|
||||||
describe("AgentsController", () => {
|
describe("AgentsController", () => {
|
||||||
@@ -17,11 +18,18 @@ describe("AgentsController", () => {
|
|||||||
};
|
};
|
||||||
let lifecycleService: {
|
let lifecycleService: {
|
||||||
getAgentLifecycleState: ReturnType<typeof vi.fn>;
|
getAgentLifecycleState: ReturnType<typeof vi.fn>;
|
||||||
|
registerSpawnedAgent: ReturnType<typeof vi.fn>;
|
||||||
};
|
};
|
||||||
let killswitchService: {
|
let killswitchService: {
|
||||||
killAgent: ReturnType<typeof vi.fn>;
|
killAgent: ReturnType<typeof vi.fn>;
|
||||||
killAllAgents: ReturnType<typeof vi.fn>;
|
killAllAgents: ReturnType<typeof vi.fn>;
|
||||||
};
|
};
|
||||||
|
let eventsService: {
|
||||||
|
subscribe: ReturnType<typeof vi.fn>;
|
||||||
|
getInitialSnapshot: ReturnType<typeof vi.fn>;
|
||||||
|
createHeartbeat: ReturnType<typeof vi.fn>;
|
||||||
|
getRecentEvents: ReturnType<typeof vi.fn>;
|
||||||
|
};
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
// Create mock services
|
// Create mock services
|
||||||
@@ -37,6 +45,7 @@ describe("AgentsController", () => {
|
|||||||
|
|
||||||
lifecycleService = {
|
lifecycleService = {
|
||||||
getAgentLifecycleState: vi.fn(),
|
getAgentLifecycleState: vi.fn(),
|
||||||
|
registerSpawnedAgent: vi.fn().mockResolvedValue(undefined),
|
||||||
};
|
};
|
||||||
|
|
||||||
killswitchService = {
|
killswitchService = {
|
||||||
@@ -44,12 +53,29 @@ describe("AgentsController", () => {
|
|||||||
killAllAgents: vi.fn(),
|
killAllAgents: vi.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
eventsService = {
|
||||||
|
subscribe: vi.fn().mockReturnValue(() => {}),
|
||||||
|
getInitialSnapshot: vi.fn().mockResolvedValue({
|
||||||
|
type: "stream.snapshot",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
agents: 0,
|
||||||
|
tasks: 0,
|
||||||
|
}),
|
||||||
|
createHeartbeat: vi.fn().mockReturnValue({
|
||||||
|
type: "task.processing",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
data: { heartbeat: true },
|
||||||
|
}),
|
||||||
|
getRecentEvents: vi.fn().mockReturnValue([]),
|
||||||
|
};
|
||||||
|
|
||||||
// Create controller with mocked services
|
// Create controller with mocked services
|
||||||
controller = new AgentsController(
|
controller = new AgentsController(
|
||||||
queueService as unknown as QueueService,
|
queueService as unknown as QueueService,
|
||||||
spawnerService as unknown as AgentSpawnerService,
|
spawnerService as unknown as AgentSpawnerService,
|
||||||
lifecycleService as unknown as AgentLifecycleService,
|
lifecycleService as unknown as AgentLifecycleService,
|
||||||
killswitchService as unknown as KillswitchService
|
killswitchService as unknown as KillswitchService,
|
||||||
|
eventsService as unknown as AgentEventsService
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -195,6 +221,10 @@ describe("AgentsController", () => {
|
|||||||
expect(queueService.addTask).toHaveBeenCalledWith(validRequest.taskId, validRequest.context, {
|
expect(queueService.addTask).toHaveBeenCalledWith(validRequest.taskId, validRequest.context, {
|
||||||
priority: 5,
|
priority: 5,
|
||||||
});
|
});
|
||||||
|
expect(lifecycleService.registerSpawnedAgent).toHaveBeenCalledWith(
|
||||||
|
agentId,
|
||||||
|
validRequest.taskId
|
||||||
|
);
|
||||||
expect(result).toEqual({
|
expect(result).toEqual({
|
||||||
agentId,
|
agentId,
|
||||||
status: "spawning",
|
status: "spawning",
|
||||||
@@ -334,4 +364,39 @@ describe("AgentsController", () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("getRecentEvents", () => {
|
||||||
|
it("should return recent events with default limit", () => {
|
||||||
|
eventsService.getRecentEvents.mockReturnValue([
|
||||||
|
{
|
||||||
|
type: "task.completed",
|
||||||
|
timestamp: "2026-02-17T15:00:00.000Z",
|
||||||
|
taskId: "task-123",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
const result = controller.getRecentEvents();
|
||||||
|
|
||||||
|
expect(eventsService.getRecentEvents).toHaveBeenCalledWith(100);
|
||||||
|
expect(result).toEqual({
|
||||||
|
events: [
|
||||||
|
{
|
||||||
|
type: "task.completed",
|
||||||
|
timestamp: "2026-02-17T15:00:00.000Z",
|
||||||
|
taskId: "task-123",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should parse and pass custom limit", () => {
|
||||||
|
controller.getRecentEvents("25");
|
||||||
|
expect(eventsService.getRecentEvents).toHaveBeenCalledWith(25);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fallback to default when limit is invalid", () => {
|
||||||
|
controller.getRecentEvents("invalid");
|
||||||
|
expect(eventsService.getRecentEvents).toHaveBeenCalledWith(100);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -11,8 +11,12 @@ import {
|
|||||||
HttpCode,
|
HttpCode,
|
||||||
UseGuards,
|
UseGuards,
|
||||||
ParseUUIDPipe,
|
ParseUUIDPipe,
|
||||||
|
Sse,
|
||||||
|
MessageEvent,
|
||||||
|
Query,
|
||||||
} from "@nestjs/common";
|
} from "@nestjs/common";
|
||||||
import { Throttle } from "@nestjs/throttler";
|
import { Throttle } from "@nestjs/throttler";
|
||||||
|
import { Observable } from "rxjs";
|
||||||
import { QueueService } from "../../queue/queue.service";
|
import { QueueService } from "../../queue/queue.service";
|
||||||
import { AgentSpawnerService } from "../../spawner/agent-spawner.service";
|
import { AgentSpawnerService } from "../../spawner/agent-spawner.service";
|
||||||
import { AgentLifecycleService } from "../../spawner/agent-lifecycle.service";
|
import { AgentLifecycleService } from "../../spawner/agent-lifecycle.service";
|
||||||
@@ -20,6 +24,7 @@ import { KillswitchService } from "../../killswitch/killswitch.service";
|
|||||||
import { SpawnAgentDto, SpawnAgentResponseDto } from "./dto/spawn-agent.dto";
|
import { SpawnAgentDto, SpawnAgentResponseDto } from "./dto/spawn-agent.dto";
|
||||||
import { OrchestratorApiKeyGuard } from "../../common/guards/api-key.guard";
|
import { OrchestratorApiKeyGuard } from "../../common/guards/api-key.guard";
|
||||||
import { OrchestratorThrottlerGuard } from "../../common/guards/throttler.guard";
|
import { OrchestratorThrottlerGuard } from "../../common/guards/throttler.guard";
|
||||||
|
import { AgentEventsService } from "./agent-events.service";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Controller for agent management endpoints
|
* Controller for agent management endpoints
|
||||||
@@ -41,7 +46,8 @@ export class AgentsController {
|
|||||||
private readonly queueService: QueueService,
|
private readonly queueService: QueueService,
|
||||||
private readonly spawnerService: AgentSpawnerService,
|
private readonly spawnerService: AgentSpawnerService,
|
||||||
private readonly lifecycleService: AgentLifecycleService,
|
private readonly lifecycleService: AgentLifecycleService,
|
||||||
private readonly killswitchService: KillswitchService
|
private readonly killswitchService: KillswitchService,
|
||||||
|
private readonly eventsService: AgentEventsService
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -67,6 +73,9 @@ export class AgentsController {
|
|||||||
context: dto.context,
|
context: dto.context,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Persist initial lifecycle state in Valkey.
|
||||||
|
await this.lifecycleService.registerSpawnedAgent(spawnResponse.agentId, dto.taskId);
|
||||||
|
|
||||||
// Queue task in Valkey
|
// Queue task in Valkey
|
||||||
await this.queueService.addTask(dto.taskId, dto.context, {
|
await this.queueService.addTask(dto.taskId, dto.context, {
|
||||||
priority: 5, // Default priority
|
priority: 5, // Default priority
|
||||||
@@ -85,6 +94,55 @@ export class AgentsController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stream orchestrator events as server-sent events (SSE)
|
||||||
|
*/
|
||||||
|
@Sse("events")
|
||||||
|
@Throttle({ status: { limit: 200, ttl: 60000 } })
|
||||||
|
streamEvents(): Observable<MessageEvent> {
|
||||||
|
return new Observable<MessageEvent>((subscriber) => {
|
||||||
|
let isClosed = false;
|
||||||
|
|
||||||
|
const unsubscribe = this.eventsService.subscribe((event) => {
|
||||||
|
if (!isClosed) {
|
||||||
|
subscriber.next({ data: event });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
void this.eventsService.getInitialSnapshot().then((snapshot) => {
|
||||||
|
if (!isClosed) {
|
||||||
|
subscriber.next({ data: snapshot });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const heartbeat = setInterval(() => {
|
||||||
|
if (!isClosed) {
|
||||||
|
subscriber.next({ data: this.eventsService.createHeartbeat() });
|
||||||
|
}
|
||||||
|
}, 15000);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
isClosed = true;
|
||||||
|
clearInterval(heartbeat);
|
||||||
|
unsubscribe();
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return recent orchestrator events for non-streaming consumers.
|
||||||
|
*/
|
||||||
|
@Get("events/recent")
|
||||||
|
@Throttle({ status: { limit: 200, ttl: 60000 } })
|
||||||
|
getRecentEvents(@Query("limit") limit?: string): {
|
||||||
|
events: ReturnType<AgentEventsService["getRecentEvents"]>;
|
||||||
|
} {
|
||||||
|
const parsedLimit = Number.parseInt(limit ?? "100", 10);
|
||||||
|
return {
|
||||||
|
events: this.eventsService.getRecentEvents(Number.isNaN(parsedLimit) ? 100 : parsedLimit),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List all agents
|
* List all agents
|
||||||
* @returns Array of all agent sessions with their status
|
* @returns Array of all agent sessions with their status
|
||||||
|
|||||||
@@ -5,10 +5,11 @@ import { SpawnerModule } from "../../spawner/spawner.module";
|
|||||||
import { KillswitchModule } from "../../killswitch/killswitch.module";
|
import { KillswitchModule } from "../../killswitch/killswitch.module";
|
||||||
import { ValkeyModule } from "../../valkey/valkey.module";
|
import { ValkeyModule } from "../../valkey/valkey.module";
|
||||||
import { OrchestratorApiKeyGuard } from "../../common/guards/api-key.guard";
|
import { OrchestratorApiKeyGuard } from "../../common/guards/api-key.guard";
|
||||||
|
import { AgentEventsService } from "./agent-events.service";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [QueueModule, SpawnerModule, KillswitchModule, ValkeyModule],
|
imports: [QueueModule, SpawnerModule, KillswitchModule, ValkeyModule],
|
||||||
controllers: [AgentsController],
|
controllers: [AgentsController],
|
||||||
providers: [OrchestratorApiKeyGuard],
|
providers: [OrchestratorApiKeyGuard, AgentEventsService],
|
||||||
})
|
})
|
||||||
export class AgentsModule {}
|
export class AgentsModule {}
|
||||||
|
|||||||
11
apps/orchestrator/src/api/queue/queue-api.module.ts
Normal file
11
apps/orchestrator/src/api/queue/queue-api.module.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { Module } from "@nestjs/common";
|
||||||
|
import { QueueController } from "./queue.controller";
|
||||||
|
import { QueueModule } from "../../queue/queue.module";
|
||||||
|
import { OrchestratorApiKeyGuard } from "../../common/guards/api-key.guard";
|
||||||
|
|
||||||
|
@Module({
|
||||||
|
imports: [QueueModule],
|
||||||
|
controllers: [QueueController],
|
||||||
|
providers: [OrchestratorApiKeyGuard],
|
||||||
|
})
|
||||||
|
export class QueueApiModule {}
|
||||||
65
apps/orchestrator/src/api/queue/queue.controller.spec.ts
Normal file
65
apps/orchestrator/src/api/queue/queue.controller.spec.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
|
import { QueueController } from "./queue.controller";
|
||||||
|
import { QueueService } from "../../queue/queue.service";
|
||||||
|
|
||||||
|
describe("QueueController", () => {
|
||||||
|
let controller: QueueController;
|
||||||
|
let queueService: {
|
||||||
|
getStats: ReturnType<typeof vi.fn>;
|
||||||
|
pause: ReturnType<typeof vi.fn>;
|
||||||
|
resume: ReturnType<typeof vi.fn>;
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
queueService = {
|
||||||
|
getStats: vi.fn(),
|
||||||
|
pause: vi.fn(),
|
||||||
|
resume: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
controller = new QueueController(queueService as unknown as QueueService);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return queue stats", async () => {
|
||||||
|
queueService.getStats.mockResolvedValue({
|
||||||
|
pending: 5,
|
||||||
|
active: 1,
|
||||||
|
completed: 10,
|
||||||
|
failed: 2,
|
||||||
|
delayed: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await controller.getStats();
|
||||||
|
|
||||||
|
expect(queueService.getStats).toHaveBeenCalledOnce();
|
||||||
|
expect(result).toEqual({
|
||||||
|
pending: 5,
|
||||||
|
active: 1,
|
||||||
|
completed: 10,
|
||||||
|
failed: 2,
|
||||||
|
delayed: 0,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pause queue processing", async () => {
|
||||||
|
queueService.pause.mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const result = await controller.pause();
|
||||||
|
|
||||||
|
expect(queueService.pause).toHaveBeenCalledOnce();
|
||||||
|
expect(result).toEqual({ message: "Queue processing paused" });
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should resume queue processing", async () => {
|
||||||
|
queueService.resume.mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const result = await controller.resume();
|
||||||
|
|
||||||
|
expect(queueService.resume).toHaveBeenCalledOnce();
|
||||||
|
expect(result).toEqual({ message: "Queue processing resumed" });
|
||||||
|
});
|
||||||
|
});
|
||||||
39
apps/orchestrator/src/api/queue/queue.controller.ts
Normal file
39
apps/orchestrator/src/api/queue/queue.controller.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { Controller, Get, HttpCode, Post, UseGuards } from "@nestjs/common";
|
||||||
|
import { Throttle } from "@nestjs/throttler";
|
||||||
|
import { QueueService } from "../../queue/queue.service";
|
||||||
|
import { OrchestratorApiKeyGuard } from "../../common/guards/api-key.guard";
|
||||||
|
import { OrchestratorThrottlerGuard } from "../../common/guards/throttler.guard";
|
||||||
|
|
||||||
|
@Controller("queue")
|
||||||
|
@UseGuards(OrchestratorApiKeyGuard, OrchestratorThrottlerGuard)
|
||||||
|
export class QueueController {
|
||||||
|
constructor(private readonly queueService: QueueService) {}
|
||||||
|
|
||||||
|
@Get("stats")
|
||||||
|
@Throttle({ status: { limit: 200, ttl: 60000 } })
|
||||||
|
async getStats(): Promise<{
|
||||||
|
pending: number;
|
||||||
|
active: number;
|
||||||
|
completed: number;
|
||||||
|
failed: number;
|
||||||
|
delayed: number;
|
||||||
|
}> {
|
||||||
|
return this.queueService.getStats();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post("pause")
|
||||||
|
@Throttle({ strict: { limit: 10, ttl: 60000 } })
|
||||||
|
@HttpCode(200)
|
||||||
|
async pause(): Promise<{ message: string }> {
|
||||||
|
await this.queueService.pause();
|
||||||
|
return { message: "Queue processing paused" };
|
||||||
|
}
|
||||||
|
|
||||||
|
@Post("resume")
|
||||||
|
@Throttle({ strict: { limit: 10, ttl: 60000 } })
|
||||||
|
@HttpCode(200)
|
||||||
|
async resume(): Promise<{ message: string }> {
|
||||||
|
await this.queueService.resume();
|
||||||
|
return { message: "Queue processing resumed" };
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,9 +1,10 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { Module } from "@nestjs/common";
|
||||||
import { ConfigModule } from "@nestjs/config";
|
import { ConfigModule, ConfigService } from "@nestjs/config";
|
||||||
import { BullModule } from "@nestjs/bullmq";
|
import { BullModule } from "@nestjs/bullmq";
|
||||||
import { ThrottlerModule } from "@nestjs/throttler";
|
import { ThrottlerModule } from "@nestjs/throttler";
|
||||||
import { HealthModule } from "./api/health/health.module";
|
import { HealthModule } from "./api/health/health.module";
|
||||||
import { AgentsModule } from "./api/agents/agents.module";
|
import { AgentsModule } from "./api/agents/agents.module";
|
||||||
|
import { QueueApiModule } from "./api/queue/queue-api.module";
|
||||||
import { CoordinatorModule } from "./coordinator/coordinator.module";
|
import { CoordinatorModule } from "./coordinator/coordinator.module";
|
||||||
import { BudgetModule } from "./budget/budget.module";
|
import { BudgetModule } from "./budget/budget.module";
|
||||||
import { CIModule } from "./ci";
|
import { CIModule } from "./ci";
|
||||||
@@ -21,12 +22,16 @@ import { orchestratorConfig } from "./config/orchestrator.config";
|
|||||||
isGlobal: true,
|
isGlobal: true,
|
||||||
load: [orchestratorConfig],
|
load: [orchestratorConfig],
|
||||||
}),
|
}),
|
||||||
BullModule.forRoot({
|
BullModule.forRootAsync({
|
||||||
|
inject: [ConfigService],
|
||||||
|
useFactory: (configService: ConfigService) => ({
|
||||||
connection: {
|
connection: {
|
||||||
host: process.env.VALKEY_HOST ?? "localhost",
|
host: configService.get<string>("orchestrator.valkey.host", "localhost"),
|
||||||
port: parseInt(process.env.VALKEY_PORT ?? "6379"),
|
port: configService.get<number>("orchestrator.valkey.port", 6379),
|
||||||
|
password: configService.get<string>("orchestrator.valkey.password"),
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
|
}),
|
||||||
ThrottlerModule.forRoot([
|
ThrottlerModule.forRoot([
|
||||||
{
|
{
|
||||||
name: "default",
|
name: "default",
|
||||||
@@ -46,6 +51,7 @@ import { orchestratorConfig } from "./config/orchestrator.config";
|
|||||||
]),
|
]),
|
||||||
HealthModule,
|
HealthModule,
|
||||||
AgentsModule,
|
AgentsModule,
|
||||||
|
QueueApiModule,
|
||||||
CoordinatorModule,
|
CoordinatorModule,
|
||||||
BudgetModule,
|
BudgetModule,
|
||||||
CIModule,
|
CIModule,
|
||||||
|
|||||||
@@ -120,6 +120,42 @@ describe("orchestratorConfig", () => {
|
|||||||
expect(config.valkey.port).toBe(6379);
|
expect(config.valkey.port).toBe(6379);
|
||||||
expect(config.valkey.url).toBe("redis://localhost:6379");
|
expect(config.valkey.url).toBe("redis://localhost:6379");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should derive valkey host and port from VALKEY_URL when VALKEY_HOST/VALKEY_PORT are not set", () => {
|
||||||
|
delete process.env.VALKEY_HOST;
|
||||||
|
delete process.env.VALKEY_PORT;
|
||||||
|
process.env.VALKEY_URL = "redis://valkey:6380";
|
||||||
|
|
||||||
|
const config = orchestratorConfig();
|
||||||
|
|
||||||
|
expect(config.valkey.host).toBe("valkey");
|
||||||
|
expect(config.valkey.port).toBe(6380);
|
||||||
|
expect(config.valkey.url).toBe("redis://valkey:6380");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should derive valkey password from VALKEY_URL when VALKEY_PASSWORD is not set", () => {
|
||||||
|
delete process.env.VALKEY_PASSWORD;
|
||||||
|
delete process.env.VALKEY_HOST;
|
||||||
|
delete process.env.VALKEY_PORT;
|
||||||
|
process.env.VALKEY_URL = "redis://:url-secret@valkey:6379";
|
||||||
|
|
||||||
|
const config = orchestratorConfig();
|
||||||
|
|
||||||
|
expect(config.valkey.password).toBe("url-secret");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should prefer explicit valkey env vars over VALKEY_URL values", () => {
|
||||||
|
process.env.VALKEY_HOST = "explicit-host";
|
||||||
|
process.env.VALKEY_PORT = "6390";
|
||||||
|
process.env.VALKEY_PASSWORD = "explicit-password";
|
||||||
|
process.env.VALKEY_URL = "redis://:url-secret@valkey:6380";
|
||||||
|
|
||||||
|
const config = orchestratorConfig();
|
||||||
|
|
||||||
|
expect(config.valkey.host).toBe("explicit-host");
|
||||||
|
expect(config.valkey.port).toBe(6390);
|
||||||
|
expect(config.valkey.password).toBe("explicit-password");
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("valkey timeout config (SEC-ORCH-28)", () => {
|
describe("valkey timeout config (SEC-ORCH-28)", () => {
|
||||||
@@ -157,12 +193,12 @@ describe("orchestratorConfig", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe("spawner config", () => {
|
describe("spawner config", () => {
|
||||||
it("should use default maxConcurrentAgents of 20 when not set", () => {
|
it("should use default maxConcurrentAgents of 2 when not set", () => {
|
||||||
delete process.env.MAX_CONCURRENT_AGENTS;
|
delete process.env.MAX_CONCURRENT_AGENTS;
|
||||||
|
|
||||||
const config = orchestratorConfig();
|
const config = orchestratorConfig();
|
||||||
|
|
||||||
expect(config.spawner.maxConcurrentAgents).toBe(20);
|
expect(config.spawner.maxConcurrentAgents).toBe(2);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should use provided maxConcurrentAgents when MAX_CONCURRENT_AGENTS is set", () => {
|
it("should use provided maxConcurrentAgents when MAX_CONCURRENT_AGENTS is set", () => {
|
||||||
@@ -181,4 +217,30 @@ describe("orchestratorConfig", () => {
|
|||||||
expect(config.spawner.maxConcurrentAgents).toBe(10);
|
expect(config.spawner.maxConcurrentAgents).toBe(10);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("AI provider config", () => {
|
||||||
|
it("should default aiProvider to ollama when unset", () => {
|
||||||
|
delete process.env.AI_PROVIDER;
|
||||||
|
|
||||||
|
const config = orchestratorConfig();
|
||||||
|
|
||||||
|
expect(config.aiProvider).toBe("ollama");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should normalize AI provider to lowercase", () => {
|
||||||
|
process.env.AI_PROVIDER = " cLaUdE ";
|
||||||
|
|
||||||
|
const config = orchestratorConfig();
|
||||||
|
|
||||||
|
expect(config.aiProvider).toBe("claude");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fallback unsupported AI provider to ollama", () => {
|
||||||
|
process.env.AI_PROVIDER = "bad-provider";
|
||||||
|
|
||||||
|
const config = orchestratorConfig();
|
||||||
|
|
||||||
|
expect(config.aiProvider).toBe("ollama");
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,19 +1,53 @@
|
|||||||
import { registerAs } from "@nestjs/config";
|
import { registerAs } from "@nestjs/config";
|
||||||
|
|
||||||
export const orchestratorConfig = registerAs("orchestrator", () => ({
|
const normalizeAiProvider = (): "ollama" | "claude" | "openai" => {
|
||||||
|
const provider = process.env.AI_PROVIDER?.trim().toLowerCase();
|
||||||
|
|
||||||
|
if (!provider) {
|
||||||
|
return "ollama";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (provider !== "ollama" && provider !== "claude" && provider !== "openai") {
|
||||||
|
return "ollama";
|
||||||
|
}
|
||||||
|
|
||||||
|
return provider;
|
||||||
|
};
|
||||||
|
|
||||||
|
const parseValkeyUrl = (url: string): { host?: string; port?: number; password?: string } => {
|
||||||
|
try {
|
||||||
|
const parsed = new URL(url);
|
||||||
|
const port = parsed.port ? parseInt(parsed.port, 10) : undefined;
|
||||||
|
|
||||||
|
return {
|
||||||
|
host: parsed.hostname || undefined,
|
||||||
|
port: Number.isNaN(port) ? undefined : port,
|
||||||
|
password: parsed.password ? decodeURIComponent(parsed.password) : undefined,
|
||||||
|
};
|
||||||
|
} catch {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const orchestratorConfig = registerAs("orchestrator", () => {
|
||||||
|
const valkeyUrl = process.env.VALKEY_URL ?? "redis://localhost:6379";
|
||||||
|
const parsedValkeyUrl = parseValkeyUrl(valkeyUrl);
|
||||||
|
|
||||||
|
return {
|
||||||
host: process.env.HOST ?? process.env.BIND_ADDRESS ?? "127.0.0.1",
|
host: process.env.HOST ?? process.env.BIND_ADDRESS ?? "127.0.0.1",
|
||||||
port: parseInt(process.env.ORCHESTRATOR_PORT ?? "3001", 10),
|
port: parseInt(process.env.ORCHESTRATOR_PORT ?? "3001", 10),
|
||||||
valkey: {
|
valkey: {
|
||||||
host: process.env.VALKEY_HOST ?? "localhost",
|
host: process.env.VALKEY_HOST ?? parsedValkeyUrl.host ?? "localhost",
|
||||||
port: parseInt(process.env.VALKEY_PORT ?? "6379", 10),
|
port: parseInt(process.env.VALKEY_PORT ?? String(parsedValkeyUrl.port ?? 6379), 10),
|
||||||
password: process.env.VALKEY_PASSWORD,
|
password: process.env.VALKEY_PASSWORD ?? parsedValkeyUrl.password,
|
||||||
url: process.env.VALKEY_URL ?? "redis://localhost:6379",
|
url: valkeyUrl,
|
||||||
connectTimeout: parseInt(process.env.VALKEY_CONNECT_TIMEOUT_MS ?? "5000", 10),
|
connectTimeout: parseInt(process.env.VALKEY_CONNECT_TIMEOUT_MS ?? "5000", 10),
|
||||||
commandTimeout: parseInt(process.env.VALKEY_COMMAND_TIMEOUT_MS ?? "3000", 10),
|
commandTimeout: parseInt(process.env.VALKEY_COMMAND_TIMEOUT_MS ?? "3000", 10),
|
||||||
},
|
},
|
||||||
claude: {
|
claude: {
|
||||||
apiKey: process.env.CLAUDE_API_KEY,
|
apiKey: process.env.CLAUDE_API_KEY,
|
||||||
},
|
},
|
||||||
|
aiProvider: normalizeAiProvider(),
|
||||||
docker: {
|
docker: {
|
||||||
socketPath: process.env.DOCKER_SOCKET ?? "/var/run/docker.sock",
|
socketPath: process.env.DOCKER_SOCKET ?? "/var/run/docker.sock",
|
||||||
},
|
},
|
||||||
@@ -27,7 +61,7 @@ export const orchestratorConfig = registerAs("orchestrator", () => ({
|
|||||||
sandbox: {
|
sandbox: {
|
||||||
enabled: process.env.SANDBOX_ENABLED !== "false",
|
enabled: process.env.SANDBOX_ENABLED !== "false",
|
||||||
defaultImage: process.env.SANDBOX_DEFAULT_IMAGE ?? "node:20-alpine",
|
defaultImage: process.env.SANDBOX_DEFAULT_IMAGE ?? "node:20-alpine",
|
||||||
defaultMemoryMB: parseInt(process.env.SANDBOX_DEFAULT_MEMORY_MB ?? "512", 10),
|
defaultMemoryMB: parseInt(process.env.SANDBOX_DEFAULT_MEMORY_MB ?? "256", 10),
|
||||||
defaultCpuLimit: parseFloat(process.env.SANDBOX_DEFAULT_CPU_LIMIT ?? "1.0"),
|
defaultCpuLimit: parseFloat(process.env.SANDBOX_DEFAULT_CPU_LIMIT ?? "1.0"),
|
||||||
networkMode: process.env.SANDBOX_NETWORK_MODE ?? "none",
|
networkMode: process.env.SANDBOX_NETWORK_MODE ?? "none",
|
||||||
},
|
},
|
||||||
@@ -41,9 +75,15 @@ export const orchestratorConfig = registerAs("orchestrator", () => ({
|
|||||||
enabled: process.env.YOLO_MODE === "true",
|
enabled: process.env.YOLO_MODE === "true",
|
||||||
},
|
},
|
||||||
spawner: {
|
spawner: {
|
||||||
maxConcurrentAgents: parseInt(process.env.MAX_CONCURRENT_AGENTS ?? "20", 10),
|
maxConcurrentAgents: parseInt(process.env.MAX_CONCURRENT_AGENTS ?? "2", 10),
|
||||||
|
sessionCleanupDelayMs: parseInt(process.env.SESSION_CLEANUP_DELAY_MS ?? "30000", 10),
|
||||||
},
|
},
|
||||||
queue: {
|
queue: {
|
||||||
|
name: process.env.ORCHESTRATOR_QUEUE_NAME ?? "orchestrator-tasks",
|
||||||
|
maxRetries: parseInt(process.env.ORCHESTRATOR_QUEUE_MAX_RETRIES ?? "3", 10),
|
||||||
|
baseDelay: parseInt(process.env.ORCHESTRATOR_QUEUE_BASE_DELAY_MS ?? "1000", 10),
|
||||||
|
maxDelay: parseInt(process.env.ORCHESTRATOR_QUEUE_MAX_DELAY_MS ?? "60000", 10),
|
||||||
|
concurrency: parseInt(process.env.ORCHESTRATOR_QUEUE_CONCURRENCY ?? "1", 10),
|
||||||
completedRetentionCount: parseInt(process.env.QUEUE_COMPLETED_RETENTION_COUNT ?? "100", 10),
|
completedRetentionCount: parseInt(process.env.QUEUE_COMPLETED_RETENTION_COUNT ?? "100", 10),
|
||||||
completedRetentionAgeSeconds: parseInt(
|
completedRetentionAgeSeconds: parseInt(
|
||||||
process.env.QUEUE_COMPLETED_RETENTION_AGE_S ?? "3600",
|
process.env.QUEUE_COMPLETED_RETENTION_AGE_S ?? "3600",
|
||||||
@@ -52,4 +92,5 @@ export const orchestratorConfig = registerAs("orchestrator", () => ({
|
|||||||
failedRetentionCount: parseInt(process.env.QUEUE_FAILED_RETENTION_COUNT ?? "1000", 10),
|
failedRetentionCount: parseInt(process.env.QUEUE_FAILED_RETENTION_COUNT ?? "1000", 10),
|
||||||
failedRetentionAgeSeconds: parseInt(process.env.QUEUE_FAILED_RETENTION_AGE_S ?? "86400", 10),
|
failedRetentionAgeSeconds: parseInt(process.env.QUEUE_FAILED_RETENTION_AGE_S ?? "86400", 10),
|
||||||
},
|
},
|
||||||
}));
|
};
|
||||||
|
});
|
||||||
|
|||||||
@@ -2,9 +2,10 @@ import { Module } from "@nestjs/common";
|
|||||||
import { ConfigModule } from "@nestjs/config";
|
import { ConfigModule } from "@nestjs/config";
|
||||||
import { QueueService } from "./queue.service";
|
import { QueueService } from "./queue.service";
|
||||||
import { ValkeyModule } from "../valkey/valkey.module";
|
import { ValkeyModule } from "../valkey/valkey.module";
|
||||||
|
import { SpawnerModule } from "../spawner/spawner.module";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [ConfigModule, ValkeyModule],
|
imports: [ConfigModule, ValkeyModule, SpawnerModule],
|
||||||
providers: [QueueService],
|
providers: [QueueService],
|
||||||
exports: [QueueService],
|
exports: [QueueService],
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -991,12 +991,17 @@ describe("QueueService", () => {
|
|||||||
success: true,
|
success: true,
|
||||||
metadata: { attempt: 1 },
|
metadata: { attempt: 1 },
|
||||||
});
|
});
|
||||||
expect(mockValkeyService.updateTaskStatus).toHaveBeenCalledWith("task-123", "executing");
|
expect(mockValkeyService.updateTaskStatus).toHaveBeenCalledWith(
|
||||||
|
"task-123",
|
||||||
|
"executing",
|
||||||
|
undefined
|
||||||
|
);
|
||||||
expect(mockValkeyService.publishEvent).toHaveBeenCalledWith({
|
expect(mockValkeyService.publishEvent).toHaveBeenCalledWith({
|
||||||
type: "task.processing",
|
type: "task.executing",
|
||||||
timestamp: expect.any(String),
|
timestamp: expect.any(String),
|
||||||
taskId: "task-123",
|
taskId: "task-123",
|
||||||
data: { attempt: 1 },
|
agentId: undefined,
|
||||||
|
data: { attempt: 1, dispatchedByQueue: true },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
import { Injectable, OnModuleDestroy, OnModuleInit } from "@nestjs/common";
|
import { Injectable, OnModuleDestroy, OnModuleInit, Optional, Logger } from "@nestjs/common";
|
||||||
import { ConfigService } from "@nestjs/config";
|
import { ConfigService } from "@nestjs/config";
|
||||||
import { Queue, Worker, Job } from "bullmq";
|
import { Queue, Worker, Job } from "bullmq";
|
||||||
import { ValkeyService } from "../valkey/valkey.service";
|
import { ValkeyService } from "../valkey/valkey.service";
|
||||||
|
import { AgentSpawnerService } from "../spawner/agent-spawner.service";
|
||||||
|
import { AgentLifecycleService } from "../spawner/agent-lifecycle.service";
|
||||||
import type { TaskContext } from "../valkey/types";
|
import type { TaskContext } from "../valkey/types";
|
||||||
import type {
|
import type {
|
||||||
QueuedTask,
|
QueuedTask,
|
||||||
@@ -16,6 +18,7 @@ import type {
|
|||||||
*/
|
*/
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class QueueService implements OnModuleInit, OnModuleDestroy {
|
export class QueueService implements OnModuleInit, OnModuleDestroy {
|
||||||
|
private readonly logger = new Logger(QueueService.name);
|
||||||
private queue!: Queue<QueuedTask>;
|
private queue!: Queue<QueuedTask>;
|
||||||
private worker!: Worker<QueuedTask, TaskProcessingResult>;
|
private worker!: Worker<QueuedTask, TaskProcessingResult>;
|
||||||
private readonly queueName: string;
|
private readonly queueName: string;
|
||||||
@@ -23,7 +26,9 @@ export class QueueService implements OnModuleInit, OnModuleDestroy {
|
|||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly valkeyService: ValkeyService,
|
private readonly valkeyService: ValkeyService,
|
||||||
private readonly configService: ConfigService
|
private readonly configService: ConfigService,
|
||||||
|
@Optional() private readonly spawnerService?: AgentSpawnerService,
|
||||||
|
@Optional() private readonly lifecycleService?: AgentLifecycleService
|
||||||
) {
|
) {
|
||||||
this.queueName = this.configService.get<string>(
|
this.queueName = this.configService.get<string>(
|
||||||
"orchestrator.queue.name",
|
"orchestrator.queue.name",
|
||||||
@@ -132,6 +137,16 @@ export class QueueService implements OnModuleInit, OnModuleDestroy {
|
|||||||
context,
|
context,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Ensure task state exists before queue lifecycle updates.
|
||||||
|
const getTaskState = (this.valkeyService as Partial<ValkeyService>).getTaskState;
|
||||||
|
const createTask = (this.valkeyService as Partial<ValkeyService>).createTask;
|
||||||
|
if (typeof getTaskState === "function" && typeof createTask === "function") {
|
||||||
|
const existingTask = await getTaskState.call(this.valkeyService, taskId);
|
||||||
|
if (!existingTask) {
|
||||||
|
await createTask.call(this.valkeyService, taskId, context);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Add to BullMQ queue
|
// Add to BullMQ queue
|
||||||
await this.queue.add(taskId, queuedTask, {
|
await this.queue.add(taskId, queuedTask, {
|
||||||
priority: 10 - priority + 1, // BullMQ: lower number = higher priority, so invert
|
priority: 10 - priority + 1, // BullMQ: lower number = higher priority, so invert
|
||||||
@@ -214,23 +229,35 @@ export class QueueService implements OnModuleInit, OnModuleDestroy {
|
|||||||
const { taskId } = job.data;
|
const { taskId } = job.data;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const session = this.spawnerService?.findAgentSessionByTaskId(taskId);
|
||||||
|
const agentId = session?.agentId;
|
||||||
|
|
||||||
|
if (agentId) {
|
||||||
|
if (this.lifecycleService) {
|
||||||
|
await this.lifecycleService.transitionToRunning(agentId);
|
||||||
|
}
|
||||||
|
this.spawnerService?.setSessionState(agentId, "running");
|
||||||
|
}
|
||||||
|
|
||||||
// Update task state to executing
|
// Update task state to executing
|
||||||
await this.valkeyService.updateTaskStatus(taskId, "executing");
|
await this.valkeyService.updateTaskStatus(taskId, "executing", agentId);
|
||||||
|
|
||||||
// Publish event
|
// Publish event
|
||||||
await this.valkeyService.publishEvent({
|
await this.valkeyService.publishEvent({
|
||||||
type: "task.processing",
|
type: "task.executing",
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
taskId,
|
taskId,
|
||||||
data: { attempt: job.attemptsMade + 1 },
|
agentId,
|
||||||
|
data: {
|
||||||
|
attempt: job.attemptsMade + 1,
|
||||||
|
dispatchedByQueue: true,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Task processing will be handled by agent spawner
|
|
||||||
// For now, just mark as processing
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
metadata: {
|
metadata: {
|
||||||
attempt: job.attemptsMade + 1,
|
attempt: job.attemptsMade + 1,
|
||||||
|
...(agentId && { agentId }),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -270,6 +297,14 @@ export class QueueService implements OnModuleInit, OnModuleDestroy {
|
|||||||
* Handle task failure
|
* Handle task failure
|
||||||
*/
|
*/
|
||||||
private async handleTaskFailure(taskId: string, error: Error): Promise<void> {
|
private async handleTaskFailure(taskId: string, error: Error): Promise<void> {
|
||||||
|
const session = this.spawnerService?.findAgentSessionByTaskId(taskId);
|
||||||
|
if (session) {
|
||||||
|
this.spawnerService?.setSessionState(session.agentId, "failed", error.message, new Date());
|
||||||
|
if (this.lifecycleService) {
|
||||||
|
await this.lifecycleService.transitionToFailed(session.agentId, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
await this.valkeyService.updateTaskStatus(taskId, "failed", undefined, error.message);
|
await this.valkeyService.updateTaskStatus(taskId, "failed", undefined, error.message);
|
||||||
|
|
||||||
await this.valkeyService.publishEvent({
|
await this.valkeyService.publishEvent({
|
||||||
@@ -284,12 +319,25 @@ export class QueueService implements OnModuleInit, OnModuleDestroy {
|
|||||||
* Handle task completion
|
* Handle task completion
|
||||||
*/
|
*/
|
||||||
private async handleTaskCompletion(taskId: string): Promise<void> {
|
private async handleTaskCompletion(taskId: string): Promise<void> {
|
||||||
|
const session = this.spawnerService?.findAgentSessionByTaskId(taskId);
|
||||||
|
if (session) {
|
||||||
|
this.spawnerService?.setSessionState(session.agentId, "completed", undefined, new Date());
|
||||||
|
if (this.lifecycleService) {
|
||||||
|
await this.lifecycleService.transitionToCompleted(session.agentId);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.logger.warn(
|
||||||
|
`Queue completed task ${taskId} but no session was found; using queue-only completion state`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
await this.valkeyService.updateTaskStatus(taskId, "completed");
|
await this.valkeyService.updateTaskStatus(taskId, "completed");
|
||||||
|
|
||||||
await this.valkeyService.publishEvent({
|
await this.valkeyService.publishEvent({
|
||||||
type: "task.completed",
|
type: "task.completed",
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
taskId,
|
taskId,
|
||||||
|
...(session && { agentId: session.agentId }),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -37,6 +37,24 @@ export class AgentLifecycleService {
|
|||||||
this.logger.log("AgentLifecycleService initialized");
|
this.logger.log("AgentLifecycleService initialized");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a newly spawned agent in persistent state and emit spawned event.
|
||||||
|
*/
|
||||||
|
async registerSpawnedAgent(agentId: string, taskId: string): Promise<AgentState> {
|
||||||
|
await this.valkeyService.createAgent(agentId, taskId);
|
||||||
|
const createdState = await this.getAgentState(agentId);
|
||||||
|
|
||||||
|
const event: AgentEvent = {
|
||||||
|
type: "agent.spawned",
|
||||||
|
agentId,
|
||||||
|
taskId,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
await this.valkeyService.publishEvent(event);
|
||||||
|
|
||||||
|
return createdState;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Acquire a per-agent mutex to serialize state transitions.
|
* Acquire a per-agent mutex to serialize state transitions.
|
||||||
* Uses promise chaining: each caller chains onto the previous lock,
|
* Uses promise chaining: each caller chains onto the previous lock,
|
||||||
|
|||||||
@@ -12,6 +12,9 @@ describe("AgentSpawnerService", () => {
|
|||||||
// Create mock ConfigService
|
// Create mock ConfigService
|
||||||
mockConfigService = {
|
mockConfigService = {
|
||||||
get: vi.fn((key: string) => {
|
get: vi.fn((key: string) => {
|
||||||
|
if (key === "orchestrator.aiProvider") {
|
||||||
|
return "ollama";
|
||||||
|
}
|
||||||
if (key === "orchestrator.claude.apiKey") {
|
if (key === "orchestrator.claude.apiKey") {
|
||||||
return "test-api-key";
|
return "test-api-key";
|
||||||
}
|
}
|
||||||
@@ -31,19 +34,80 @@ describe("AgentSpawnerService", () => {
|
|||||||
expect(service).toBeDefined();
|
expect(service).toBeDefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should initialize with Claude API key from config", () => {
|
it("should initialize with default AI provider when API key is omitted", () => {
|
||||||
|
const noClaudeConfigService = {
|
||||||
|
get: vi.fn((key: string) => {
|
||||||
|
if (key === "orchestrator.aiProvider") {
|
||||||
|
return "ollama";
|
||||||
|
}
|
||||||
|
if (key === "orchestrator.spawner.maxConcurrentAgents") {
|
||||||
|
return 20;
|
||||||
|
}
|
||||||
|
if (key === "orchestrator.spawner.sessionCleanupDelayMs") {
|
||||||
|
return 30000;
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}),
|
||||||
|
} as unknown as ConfigService;
|
||||||
|
|
||||||
|
const serviceNoKey = new AgentSpawnerService(noClaudeConfigService);
|
||||||
|
expect(serviceNoKey).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should initialize with Claude provider when key is present", () => {
|
||||||
expect(mockConfigService.get).toHaveBeenCalledWith("orchestrator.claude.apiKey");
|
expect(mockConfigService.get).toHaveBeenCalledWith("orchestrator.claude.apiKey");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw error if Claude API key is missing", () => {
|
it("should initialize with CLAUDE provider when API key is present", () => {
|
||||||
|
const claudeConfigService = {
|
||||||
|
get: vi.fn((key: string) => {
|
||||||
|
if (key === "orchestrator.aiProvider") {
|
||||||
|
return "claude";
|
||||||
|
}
|
||||||
|
if (key === "orchestrator.claude.apiKey") {
|
||||||
|
return "test-api-key";
|
||||||
|
}
|
||||||
|
if (key === "orchestrator.spawner.maxConcurrentAgents") {
|
||||||
|
return 20;
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}),
|
||||||
|
} as unknown as ConfigService;
|
||||||
|
|
||||||
|
const claudeService = new AgentSpawnerService(claudeConfigService);
|
||||||
|
expect(claudeService).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error if Claude API key is missing when provider is claude", () => {
|
||||||
const badConfigService = {
|
const badConfigService = {
|
||||||
get: vi.fn(() => undefined),
|
get: vi.fn((key: string) => {
|
||||||
|
if (key === "orchestrator.aiProvider") {
|
||||||
|
return "claude";
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}),
|
||||||
} as unknown as ConfigService;
|
} as unknown as ConfigService;
|
||||||
|
|
||||||
expect(() => new AgentSpawnerService(badConfigService)).toThrow(
|
expect(() => new AgentSpawnerService(badConfigService)).toThrow(
|
||||||
"CLAUDE_API_KEY is not configured"
|
"CLAUDE_API_KEY is required when AI_PROVIDER is set to 'claude'"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should still initialize when CLAUDE_API_KEY is missing for non-Claude provider", () => {
|
||||||
|
const nonClaudeConfigService = {
|
||||||
|
get: vi.fn((key: string) => {
|
||||||
|
if (key === "orchestrator.aiProvider") {
|
||||||
|
return "ollama";
|
||||||
|
}
|
||||||
|
if (key === "orchestrator.spawner.maxConcurrentAgents") {
|
||||||
|
return 20;
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}),
|
||||||
|
} as unknown as ConfigService;
|
||||||
|
|
||||||
|
expect(() => new AgentSpawnerService(nonClaudeConfigService)).not.toThrow();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("spawnAgent", () => {
|
describe("spawnAgent", () => {
|
||||||
|
|||||||
@@ -14,6 +14,8 @@ import {
|
|||||||
* This allows time for status queries before the session is removed
|
* This allows time for status queries before the session is removed
|
||||||
*/
|
*/
|
||||||
const DEFAULT_SESSION_CLEANUP_DELAY_MS = 30000; // 30 seconds
|
const DEFAULT_SESSION_CLEANUP_DELAY_MS = 30000; // 30 seconds
|
||||||
|
const SUPPORTED_AI_PROVIDERS = ["ollama", "claude", "openai"] as const;
|
||||||
|
type SupportedAiProvider = (typeof SUPPORTED_AI_PROVIDERS)[number];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Service responsible for spawning Claude agents using Anthropic SDK
|
* Service responsible for spawning Claude agents using Anthropic SDK
|
||||||
@@ -21,22 +23,38 @@ const DEFAULT_SESSION_CLEANUP_DELAY_MS = 30000; // 30 seconds
|
|||||||
@Injectable()
|
@Injectable()
|
||||||
export class AgentSpawnerService implements OnModuleDestroy {
|
export class AgentSpawnerService implements OnModuleDestroy {
|
||||||
private readonly logger = new Logger(AgentSpawnerService.name);
|
private readonly logger = new Logger(AgentSpawnerService.name);
|
||||||
private readonly anthropic: Anthropic;
|
private readonly anthropic: Anthropic | undefined;
|
||||||
|
private readonly aiProvider: SupportedAiProvider;
|
||||||
private readonly sessions = new Map<string, AgentSession>();
|
private readonly sessions = new Map<string, AgentSession>();
|
||||||
private readonly maxConcurrentAgents: number;
|
private readonly maxConcurrentAgents: number;
|
||||||
private readonly sessionCleanupDelayMs: number;
|
private readonly sessionCleanupDelayMs: number;
|
||||||
private readonly cleanupTimers = new Map<string, NodeJS.Timeout>();
|
private readonly cleanupTimers = new Map<string, NodeJS.Timeout>();
|
||||||
|
|
||||||
constructor(private readonly configService: ConfigService) {
|
constructor(private readonly configService: ConfigService) {
|
||||||
|
const configuredProvider = this.configService.get<string>("orchestrator.aiProvider");
|
||||||
|
this.aiProvider = this.normalizeAiProvider(configuredProvider);
|
||||||
|
|
||||||
|
this.logger.log(`AgentSpawnerService resolved AI provider: ${this.aiProvider}`);
|
||||||
|
|
||||||
const apiKey = this.configService.get<string>("orchestrator.claude.apiKey");
|
const apiKey = this.configService.get<string>("orchestrator.claude.apiKey");
|
||||||
|
|
||||||
|
if (this.aiProvider === "claude") {
|
||||||
if (!apiKey) {
|
if (!apiKey) {
|
||||||
throw new Error("CLAUDE_API_KEY is not configured");
|
throw new Error("CLAUDE_API_KEY is required when AI_PROVIDER is set to 'claude'");
|
||||||
}
|
}
|
||||||
|
|
||||||
this.anthropic = new Anthropic({
|
this.logger.log("CLAUDE_API_KEY is configured. Initializing Anthropic client.");
|
||||||
apiKey,
|
this.anthropic = new Anthropic({ apiKey });
|
||||||
});
|
} else {
|
||||||
|
if (apiKey) {
|
||||||
|
this.logger.debug(
|
||||||
|
`CLAUDE_API_KEY is set but ignored because AI provider is '${this.aiProvider}'`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
this.logger.log(`CLAUDE_API_KEY not required for AI provider '${this.aiProvider}'.`);
|
||||||
|
}
|
||||||
|
this.anthropic = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
// Default to 20 if not configured
|
// Default to 20 if not configured
|
||||||
this.maxConcurrentAgents =
|
this.maxConcurrentAgents =
|
||||||
@@ -48,10 +66,27 @@ export class AgentSpawnerService implements OnModuleDestroy {
|
|||||||
DEFAULT_SESSION_CLEANUP_DELAY_MS;
|
DEFAULT_SESSION_CLEANUP_DELAY_MS;
|
||||||
|
|
||||||
this.logger.log(
|
this.logger.log(
|
||||||
`AgentSpawnerService initialized with Claude SDK (max concurrent agents: ${String(this.maxConcurrentAgents)}, cleanup delay: ${String(this.sessionCleanupDelayMs)}ms)`
|
`AgentSpawnerService initialized with ${this.aiProvider} AI provider (max concurrent agents: ${String(
|
||||||
|
this.maxConcurrentAgents
|
||||||
|
)}, cleanup delay: ${String(this.sessionCleanupDelayMs)}ms)`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private normalizeAiProvider(provider?: string): SupportedAiProvider {
|
||||||
|
const normalizedProvider = provider?.trim().toLowerCase();
|
||||||
|
|
||||||
|
if (!normalizedProvider) {
|
||||||
|
return "ollama";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!SUPPORTED_AI_PROVIDERS.includes(normalizedProvider as SupportedAiProvider)) {
|
||||||
|
this.logger.warn(`Unsupported AI provider '${normalizedProvider}'. Defaulting to 'ollama'.`);
|
||||||
|
return "ollama";
|
||||||
|
}
|
||||||
|
|
||||||
|
return normalizedProvider as SupportedAiProvider;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clean up all pending cleanup timers on module destroy
|
* Clean up all pending cleanup timers on module destroy
|
||||||
*/
|
*/
|
||||||
@@ -116,6 +151,33 @@ export class AgentSpawnerService implements OnModuleDestroy {
|
|||||||
return this.sessions.get(agentId);
|
return this.sessions.get(agentId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find an active session by task ID.
|
||||||
|
*/
|
||||||
|
findAgentSessionByTaskId(taskId: string): AgentSession | undefined {
|
||||||
|
return Array.from(this.sessions.values()).find((session) => session.taskId === taskId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update in-memory session state for visibility in list/status endpoints.
|
||||||
|
*/
|
||||||
|
setSessionState(
|
||||||
|
agentId: string,
|
||||||
|
state: AgentSession["state"],
|
||||||
|
error?: string,
|
||||||
|
completedAt?: Date
|
||||||
|
): void {
|
||||||
|
const session = this.sessions.get(agentId);
|
||||||
|
if (!session) return;
|
||||||
|
|
||||||
|
session.state = state;
|
||||||
|
session.error = error;
|
||||||
|
if (completedAt) {
|
||||||
|
session.completedAt = completedAt;
|
||||||
|
}
|
||||||
|
this.sessions.set(agentId, session);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List all agent sessions
|
* List all agent sessions
|
||||||
* @returns Array of all agent sessions
|
* @returns Array of all agent sessions
|
||||||
|
|||||||
@@ -1,6 +1,3 @@
|
|||||||
# syntax=docker/dockerfile:1
|
|
||||||
# Enable BuildKit features for cache mounts
|
|
||||||
|
|
||||||
# Base image for all stages
|
# Base image for all stages
|
||||||
# Uses Debian slim (glibc) for consistency with API/orchestrator and to prevent
|
# Uses Debian slim (glibc) for consistency with API/orchestrator and to prevent
|
||||||
# future native addon compatibility issues with Alpine's musl libc.
|
# future native addon compatibility issues with Alpine's musl libc.
|
||||||
@@ -27,9 +24,22 @@ COPY packages/ui/package.json ./packages/ui/
|
|||||||
COPY packages/config/package.json ./packages/config/
|
COPY packages/config/package.json ./packages/config/
|
||||||
COPY apps/web/package.json ./apps/web/
|
COPY apps/web/package.json ./apps/web/
|
||||||
|
|
||||||
# Install dependencies with pnpm store cache
|
# Install dependencies (no cache mount — Kaniko builds are ephemeral in CI)
|
||||||
RUN --mount=type=cache,id=pnpm-store,target=/root/.local/share/pnpm/store \
|
RUN pnpm install --frozen-lockfile
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
|
# ======================
|
||||||
|
# Production dependencies stage
|
||||||
|
# ======================
|
||||||
|
FROM base AS prod-deps
|
||||||
|
|
||||||
|
# Copy all package.json files for workspace resolution
|
||||||
|
COPY packages/shared/package.json ./packages/shared/
|
||||||
|
COPY packages/ui/package.json ./packages/ui/
|
||||||
|
COPY packages/config/package.json ./packages/config/
|
||||||
|
COPY apps/web/package.json ./apps/web/
|
||||||
|
|
||||||
|
# Install production dependencies only
|
||||||
|
RUN pnpm install --frozen-lockfile --prod
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Builder stage
|
# Builder stage
|
||||||
@@ -79,23 +89,19 @@ RUN mkdir -p ./apps/web/public
|
|||||||
# ======================
|
# ======================
|
||||||
FROM node:24-slim AS production
|
FROM node:24-slim AS production
|
||||||
|
|
||||||
# Remove npm (unused in production — we use pnpm) to reduce attack surface
|
# Install dumb-init for proper signal handling (static binary from GitHub,
|
||||||
RUN rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx
|
# avoids apt-get which fails under Kaniko with bookworm GPG signature errors)
|
||||||
|
ADD https://github.com/Yelp/dumb-init/releases/download/v1.2.5/dumb-init_1.2.5_x86_64 /usr/local/bin/dumb-init
|
||||||
|
|
||||||
# Install pnpm (needed for pnpm start command)
|
# Single RUN to minimize Kaniko filesystem snapshots (each RUN = full snapshot)
|
||||||
RUN corepack enable && corepack prepare pnpm@10.27.0 --activate
|
RUN rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx \
|
||||||
|
&& chmod 755 /usr/local/bin/dumb-init \
|
||||||
# Install dumb-init for proper signal handling
|
&& groupadd -g 1001 nodejs && useradd -m -u 1001 -g nodejs nextjs
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends dumb-init \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Create non-root user
|
|
||||||
RUN groupadd -g 1001 nodejs && useradd -m -u 1001 -g nodejs nextjs
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy node_modules from builder (includes all dependencies in pnpm store)
|
# Copy node_modules from builder (includes all dependencies in pnpm store)
|
||||||
COPY --from=builder --chown=nextjs:nodejs /app/node_modules ./node_modules
|
COPY --from=prod-deps --chown=nextjs:nodejs /app/node_modules ./node_modules
|
||||||
|
|
||||||
# Copy built packages (includes dist/ directories)
|
# Copy built packages (includes dist/ directories)
|
||||||
COPY --from=builder --chown=nextjs:nodejs /app/packages ./packages
|
COPY --from=builder --chown=nextjs:nodejs /app/packages ./packages
|
||||||
@@ -106,7 +112,7 @@ COPY --from=builder --chown=nextjs:nodejs /app/apps/web/public ./apps/web/public
|
|||||||
COPY --from=builder --chown=nextjs:nodejs /app/apps/web/next.config.ts ./apps/web/
|
COPY --from=builder --chown=nextjs:nodejs /app/apps/web/next.config.ts ./apps/web/
|
||||||
COPY --from=builder --chown=nextjs:nodejs /app/apps/web/package.json ./apps/web/
|
COPY --from=builder --chown=nextjs:nodejs /app/apps/web/package.json ./apps/web/
|
||||||
# Copy app's node_modules which contains symlinks to root node_modules
|
# Copy app's node_modules which contains symlinks to root node_modules
|
||||||
COPY --from=builder --chown=nextjs:nodejs /app/apps/web/node_modules ./apps/web/node_modules
|
COPY --from=prod-deps --chown=nextjs:nodejs /app/apps/web/node_modules ./apps/web/node_modules
|
||||||
|
|
||||||
# Set working directory to web app
|
# Set working directory to web app
|
||||||
WORKDIR /app/apps/web
|
WORKDIR /app/apps/web
|
||||||
@@ -120,6 +126,7 @@ EXPOSE ${PORT:-3000}
|
|||||||
# Environment variables
|
# Environment variables
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
ENV HOSTNAME="0.0.0.0"
|
ENV HOSTNAME="0.0.0.0"
|
||||||
|
ENV PATH="/app/apps/web/node_modules/.bin:${PATH}"
|
||||||
|
|
||||||
# Health check uses PORT env var (set by docker-compose or defaults to 3000)
|
# Health check uses PORT env var (set by docker-compose or defaults to 3000)
|
||||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
||||||
@@ -129,4 +136,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
|||||||
ENTRYPOINT ["dumb-init", "--"]
|
ENTRYPOINT ["dumb-init", "--"]
|
||||||
|
|
||||||
# Start the application
|
# Start the application
|
||||||
CMD ["pnpm", "start"]
|
CMD ["next", "start"]
|
||||||
|
|||||||
@@ -1,5 +1,16 @@
|
|||||||
import type { NextConfig } from "next";
|
import type { NextConfig } from "next";
|
||||||
|
|
||||||
|
const defaultAuthMode = process.env.NODE_ENV === "development" ? "mock" : "real";
|
||||||
|
const authMode = (process.env.NEXT_PUBLIC_AUTH_MODE ?? defaultAuthMode).toLowerCase();
|
||||||
|
|
||||||
|
if (!["real", "mock"].includes(authMode)) {
|
||||||
|
throw new Error(`Invalid NEXT_PUBLIC_AUTH_MODE "${authMode}". Expected one of: real, mock.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (authMode === "mock" && process.env.NODE_ENV !== "development") {
|
||||||
|
throw new Error("NEXT_PUBLIC_AUTH_MODE=mock is only allowed for local development.");
|
||||||
|
}
|
||||||
|
|
||||||
const nextConfig: NextConfig = {
|
const nextConfig: NextConfig = {
|
||||||
transpilePackages: ["@mosaic/ui", "@mosaic/shared"],
|
transpilePackages: ["@mosaic/ui", "@mosaic/shared"],
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -18,15 +18,27 @@
|
|||||||
"@dnd-kit/core": "^6.3.1",
|
"@dnd-kit/core": "^6.3.1",
|
||||||
"@dnd-kit/sortable": "^9.0.0",
|
"@dnd-kit/sortable": "^9.0.0",
|
||||||
"@dnd-kit/utilities": "^3.2.2",
|
"@dnd-kit/utilities": "^3.2.2",
|
||||||
|
"@hello-pangea/dnd": "^18.0.1",
|
||||||
"@mosaic/shared": "workspace:*",
|
"@mosaic/shared": "workspace:*",
|
||||||
"@mosaic/ui": "workspace:*",
|
"@mosaic/ui": "workspace:*",
|
||||||
"@tanstack/react-query": "^5.90.20",
|
"@tanstack/react-query": "^5.90.20",
|
||||||
|
"@tiptap/extension-code-block-lowlight": "^3.20.0",
|
||||||
|
"@tiptap/extension-link": "^3.20.0",
|
||||||
|
"@tiptap/extension-placeholder": "^3.20.0",
|
||||||
|
"@tiptap/extension-table": "^3.20.0",
|
||||||
|
"@tiptap/extension-table-cell": "^3.20.0",
|
||||||
|
"@tiptap/extension-table-header": "^3.20.0",
|
||||||
|
"@tiptap/extension-table-row": "^3.20.0",
|
||||||
|
"@tiptap/pm": "^3.20.0",
|
||||||
|
"@tiptap/react": "^3.20.0",
|
||||||
|
"@tiptap/starter-kit": "^3.20.0",
|
||||||
"@types/dompurify": "^3.2.0",
|
"@types/dompurify": "^3.2.0",
|
||||||
"@xyflow/react": "^12.5.3",
|
"@xyflow/react": "^12.5.3",
|
||||||
"better-auth": "^1.4.17",
|
"better-auth": "^1.4.17",
|
||||||
"date-fns": "^4.1.0",
|
"date-fns": "^4.1.0",
|
||||||
"dompurify": "^3.3.1",
|
"dompurify": "^3.3.1",
|
||||||
"elkjs": "^0.9.3",
|
"elkjs": "^0.9.3",
|
||||||
|
"lowlight": "^3.3.0",
|
||||||
"lucide-react": "^0.563.0",
|
"lucide-react": "^0.563.0",
|
||||||
"mermaid": "^11.4.1",
|
"mermaid": "^11.4.1",
|
||||||
"next": "^16.1.6",
|
"next": "^16.1.6",
|
||||||
@@ -34,7 +46,8 @@
|
|||||||
"react-dom": "^19.0.0",
|
"react-dom": "^19.0.0",
|
||||||
"react-grid-layout": "^2.2.2",
|
"react-grid-layout": "^2.2.2",
|
||||||
"recharts": "^3.7.0",
|
"recharts": "^3.7.0",
|
||||||
"socket.io-client": "^4.8.3"
|
"socket.io-client": "^4.8.3",
|
||||||
|
"tiptap-markdown": "^0.9.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@mosaic/config": "workspace:*",
|
"@mosaic/config": "workspace:*",
|
||||||
@@ -47,7 +60,10 @@
|
|||||||
"@types/react-grid-layout": "^2.1.0",
|
"@types/react-grid-layout": "^2.1.0",
|
||||||
"@vitejs/plugin-react": "^4.3.4",
|
"@vitejs/plugin-react": "^4.3.4",
|
||||||
"@vitest/coverage-v8": "^3.2.4",
|
"@vitest/coverage-v8": "^3.2.4",
|
||||||
|
"autoprefixer": "^10.4.24",
|
||||||
"jsdom": "^26.0.0",
|
"jsdom": "^26.0.0",
|
||||||
|
"postcss": "^8.5.6",
|
||||||
|
"tailwindcss": "^3.4.19",
|
||||||
"typescript": "^5.8.2",
|
"typescript": "^5.8.2",
|
||||||
"vitest": "^3.0.8"
|
"vitest": "^3.0.8"
|
||||||
}
|
}
|
||||||
|
|||||||
8
apps/web/postcss.config.mjs
Normal file
8
apps/web/postcss.config.mjs
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
const config = {
|
||||||
|
plugins: {
|
||||||
|
tailwindcss: {},
|
||||||
|
autoprefixer: {},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default config;
|
||||||
87
apps/web/src/app/(auth)/login/page.mock-mode.test.tsx
Normal file
87
apps/web/src/app/(auth)/login/page.mock-mode.test.tsx
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach, type Mock } from "vitest";
|
||||||
|
import { render, screen, waitFor } from "@testing-library/react";
|
||||||
|
import userEvent from "@testing-library/user-event";
|
||||||
|
import LoginPage from "./page";
|
||||||
|
|
||||||
|
const { mockPush, mockReplace, mockSearchParams, authState } = vi.hoisted(() => ({
|
||||||
|
mockPush: vi.fn(),
|
||||||
|
mockReplace: vi.fn(),
|
||||||
|
mockSearchParams: new URLSearchParams(),
|
||||||
|
authState: {
|
||||||
|
isAuthenticated: false,
|
||||||
|
refreshSession: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
const { mockFetchWithRetry } = vi.hoisted(() => ({
|
||||||
|
mockFetchWithRetry: vi.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("next/navigation", () => ({
|
||||||
|
useRouter: (): { push: Mock; replace: Mock } => ({
|
||||||
|
push: mockPush,
|
||||||
|
replace: mockReplace,
|
||||||
|
}),
|
||||||
|
useSearchParams: (): URLSearchParams => mockSearchParams,
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("@/lib/config", () => ({
|
||||||
|
API_BASE_URL: "http://localhost:3001",
|
||||||
|
IS_MOCK_AUTH_MODE: true,
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("@/lib/auth-client", () => ({
|
||||||
|
signIn: {
|
||||||
|
oauth2: vi.fn(),
|
||||||
|
email: vi.fn(),
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("@/lib/auth/auth-context", () => ({
|
||||||
|
useAuth: (): { isAuthenticated: boolean; refreshSession: Mock } => ({
|
||||||
|
isAuthenticated: authState.isAuthenticated,
|
||||||
|
refreshSession: authState.refreshSession,
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("@/lib/auth/fetch-with-retry", () => ({
|
||||||
|
fetchWithRetry: mockFetchWithRetry,
|
||||||
|
}));
|
||||||
|
|
||||||
|
describe("LoginPage (mock auth mode)", (): void => {
|
||||||
|
beforeEach((): void => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
mockSearchParams.delete("error");
|
||||||
|
authState.isAuthenticated = false;
|
||||||
|
authState.refreshSession.mockResolvedValue(undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should render mock auth controls", (): void => {
|
||||||
|
render(<LoginPage />);
|
||||||
|
|
||||||
|
expect(screen.getByText(/local mock auth mode is active/i)).toBeInTheDocument();
|
||||||
|
expect(screen.getByTestId("mock-auth-login")).toBeInTheDocument();
|
||||||
|
expect(mockFetchWithRetry).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should continue with mock session and navigate to tasks", async (): Promise<void> => {
|
||||||
|
const user = userEvent.setup();
|
||||||
|
render(<LoginPage />);
|
||||||
|
|
||||||
|
await user.click(screen.getByTestId("mock-auth-login"));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(authState.refreshSession).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mockPush).toHaveBeenCalledWith("/tasks");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should auto-redirect authenticated mock users to tasks", async (): Promise<void> => {
|
||||||
|
authState.isAuthenticated = true;
|
||||||
|
render(<LoginPage />);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockReplace).toHaveBeenCalledWith("/tasks");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -16,6 +16,11 @@ const { mockOAuth2, mockSignInEmail, mockPush, mockReplace, mockSearchParams } =
|
|||||||
mockSearchParams: new URLSearchParams(),
|
mockSearchParams: new URLSearchParams(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
const { mockRefreshSession, mockIsAuthenticated } = vi.hoisted(() => ({
|
||||||
|
mockRefreshSession: vi.fn(),
|
||||||
|
mockIsAuthenticated: false,
|
||||||
|
}));
|
||||||
|
|
||||||
vi.mock("next/navigation", () => ({
|
vi.mock("next/navigation", () => ({
|
||||||
useRouter: (): { push: Mock; replace: Mock } => ({
|
useRouter: (): { push: Mock; replace: Mock } => ({
|
||||||
push: mockPush,
|
push: mockPush,
|
||||||
@@ -33,6 +38,14 @@ vi.mock("@/lib/auth-client", () => ({
|
|||||||
|
|
||||||
vi.mock("@/lib/config", () => ({
|
vi.mock("@/lib/config", () => ({
|
||||||
API_BASE_URL: "http://localhost:3001",
|
API_BASE_URL: "http://localhost:3001",
|
||||||
|
IS_MOCK_AUTH_MODE: false,
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("@/lib/auth/auth-context", () => ({
|
||||||
|
useAuth: (): { isAuthenticated: boolean; refreshSession: Mock } => ({
|
||||||
|
isAuthenticated: mockIsAuthenticated,
|
||||||
|
refreshSession: mockRefreshSession,
|
||||||
|
}),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// Mock fetchWithRetry to behave like fetch for test purposes
|
// Mock fetchWithRetry to behave like fetch for test purposes
|
||||||
@@ -91,6 +104,7 @@ describe("LoginPage", (): void => {
|
|||||||
mockSearchParams.delete("error");
|
mockSearchParams.delete("error");
|
||||||
// Default: OAuth2 returns a resolved promise (fire-and-forget redirect)
|
// Default: OAuth2 returns a resolved promise (fire-and-forget redirect)
|
||||||
mockOAuth2.mockResolvedValue(undefined);
|
mockOAuth2.mockResolvedValue(undefined);
|
||||||
|
mockRefreshSession.mockResolvedValue(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("renders loading state initially", (): void => {
|
it("renders loading state initially", (): void => {
|
||||||
@@ -104,19 +118,28 @@ describe("LoginPage", (): void => {
|
|||||||
expect(screen.getByText("Loading authentication options")).toBeInTheDocument();
|
expect(screen.getByText("Loading authentication options")).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("renders the page heading and description", (): void => {
|
it("renders the page heading and description", async (): Promise<void> => {
|
||||||
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
||||||
|
|
||||||
render(<LoginPage />);
|
render(<LoginPage />);
|
||||||
|
|
||||||
expect(screen.getByRole("heading", { level: 1 })).toHaveTextContent("Welcome to Mosaic Stack");
|
await waitFor((): void => {
|
||||||
expect(screen.getByText(/Your personal assistant platform/i)).toBeInTheDocument();
|
expect(screen.getByLabelText(/email/i)).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("has proper layout styling", (): void => {
|
expect(screen.getByRole("heading", { level: 1 })).toHaveTextContent("Command Center");
|
||||||
|
expect(screen.getByText(/Sign in to your orchestration platform/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("has proper layout styling", async (): Promise<void> => {
|
||||||
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
||||||
|
|
||||||
const { container } = render(<LoginPage />);
|
const { container } = render(<LoginPage />);
|
||||||
|
|
||||||
|
await waitFor((): void => {
|
||||||
|
expect(screen.getByLabelText(/email/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
const main = container.querySelector("main");
|
const main = container.querySelector("main");
|
||||||
expect(main).toHaveClass("flex", "min-h-screen");
|
expect(main).toHaveClass("flex", "min-h-screen");
|
||||||
});
|
});
|
||||||
@@ -163,7 +186,7 @@ describe("LoginPage", (): void => {
|
|||||||
expect(screen.getByRole("button", { name: /continue with authentik/i })).toBeInTheDocument();
|
expect(screen.getByRole("button", { name: /continue with authentik/i })).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(screen.getByText(/or continue with email/i)).toBeInTheDocument();
|
expect(screen.getByText(/or continue with/i)).toBeInTheDocument();
|
||||||
expect(screen.getByLabelText(/email/i)).toBeInTheDocument();
|
expect(screen.getByLabelText(/email/i)).toBeInTheDocument();
|
||||||
expect(screen.getByLabelText(/password/i)).toBeInTheDocument();
|
expect(screen.getByLabelText(/password/i)).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
@@ -177,7 +200,11 @@ describe("LoginPage", (): void => {
|
|||||||
expect(screen.getByRole("button", { name: /continue with authentik/i })).toBeInTheDocument();
|
expect(screen.getByRole("button", { name: /continue with authentik/i })).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(screen.queryByText(/or continue with email/i)).not.toBeInTheDocument();
|
// The divider element should not appear (no credentials provider)
|
||||||
|
const dividerTexts = screen.queryAllByText(/or continue with/i);
|
||||||
|
// OAuthButton text contains "Continue with" so filter for the divider specifically
|
||||||
|
const dividerOnly = dividerTexts.filter((el) => el.textContent === "or continue with");
|
||||||
|
expect(dividerOnly).toHaveLength(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("shows error state with retry button on fetch failure instead of silent fallback", async (): Promise<void> => {
|
it("shows error state with retry button on fetch failure instead of silent fallback", async (): Promise<void> => {
|
||||||
@@ -192,7 +219,6 @@ describe("LoginPage", (): void => {
|
|||||||
// Should NOT silently fall back to email form
|
// Should NOT silently fall back to email form
|
||||||
expect(screen.queryByLabelText(/email/i)).not.toBeInTheDocument();
|
expect(screen.queryByLabelText(/email/i)).not.toBeInTheDocument();
|
||||||
expect(screen.queryByLabelText(/password/i)).not.toBeInTheDocument();
|
expect(screen.queryByLabelText(/password/i)).not.toBeInTheDocument();
|
||||||
expect(screen.queryByRole("button", { name: /continue with/i })).not.toBeInTheDocument();
|
|
||||||
|
|
||||||
// Should show the error banner with helpful message
|
// Should show the error banner with helpful message
|
||||||
expect(
|
expect(
|
||||||
@@ -267,7 +293,7 @@ describe("LoginPage", (): void => {
|
|||||||
|
|
||||||
expect(mockOAuth2).toHaveBeenCalledWith({
|
expect(mockOAuth2).toHaveBeenCalledWith({
|
||||||
providerId: "authentik",
|
providerId: "authentik",
|
||||||
callbackURL: "/",
|
callbackURL: "http://localhost:3000/",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -430,40 +456,58 @@ describe("LoginPage", (): void => {
|
|||||||
/* ------------------------------------------------------------------ */
|
/* ------------------------------------------------------------------ */
|
||||||
|
|
||||||
describe("responsive layout", (): void => {
|
describe("responsive layout", (): void => {
|
||||||
it("applies mobile-first padding to main element", (): void => {
|
it("applies AuthShell layout classes to main element", async (): Promise<void> => {
|
||||||
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
||||||
|
|
||||||
const { container } = render(<LoginPage />);
|
const { container } = render(<LoginPage />);
|
||||||
const main = container.querySelector("main");
|
|
||||||
|
|
||||||
expect(main).toHaveClass("p-4", "sm:p-8");
|
await waitFor((): void => {
|
||||||
|
expect(screen.getByLabelText(/email/i)).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("applies responsive text size to heading", (): void => {
|
const main = container.querySelector("main");
|
||||||
|
expect(main).toHaveClass("min-h-screen", "items-center", "justify-center");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("applies responsive text size to heading", async (): Promise<void> => {
|
||||||
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
||||||
|
|
||||||
render(<LoginPage />);
|
render(<LoginPage />);
|
||||||
|
|
||||||
|
await waitFor((): void => {
|
||||||
|
expect(screen.getByLabelText(/email/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
const heading = screen.getByRole("heading", { level: 1 });
|
const heading = screen.getByRole("heading", { level: 1 });
|
||||||
expect(heading).toHaveClass("text-2xl", "sm:text-4xl");
|
expect(heading).toHaveClass("text-xl", "sm:text-2xl");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("applies responsive padding to card container", (): void => {
|
it("AuthCard applies card styling with padding", async (): Promise<void> => {
|
||||||
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
||||||
|
|
||||||
const { container } = render(<LoginPage />);
|
const { container } = render(<LoginPage />);
|
||||||
const card = container.querySelector(".bg-white");
|
|
||||||
|
|
||||||
expect(card).toHaveClass("p-4", "sm:p-8");
|
await waitFor((): void => {
|
||||||
|
expect(screen.getByLabelText(/email/i)).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("card container has full width with max-width constraint", (): void => {
|
// AuthCard uses rounded-b-2xl and p-6 sm:p-10
|
||||||
|
const card = container.querySelector(".rounded-b-2xl");
|
||||||
|
expect(card).toHaveClass("p-6", "sm:p-10");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("AuthShell constrains card width", async (): Promise<void> => {
|
||||||
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
mockFetchConfig(EMAIL_ONLY_CONFIG);
|
||||||
|
|
||||||
const { container } = render(<LoginPage />);
|
const { container } = render(<LoginPage />);
|
||||||
const wrapper = container.querySelector(".max-w-md");
|
|
||||||
|
|
||||||
expect(wrapper).toHaveClass("w-full", "max-w-md");
|
await waitFor((): void => {
|
||||||
|
expect(screen.getByLabelText(/email/i)).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
// AuthShell wraps children in max-w-[27rem]
|
||||||
|
const wrapper = container.querySelector(".max-w-\\[27rem\\]");
|
||||||
|
expect(wrapper).toHaveClass("w-full");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -539,7 +583,9 @@ describe("LoginPage", (): void => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// LoginForm auto-focuses the email input on mount
|
// LoginForm auto-focuses the email input on mount
|
||||||
|
await waitFor((): void => {
|
||||||
expect(screen.getByLabelText(/email/i)).toHaveFocus();
|
expect(screen.getByLabelText(/email/i)).toHaveFocus();
|
||||||
|
});
|
||||||
|
|
||||||
// Tab forward through form: email -> password -> submit
|
// Tab forward through form: email -> password -> submit
|
||||||
await user.tab();
|
await user.tab();
|
||||||
|
|||||||
@@ -5,10 +5,12 @@ import type { ReactElement } from "react";
|
|||||||
import { useRouter, useSearchParams } from "next/navigation";
|
import { useRouter, useSearchParams } from "next/navigation";
|
||||||
import { Loader2 } from "lucide-react";
|
import { Loader2 } from "lucide-react";
|
||||||
import type { AuthConfigResponse, AuthProviderConfig } from "@mosaic/shared";
|
import type { AuthConfigResponse, AuthProviderConfig } from "@mosaic/shared";
|
||||||
import { API_BASE_URL } from "@/lib/config";
|
import { AuthShell, AuthCard, AuthBrand, AuthStatusPill } from "@mosaic/ui";
|
||||||
|
import { API_BASE_URL, IS_MOCK_AUTH_MODE } from "@/lib/config";
|
||||||
import { signIn } from "@/lib/auth-client";
|
import { signIn } from "@/lib/auth-client";
|
||||||
import { fetchWithRetry } from "@/lib/auth/fetch-with-retry";
|
import { fetchWithRetry } from "@/lib/auth/fetch-with-retry";
|
||||||
import { parseAuthError } from "@/lib/auth/auth-errors";
|
import { parseAuthError } from "@/lib/auth/auth-errors";
|
||||||
|
import { useAuth } from "@/lib/auth/auth-context";
|
||||||
import { OAuthButton } from "@/components/auth/OAuthButton";
|
import { OAuthButton } from "@/components/auth/OAuthButton";
|
||||||
import { LoginForm } from "@/components/auth/LoginForm";
|
import { LoginForm } from "@/components/auth/LoginForm";
|
||||||
import { AuthDivider } from "@/components/auth/AuthDivider";
|
import { AuthDivider } from "@/components/auth/AuthDivider";
|
||||||
@@ -18,23 +20,21 @@ export default function LoginPage(): ReactElement {
|
|||||||
return (
|
return (
|
||||||
<Suspense
|
<Suspense
|
||||||
fallback={
|
fallback={
|
||||||
<main className="flex min-h-screen flex-col items-center justify-center p-4 sm:p-8 bg-gray-50">
|
<AuthShell>
|
||||||
<div className="w-full max-w-md space-y-8">
|
<AuthCard>
|
||||||
<div className="text-center">
|
<div className="flex flex-col items-center gap-6">
|
||||||
<h1 className="text-2xl sm:text-4xl font-bold mb-4">Welcome to Mosaic Stack</h1>
|
<AuthBrand />
|
||||||
</div>
|
|
||||||
<div className="bg-white p-4 sm:p-8 rounded-lg shadow-md">
|
|
||||||
<div
|
<div
|
||||||
className="flex items-center justify-center py-8"
|
className="flex items-center justify-center py-8"
|
||||||
role="status"
|
role="status"
|
||||||
aria-label="Loading authentication options"
|
aria-label="Loading authentication options"
|
||||||
>
|
>
|
||||||
<Loader2 className="h-8 w-8 animate-spin text-blue-500" aria-hidden="true" />
|
<Loader2 className="h-8 w-8 animate-spin text-[#56a0ff]" aria-hidden="true" />
|
||||||
<span className="sr-only">Loading authentication options</span>
|
<span className="sr-only">Loading authentication options</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</AuthCard>
|
||||||
</main>
|
</AuthShell>
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
<LoginPageContent />
|
<LoginPageContent />
|
||||||
@@ -45,6 +45,7 @@ export default function LoginPage(): ReactElement {
|
|||||||
function LoginPageContent(): ReactElement {
|
function LoginPageContent(): ReactElement {
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const searchParams = useSearchParams();
|
const searchParams = useSearchParams();
|
||||||
|
const { isAuthenticated, refreshSession } = useAuth();
|
||||||
const [config, setConfig] = useState<AuthConfigResponse | null | undefined>(undefined);
|
const [config, setConfig] = useState<AuthConfigResponse | null | undefined>(undefined);
|
||||||
const [loadingConfig, setLoadingConfig] = useState(true);
|
const [loadingConfig, setLoadingConfig] = useState(true);
|
||||||
const [retryCount, setRetryCount] = useState(0);
|
const [retryCount, setRetryCount] = useState(0);
|
||||||
@@ -68,6 +69,18 @@ function LoginPageContent(): ReactElement {
|
|||||||
}, [searchParams, router]);
|
}, [searchParams, router]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
if (IS_MOCK_AUTH_MODE && isAuthenticated) {
|
||||||
|
router.replace("/tasks");
|
||||||
|
}
|
||||||
|
}, [isAuthenticated, router]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (IS_MOCK_AUTH_MODE) {
|
||||||
|
setConfig({ providers: [] });
|
||||||
|
setLoadingConfig(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
let cancelled = false;
|
let cancelled = false;
|
||||||
|
|
||||||
async function fetchConfig(): Promise<void> {
|
async function fetchConfig(): Promise<void> {
|
||||||
@@ -113,7 +126,28 @@ function LoginPageContent(): ReactElement {
|
|||||||
const handleOAuthLogin = useCallback((providerId: string): void => {
|
const handleOAuthLogin = useCallback((providerId: string): void => {
|
||||||
setOauthLoading(providerId);
|
setOauthLoading(providerId);
|
||||||
setError(null);
|
setError(null);
|
||||||
signIn.oauth2({ providerId, callbackURL: "/" }).catch((err: unknown) => {
|
const callbackURL =
|
||||||
|
typeof window !== "undefined" ? new URL("/", window.location.origin).toString() : "/";
|
||||||
|
signIn
|
||||||
|
.oauth2({ providerId, callbackURL })
|
||||||
|
.then((result) => {
|
||||||
|
// BetterAuth returns Data | Error union — check for error or missing redirect URL
|
||||||
|
const hasError = "error" in result && result.error;
|
||||||
|
const hasUrl = "data" in result && result.data?.url;
|
||||||
|
if (hasError || !hasUrl) {
|
||||||
|
const errObj = hasError ? result.error : null;
|
||||||
|
const message =
|
||||||
|
errObj && typeof errObj === "object" && "message" in errObj
|
||||||
|
? String(errObj.message)
|
||||||
|
: "no redirect URL";
|
||||||
|
console.error(`[Auth] OAuth sign-in failed for ${providerId}:`, message);
|
||||||
|
setError("Unable to connect to the sign-in provider. Please try again in a moment.");
|
||||||
|
setOauthLoading(null);
|
||||||
|
}
|
||||||
|
// If data.url exists, BetterAuth's client will redirect the browser automatically.
|
||||||
|
// No need to reset loading — the page is navigating away.
|
||||||
|
})
|
||||||
|
.catch((err: unknown) => {
|
||||||
const message = err instanceof Error ? err.message : String(err);
|
const message = err instanceof Error ? err.message : String(err);
|
||||||
console.error(`[Auth] OAuth sign-in initiation failed for ${providerId}:`, message);
|
console.error(`[Auth] OAuth sign-in initiation failed for ${providerId}:`, message);
|
||||||
setError("Unable to connect to the sign-in provider. Please try again in a moment.");
|
setError("Unable to connect to the sign-in provider. Please try again in a moment.");
|
||||||
@@ -156,18 +190,64 @@ function LoginPageContent(): ReactElement {
|
|||||||
setRetryCount((c) => c + 1);
|
setRetryCount((c) => c + 1);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
const handleMockLogin = useCallback(async (): Promise<void> => {
|
||||||
|
setError(null);
|
||||||
|
try {
|
||||||
|
await refreshSession();
|
||||||
|
router.push("/tasks");
|
||||||
|
} catch (err: unknown) {
|
||||||
|
const parsed = parseAuthError(err);
|
||||||
|
setError(parsed.message);
|
||||||
|
}
|
||||||
|
}, [refreshSession, router]);
|
||||||
|
|
||||||
|
if (IS_MOCK_AUTH_MODE) {
|
||||||
return (
|
return (
|
||||||
<main className="flex min-h-screen flex-col items-center justify-center p-4 sm:p-8 bg-gray-50">
|
<AuthShell>
|
||||||
<div className="w-full max-w-md space-y-8">
|
<AuthCard>
|
||||||
|
<div className="flex flex-col items-center gap-6">
|
||||||
|
<AuthBrand />
|
||||||
<div className="text-center">
|
<div className="text-center">
|
||||||
<h1 className="text-2xl sm:text-4xl font-bold mb-4">Welcome to Mosaic Stack</h1>
|
<h1 className="text-xl font-bold tracking-tight sm:text-2xl">Command Center</h1>
|
||||||
<p className="text-base sm:text-lg text-gray-600">
|
<p className="mt-1 text-sm text-[#5a6a87] dark:text-[#8f9db7]">
|
||||||
Your personal assistant platform. Organize tasks, events, and projects with a
|
Local mock auth mode is active
|
||||||
PDA-friendly approach.
|
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div className="bg-white p-4 sm:p-8 rounded-lg shadow-md">
|
<div className="mt-6 space-y-4">
|
||||||
|
<AuthStatusPill label="Mock mode" tone="warning" className="w-full justify-center" />
|
||||||
|
{error && <AuthErrorBanner message={error} />}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => {
|
||||||
|
void handleMockLogin();
|
||||||
|
}}
|
||||||
|
className="w-full inline-flex items-center justify-center gap-2 rounded-lg px-4 py-3 text-sm font-semibold text-white bg-[linear-gradient(135deg,#2f80ff,#8b5cf6)] transition-all duration-200 focus:outline-none focus:ring-2 focus:ring-[#56a0ff]/60 hover:-translate-y-0.5 hover:shadow-[0_10px_30px_rgba(47,128,255,0.38)]"
|
||||||
|
data-testid="mock-auth-login"
|
||||||
|
>
|
||||||
|
Continue with Mock Session
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</AuthCard>
|
||||||
|
</AuthShell>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<AuthShell>
|
||||||
|
<AuthCard>
|
||||||
|
<div className="flex flex-col items-center gap-6">
|
||||||
|
<AuthBrand />
|
||||||
|
<div className="text-center">
|
||||||
|
<h1 className="text-xl font-bold tracking-tight sm:text-2xl">Command Center</h1>
|
||||||
|
<p className="mt-1 text-sm text-[#5a6a87] dark:text-[#8f9db7]">
|
||||||
|
Sign in to your orchestration platform
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="mt-6">
|
||||||
{loadingConfig ? (
|
{loadingConfig ? (
|
||||||
<div
|
<div
|
||||||
className="flex items-center justify-center py-8"
|
className="flex items-center justify-center py-8"
|
||||||
@@ -175,7 +255,7 @@ function LoginPageContent(): ReactElement {
|
|||||||
role="status"
|
role="status"
|
||||||
aria-label="Loading authentication options"
|
aria-label="Loading authentication options"
|
||||||
>
|
>
|
||||||
<Loader2 className="h-8 w-8 animate-spin text-blue-500" aria-hidden="true" />
|
<Loader2 className="h-8 w-8 animate-spin text-[#56a0ff]" aria-hidden="true" />
|
||||||
<span className="sr-only">Loading authentication options</span>
|
<span className="sr-only">Loading authentication options</span>
|
||||||
</div>
|
</div>
|
||||||
) : config === null ? (
|
) : config === null ? (
|
||||||
@@ -185,34 +265,49 @@ function LoginPageContent(): ReactElement {
|
|||||||
<button
|
<button
|
||||||
type="button"
|
type="button"
|
||||||
onClick={handleRetry}
|
onClick={handleRetry}
|
||||||
className="px-4 py-2 text-sm font-medium text-white bg-blue-600 rounded-md hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-blue-500 focus:ring-offset-2"
|
className="inline-flex items-center justify-center gap-2 rounded-lg px-4 py-2.5 text-sm font-semibold text-white bg-[linear-gradient(135deg,#2f80ff,#8b5cf6)] transition-all duration-200 focus:outline-none focus:ring-2 focus:ring-[#56a0ff]/60 hover:-translate-y-0.5 hover:shadow-[0_10px_30px_rgba(47,128,255,0.38)]"
|
||||||
>
|
>
|
||||||
Try again
|
Try again
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<div className="space-y-0">
|
||||||
{urlError && (
|
{urlError && (
|
||||||
|
<div className="mb-4">
|
||||||
<AuthErrorBanner
|
<AuthErrorBanner
|
||||||
message={urlError}
|
message={urlError}
|
||||||
onDismiss={(): void => {
|
onDismiss={(): void => {
|
||||||
setUrlError(null);
|
setUrlError(null);
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{error && !hasCredentials && (
|
{error && !hasCredentials && (
|
||||||
|
<div className="mb-4">
|
||||||
<AuthErrorBanner
|
<AuthErrorBanner
|
||||||
message={error}
|
message={error}
|
||||||
onDismiss={(): void => {
|
onDismiss={(): void => {
|
||||||
setError(null);
|
setError(null);
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{hasOAuth &&
|
{hasCredentials && (
|
||||||
oauthProviders.map((provider) => (
|
<LoginForm
|
||||||
|
onSubmit={handleCredentialsLogin}
|
||||||
|
isLoading={credentialsLoading}
|
||||||
|
error={error}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{hasOAuth && hasCredentials && <AuthDivider />}
|
||||||
|
|
||||||
|
{hasOAuth && (
|
||||||
|
<div className="space-y-2">
|
||||||
|
{oauthProviders.map((provider) => (
|
||||||
<OAuthButton
|
<OAuthButton
|
||||||
key={provider.id}
|
key={provider.id}
|
||||||
providerName={provider.name}
|
providerName={provider.name}
|
||||||
@@ -224,20 +319,16 @@ function LoginPageContent(): ReactElement {
|
|||||||
disabled={oauthLoading !== null && oauthLoading !== provider.id}
|
disabled={oauthLoading !== null && oauthLoading !== provider.id}
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
|
</div>
|
||||||
{hasOAuth && hasCredentials && <AuthDivider />}
|
|
||||||
|
|
||||||
{hasCredentials && (
|
|
||||||
<LoginForm
|
|
||||||
onSubmit={handleCredentialsLogin}
|
|
||||||
isLoading={credentialsLoading}
|
|
||||||
error={error}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</>
|
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</main>
|
|
||||||
|
<div className="mt-6 flex justify-center">
|
||||||
|
<AuthStatusPill label="Mosaic v0.1" tone="neutral" />
|
||||||
|
</div>
|
||||||
|
</AuthCard>
|
||||||
|
</AuthShell>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { describe, it, expect, vi } from "vitest";
|
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||||
import { render, screen, waitFor } from "@testing-library/react";
|
import { render, screen, waitFor } from "@testing-library/react";
|
||||||
|
import type { Event } from "@mosaic/shared";
|
||||||
import CalendarPage from "./page";
|
import CalendarPage from "./page";
|
||||||
|
|
||||||
// Mock the Calendar component
|
// Mock the Calendar component
|
||||||
@@ -15,15 +16,94 @@ vi.mock("@/components/calendar/Calendar", () => ({
|
|||||||
),
|
),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
// Mock MosaicSpinner
|
||||||
|
vi.mock("@/components/ui/MosaicSpinner", () => ({
|
||||||
|
MosaicSpinner: ({ label }: { label?: string }): React.JSX.Element => (
|
||||||
|
<div data-testid="mosaic-spinner">{label ?? "Loading..."}</div>
|
||||||
|
),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock useWorkspaceId
|
||||||
|
const mockUseWorkspaceId = vi.fn<() => string | null>();
|
||||||
|
vi.mock("@/lib/hooks", () => ({
|
||||||
|
useWorkspaceId: (): string | null => mockUseWorkspaceId(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock fetchEvents
|
||||||
|
const mockFetchEvents = vi.fn<() => Promise<Event[]>>();
|
||||||
|
vi.mock("@/lib/api/events", () => ({
|
||||||
|
fetchEvents: (...args: unknown[]): Promise<Event[]> => mockFetchEvents(...(args as [])),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const fakeEvents: Event[] = [
|
||||||
|
{
|
||||||
|
id: "event-1",
|
||||||
|
title: "Team standup",
|
||||||
|
description: "Daily standup meeting",
|
||||||
|
startTime: new Date("2026-02-20T09:00:00Z"),
|
||||||
|
endTime: new Date("2026-02-20T09:30:00Z"),
|
||||||
|
allDay: false,
|
||||||
|
location: null,
|
||||||
|
recurrence: null,
|
||||||
|
creatorId: "user-1",
|
||||||
|
projectId: null,
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
metadata: {},
|
||||||
|
createdAt: new Date("2026-01-28"),
|
||||||
|
updatedAt: new Date("2026-01-28"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "event-2",
|
||||||
|
title: "Sprint planning",
|
||||||
|
description: "Bi-weekly sprint planning",
|
||||||
|
startTime: new Date("2026-02-21T14:00:00Z"),
|
||||||
|
endTime: new Date("2026-02-21T15:00:00Z"),
|
||||||
|
allDay: false,
|
||||||
|
location: null,
|
||||||
|
recurrence: null,
|
||||||
|
creatorId: "user-1",
|
||||||
|
projectId: null,
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
metadata: {},
|
||||||
|
createdAt: new Date("2026-01-28"),
|
||||||
|
updatedAt: new Date("2026-01-28"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "event-3",
|
||||||
|
title: "All-day workshop",
|
||||||
|
description: null,
|
||||||
|
startTime: new Date("2026-02-22T00:00:00Z"),
|
||||||
|
endTime: null,
|
||||||
|
allDay: true,
|
||||||
|
location: "Conference Room A",
|
||||||
|
recurrence: null,
|
||||||
|
creatorId: "user-1",
|
||||||
|
projectId: null,
|
||||||
|
workspaceId: "ws-1",
|
||||||
|
metadata: {},
|
||||||
|
createdAt: new Date("2026-01-28"),
|
||||||
|
updatedAt: new Date("2026-01-28"),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
describe("CalendarPage", (): void => {
|
describe("CalendarPage", (): void => {
|
||||||
|
beforeEach((): void => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
mockUseWorkspaceId.mockReturnValue("ws-1");
|
||||||
|
mockFetchEvents.mockResolvedValue(fakeEvents);
|
||||||
|
});
|
||||||
|
|
||||||
it("should render the page title", (): void => {
|
it("should render the page title", (): void => {
|
||||||
render(<CalendarPage />);
|
render(<CalendarPage />);
|
||||||
expect(screen.getByRole("heading", { level: 1 })).toHaveTextContent("Calendar");
|
expect(screen.getByRole("heading", { level: 1 })).toHaveTextContent("Calendar");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should show loading state initially", (): void => {
|
it("should show loading state initially", (): void => {
|
||||||
|
// Never resolve so we stay in loading state
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||||
|
mockFetchEvents.mockReturnValue(new Promise<Event[]>(() => {}));
|
||||||
render(<CalendarPage />);
|
render(<CalendarPage />);
|
||||||
expect(screen.getByTestId("calendar")).toHaveTextContent("Loading");
|
expect(screen.getByTestId("mosaic-spinner")).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should render the Calendar with events after loading", async (): Promise<void> => {
|
it("should render the Calendar with events after loading", async (): Promise<void> => {
|
||||||
@@ -43,4 +123,31 @@ describe("CalendarPage", (): void => {
|
|||||||
render(<CalendarPage />);
|
render(<CalendarPage />);
|
||||||
expect(screen.getByText("View your schedule at a glance")).toBeInTheDocument();
|
expect(screen.getByText("View your schedule at a glance")).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should show empty state when no events exist", async (): Promise<void> => {
|
||||||
|
mockFetchEvents.mockResolvedValue([]);
|
||||||
|
render(<CalendarPage />);
|
||||||
|
await waitFor((): void => {
|
||||||
|
expect(screen.getByText("No events scheduled")).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should show error state on API failure", async (): Promise<void> => {
|
||||||
|
mockFetchEvents.mockRejectedValue(new Error("Network error"));
|
||||||
|
render(<CalendarPage />);
|
||||||
|
await waitFor((): void => {
|
||||||
|
expect(screen.getByText("Network error")).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
expect(screen.getByRole("button", { name: /try again/i })).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not fetch when workspace ID is not available", async (): Promise<void> => {
|
||||||
|
mockUseWorkspaceId.mockReturnValue(null);
|
||||||
|
render(<CalendarPage />);
|
||||||
|
|
||||||
|
// Wait a tick to ensure useEffect ran
|
||||||
|
await waitFor((): void => {
|
||||||
|
expect(mockFetchEvents).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -3,57 +3,161 @@
|
|||||||
import { useState, useEffect } from "react";
|
import { useState, useEffect } from "react";
|
||||||
import type { ReactElement } from "react";
|
import type { ReactElement } from "react";
|
||||||
import { Calendar } from "@/components/calendar/Calendar";
|
import { Calendar } from "@/components/calendar/Calendar";
|
||||||
import { mockEvents } from "@/lib/api/events";
|
import { fetchEvents } from "@/lib/api/events";
|
||||||
|
import { MosaicSpinner } from "@/components/ui/MosaicSpinner";
|
||||||
|
import { useWorkspaceId } from "@/lib/hooks";
|
||||||
import type { Event } from "@mosaic/shared";
|
import type { Event } from "@mosaic/shared";
|
||||||
|
|
||||||
export default function CalendarPage(): ReactElement {
|
export default function CalendarPage(): ReactElement {
|
||||||
|
const workspaceId = useWorkspaceId();
|
||||||
const [events, setEvents] = useState<Event[]>([]);
|
const [events, setEvents] = useState<Event[]>([]);
|
||||||
const [isLoading, setIsLoading] = useState(true);
|
const [isLoading, setIsLoading] = useState(true);
|
||||||
const [error, setError] = useState<string | null>(null);
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
void loadEvents();
|
if (!workspaceId) {
|
||||||
}, []);
|
setIsLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const wsId = workspaceId;
|
||||||
|
let cancelled = false;
|
||||||
|
setError(null);
|
||||||
|
setIsLoading(true);
|
||||||
|
|
||||||
async function loadEvents(): Promise<void> {
|
async function loadEvents(): Promise<void> {
|
||||||
setIsLoading(true);
|
|
||||||
setError(null);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// TODO: Replace with real API call when backend is ready
|
const data = await fetchEvents(wsId);
|
||||||
// const data = await fetchEvents();
|
if (!cancelled) {
|
||||||
await new Promise((resolve) => setTimeout(resolve, 300));
|
setEvents(data);
|
||||||
setEvents(mockEvents);
|
}
|
||||||
} catch (err) {
|
} catch (err: unknown) {
|
||||||
|
console.error("[Calendar] Failed to fetch events:", err);
|
||||||
|
if (!cancelled) {
|
||||||
setError(
|
setError(
|
||||||
err instanceof Error
|
err instanceof Error
|
||||||
? err.message
|
? err.message
|
||||||
: "We had trouble loading your calendar. Please try again when you're ready."
|
: "We had trouble loading your calendar. Please try again when you're ready."
|
||||||
);
|
);
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
|
if (!cancelled) {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void loadEvents();
|
||||||
|
|
||||||
|
return (): void => {
|
||||||
|
cancelled = true;
|
||||||
|
};
|
||||||
|
}, [workspaceId]);
|
||||||
|
|
||||||
|
function handleRetry(): void {
|
||||||
|
if (!workspaceId) return;
|
||||||
|
|
||||||
|
const wsId = workspaceId;
|
||||||
|
setError(null);
|
||||||
|
setIsLoading(true);
|
||||||
|
|
||||||
|
fetchEvents(wsId)
|
||||||
|
.then((data) => {
|
||||||
|
setEvents(data);
|
||||||
|
})
|
||||||
|
.catch((err: unknown) => {
|
||||||
|
console.error("[Calendar] Retry failed:", err);
|
||||||
|
setError(
|
||||||
|
err instanceof Error
|
||||||
|
? err.message
|
||||||
|
: "We had trouble loading your calendar. Please try again when you're ready."
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.finally(() => {
|
||||||
|
setIsLoading(false);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<main className="container mx-auto px-4 py-8">
|
||||||
|
<div className="mb-8">
|
||||||
|
<h1 className="text-3xl font-bold" style={{ color: "var(--text)" }}>
|
||||||
|
Calendar
|
||||||
|
</h1>
|
||||||
|
<p style={{ color: "var(--text-muted)" }} className="mt-2">
|
||||||
|
View your schedule at a glance
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div className="flex justify-center py-16">
|
||||||
|
<MosaicSpinner label="Loading calendar..." />
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error !== null) {
|
||||||
|
return (
|
||||||
|
<main className="container mx-auto px-4 py-8">
|
||||||
|
<div className="mb-8">
|
||||||
|
<h1 className="text-3xl font-bold" style={{ color: "var(--text)" }}>
|
||||||
|
Calendar
|
||||||
|
</h1>
|
||||||
|
<p style={{ color: "var(--text-muted)" }} className="mt-2">
|
||||||
|
View your schedule at a glance
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
className="rounded-lg p-6 text-center"
|
||||||
|
style={{
|
||||||
|
background: "var(--surface)",
|
||||||
|
border: "1px solid var(--border)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<p style={{ color: "var(--danger)" }}>{error}</p>
|
||||||
|
<button
|
||||||
|
onClick={handleRetry}
|
||||||
|
className="mt-4 rounded-md px-4 py-2 text-sm font-medium transition-colors"
|
||||||
|
style={{
|
||||||
|
background: "var(--accent)",
|
||||||
|
color: "var(--surface)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Try again
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<main className="container mx-auto px-4 py-8">
|
<main className="container mx-auto px-4 py-8">
|
||||||
<div className="mb-8">
|
<div className="mb-8">
|
||||||
<h1 className="text-3xl font-bold text-gray-900">Calendar</h1>
|
<h1 className="text-3xl font-bold" style={{ color: "var(--text)" }}>
|
||||||
<p className="text-gray-600 mt-2">View your schedule at a glance</p>
|
Calendar
|
||||||
|
</h1>
|
||||||
|
<p style={{ color: "var(--text-muted)" }} className="mt-2">
|
||||||
|
View your schedule at a glance
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{error !== null ? (
|
{events.length === 0 ? (
|
||||||
<div className="rounded-lg border border-amber-200 bg-amber-50 p-6 text-center">
|
<div
|
||||||
<p className="text-amber-800">{error}</p>
|
className="rounded-lg p-8 text-center"
|
||||||
<button
|
style={{
|
||||||
onClick={() => void loadEvents()}
|
background: "var(--surface)",
|
||||||
className="mt-4 rounded-md bg-amber-600 px-4 py-2 text-sm font-medium text-white hover:bg-amber-700 transition-colors"
|
border: "1px solid var(--border)",
|
||||||
|
}}
|
||||||
>
|
>
|
||||||
Try again
|
<p className="text-lg" style={{ color: "var(--text-muted)" }}>
|
||||||
</button>
|
No events scheduled
|
||||||
|
</p>
|
||||||
|
<p className="text-sm mt-2" style={{ color: "var(--text-muted)" }}>
|
||||||
|
Your calendar is clear
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<Calendar events={events} isLoading={isLoading} />
|
<Calendar events={events} isLoading={false} />
|
||||||
)}
|
)}
|
||||||
</main>
|
</main>
|
||||||
);
|
);
|
||||||
|
|||||||
1436
apps/web/src/app/(authenticated)/files/page.tsx
Normal file
1436
apps/web/src/app/(authenticated)/files/page.tsx
Normal file
File diff suppressed because it is too large
Load Diff
765
apps/web/src/app/(authenticated)/kanban/page.tsx
Normal file
765
apps/web/src/app/(authenticated)/kanban/page.tsx
Normal file
@@ -0,0 +1,765 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { useState, useEffect, useCallback, useMemo } from "react";
|
||||||
|
import type { ReactElement } from "react";
|
||||||
|
import { useSearchParams, useRouter } from "next/navigation";
|
||||||
|
import { DragDropContext, Droppable, Draggable } from "@hello-pangea/dnd";
|
||||||
|
import type {
|
||||||
|
DropResult,
|
||||||
|
DroppableProvided,
|
||||||
|
DraggableProvided,
|
||||||
|
DraggableStateSnapshot,
|
||||||
|
} from "@hello-pangea/dnd";
|
||||||
|
|
||||||
|
import { MosaicSpinner } from "@/components/ui/MosaicSpinner";
|
||||||
|
import { fetchTasks, updateTask, type TaskFilters } from "@/lib/api/tasks";
|
||||||
|
import { fetchProjects, type Project } from "@/lib/api/projects";
|
||||||
|
import { useWorkspaceId } from "@/lib/hooks";
|
||||||
|
import type { Task } from "@mosaic/shared";
|
||||||
|
import { TaskStatus, TaskPriority } from "@mosaic/shared";
|
||||||
|
|
||||||
|
/* ---------------------------------------------------------------------------
|
||||||
|
Column configuration
|
||||||
|
--------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
interface ColumnConfig {
|
||||||
|
status: TaskStatus;
|
||||||
|
label: string;
|
||||||
|
accent: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const COLUMNS: ColumnConfig[] = [
|
||||||
|
{ status: TaskStatus.NOT_STARTED, label: "To Do", accent: "var(--ms-blue-400)" },
|
||||||
|
{ status: TaskStatus.IN_PROGRESS, label: "In Progress", accent: "var(--ms-amber-400)" },
|
||||||
|
{ status: TaskStatus.PAUSED, label: "Paused", accent: "var(--ms-purple-400)" },
|
||||||
|
{ status: TaskStatus.COMPLETED, label: "Done", accent: "var(--ms-teal-400)" },
|
||||||
|
{ status: TaskStatus.ARCHIVED, label: "Archived", accent: "var(--muted)" },
|
||||||
|
];
|
||||||
|
|
||||||
|
const PRIORITY_OPTIONS: { value: string; label: string }[] = [
|
||||||
|
{ value: "", label: "All Priorities" },
|
||||||
|
{ value: TaskPriority.HIGH, label: "High" },
|
||||||
|
{ value: TaskPriority.MEDIUM, label: "Medium" },
|
||||||
|
{ value: TaskPriority.LOW, label: "Low" },
|
||||||
|
];
|
||||||
|
|
||||||
|
/* ---------------------------------------------------------------------------
|
||||||
|
Filter select shared styles
|
||||||
|
--------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
const selectStyle: React.CSSProperties = {
|
||||||
|
padding: "6px 10px",
|
||||||
|
borderRadius: "var(--r)",
|
||||||
|
border: "1px solid var(--border)",
|
||||||
|
background: "var(--surface)",
|
||||||
|
color: "var(--text)",
|
||||||
|
fontSize: "0.83rem",
|
||||||
|
outline: "none",
|
||||||
|
minWidth: 130,
|
||||||
|
};
|
||||||
|
|
||||||
|
const inputStyle: React.CSSProperties = {
|
||||||
|
...selectStyle,
|
||||||
|
minWidth: 180,
|
||||||
|
};
|
||||||
|
|
||||||
|
/* ---------------------------------------------------------------------------
|
||||||
|
Priority badge helper
|
||||||
|
--------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
interface PriorityStyle {
|
||||||
|
label: string;
|
||||||
|
bg: string;
|
||||||
|
color: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getPriorityStyle(priority: TaskPriority): PriorityStyle {
|
||||||
|
switch (priority) {
|
||||||
|
case TaskPriority.HIGH:
|
||||||
|
return { label: "High", bg: "rgba(229,72,77,0.15)", color: "var(--danger)" };
|
||||||
|
case TaskPriority.MEDIUM:
|
||||||
|
return { label: "Medium", bg: "rgba(245,158,11,0.15)", color: "var(--warn)" };
|
||||||
|
case TaskPriority.LOW:
|
||||||
|
return { label: "Low", bg: "rgba(143,157,183,0.15)", color: "var(--muted)" };
|
||||||
|
default:
|
||||||
|
return { label: String(priority), bg: "rgba(143,157,183,0.15)", color: "var(--muted)" };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ---------------------------------------------------------------------------
|
||||||
|
Task Card
|
||||||
|
--------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
interface TaskCardProps {
|
||||||
|
task: Task;
|
||||||
|
provided: DraggableProvided;
|
||||||
|
snapshot: DraggableStateSnapshot;
|
||||||
|
columnAccent: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function TaskCard({ task, provided, snapshot, columnAccent }: TaskCardProps): ReactElement {
|
||||||
|
const [hovered, setHovered] = useState(false);
|
||||||
|
const priorityStyle = getPriorityStyle(task.priority);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
ref={provided.innerRef}
|
||||||
|
{...provided.draggableProps}
|
||||||
|
{...provided.dragHandleProps}
|
||||||
|
onMouseEnter={() => {
|
||||||
|
setHovered(true);
|
||||||
|
}}
|
||||||
|
onMouseLeave={() => {
|
||||||
|
setHovered(false);
|
||||||
|
}}
|
||||||
|
style={{
|
||||||
|
background: "var(--surface)",
|
||||||
|
border: `1px solid ${hovered || snapshot.isDragging ? columnAccent : "var(--border)"}`,
|
||||||
|
borderRadius: "var(--r)",
|
||||||
|
padding: 12,
|
||||||
|
marginBottom: 8,
|
||||||
|
cursor: "grab",
|
||||||
|
transition: "border-color 0.15s, box-shadow 0.15s",
|
||||||
|
boxShadow: snapshot.isDragging ? "var(--shadow-lg)" : "none",
|
||||||
|
...provided.draggableProps.style,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{/* Title */}
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
fontWeight: 600,
|
||||||
|
color: "var(--text)",
|
||||||
|
fontSize: "0.875rem",
|
||||||
|
marginBottom: 6,
|
||||||
|
overflow: "hidden",
|
||||||
|
textOverflow: "ellipsis",
|
||||||
|
whiteSpace: "nowrap",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{task.title}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Priority badge */}
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
display: "inline-block",
|
||||||
|
padding: "1px 8px",
|
||||||
|
borderRadius: "var(--r-sm)",
|
||||||
|
background: priorityStyle.bg,
|
||||||
|
color: priorityStyle.color,
|
||||||
|
fontSize: "0.7rem",
|
||||||
|
fontWeight: 500,
|
||||||
|
marginBottom: 6,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{priorityStyle.label}
|
||||||
|
</span>
|
||||||
|
|
||||||
|
{/* Description */}
|
||||||
|
{task.description && (
|
||||||
|
<p
|
||||||
|
style={{
|
||||||
|
color: "var(--muted)",
|
||||||
|
fontSize: "0.8rem",
|
||||||
|
margin: 0,
|
||||||
|
overflow: "hidden",
|
||||||
|
textOverflow: "ellipsis",
|
||||||
|
display: "-webkit-box",
|
||||||
|
WebkitLineClamp: 2,
|
||||||
|
WebkitBoxOrient: "vertical",
|
||||||
|
lineHeight: 1.4,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{task.description}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ---------------------------------------------------------------------------
|
||||||
|
Kanban Column
|
||||||
|
--------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
interface KanbanColumnProps {
|
||||||
|
config: ColumnConfig;
|
||||||
|
tasks: Task[];
|
||||||
|
}
|
||||||
|
|
||||||
|
function KanbanColumn({ config, tasks }: KanbanColumnProps): ReactElement {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
minWidth: 280,
|
||||||
|
maxWidth: 340,
|
||||||
|
flex: "1 0 280px",
|
||||||
|
display: "flex",
|
||||||
|
flexDirection: "column",
|
||||||
|
background: "var(--bg-mid)",
|
||||||
|
borderRadius: "var(--r-lg)",
|
||||||
|
overflow: "hidden",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{/* Column header */}
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
borderTop: `3px solid ${config.accent}`,
|
||||||
|
padding: "12px 16px",
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
justifyContent: "space-between",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
fontWeight: 600,
|
||||||
|
fontSize: "0.85rem",
|
||||||
|
color: "var(--text)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{config.label}
|
||||||
|
</span>
|
||||||
|
<span
|
||||||
|
style={{
|
||||||
|
display: "inline-flex",
|
||||||
|
alignItems: "center",
|
||||||
|
justifyContent: "center",
|
||||||
|
minWidth: 22,
|
||||||
|
height: 22,
|
||||||
|
padding: "0 6px",
|
||||||
|
borderRadius: "var(--r)",
|
||||||
|
background: `color-mix(in srgb, ${config.accent} 15%, transparent)`,
|
||||||
|
color: config.accent,
|
||||||
|
fontSize: "0.75rem",
|
||||||
|
fontWeight: 600,
|
||||||
|
fontFamily: "var(--mono)",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{tasks.length}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Droppable area */}
|
||||||
|
<Droppable droppableId={config.status}>
|
||||||
|
{(provided: DroppableProvided) => (
|
||||||
|
<div
|
||||||
|
ref={provided.innerRef}
|
||||||
|
{...provided.droppableProps}
|
||||||
|
style={{
|
||||||
|
padding: "8px 12px 12px",
|
||||||
|
flex: 1,
|
||||||
|
minHeight: 80,
|
||||||
|
overflowY: "auto",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{tasks.map((task, index) => (
|
||||||
|
<Draggable key={task.id} draggableId={task.id} index={index}>
|
||||||
|
{(dragProvided: DraggableProvided, dragSnapshot: DraggableStateSnapshot) => (
|
||||||
|
<TaskCard
|
||||||
|
task={task}
|
||||||
|
provided={dragProvided}
|
||||||
|
snapshot={dragSnapshot}
|
||||||
|
columnAccent={config.accent}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</Draggable>
|
||||||
|
))}
|
||||||
|
{provided.placeholder}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</Droppable>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ---------------------------------------------------------------------------
|
||||||
|
Filter Bar
|
||||||
|
--------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
interface FilterBarProps {
|
||||||
|
projects: Project[];
|
||||||
|
projectId: string;
|
||||||
|
priority: string;
|
||||||
|
search: string;
|
||||||
|
myTasks: boolean;
|
||||||
|
onProjectChange: (value: string) => void;
|
||||||
|
onPriorityChange: (value: string) => void;
|
||||||
|
onSearchChange: (value: string) => void;
|
||||||
|
onMyTasksToggle: () => void;
|
||||||
|
onClear: () => void;
|
||||||
|
hasActiveFilters: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function FilterBar({
|
||||||
|
projects,
|
||||||
|
projectId,
|
||||||
|
priority,
|
||||||
|
search,
|
||||||
|
myTasks,
|
||||||
|
onProjectChange,
|
||||||
|
onPriorityChange,
|
||||||
|
onSearchChange,
|
||||||
|
onMyTasksToggle,
|
||||||
|
onClear,
|
||||||
|
hasActiveFilters,
|
||||||
|
}: FilterBarProps): ReactElement {
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
flexWrap: "wrap",
|
||||||
|
gap: 8,
|
||||||
|
padding: "10px 14px",
|
||||||
|
background: "var(--surface)",
|
||||||
|
border: "1px solid var(--border)",
|
||||||
|
borderRadius: "var(--r-lg)",
|
||||||
|
marginBottom: 16,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{/* Search */}
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
placeholder="Search tasks..."
|
||||||
|
value={search}
|
||||||
|
onChange={(e): void => {
|
||||||
|
onSearchChange(e.target.value);
|
||||||
|
}}
|
||||||
|
style={inputStyle}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Project filter */}
|
||||||
|
<select
|
||||||
|
value={projectId}
|
||||||
|
onChange={(e): void => {
|
||||||
|
onProjectChange(e.target.value);
|
||||||
|
}}
|
||||||
|
style={selectStyle}
|
||||||
|
>
|
||||||
|
<option value="">All Projects</option>
|
||||||
|
{projects.map((p) => (
|
||||||
|
<option key={p.id} value={p.id}>
|
||||||
|
{p.name}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
|
||||||
|
{/* Priority filter */}
|
||||||
|
<select
|
||||||
|
value={priority}
|
||||||
|
onChange={(e): void => {
|
||||||
|
onPriorityChange(e.target.value);
|
||||||
|
}}
|
||||||
|
style={selectStyle}
|
||||||
|
>
|
||||||
|
{PRIORITY_OPTIONS.map((opt) => (
|
||||||
|
<option key={opt.value} value={opt.value}>
|
||||||
|
{opt.label}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
|
||||||
|
{/* My Tasks toggle */}
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={onMyTasksToggle}
|
||||||
|
style={{
|
||||||
|
padding: "6px 12px",
|
||||||
|
borderRadius: "var(--r)",
|
||||||
|
border: myTasks ? "1px solid var(--primary)" : "1px solid var(--border)",
|
||||||
|
background: myTasks ? "var(--primary)" : "transparent",
|
||||||
|
color: myTasks ? "#fff" : "var(--text-2)",
|
||||||
|
fontSize: "0.83rem",
|
||||||
|
fontWeight: 500,
|
||||||
|
cursor: "pointer",
|
||||||
|
transition: "all 0.12s ease",
|
||||||
|
whiteSpace: "nowrap",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
My Tasks
|
||||||
|
</button>
|
||||||
|
|
||||||
|
{/* Clear filters */}
|
||||||
|
{hasActiveFilters && (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={onClear}
|
||||||
|
style={{
|
||||||
|
padding: "6px 12px",
|
||||||
|
borderRadius: "var(--r)",
|
||||||
|
border: "1px solid var(--border)",
|
||||||
|
background: "transparent",
|
||||||
|
color: "var(--muted)",
|
||||||
|
fontSize: "0.83rem",
|
||||||
|
fontWeight: 500,
|
||||||
|
cursor: "pointer",
|
||||||
|
whiteSpace: "nowrap",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Clear
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ---------------------------------------------------------------------------
|
||||||
|
Kanban Board Page
|
||||||
|
--------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
export default function KanbanPage(): ReactElement {
|
||||||
|
const workspaceId = useWorkspaceId();
|
||||||
|
const router = useRouter();
|
||||||
|
const searchParams = useSearchParams();
|
||||||
|
|
||||||
|
const [tasks, setTasks] = useState<Task[]>([]);
|
||||||
|
const [projects, setProjects] = useState<Project[]>([]);
|
||||||
|
const [isLoading, setIsLoading] = useState(true);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Read filters from URL params
|
||||||
|
const filterProject = searchParams.get("project") ?? "";
|
||||||
|
const filterPriority = searchParams.get("priority") ?? "";
|
||||||
|
const filterSearch = searchParams.get("q") ?? "";
|
||||||
|
const filterMyTasks = searchParams.get("my") === "1";
|
||||||
|
|
||||||
|
const hasActiveFilters =
|
||||||
|
filterProject !== "" || filterPriority !== "" || filterSearch !== "" || filterMyTasks;
|
||||||
|
|
||||||
|
/** Update a single URL param (preserving others) */
|
||||||
|
const setParam = useCallback(
|
||||||
|
(key: string, value: string) => {
|
||||||
|
const params = new URLSearchParams(searchParams.toString());
|
||||||
|
if (value) {
|
||||||
|
params.set(key, value);
|
||||||
|
} else {
|
||||||
|
params.delete(key);
|
||||||
|
}
|
||||||
|
router.replace(`/kanban?${params.toString()}`, { scroll: false });
|
||||||
|
},
|
||||||
|
[searchParams, router]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleProjectChange = useCallback(
|
||||||
|
(value: string) => {
|
||||||
|
setParam("project", value);
|
||||||
|
},
|
||||||
|
[setParam]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handlePriorityChange = useCallback(
|
||||||
|
(value: string) => {
|
||||||
|
setParam("priority", value);
|
||||||
|
},
|
||||||
|
[setParam]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleSearchChange = useCallback(
|
||||||
|
(value: string) => {
|
||||||
|
setParam("q", value);
|
||||||
|
},
|
||||||
|
[setParam]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleMyTasksToggle = useCallback(() => {
|
||||||
|
setParam("my", filterMyTasks ? "" : "1");
|
||||||
|
}, [setParam, filterMyTasks]);
|
||||||
|
|
||||||
|
const handleClearFilters = useCallback(() => {
|
||||||
|
router.replace("/kanban", { scroll: false });
|
||||||
|
}, [router]);
|
||||||
|
|
||||||
|
/* --- data fetching --- */
|
||||||
|
|
||||||
|
const loadTasks = useCallback(async (wsId: string | null): Promise<void> => {
|
||||||
|
try {
|
||||||
|
setIsLoading(true);
|
||||||
|
setError(null);
|
||||||
|
const filters = wsId !== null ? { workspaceId: wsId } : {};
|
||||||
|
const data = await fetchTasks(filters);
|
||||||
|
setTasks(data);
|
||||||
|
} catch (err: unknown) {
|
||||||
|
console.error("[Kanban] Failed to fetch tasks:", err);
|
||||||
|
setError(
|
||||||
|
err instanceof Error
|
||||||
|
? err.message
|
||||||
|
: "Something went wrong loading tasks. You could try again when ready."
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!workspaceId) {
|
||||||
|
setIsLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ac = new AbortController();
|
||||||
|
|
||||||
|
async function load(): Promise<void> {
|
||||||
|
try {
|
||||||
|
setIsLoading(true);
|
||||||
|
setError(null);
|
||||||
|
const filters: TaskFilters = {};
|
||||||
|
if (workspaceId) filters.workspaceId = workspaceId;
|
||||||
|
const [taskData, projectData] = await Promise.all([
|
||||||
|
fetchTasks(filters),
|
||||||
|
fetchProjects(workspaceId ?? undefined),
|
||||||
|
]);
|
||||||
|
if (ac.signal.aborted) return;
|
||||||
|
setTasks(taskData);
|
||||||
|
setProjects(projectData);
|
||||||
|
} catch (err: unknown) {
|
||||||
|
console.error("[Kanban] Failed to fetch tasks:", err);
|
||||||
|
if (ac.signal.aborted) return;
|
||||||
|
setError(
|
||||||
|
err instanceof Error
|
||||||
|
? err.message
|
||||||
|
: "Something went wrong loading tasks. You could try again when ready."
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
if (!ac.signal.aborted) {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void load();
|
||||||
|
|
||||||
|
return (): void => {
|
||||||
|
ac.abort();
|
||||||
|
};
|
||||||
|
}, [workspaceId]);
|
||||||
|
|
||||||
|
/* --- apply client-side filters --- */
|
||||||
|
|
||||||
|
const filteredTasks = useMemo(() => {
|
||||||
|
let result = tasks;
|
||||||
|
|
||||||
|
if (filterProject) {
|
||||||
|
result = result.filter((t) => t.projectId === filterProject);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (filterPriority) {
|
||||||
|
result = result.filter((t) => t.priority === (filterPriority as TaskPriority));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (filterSearch) {
|
||||||
|
const q = filterSearch.toLowerCase();
|
||||||
|
result = result.filter(
|
||||||
|
(t) => t.title.toLowerCase().includes(q) || t.description?.toLowerCase().includes(q)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (filterMyTasks) {
|
||||||
|
// "My Tasks" filters to tasks assigned to the current user.
|
||||||
|
// Since we don't have the current userId readily available,
|
||||||
|
// filter by assigneeId being non-null (assigned tasks).
|
||||||
|
// A proper implementation would compare against the logged-in user's ID.
|
||||||
|
result = result.filter((t) => t.assigneeId !== null);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}, [tasks, filterProject, filterPriority, filterSearch, filterMyTasks]);
|
||||||
|
|
||||||
|
/* --- group tasks by status --- */
|
||||||
|
|
||||||
|
function groupByStatus(allTasks: Task[]): Record<TaskStatus, Task[]> {
|
||||||
|
const grouped: Record<TaskStatus, Task[]> = {
|
||||||
|
[TaskStatus.NOT_STARTED]: [],
|
||||||
|
[TaskStatus.IN_PROGRESS]: [],
|
||||||
|
[TaskStatus.PAUSED]: [],
|
||||||
|
[TaskStatus.COMPLETED]: [],
|
||||||
|
[TaskStatus.ARCHIVED]: [],
|
||||||
|
};
|
||||||
|
|
||||||
|
for (const task of allTasks) {
|
||||||
|
grouped[task.status].push(task);
|
||||||
|
}
|
||||||
|
|
||||||
|
return grouped;
|
||||||
|
}
|
||||||
|
|
||||||
|
const grouped = groupByStatus(filteredTasks);
|
||||||
|
|
||||||
|
/* --- drag-and-drop handler --- */
|
||||||
|
|
||||||
|
const handleDragEnd = useCallback(
|
||||||
|
(result: DropResult) => {
|
||||||
|
const { source, destination, draggableId } = result;
|
||||||
|
|
||||||
|
// Dropped outside a droppable area
|
||||||
|
if (!destination) return;
|
||||||
|
|
||||||
|
// Dropped in same position
|
||||||
|
if (source.droppableId === destination.droppableId && source.index === destination.index) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const newStatus = destination.droppableId as TaskStatus;
|
||||||
|
const taskId = draggableId;
|
||||||
|
|
||||||
|
// Optimistic update: move card in local state
|
||||||
|
setTasks((prev) => prev.map((t) => (t.id === taskId ? { ...t, status: newStatus } : t)));
|
||||||
|
|
||||||
|
// Persist to API
|
||||||
|
const wsId = workspaceId ?? undefined;
|
||||||
|
updateTask(taskId, { status: newStatus }, wsId).catch((err: unknown) => {
|
||||||
|
console.error("[Kanban] Failed to update task status:", err);
|
||||||
|
// Revert on failure by re-fetching
|
||||||
|
void loadTasks(workspaceId);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
[workspaceId, loadTasks]
|
||||||
|
);
|
||||||
|
|
||||||
|
/* --- retry handler --- */
|
||||||
|
|
||||||
|
function handleRetry(): void {
|
||||||
|
void loadTasks(workspaceId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* --- render --- */
|
||||||
|
|
||||||
|
return (
|
||||||
|
<main style={{ padding: "32px 24px", minHeight: "100%" }}>
|
||||||
|
{/* Page header */}
|
||||||
|
<div style={{ marginBottom: 16 }}>
|
||||||
|
<h1
|
||||||
|
style={{
|
||||||
|
fontSize: "1.875rem",
|
||||||
|
fontWeight: 700,
|
||||||
|
color: "var(--text)",
|
||||||
|
margin: 0,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Kanban Board
|
||||||
|
</h1>
|
||||||
|
<p
|
||||||
|
style={{
|
||||||
|
fontSize: "0.9rem",
|
||||||
|
color: "var(--muted)",
|
||||||
|
marginTop: 4,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Visualize and manage task progress across stages
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Filter bar */}
|
||||||
|
<FilterBar
|
||||||
|
projects={projects}
|
||||||
|
projectId={filterProject}
|
||||||
|
priority={filterPriority}
|
||||||
|
search={filterSearch}
|
||||||
|
myTasks={filterMyTasks}
|
||||||
|
onProjectChange={handleProjectChange}
|
||||||
|
onPriorityChange={handlePriorityChange}
|
||||||
|
onSearchChange={handleSearchChange}
|
||||||
|
onMyTasksToggle={handleMyTasksToggle}
|
||||||
|
onClear={handleClearFilters}
|
||||||
|
hasActiveFilters={hasActiveFilters}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* Loading state */}
|
||||||
|
{isLoading ? (
|
||||||
|
<div className="flex justify-center py-16">
|
||||||
|
<MosaicSpinner label="Loading tasks..." />
|
||||||
|
</div>
|
||||||
|
) : error !== null ? (
|
||||||
|
/* Error state */
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
background: "var(--surface)",
|
||||||
|
border: "1px solid var(--border)",
|
||||||
|
borderRadius: "var(--r-lg)",
|
||||||
|
padding: 32,
|
||||||
|
textAlign: "center",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<p style={{ color: "var(--danger)", margin: "0 0 16px" }}>{error}</p>
|
||||||
|
<button
|
||||||
|
onClick={handleRetry}
|
||||||
|
style={{
|
||||||
|
padding: "8px 16px",
|
||||||
|
background: "var(--danger)",
|
||||||
|
border: "none",
|
||||||
|
borderRadius: "var(--r)",
|
||||||
|
color: "#fff",
|
||||||
|
fontSize: "0.85rem",
|
||||||
|
fontWeight: 500,
|
||||||
|
cursor: "pointer",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Try again
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
) : filteredTasks.length === 0 && tasks.length > 0 ? (
|
||||||
|
/* No results (filtered) */
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
background: "var(--surface)",
|
||||||
|
border: "1px solid var(--border)",
|
||||||
|
borderRadius: "var(--r-lg)",
|
||||||
|
padding: 48,
|
||||||
|
textAlign: "center",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<p style={{ color: "var(--muted)", margin: 0, fontSize: "0.9rem" }}>
|
||||||
|
No tasks match your filters.
|
||||||
|
</p>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={handleClearFilters}
|
||||||
|
style={{
|
||||||
|
marginTop: 12,
|
||||||
|
padding: "6px 14px",
|
||||||
|
borderRadius: "var(--r)",
|
||||||
|
border: "1px solid var(--border)",
|
||||||
|
background: "transparent",
|
||||||
|
color: "var(--text-2)",
|
||||||
|
fontSize: "0.83rem",
|
||||||
|
cursor: "pointer",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Clear filters
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
) : tasks.length === 0 ? (
|
||||||
|
/* Empty state */
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
background: "var(--surface)",
|
||||||
|
border: "1px solid var(--border)",
|
||||||
|
borderRadius: "var(--r-lg)",
|
||||||
|
padding: 48,
|
||||||
|
textAlign: "center",
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<p style={{ color: "var(--muted)", margin: 0, fontSize: "0.9rem" }}>
|
||||||
|
No tasks yet. Create some tasks to see them here.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
/* Board */
|
||||||
|
<DragDropContext onDragEnd={handleDragEnd}>
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
display: "flex",
|
||||||
|
gap: 16,
|
||||||
|
overflowX: "auto",
|
||||||
|
paddingBottom: 16,
|
||||||
|
minHeight: 400,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{COLUMNS.map((col) => (
|
||||||
|
<KanbanColumn key={col.status} config={col} tasks={grouped[col.status]} />
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</DragDropContext>
|
||||||
|
)}
|
||||||
|
</main>
|
||||||
|
);
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user