Compare commits
173 Commits
17ee28b6f6
...
v0.0.15
| Author | SHA1 | Date | |
|---|---|---|---|
| d218902cb0 | |||
| b43e860c40 | |||
| 716f230f72 | |||
| a5ed260fbd | |||
| 9b5c15ca56 | |||
| 74c8c376b7 | |||
| 9901fba61e | |||
| 17144b1c42 | |||
| a6f75cd587 | |||
| 06e54328d5 | |||
| 7480deff10 | |||
| 1b66417be5 | |||
| 23d610ba5b | |||
| 25ae14aba1 | |||
| 1425893318 | |||
| bc4c1f9c70 | |||
| d66451cf48 | |||
| c23ebca648 | |||
|
|
eae55bc4a3 | ||
| b5ac2630c1 | |||
| 8424a28faa | |||
| d2cec04cba | |||
| 9ac971e857 | |||
| 0c2a6b14cf | |||
| af299abdaf | |||
| fa9f173f8e | |||
| 7935d86015 | |||
| f43631671f | |||
| 8328f9509b | |||
| f72e8c2da9 | |||
| 1a668627a3 | |||
| bd3625ae1b | |||
| aeac188d40 | |||
| f219dd71a0 | |||
| 2c3c1f67ac | |||
| dedc1af080 | |||
| 3b16b2c743 | |||
|
|
6fd8e85266 | ||
|
|
d3474cdd74 | ||
| 157b702331 | |||
|
|
63c6a129bd | ||
| 4a4aee7b7c | |||
|
|
9d9a01f5f7 | ||
|
|
5bce7dbb05 | ||
|
|
ab902250f8 | ||
|
|
d34f097a5c | ||
|
|
f4ad7eba37 | ||
|
|
4d089cd020 | ||
|
|
3258cd4f4d | ||
| 35dd623ab5 | |||
|
|
758b2a839b | ||
| af113707d9 | |||
|
|
57d0f5d2a3 | ||
|
|
ad428598a9 | ||
|
|
cab8d690ab | ||
| 0a780a5062 | |||
| a1515676db | |||
|
|
254f85369b | ||
|
|
ddf6851bfd | ||
| 027fee1afa | |||
| abe57621cd | |||
| 7c7ad59002 | |||
| ca430d6fdf | |||
| 18e5f6312b | |||
| d2ed1f2817 | |||
| fb609d40e3 | |||
| 0c93be417a | |||
| b719fa0444 | |||
|
|
8961f5b18c | ||
| d58bf47cd7 | |||
|
|
c917a639c4 | ||
|
|
9d3a673e6c | ||
|
|
b96e2d7dc6 | ||
|
|
76756ad695 | ||
|
|
05ee6303c2 | ||
|
|
5328390f4c | ||
|
|
4d9b75994f | ||
|
|
d7de20e586 | ||
|
|
399d5a31c8 | ||
|
|
b675db1324 | ||
|
|
e0d6d585b3 | ||
|
|
0a2eaaa5e4 | ||
|
|
df495c67b5 | ||
|
|
3e2c1b69ea | ||
|
|
27c4c8edf3 | ||
|
|
e600cfd2d0 | ||
|
|
08e32d42a3 | ||
|
|
752e839054 | ||
|
|
8a572e8525 | ||
|
|
4f31690281 | ||
|
|
097f5f4ab6 | ||
|
|
ac492aab80 | ||
|
|
110e181272 | ||
|
|
9696e45265 | ||
|
|
7ead8b1076 | ||
|
|
3fbba135b9 | ||
|
|
c233d97ba0 | ||
|
|
f1ee0df933 | ||
|
|
07084208a7 | ||
|
|
f500300b1f | ||
|
|
24ee7c7f87 | ||
|
|
d9a3eeb9aa | ||
|
|
077bb042b7 | ||
|
|
1d7d5a9d01 | ||
|
|
2020c15545 | ||
|
|
3ab87362a9 | ||
|
|
81b5204258 | ||
|
|
9623a3be97 | ||
|
|
f37c83e280 | ||
|
|
7ebbcbf958 | ||
|
|
b316e98b64 | ||
|
|
447141f05d | ||
|
|
3b2356f5a0 | ||
|
|
d2605196ac | ||
|
|
2d59c4b2e4 | ||
|
|
a9090aca7f | ||
|
|
f6eadff5bf | ||
|
|
9ae21c4c15 | ||
|
|
976d14d94b | ||
|
|
b2eec3cf83 | ||
|
|
bd7470f5d7 | ||
| 491675b613 | |||
| 4b3eecf05a | |||
| 3376d8162e | |||
| e2ffaa71b1 | |||
| 444fa1116a | |||
| 31ce9e920c | |||
| ba54de88fd | |||
| ca21416efc | |||
| 1bad7a8cca | |||
| 6015ace1de | |||
| 92de2f282f | |||
| 1fde25760a | |||
| cf28efa880 | |||
| 11d284554d | |||
| 3cc2030446 | |||
| eca2c46e9d | |||
| c5a87df6e1 | |||
| af9c5799af | |||
| dcbc8d1053 | |||
| d2c7602430 | |||
| 24065aa199 | |||
| bc86947d01 | |||
| 74d6c1092e | |||
| 03d0c032e4 | |||
| 8d19ac1f4b | |||
| 28c9e6fe65 | |||
| b3d6d73348 | |||
| 527262af38 | |||
| a1f0d1dd71 | |||
| 9cc70dbe31 | |||
| 6c465566f6 | |||
| 68808c0933 | |||
| 7b4fda6011 | |||
| 0819dfa470 | |||
| 93cd31435b | |||
| d37c78f503 | |||
| aa106a948a | |||
| 79b1d81d27 | |||
| ad24720616 | |||
| b5edb4f37e | |||
| 4a9ecab4dd | |||
| 3ae9e53bcc | |||
| 771ed484e4 | |||
| 7d22c2490a | |||
| c40373fa3b | |||
| 52553c8266 | |||
| f238867eae | |||
| 5b5d3811d6 | |||
| 4cc43bece6 | |||
| 4a5cb6441e | |||
| 6e4236b359 | |||
| fb53272fa9 |
150
.env.example
150
.env.example
@@ -15,11 +15,19 @@ WEB_PORT=3000
|
|||||||
# ======================
|
# ======================
|
||||||
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||||
NEXT_PUBLIC_API_URL=http://localhost:3001
|
NEXT_PUBLIC_API_URL=http://localhost:3001
|
||||||
|
# Frontend auth mode:
|
||||||
|
# - real: Normal auth/session flow
|
||||||
|
# - mock: Local-only seeded user for FE development (blocked outside NODE_ENV=development)
|
||||||
|
# Use `mock` locally to continue FE work when auth flow is unstable.
|
||||||
|
# If omitted, web runtime defaults:
|
||||||
|
# - development -> mock
|
||||||
|
# - production -> real
|
||||||
|
NEXT_PUBLIC_AUTH_MODE=real
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# PostgreSQL Database
|
# PostgreSQL Database
|
||||||
# ======================
|
# ======================
|
||||||
# Bundled PostgreSQL (when database profile enabled)
|
# Bundled PostgreSQL
|
||||||
# SECURITY: Change POSTGRES_PASSWORD to a strong random password in production
|
# SECURITY: Change POSTGRES_PASSWORD to a strong random password in production
|
||||||
DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@postgres:5432/mosaic
|
DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@postgres:5432/mosaic
|
||||||
POSTGRES_USER=mosaic
|
POSTGRES_USER=mosaic
|
||||||
@@ -28,7 +36,7 @@ POSTGRES_DB=mosaic
|
|||||||
POSTGRES_PORT=5432
|
POSTGRES_PORT=5432
|
||||||
|
|
||||||
# External PostgreSQL (managed service)
|
# External PostgreSQL (managed service)
|
||||||
# Disable 'database' profile and point DATABASE_URL to your external instance
|
# To use an external instance, update DATABASE_URL above
|
||||||
# Example: DATABASE_URL=postgresql://user:pass@rds.amazonaws.com:5432/mosaic
|
# Example: DATABASE_URL=postgresql://user:pass@rds.amazonaws.com:5432/mosaic
|
||||||
|
|
||||||
# PostgreSQL Performance Tuning (Optional)
|
# PostgreSQL Performance Tuning (Optional)
|
||||||
@@ -39,7 +47,7 @@ POSTGRES_MAX_CONNECTIONS=100
|
|||||||
# ======================
|
# ======================
|
||||||
# Valkey Cache (Redis-compatible)
|
# Valkey Cache (Redis-compatible)
|
||||||
# ======================
|
# ======================
|
||||||
# Bundled Valkey (when cache profile enabled)
|
# Bundled Valkey
|
||||||
VALKEY_URL=redis://valkey:6379
|
VALKEY_URL=redis://valkey:6379
|
||||||
VALKEY_HOST=valkey
|
VALKEY_HOST=valkey
|
||||||
VALKEY_PORT=6379
|
VALKEY_PORT=6379
|
||||||
@@ -47,7 +55,7 @@ VALKEY_PORT=6379
|
|||||||
VALKEY_MAXMEMORY=256mb
|
VALKEY_MAXMEMORY=256mb
|
||||||
|
|
||||||
# External Redis/Valkey (managed service)
|
# External Redis/Valkey (managed service)
|
||||||
# Disable 'cache' profile and point VALKEY_URL to your external instance
|
# To use an external instance, update VALKEY_URL above
|
||||||
# Example: VALKEY_URL=redis://elasticache.amazonaws.com:6379
|
# Example: VALKEY_URL=redis://elasticache.amazonaws.com:6379
|
||||||
# Example with auth: VALKEY_URL=redis://:password@redis.example.com:6379
|
# Example with auth: VALKEY_URL=redis://:password@redis.example.com:6379
|
||||||
|
|
||||||
@@ -61,7 +69,7 @@ KNOWLEDGE_CACHE_TTL=300
|
|||||||
# Authentication (Authentik OIDC)
|
# Authentication (Authentik OIDC)
|
||||||
# ======================
|
# ======================
|
||||||
# Set to 'true' to enable OIDC authentication with Authentik
|
# Set to 'true' to enable OIDC authentication with Authentik
|
||||||
# When enabled, OIDC_ISSUER, OIDC_CLIENT_ID, and OIDC_CLIENT_SECRET are required
|
# When enabled, OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET, and OIDC_REDIRECT_URI are required
|
||||||
OIDC_ENABLED=false
|
OIDC_ENABLED=false
|
||||||
|
|
||||||
# Authentik Server URLs (required when OIDC_ENABLED=true)
|
# Authentik Server URLs (required when OIDC_ENABLED=true)
|
||||||
@@ -70,9 +78,9 @@ OIDC_ISSUER=https://auth.example.com/application/o/mosaic-stack/
|
|||||||
OIDC_CLIENT_ID=your-client-id-here
|
OIDC_CLIENT_ID=your-client-id-here
|
||||||
OIDC_CLIENT_SECRET=your-client-secret-here
|
OIDC_CLIENT_SECRET=your-client-secret-here
|
||||||
# Redirect URI must match what's configured in Authentik
|
# Redirect URI must match what's configured in Authentik
|
||||||
# Development: http://localhost:3001/auth/callback/authentik
|
# Development: http://localhost:3001/auth/oauth2/callback/authentik
|
||||||
# Production: https://api.mosaicstack.dev/auth/callback/authentik
|
# Production: https://api.mosaicstack.dev/auth/oauth2/callback/authentik
|
||||||
OIDC_REDIRECT_URI=http://localhost:3001/auth/callback/authentik
|
OIDC_REDIRECT_URI=http://localhost:3001/auth/oauth2/callback/authentik
|
||||||
|
|
||||||
# Authentik PostgreSQL Database
|
# Authentik PostgreSQL Database
|
||||||
AUTHENTIK_POSTGRES_USER=authentik
|
AUTHENTIK_POSTGRES_USER=authentik
|
||||||
@@ -116,6 +124,17 @@ JWT_EXPIRATION=24h
|
|||||||
# This is used by BetterAuth for session management and CSRF protection
|
# This is used by BetterAuth for session management and CSRF protection
|
||||||
# Example: openssl rand -base64 32
|
# Example: openssl rand -base64 32
|
||||||
BETTER_AUTH_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS
|
BETTER_AUTH_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS
|
||||||
|
# Optional explicit BetterAuth origin for callback/error URL generation.
|
||||||
|
# When empty, backend falls back to NEXT_PUBLIC_API_URL.
|
||||||
|
BETTER_AUTH_URL=
|
||||||
|
|
||||||
|
# Trusted Origins (comma-separated list of additional trusted origins for CORS and auth)
|
||||||
|
# These are added to NEXT_PUBLIC_APP_URL and NEXT_PUBLIC_API_URL automatically
|
||||||
|
TRUSTED_ORIGINS=
|
||||||
|
|
||||||
|
# Cookie Domain (for cross-subdomain session sharing)
|
||||||
|
# Leave empty for single-domain setups. Set to ".example.com" for cross-subdomain.
|
||||||
|
COOKIE_DOMAIN=
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Encryption (Credential Security)
|
# Encryption (Credential Security)
|
||||||
@@ -196,11 +215,9 @@ NODE_ENV=development
|
|||||||
# Used by docker-compose.yml (pulls images) and docker-swarm.yml
|
# Used by docker-compose.yml (pulls images) and docker-swarm.yml
|
||||||
# For local builds, use docker-compose.build.yml instead
|
# For local builds, use docker-compose.build.yml instead
|
||||||
# Options:
|
# Options:
|
||||||
# - dev: Pull development images from registry (default, built from develop branch)
|
# - latest: Pull latest images from registry (default, built from main branch)
|
||||||
# - latest: Pull latest stable images from registry (built from main branch)
|
|
||||||
# - <commit-sha>: Use specific commit SHA tag (e.g., 658ec077)
|
|
||||||
# - <version>: Use specific version tag (e.g., v1.0.0)
|
# - <version>: Use specific version tag (e.g., v1.0.0)
|
||||||
IMAGE_TAG=dev
|
IMAGE_TAG=latest
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Docker Compose Profiles
|
# Docker Compose Profiles
|
||||||
@@ -236,12 +253,16 @@ MOSAIC_API_DOMAIN=api.mosaic.local
|
|||||||
MOSAIC_WEB_DOMAIN=mosaic.local
|
MOSAIC_WEB_DOMAIN=mosaic.local
|
||||||
MOSAIC_AUTH_DOMAIN=auth.mosaic.local
|
MOSAIC_AUTH_DOMAIN=auth.mosaic.local
|
||||||
|
|
||||||
# External Traefik network name (for upstream mode)
|
# External Traefik network name (for upstream mode and swarm)
|
||||||
# Must match the network name of your existing Traefik instance
|
# Must match the network name of your existing Traefik instance
|
||||||
TRAEFIK_NETWORK=traefik-public
|
TRAEFIK_NETWORK=traefik-public
|
||||||
|
TRAEFIK_DOCKER_NETWORK=traefik-public
|
||||||
|
|
||||||
# TLS/SSL Configuration
|
# TLS/SSL Configuration
|
||||||
TRAEFIK_TLS_ENABLED=true
|
TRAEFIK_TLS_ENABLED=true
|
||||||
|
TRAEFIK_ENTRYPOINT=websecure
|
||||||
|
# Cert resolver name (leave empty if TLS is handled externally or using self-signed certs)
|
||||||
|
TRAEFIK_CERTRESOLVER=
|
||||||
# For Let's Encrypt (production):
|
# For Let's Encrypt (production):
|
||||||
TRAEFIK_ACME_EMAIL=admin@example.com
|
TRAEFIK_ACME_EMAIL=admin@example.com
|
||||||
# For self-signed certificates (development), leave TRAEFIK_ACME_EMAIL empty
|
# For self-signed certificates (development), leave TRAEFIK_ACME_EMAIL empty
|
||||||
@@ -277,6 +298,15 @@ GITEA_WEBHOOK_SECRET=REPLACE_WITH_RANDOM_WEBHOOK_SECRET
|
|||||||
# The coordinator service uses this key to authenticate with the API
|
# The coordinator service uses this key to authenticate with the API
|
||||||
COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
||||||
|
|
||||||
|
# Anthropic API Key (used by coordinator for issue parsing)
|
||||||
|
# Get your API key from: https://console.anthropic.com/
|
||||||
|
ANTHROPIC_API_KEY=REPLACE_WITH_ANTHROPIC_API_KEY
|
||||||
|
|
||||||
|
# Coordinator tuning
|
||||||
|
COORDINATOR_POLL_INTERVAL=5.0
|
||||||
|
COORDINATOR_MAX_CONCURRENT_AGENTS=10
|
||||||
|
COORDINATOR_ENABLED=true
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Rate Limiting
|
# Rate Limiting
|
||||||
# ======================
|
# ======================
|
||||||
@@ -316,6 +346,40 @@ RATE_LIMIT_STORAGE=redis
|
|||||||
# multi-tenant isolation. Each Discord bot instance should be configured for
|
# multi-tenant isolation. Each Discord bot instance should be configured for
|
||||||
# a single workspace.
|
# a single workspace.
|
||||||
|
|
||||||
|
# ======================
|
||||||
|
# Matrix Bridge (Optional)
|
||||||
|
# ======================
|
||||||
|
# Matrix bot integration for chat-based control via Matrix protocol
|
||||||
|
# Requires a Matrix account with an access token for the bot user
|
||||||
|
# Set these AFTER deploying Synapse and creating the bot account.
|
||||||
|
#
|
||||||
|
# SECURITY: MATRIX_WORKSPACE_ID must be a valid workspace UUID from your database.
|
||||||
|
# All Matrix commands will execute within this workspace context for proper
|
||||||
|
# multi-tenant isolation. Each Matrix bot instance should be configured for
|
||||||
|
# a single workspace.
|
||||||
|
MATRIX_HOMESERVER_URL=http://synapse:8008
|
||||||
|
MATRIX_ACCESS_TOKEN=
|
||||||
|
MATRIX_BOT_USER_ID=@mosaic-bot:matrix.example.com
|
||||||
|
MATRIX_SERVER_NAME=matrix.example.com
|
||||||
|
# MATRIX_CONTROL_ROOM_ID=!roomid:matrix.example.com
|
||||||
|
# MATRIX_WORKSPACE_ID=your-workspace-uuid
|
||||||
|
|
||||||
|
# ======================
|
||||||
|
# Matrix / Synapse Deployment
|
||||||
|
# ======================
|
||||||
|
# Domains for Traefik routing to Matrix services
|
||||||
|
MATRIX_DOMAIN=matrix.example.com
|
||||||
|
ELEMENT_DOMAIN=chat.example.com
|
||||||
|
|
||||||
|
# Synapse database (created automatically by synapse-db-init in the swarm compose)
|
||||||
|
SYNAPSE_POSTGRES_DB=synapse
|
||||||
|
SYNAPSE_POSTGRES_USER=synapse
|
||||||
|
SYNAPSE_POSTGRES_PASSWORD=REPLACE_WITH_SECURE_SYNAPSE_DB_PASSWORD
|
||||||
|
|
||||||
|
# Image tags for Matrix services
|
||||||
|
SYNAPSE_IMAGE_TAG=latest
|
||||||
|
ELEMENT_IMAGE_TAG=latest
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Orchestrator Configuration
|
# Orchestrator Configuration
|
||||||
# ======================
|
# ======================
|
||||||
@@ -326,6 +390,17 @@ RATE_LIMIT_STORAGE=redis
|
|||||||
# Health endpoints (/health/*) remain unauthenticated
|
# Health endpoints (/health/*) remain unauthenticated
|
||||||
ORCHESTRATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
ORCHESTRATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
||||||
|
|
||||||
|
# Runtime safety defaults (recommended for low-memory hosts)
|
||||||
|
MAX_CONCURRENT_AGENTS=2
|
||||||
|
SESSION_CLEANUP_DELAY_MS=30000
|
||||||
|
ORCHESTRATOR_QUEUE_NAME=orchestrator-tasks
|
||||||
|
ORCHESTRATOR_QUEUE_CONCURRENCY=1
|
||||||
|
ORCHESTRATOR_QUEUE_MAX_RETRIES=3
|
||||||
|
ORCHESTRATOR_QUEUE_BASE_DELAY_MS=1000
|
||||||
|
ORCHESTRATOR_QUEUE_MAX_DELAY_MS=60000
|
||||||
|
SANDBOX_DEFAULT_MEMORY_MB=256
|
||||||
|
SANDBOX_DEFAULT_CPU_LIMIT=1.0
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# AI Provider Configuration
|
# AI Provider Configuration
|
||||||
# ======================
|
# ======================
|
||||||
@@ -339,17 +414,58 @@ AI_PROVIDER=ollama
|
|||||||
# For remote Ollama: http://your-ollama-server:11434
|
# For remote Ollama: http://your-ollama-server:11434
|
||||||
OLLAMA_MODEL=llama3.1:latest
|
OLLAMA_MODEL=llama3.1:latest
|
||||||
|
|
||||||
# Claude API Configuration (when AI_PROVIDER=claude)
|
# Claude API Key
|
||||||
# OPTIONAL: Only required if AI_PROVIDER=claude
|
# Required only when AI_PROVIDER=claude.
|
||||||
# Get your API key from: https://console.anthropic.com/
|
# Get your API key from: https://console.anthropic.com/
|
||||||
# Note: Claude Max subscription users should use AI_PROVIDER=ollama instead
|
CLAUDE_API_KEY=REPLACE_WITH_CLAUDE_API_KEY
|
||||||
# CLAUDE_API_KEY=sk-ant-...
|
|
||||||
|
|
||||||
# OpenAI API Configuration (when AI_PROVIDER=openai)
|
# OpenAI API Configuration (when AI_PROVIDER=openai)
|
||||||
# OPTIONAL: Only required if AI_PROVIDER=openai
|
# OPTIONAL: Only required if AI_PROVIDER=openai
|
||||||
# Get your API key from: https://platform.openai.com/api-keys
|
# Get your API key from: https://platform.openai.com/api-keys
|
||||||
# OPENAI_API_KEY=sk-...
|
# OPENAI_API_KEY=sk-...
|
||||||
|
|
||||||
|
# ======================
|
||||||
|
# Speech Services (STT / TTS)
|
||||||
|
# ======================
|
||||||
|
# Speech-to-Text (STT) - Whisper via Speaches
|
||||||
|
# Set STT_ENABLED=true to enable speech-to-text transcription
|
||||||
|
# STT_BASE_URL is required when STT_ENABLED=true
|
||||||
|
STT_ENABLED=true
|
||||||
|
STT_BASE_URL=http://speaches:8000/v1
|
||||||
|
STT_MODEL=Systran/faster-whisper-large-v3-turbo
|
||||||
|
STT_LANGUAGE=en
|
||||||
|
|
||||||
|
# Text-to-Speech (TTS) - Default Engine (Kokoro)
|
||||||
|
# Set TTS_ENABLED=true to enable text-to-speech synthesis
|
||||||
|
# TTS_DEFAULT_URL is required when TTS_ENABLED=true
|
||||||
|
TTS_ENABLED=true
|
||||||
|
TTS_DEFAULT_URL=http://kokoro-tts:8880/v1
|
||||||
|
TTS_DEFAULT_VOICE=af_heart
|
||||||
|
TTS_DEFAULT_FORMAT=mp3
|
||||||
|
|
||||||
|
# Text-to-Speech (TTS) - Premium Engine (Chatterbox) - Optional
|
||||||
|
# Higher quality voice cloning engine, disabled by default
|
||||||
|
# TTS_PREMIUM_URL is required when TTS_PREMIUM_ENABLED=true
|
||||||
|
TTS_PREMIUM_ENABLED=false
|
||||||
|
TTS_PREMIUM_URL=http://chatterbox-tts:8881/v1
|
||||||
|
|
||||||
|
# Text-to-Speech (TTS) - Fallback Engine (Piper/OpenedAI) - Optional
|
||||||
|
# Lightweight fallback engine, disabled by default
|
||||||
|
# TTS_FALLBACK_URL is required when TTS_FALLBACK_ENABLED=true
|
||||||
|
TTS_FALLBACK_ENABLED=false
|
||||||
|
TTS_FALLBACK_URL=http://openedai-speech:8000/v1
|
||||||
|
|
||||||
|
# Whisper model for Speaches STT engine
|
||||||
|
SPEACHES_WHISPER_MODEL=Systran/faster-whisper-large-v3-turbo
|
||||||
|
|
||||||
|
# Speech Service Limits
|
||||||
|
# Maximum upload file size in bytes (default: 25MB)
|
||||||
|
SPEECH_MAX_UPLOAD_SIZE=25000000
|
||||||
|
# Maximum audio duration in seconds (default: 600 = 10 minutes)
|
||||||
|
SPEECH_MAX_DURATION_SECONDS=600
|
||||||
|
# Maximum text length for TTS in characters (default: 4096)
|
||||||
|
SPEECH_MAX_TEXT_LENGTH=4096
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Mosaic Telemetry (Task Completion Tracking & Predictions)
|
# Mosaic Telemetry (Task Completion Tracking & Predictions)
|
||||||
# ======================
|
# ======================
|
||||||
|
|||||||
@@ -1,66 +0,0 @@
|
|||||||
# ==============================================
|
|
||||||
# Mosaic Stack Production Environment
|
|
||||||
# ==============================================
|
|
||||||
# Copy to .env and configure for production deployment
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# PostgreSQL Database
|
|
||||||
# ======================
|
|
||||||
# CRITICAL: Use a strong, unique password
|
|
||||||
POSTGRES_USER=mosaic
|
|
||||||
POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
POSTGRES_DB=mosaic
|
|
||||||
POSTGRES_SHARED_BUFFERS=256MB
|
|
||||||
POSTGRES_EFFECTIVE_CACHE_SIZE=1GB
|
|
||||||
POSTGRES_MAX_CONNECTIONS=100
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Valkey Cache
|
|
||||||
# ======================
|
|
||||||
VALKEY_MAXMEMORY=256mb
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# API Configuration
|
|
||||||
# ======================
|
|
||||||
API_PORT=3001
|
|
||||||
API_HOST=0.0.0.0
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Web Configuration
|
|
||||||
# ======================
|
|
||||||
WEB_PORT=3000
|
|
||||||
NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Authentication (Authentik OIDC)
|
|
||||||
# ======================
|
|
||||||
OIDC_ISSUER=https://auth.diversecanvas.com/application/o/mosaic-stack/
|
|
||||||
OIDC_CLIENT_ID=your-client-id
|
|
||||||
OIDC_CLIENT_SECRET=your-client-secret
|
|
||||||
OIDC_REDIRECT_URI=https://api.mosaicstack.dev/auth/callback/authentik
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# JWT Configuration
|
|
||||||
# ======================
|
|
||||||
# CRITICAL: Generate a random secret (openssl rand -base64 32)
|
|
||||||
JWT_SECRET=REPLACE_WITH_RANDOM_SECRET
|
|
||||||
JWT_EXPIRATION=24h
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Traefik Integration
|
|
||||||
# ======================
|
|
||||||
# Set to true if using external Traefik
|
|
||||||
TRAEFIK_ENABLE=true
|
|
||||||
TRAEFIK_ENTRYPOINT=websecure
|
|
||||||
TRAEFIK_TLS_ENABLED=true
|
|
||||||
TRAEFIK_DOCKER_NETWORK=traefik-public
|
|
||||||
TRAEFIK_CERTRESOLVER=letsencrypt
|
|
||||||
|
|
||||||
# Domain configuration
|
|
||||||
MOSAIC_API_DOMAIN=api.mosaicstack.dev
|
|
||||||
MOSAIC_WEB_DOMAIN=app.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Optional: Ollama
|
|
||||||
# ======================
|
|
||||||
# OLLAMA_ENDPOINT=http://ollama.diversecanvas.com:11434
|
|
||||||
@@ -1,161 +0,0 @@
|
|||||||
# ==============================================
|
|
||||||
# Mosaic Stack - Docker Swarm Configuration
|
|
||||||
# ==============================================
|
|
||||||
# Copy this file to .env for Docker Swarm deployment
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Application Ports (Internal)
|
|
||||||
# ======================
|
|
||||||
API_PORT=3001
|
|
||||||
API_HOST=0.0.0.0
|
|
||||||
WEB_PORT=3000
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Domain Configuration (Traefik)
|
|
||||||
# ======================
|
|
||||||
# These domains must be configured in your DNS or /etc/hosts
|
|
||||||
MOSAIC_API_DOMAIN=api.mosaicstack.dev
|
|
||||||
MOSAIC_WEB_DOMAIN=mosaic.mosaicstack.dev
|
|
||||||
MOSAIC_AUTH_DOMAIN=auth.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Web Configuration
|
|
||||||
# ======================
|
|
||||||
# Use the Traefik domain for the API URL
|
|
||||||
NEXT_PUBLIC_APP_URL=http://mosaic.mosaicstack.dev
|
|
||||||
NEXT_PUBLIC_API_URL=http://api.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# PostgreSQL Database
|
|
||||||
# ======================
|
|
||||||
DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@postgres:5432/mosaic
|
|
||||||
POSTGRES_USER=mosaic
|
|
||||||
POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
POSTGRES_DB=mosaic
|
|
||||||
POSTGRES_PORT=5432
|
|
||||||
|
|
||||||
# PostgreSQL Performance Tuning
|
|
||||||
POSTGRES_SHARED_BUFFERS=256MB
|
|
||||||
POSTGRES_EFFECTIVE_CACHE_SIZE=1GB
|
|
||||||
POSTGRES_MAX_CONNECTIONS=100
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Valkey Cache
|
|
||||||
# ======================
|
|
||||||
VALKEY_URL=redis://valkey:6379
|
|
||||||
VALKEY_HOST=valkey
|
|
||||||
VALKEY_PORT=6379
|
|
||||||
VALKEY_MAXMEMORY=256mb
|
|
||||||
|
|
||||||
# Knowledge Module Cache Configuration
|
|
||||||
KNOWLEDGE_CACHE_ENABLED=true
|
|
||||||
KNOWLEDGE_CACHE_TTL=300
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Authentication (Authentik OIDC)
|
|
||||||
# ======================
|
|
||||||
# NOTE: Authentik services are COMMENTED OUT in docker-compose.swarm.yml by default
|
|
||||||
# Uncomment those services if you want to run Authentik internally
|
|
||||||
# Otherwise, use external Authentik by configuring OIDC_* variables below
|
|
||||||
|
|
||||||
# External Authentik Configuration (default)
|
|
||||||
OIDC_ENABLED=true
|
|
||||||
OIDC_ISSUER=https://auth.example.com/application/o/mosaic-stack/
|
|
||||||
OIDC_CLIENT_ID=your-client-id-here
|
|
||||||
OIDC_CLIENT_SECRET=your-client-secret-here
|
|
||||||
OIDC_REDIRECT_URI=https://api.mosaicstack.dev/auth/callback/authentik
|
|
||||||
|
|
||||||
# Internal Authentik Configuration (only needed if uncommenting Authentik services)
|
|
||||||
# Authentik PostgreSQL Database
|
|
||||||
AUTHENTIK_POSTGRES_USER=authentik
|
|
||||||
AUTHENTIK_POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
AUTHENTIK_POSTGRES_DB=authentik
|
|
||||||
|
|
||||||
# Authentik Server Configuration
|
|
||||||
AUTHENTIK_SECRET_KEY=REPLACE_WITH_RANDOM_SECRET_MINIMUM_50_CHARS
|
|
||||||
AUTHENTIK_ERROR_REPORTING=false
|
|
||||||
AUTHENTIK_BOOTSTRAP_PASSWORD=REPLACE_WITH_SECURE_PASSWORD
|
|
||||||
AUTHENTIK_BOOTSTRAP_EMAIL=admin@mosaicstack.dev
|
|
||||||
AUTHENTIK_COOKIE_DOMAIN=.mosaicstack.dev
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# JWT Configuration
|
|
||||||
# ======================
|
|
||||||
JWT_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS
|
|
||||||
JWT_EXPIRATION=24h
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Encryption (Credential Security)
|
|
||||||
# ======================
|
|
||||||
# Generate with: openssl rand -hex 32
|
|
||||||
ENCRYPTION_KEY=REPLACE_WITH_64_CHAR_HEX_STRING_GENERATE_WITH_OPENSSL_RAND_HEX_32
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# OpenBao Secrets Management
|
|
||||||
# ======================
|
|
||||||
OPENBAO_ADDR=http://openbao:8200
|
|
||||||
OPENBAO_PORT=8200
|
|
||||||
# For development only - remove in production
|
|
||||||
OPENBAO_DEV_ROOT_TOKEN_ID=root
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Ollama (Optional AI Service)
|
|
||||||
# ======================
|
|
||||||
OLLAMA_ENDPOINT=http://ollama:11434
|
|
||||||
OLLAMA_PORT=11434
|
|
||||||
OLLAMA_EMBEDDING_MODEL=mxbai-embed-large
|
|
||||||
|
|
||||||
# Semantic Search Configuration
|
|
||||||
SEMANTIC_SEARCH_SIMILARITY_THRESHOLD=0.5
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# OpenAI API (Optional)
|
|
||||||
# ======================
|
|
||||||
# OPENAI_API_KEY=sk-...
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Application Environment
|
|
||||||
# ======================
|
|
||||||
NODE_ENV=production
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Gitea Integration (Coordinator)
|
|
||||||
# ======================
|
|
||||||
GITEA_URL=https://git.mosaicstack.dev
|
|
||||||
GITEA_BOT_USERNAME=mosaic
|
|
||||||
GITEA_BOT_TOKEN=REPLACE_WITH_COORDINATOR_BOT_API_TOKEN
|
|
||||||
GITEA_BOT_PASSWORD=REPLACE_WITH_COORDINATOR_BOT_PASSWORD
|
|
||||||
GITEA_REPO_OWNER=mosaic
|
|
||||||
GITEA_REPO_NAME=stack
|
|
||||||
GITEA_WEBHOOK_SECRET=REPLACE_WITH_RANDOM_WEBHOOK_SECRET
|
|
||||||
COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Coordinator Service
|
|
||||||
# ======================
|
|
||||||
ANTHROPIC_API_KEY=REPLACE_WITH_ANTHROPIC_API_KEY
|
|
||||||
COORDINATOR_POLL_INTERVAL=5.0
|
|
||||||
COORDINATOR_MAX_CONCURRENT_AGENTS=10
|
|
||||||
COORDINATOR_ENABLED=true
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Rate Limiting
|
|
||||||
# ======================
|
|
||||||
RATE_LIMIT_TTL=60
|
|
||||||
RATE_LIMIT_GLOBAL_LIMIT=100
|
|
||||||
RATE_LIMIT_WEBHOOK_LIMIT=60
|
|
||||||
RATE_LIMIT_COORDINATOR_LIMIT=100
|
|
||||||
RATE_LIMIT_HEALTH_LIMIT=300
|
|
||||||
RATE_LIMIT_STORAGE=redis
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Orchestrator Configuration
|
|
||||||
# ======================
|
|
||||||
ORCHESTRATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
|
|
||||||
CLAUDE_API_KEY=REPLACE_WITH_CLAUDE_API_KEY
|
|
||||||
|
|
||||||
# ======================
|
|
||||||
# Logging & Debugging
|
|
||||||
# ======================
|
|
||||||
LOG_LEVEL=info
|
|
||||||
DEBUG=false
|
|
||||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -59,3 +59,13 @@ yarn-error.log*
|
|||||||
|
|
||||||
# Orchestrator reports (generated by QA automation, cleaned up after processing)
|
# Orchestrator reports (generated by QA automation, cleaned up after processing)
|
||||||
docs/reports/qa-automation/
|
docs/reports/qa-automation/
|
||||||
|
|
||||||
|
# Repo-local orchestrator runtime artifacts
|
||||||
|
.mosaic/orchestrator/orchestrator.pid
|
||||||
|
.mosaic/orchestrator/state.json
|
||||||
|
.mosaic/orchestrator/tasks.json
|
||||||
|
.mosaic/orchestrator/matrix_state.json
|
||||||
|
.mosaic/orchestrator/logs/*.log
|
||||||
|
.mosaic/orchestrator/results/*
|
||||||
|
!.mosaic/orchestrator/logs/.gitkeep
|
||||||
|
!.mosaic/orchestrator/results/.gitkeep
|
||||||
|
|||||||
15
.mosaic/README.md
Normal file
15
.mosaic/README.md
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# Repo Mosaic Linkage
|
||||||
|
|
||||||
|
This repository is attached to the machine-wide Mosaic framework.
|
||||||
|
|
||||||
|
## Load Order for Agents
|
||||||
|
|
||||||
|
1. `~/.config/mosaic/STANDARDS.md`
|
||||||
|
2. `AGENTS.md` (this repository)
|
||||||
|
3. `.mosaic/repo-hooks.sh` (repo-specific automation hooks)
|
||||||
|
|
||||||
|
## Purpose
|
||||||
|
|
||||||
|
- Keep universal standards in `~/.config/mosaic`
|
||||||
|
- Keep repo-specific behavior in this repo
|
||||||
|
- Avoid copying large runtime configs into each project
|
||||||
18
.mosaic/orchestrator/config.json
Normal file
18
.mosaic/orchestrator/config.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"transport": "matrix",
|
||||||
|
"matrix": {
|
||||||
|
"control_room_id": "",
|
||||||
|
"workspace_id": "",
|
||||||
|
"homeserver_url": "",
|
||||||
|
"access_token": "",
|
||||||
|
"bot_user_id": ""
|
||||||
|
},
|
||||||
|
"worker": {
|
||||||
|
"runtime": "codex",
|
||||||
|
"command_template": "bash scripts/agent/orchestrator-worker.sh {task_file}",
|
||||||
|
"timeout_seconds": 7200,
|
||||||
|
"max_attempts": 1
|
||||||
|
},
|
||||||
|
"quality_gates": ["pnpm lint", "pnpm typecheck", "pnpm test"]
|
||||||
|
}
|
||||||
1
.mosaic/orchestrator/logs/.gitkeep
Normal file
1
.mosaic/orchestrator/logs/.gitkeep
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
1
.mosaic/orchestrator/results/.gitkeep
Normal file
1
.mosaic/orchestrator/results/.gitkeep
Normal file
@@ -0,0 +1 @@
|
|||||||
|
|
||||||
10
.mosaic/quality-rails.yml
Normal file
10
.mosaic/quality-rails.yml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
enabled: false
|
||||||
|
template: ""
|
||||||
|
|
||||||
|
# Set enabled: true and choose one template:
|
||||||
|
# - typescript-node
|
||||||
|
# - typescript-nextjs
|
||||||
|
# - monorepo
|
||||||
|
#
|
||||||
|
# Apply manually:
|
||||||
|
# ~/.config/mosaic/bin/mosaic-quality-apply --template <template> --target <repo>
|
||||||
29
.mosaic/repo-hooks.sh
Executable file
29
.mosaic/repo-hooks.sh
Executable file
@@ -0,0 +1,29 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Repo-specific hooks used by scripts/agent/*.sh for Mosaic Stack.
|
||||||
|
|
||||||
|
mosaic_hook_session_start() {
|
||||||
|
echo "[mosaic-stack] Branch: $(git rev-parse --abbrev-ref HEAD)"
|
||||||
|
echo "[mosaic-stack] Remotes:"
|
||||||
|
git remote -v | sed 's/^/[mosaic-stack] /'
|
||||||
|
if command -v node >/dev/null 2>&1; then
|
||||||
|
echo "[mosaic-stack] Node: $(node -v)"
|
||||||
|
fi
|
||||||
|
if command -v pnpm >/dev/null 2>&1; then
|
||||||
|
echo "[mosaic-stack] pnpm: $(pnpm -v)"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
mosaic_hook_critical() {
|
||||||
|
echo "[mosaic-stack] Recent commits:"
|
||||||
|
git log --oneline --decorate -n 5 | sed 's/^/[mosaic-stack] /'
|
||||||
|
echo "[mosaic-stack] Open TODO/FIXME markers (top 20):"
|
||||||
|
rg -n "(TODO|FIXME|HACK|SECURITY)" apps packages plugins docs --glob '!**/node_modules/**' -S \
|
||||||
|
| head -n 20 \
|
||||||
|
| sed 's/^/[mosaic-stack] /' \
|
||||||
|
|| true
|
||||||
|
}
|
||||||
|
|
||||||
|
mosaic_hook_session_end() {
|
||||||
|
echo "[mosaic-stack] Working tree summary:"
|
||||||
|
git status --short | sed 's/^/[mosaic-stack] /' || true
|
||||||
|
}
|
||||||
13
.trivyignore
13
.trivyignore
@@ -6,7 +6,7 @@
|
|||||||
# - npm bundled CVEs (5): npm removed from production Node.js images
|
# - npm bundled CVEs (5): npm removed from production Node.js images
|
||||||
# - Node.js 20 → 24 LTS migration (#367): base images updated
|
# - Node.js 20 → 24 LTS migration (#367): base images updated
|
||||||
#
|
#
|
||||||
# REMAINING: OpenBao (5 CVEs) + Next.js bundled tar (3 CVEs)
|
# REMAINING: OpenBao (5 CVEs) + Next.js bundled tar/minimatch (5 CVEs)
|
||||||
# Re-evaluate when upgrading openbao image beyond 2.5.0 or Next.js beyond 16.1.6.
|
# Re-evaluate when upgrading openbao image beyond 2.5.0 or Next.js beyond 16.1.6.
|
||||||
|
|
||||||
# === OpenBao false positives ===
|
# === OpenBao false positives ===
|
||||||
@@ -17,15 +17,18 @@ CVE-2024-9180 # HIGH: privilege escalation (fixed in 2.0.3)
|
|||||||
CVE-2025-59043 # HIGH: DoS via malicious JSON (fixed in 2.4.1)
|
CVE-2025-59043 # HIGH: DoS via malicious JSON (fixed in 2.4.1)
|
||||||
CVE-2025-64761 # HIGH: identity group root escalation (fixed in 2.4.4)
|
CVE-2025-64761 # HIGH: identity group root escalation (fixed in 2.4.4)
|
||||||
|
|
||||||
# === Next.js bundled tar CVEs (upstream — waiting on Next.js release) ===
|
# === Next.js bundled tar/minimatch CVEs (upstream — waiting on Next.js release) ===
|
||||||
# Next.js 16.1.6 bundles tar@7.5.2 in next/dist/compiled/tar/ (pre-compiled).
|
# Next.js 16.1.6 bundles tar@7.5.2 and minimatch@9.0.5 in next/dist/compiled/ (pre-compiled).
|
||||||
# This is NOT a pnpm dependency — it's embedded in the Next.js package itself.
|
# These are NOT pnpm dependencies — they're embedded in the Next.js package itself.
|
||||||
|
# pnpm overrides cannot reach these; only a Next.js upgrade can fix them.
|
||||||
# Affects web image only (orchestrator and API are clean).
|
# Affects web image only (orchestrator and API are clean).
|
||||||
# npm was also removed from all production images, eliminating the npm-bundled copy.
|
# npm was also removed from all production images, eliminating the npm-bundled copy.
|
||||||
# To resolve: upgrade Next.js when a release bundles tar >= 7.5.7.
|
# To resolve: upgrade Next.js when a release bundles tar >= 7.5.8 and minimatch >= 10.2.1.
|
||||||
CVE-2026-23745 # HIGH: tar arbitrary file overwrite via unsanitized linkpaths (fixed in 7.5.3)
|
CVE-2026-23745 # HIGH: tar arbitrary file overwrite via unsanitized linkpaths (fixed in 7.5.3)
|
||||||
CVE-2026-23950 # HIGH: tar arbitrary file overwrite via Unicode path collision (fixed in 7.5.4)
|
CVE-2026-23950 # HIGH: tar arbitrary file overwrite via Unicode path collision (fixed in 7.5.4)
|
||||||
CVE-2026-24842 # HIGH: tar arbitrary file creation via hardlink path traversal (needs tar >= 7.5.7)
|
CVE-2026-24842 # HIGH: tar arbitrary file creation via hardlink path traversal (needs tar >= 7.5.7)
|
||||||
|
CVE-2026-26960 # HIGH: tar arbitrary file read/write via malicious archive hardlink (needs tar >= 7.5.8)
|
||||||
|
CVE-2026-26996 # HIGH: minimatch DoS via specially crafted glob patterns (needs minimatch >= 10.2.1)
|
||||||
|
|
||||||
# === OpenBao Go stdlib (waiting on upstream rebuild) ===
|
# === OpenBao Go stdlib (waiting on upstream rebuild) ===
|
||||||
# OpenBao 2.5.0 compiled with Go 1.25.6, fix needs Go >= 1.25.7.
|
# OpenBao 2.5.0 compiled with Go 1.25.6, fix needs Go >= 1.25.7.
|
||||||
|
|||||||
@@ -85,12 +85,11 @@ install -> [ruff-check, mypy, security-bandit, security-pip-audit, test]
|
|||||||
|
|
||||||
## Image Tagging
|
## Image Tagging
|
||||||
|
|
||||||
| Condition | Tag | Purpose |
|
| Condition | Tag | Purpose |
|
||||||
| ---------------- | -------------------------- | -------------------------- |
|
| ------------- | -------------------------- | -------------------------- |
|
||||||
| Always | `${CI_COMMIT_SHA:0:8}` | Immutable commit reference |
|
| Always | `${CI_COMMIT_SHA:0:8}` | Immutable commit reference |
|
||||||
| `main` branch | `latest` | Current production release |
|
| `main` branch | `latest` | Current latest build |
|
||||||
| `develop` branch | `dev` | Current development build |
|
| Git tag | tag value (e.g., `v1.0.0`) | Semantic version release |
|
||||||
| Git tag | tag value (e.g., `v1.0.0`) | Semantic version release |
|
|
||||||
|
|
||||||
## Required Secrets
|
## Required Secrets
|
||||||
|
|
||||||
@@ -138,5 +137,5 @@ Fails on blockers or critical/high severity security findings.
|
|||||||
|
|
||||||
### Pipeline runs Docker builds on pull requests
|
### Pipeline runs Docker builds on pull requests
|
||||||
|
|
||||||
- Docker build steps have `when: branch: [main, develop]` guards
|
- Docker build steps have `when: branch: [main]` guards
|
||||||
- PRs only run quality gates, not Docker builds
|
- PRs only run quality gates, not Docker builds
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ when:
|
|||||||
- "turbo.json"
|
- "turbo.json"
|
||||||
- "package.json"
|
- "package.json"
|
||||||
- ".woodpecker/api.yml"
|
- ".woodpecker/api.yml"
|
||||||
|
- ".trivyignore"
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- &node_image "node:24-alpine"
|
- &node_image "node:24-alpine"
|
||||||
@@ -112,7 +113,7 @@ steps:
|
|||||||
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||||
commands:
|
commands:
|
||||||
- *use_deps
|
- *use_deps
|
||||||
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts'
|
- pnpm --filter "@mosaic/api" exec vitest run --exclude 'src/auth/auth-rls.integration.spec.ts' --exclude 'src/credentials/user-credential.model.spec.ts' --exclude 'src/job-events/job-events.performance.spec.ts' --exclude 'src/knowledge/services/fulltext-search.spec.ts' --exclude 'src/mosaic-telemetry/mosaic-telemetry.module.spec.ts'
|
||||||
depends_on:
|
depends_on:
|
||||||
- prisma-migrate
|
- prisma-migrate
|
||||||
|
|
||||||
@@ -151,12 +152,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-api:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context . --dockerfile apps/api/Dockerfile $DESTINATIONS
|
/kaniko/executor --context . --dockerfile apps/api/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- build
|
- build
|
||||||
@@ -179,7 +178,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -187,7 +186,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-api:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-api
|
- docker-build-api
|
||||||
@@ -229,7 +228,7 @@ steps:
|
|||||||
}
|
}
|
||||||
link_package "stack-api"
|
link_package "stack-api"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-api
|
- security-trivy-api
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ when:
|
|||||||
event: pull_request
|
event: pull_request
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- &node_image "node:22-slim"
|
- &node_image "node:24-slim"
|
||||||
- &install_codex "npm i -g @openai/codex"
|
- &install_codex "npm i -g @openai/codex"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
@@ -92,12 +92,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-coordinator:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context apps/coordinator --dockerfile apps/coordinator/Dockerfile $DESTINATIONS
|
/kaniko/executor --context apps/coordinator --dockerfile apps/coordinator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- ruff-check
|
- ruff-check
|
||||||
@@ -124,7 +122,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -132,7 +130,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-coordinator:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-coordinator:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-coordinator
|
- docker-build-coordinator
|
||||||
@@ -174,7 +172,7 @@ steps:
|
|||||||
}
|
}
|
||||||
link_package "stack-coordinator"
|
link_package "stack-coordinator"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-coordinator
|
- security-trivy-coordinator
|
||||||
|
|||||||
@@ -36,12 +36,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-postgres:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context docker/postgres --dockerfile docker/postgres/Dockerfile $DESTINATIONS
|
/kaniko/executor --context docker/postgres --dockerfile docker/postgres/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
|
|
||||||
docker-build-openbao:
|
docker-build-openbao:
|
||||||
@@ -61,12 +59,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-openbao:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context docker/openbao --dockerfile docker/openbao/Dockerfile $DESTINATIONS
|
/kaniko/executor --context docker/openbao --dockerfile docker/openbao/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
|
|
||||||
# === Container Security Scans ===
|
# === Container Security Scans ===
|
||||||
@@ -87,7 +83,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -95,7 +91,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-postgres:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-postgres:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-postgres
|
- docker-build-postgres
|
||||||
@@ -116,7 +112,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -124,7 +120,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-openbao:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-openbao:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-openbao
|
- docker-build-openbao
|
||||||
@@ -167,7 +163,7 @@ steps:
|
|||||||
link_package "stack-postgres"
|
link_package "stack-postgres"
|
||||||
link_package "stack-openbao"
|
link_package "stack-openbao"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-postgres
|
- security-trivy-postgres
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ when:
|
|||||||
- "turbo.json"
|
- "turbo.json"
|
||||||
- "package.json"
|
- "package.json"
|
||||||
- ".woodpecker/orchestrator.yml"
|
- ".woodpecker/orchestrator.yml"
|
||||||
|
- ".trivyignore"
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- &node_image "node:24-alpine"
|
- &node_image "node:24-alpine"
|
||||||
@@ -108,12 +109,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-orchestrator:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile $DESTINATIONS
|
/kaniko/executor --context . --dockerfile apps/orchestrator/Dockerfile --snapshot-mode=redo $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- build
|
- build
|
||||||
@@ -136,7 +135,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -144,7 +143,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-orchestrator:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-orchestrator
|
- docker-build-orchestrator
|
||||||
@@ -186,7 +185,7 @@ steps:
|
|||||||
}
|
}
|
||||||
link_package "stack-orchestrator"
|
link_package "stack-orchestrator"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-orchestrator
|
- security-trivy-orchestrator
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ when:
|
|||||||
- "turbo.json"
|
- "turbo.json"
|
||||||
- "package.json"
|
- "package.json"
|
||||||
- ".woodpecker/web.yml"
|
- ".woodpecker/web.yml"
|
||||||
|
- ".trivyignore"
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
- &node_image "node:24-alpine"
|
- &node_image "node:24-alpine"
|
||||||
@@ -119,12 +120,10 @@ steps:
|
|||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:$CI_COMMIT_TAG"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:latest"
|
||||||
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
|
|
||||||
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/stack-web:dev"
|
|
||||||
fi
|
fi
|
||||||
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --snapshot-mode=redo --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- build
|
- build
|
||||||
@@ -147,7 +146,7 @@ steps:
|
|||||||
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
elif [ "$$CI_COMMIT_BRANCH" = "main" ]; then
|
||||||
SCAN_TAG="latest"
|
SCAN_TAG="latest"
|
||||||
else
|
else
|
||||||
SCAN_TAG="dev"
|
SCAN_TAG="latest"
|
||||||
fi
|
fi
|
||||||
mkdir -p ~/.docker
|
mkdir -p ~/.docker
|
||||||
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$$GITEA_USER\",\"password\":\"$$GITEA_TOKEN\"}}}" > ~/.docker/config.json
|
||||||
@@ -155,7 +154,7 @@ steps:
|
|||||||
--ignorefile .trivyignore \
|
--ignorefile .trivyignore \
|
||||||
git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
git.mosaicstack.dev/mosaic/stack-web:$$SCAN_TAG
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- docker-build-web
|
- docker-build-web
|
||||||
@@ -197,7 +196,7 @@ steps:
|
|||||||
}
|
}
|
||||||
link_package "stack-web"
|
link_package "stack-web"
|
||||||
when:
|
when:
|
||||||
- branch: [main, develop]
|
- branch: [main]
|
||||||
event: [push, manual, tag]
|
event: [push, manual, tag]
|
||||||
depends_on:
|
depends_on:
|
||||||
- security-trivy-web
|
- security-trivy-web
|
||||||
|
|||||||
74
AGENTS.md
74
AGENTS.md
@@ -1,37 +1,67 @@
|
|||||||
# Mosaic Stack — Agent Guidelines
|
# Mosaic Stack — Agent Guidelines
|
||||||
|
|
||||||
> **Any AI model, coding assistant, or framework working in this codebase MUST read and follow `CLAUDE.md` in the project root.**
|
## Load Order
|
||||||
|
|
||||||
`CLAUDE.md` is the authoritative source for:
|
1. `SOUL.md` (repo identity + behavior invariants)
|
||||||
|
2. `~/.config/mosaic/STANDARDS.md` (machine-wide standards rails)
|
||||||
|
3. `AGENTS.md` (repo-specific overlay)
|
||||||
|
4. `.mosaic/repo-hooks.sh` (repo lifecycle hooks)
|
||||||
|
|
||||||
- Technology stack and versions
|
## Runtime Contract
|
||||||
- TypeScript strict mode requirements
|
|
||||||
- ESLint Quality Rails (error-level enforcement)
|
|
||||||
- Prettier formatting rules
|
|
||||||
- Testing requirements (85% coverage, TDD)
|
|
||||||
- API conventions and database patterns
|
|
||||||
- Commit format and branch strategy
|
|
||||||
- PDA-friendly design principles
|
|
||||||
|
|
||||||
## Quick Rules (Read CLAUDE.md for Details)
|
- This file is authoritative for repo-local operations.
|
||||||
|
- `CLAUDE.md` is a compatibility pointer to `AGENTS.md`.
|
||||||
|
- Follow universal rails from `~/.config/mosaic/guides/` and `~/.config/mosaic/rails/`.
|
||||||
|
|
||||||
- **No `any` types** — use `unknown`, generics, or proper types
|
## Session Lifecycle
|
||||||
- **Explicit return types** on all functions
|
|
||||||
- **Type-only imports** — `import type { Foo }` for types
|
|
||||||
- **Double quotes**, semicolons, 2-space indent, 100 char width
|
|
||||||
- **`??` not `||`** for defaults, **`?.`** not `&&` chains
|
|
||||||
- **All promises** must be awaited or returned
|
|
||||||
- **85% test coverage** minimum, tests before implementation
|
|
||||||
|
|
||||||
## Updating Conventions
|
```bash
|
||||||
|
bash scripts/agent/session-start.sh
|
||||||
|
bash scripts/agent/critical.sh
|
||||||
|
bash scripts/agent/session-end.sh
|
||||||
|
```
|
||||||
|
|
||||||
If you discover new patterns, gotchas, or conventions while working in this codebase, **update `CLAUDE.md`** — not this file. This file exists solely to redirect agents that look for `AGENTS.md` to the canonical source.
|
Optional:
|
||||||
|
|
||||||
## Per-App Context
|
```bash
|
||||||
|
bash scripts/agent/log-limitation.sh "Short Name"
|
||||||
|
bash scripts/agent/orchestrator-daemon.sh status
|
||||||
|
bash scripts/agent/orchestrator-events.sh recent --limit 50
|
||||||
|
```
|
||||||
|
|
||||||
Each app directory has its own `AGENTS.md` for app-specific patterns:
|
## Repo Context
|
||||||
|
|
||||||
|
- Platform: multi-tenant personal assistant stack
|
||||||
|
- Monorepo: `pnpm` workspaces + Turborepo
|
||||||
|
- Core apps: `apps/api` (NestJS), `apps/web` (Next.js), orchestrator/coordinator services
|
||||||
|
- Infrastructure: Docker Compose + PostgreSQL + Valkey + Authentik
|
||||||
|
|
||||||
|
## Quick Command Set
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm install
|
||||||
|
pnpm dev
|
||||||
|
pnpm test
|
||||||
|
pnpm lint
|
||||||
|
pnpm build
|
||||||
|
```
|
||||||
|
|
||||||
|
## Standards and Quality
|
||||||
|
|
||||||
|
- Enforce strict typing and no unsafe shortcuts.
|
||||||
|
- Keep lint/typecheck/tests green before completion.
|
||||||
|
- Prefer small, focused commits and clear change descriptions.
|
||||||
|
|
||||||
|
## App-Specific Overlays
|
||||||
|
|
||||||
- `apps/api/AGENTS.md`
|
- `apps/api/AGENTS.md`
|
||||||
- `apps/web/AGENTS.md`
|
- `apps/web/AGENTS.md`
|
||||||
- `apps/coordinator/AGENTS.md`
|
- `apps/coordinator/AGENTS.md`
|
||||||
- `apps/orchestrator/AGENTS.md`
|
- `apps/orchestrator/AGENTS.md`
|
||||||
|
|
||||||
|
## Additional Guidance
|
||||||
|
|
||||||
|
- Orchestrator guidance: `docs/claude/orchestrator.md`
|
||||||
|
- Security remediation context: `docs/reports/codebase-review-2026-02-05/01-security-review.md`
|
||||||
|
- Code quality context: `docs/reports/codebase-review-2026-02-05/02-code-quality-review.md`
|
||||||
|
- QA context: `docs/reports/codebase-review-2026-02-05/03-qa-test-coverage.md`
|
||||||
|
|||||||
479
CLAUDE.md
479
CLAUDE.md
@@ -1,477 +1,10 @@
|
|||||||
**Multi-tenant personal assistant platform with PostgreSQL backend, Authentik SSO, and MoltBot
|
# CLAUDE Compatibility Pointer
|
||||||
integration.**
|
|
||||||
|
|
||||||
## Conditional Documentation Loading
|
This file exists so Claude Code sessions load Mosaic standards.
|
||||||
|
|
||||||
| When working on... | Load this guide |
|
## MANDATORY — Read Before Any Response
|
||||||
| ---------------------------------------- | ------------------------------------------------------------------- |
|
|
||||||
| Orchestrating autonomous task completion | `docs/claude/orchestrator.md` |
|
|
||||||
| Security remediation (review findings) | `docs/reports/codebase-review-2026-02-05/01-security-review.md` |
|
|
||||||
| Code quality fixes | `docs/reports/codebase-review-2026-02-05/02-code-quality-review.md` |
|
|
||||||
| Test coverage gaps | `docs/reports/codebase-review-2026-02-05/03-qa-test-coverage.md` |
|
|
||||||
|
|
||||||
## Platform Templates
|
BEFORE responding to any user message, READ `~/.config/mosaic/AGENTS.md`.
|
||||||
|
|
||||||
Bootstrap templates are at `docs/templates/`. See `docs/templates/README.md` for usage.
|
That file is the universal agent configuration. Do NOT respond until you have loaded it.
|
||||||
|
Then read the project-local `AGENTS.md` in this repository for project-specific guidance.
|
||||||
## Project Overview
|
|
||||||
|
|
||||||
Mosaic Stack is a standalone platform that provides:
|
|
||||||
|
|
||||||
- Multi-user workspaces with team sharing
|
|
||||||
- Task, event, and project management
|
|
||||||
- Gantt charts and Kanban boards
|
|
||||||
- MoltBot integration via plugins (stock MoltBot + mosaic-plugin-\*)
|
|
||||||
- PDA-friendly design throughout
|
|
||||||
|
|
||||||
**Repository:** git.mosaicstack.dev/mosaic/stack
|
|
||||||
**Versioning:** Start at 0.0.1, MVP = 0.1.0
|
|
||||||
|
|
||||||
## Technology Stack
|
|
||||||
|
|
||||||
| Layer | Technology |
|
|
||||||
| ---------- | -------------------------------------------- |
|
|
||||||
| Frontend | Next.js 16 + React + TailwindCSS + Shadcn/ui |
|
|
||||||
| Backend | NestJS + Prisma ORM |
|
|
||||||
| Database | PostgreSQL 17 + pgvector |
|
|
||||||
| Cache | Valkey (Redis-compatible) |
|
|
||||||
| Auth | Authentik (OIDC) |
|
|
||||||
| AI | Ollama (configurable: local or remote) |
|
|
||||||
| Messaging | MoltBot (stock + Mosaic plugins) |
|
|
||||||
| Real-time | WebSockets (Socket.io) |
|
|
||||||
| Monorepo | pnpm workspaces + TurboRepo |
|
|
||||||
| Testing | Vitest + Playwright |
|
|
||||||
| Deployment | Docker + docker-compose |
|
|
||||||
|
|
||||||
## Repository Structure
|
|
||||||
|
|
||||||
mosaic-stack/
|
|
||||||
├── apps/
|
|
||||||
│ ├── api/ # mosaic-api (NestJS)
|
|
||||||
│ │ ├── src/
|
|
||||||
│ │ │ ├── auth/ # Authentik OIDC
|
|
||||||
│ │ │ ├── tasks/ # Task management
|
|
||||||
│ │ │ ├── events/ # Calendar/events
|
|
||||||
│ │ │ ├── projects/ # Project management
|
|
||||||
│ │ │ ├── brain/ # MoltBot integration
|
|
||||||
│ │ │ └── activity/ # Activity logging
|
|
||||||
│ │ ├── prisma/
|
|
||||||
│ │ │ └── schema.prisma
|
|
||||||
│ │ └── Dockerfile
|
|
||||||
│ └── web/ # mosaic-web (Next.js 16)
|
|
||||||
│ ├── app/
|
|
||||||
│ ├── components/
|
|
||||||
│ └── Dockerfile
|
|
||||||
├── packages/
|
|
||||||
│ ├── shared/ # Shared types, utilities
|
|
||||||
│ ├── ui/ # Shared UI components
|
|
||||||
│ └── config/ # Shared configuration
|
|
||||||
├── plugins/
|
|
||||||
│ ├── mosaic-plugin-brain/ # MoltBot skill: API queries
|
|
||||||
│ ├── mosaic-plugin-calendar/ # MoltBot skill: Calendar
|
|
||||||
│ ├── mosaic-plugin-tasks/ # MoltBot skill: Tasks
|
|
||||||
│ └── mosaic-plugin-gantt/ # MoltBot skill: Gantt
|
|
||||||
├── docker/
|
|
||||||
│ ├── docker-compose.yml # Turnkey deployment
|
|
||||||
│ └── init-scripts/ # PostgreSQL init
|
|
||||||
├── docs/
|
|
||||||
│ ├── SETUP.md
|
|
||||||
│ ├── CONFIGURATION.md
|
|
||||||
│ └── DESIGN-PRINCIPLES.md
|
|
||||||
├── .env.example
|
|
||||||
├── turbo.json
|
|
||||||
├── pnpm-workspace.yaml
|
|
||||||
└── README.md
|
|
||||||
|
|
||||||
## Development Workflow
|
|
||||||
|
|
||||||
### Branch Strategy
|
|
||||||
|
|
||||||
- `main` — stable releases only
|
|
||||||
- `develop` — active development (default working branch)
|
|
||||||
- `feature/*` — feature branches from develop
|
|
||||||
- `fix/*` — bug fix branches
|
|
||||||
|
|
||||||
### Starting Work
|
|
||||||
|
|
||||||
````bash
|
|
||||||
git checkout develop
|
|
||||||
git pull --rebase
|
|
||||||
pnpm install
|
|
||||||
|
|
||||||
Running Locally
|
|
||||||
|
|
||||||
# Start all services (Docker)
|
|
||||||
docker compose up -d
|
|
||||||
|
|
||||||
# Or run individually for development
|
|
||||||
pnpm dev # All apps
|
|
||||||
pnpm dev:api # API only
|
|
||||||
pnpm dev:web # Web only
|
|
||||||
|
|
||||||
Testing
|
|
||||||
|
|
||||||
pnpm test # Run all tests
|
|
||||||
pnpm test:api # API tests only
|
|
||||||
pnpm test:web # Web tests only
|
|
||||||
pnpm test:e2e # Playwright E2E
|
|
||||||
|
|
||||||
Building
|
|
||||||
|
|
||||||
pnpm build # Build all
|
|
||||||
pnpm build:api # Build API
|
|
||||||
pnpm build:web # Build Web
|
|
||||||
|
|
||||||
Design Principles (NON-NEGOTIABLE)
|
|
||||||
|
|
||||||
PDA-Friendly Language
|
|
||||||
|
|
||||||
NEVER use demanding language. This is critical.
|
|
||||||
┌─────────────┬──────────────────────┐
|
|
||||||
│ ❌ NEVER │ ✅ ALWAYS │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ OVERDUE │ Target passed │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ URGENT │ Approaching target │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ MUST DO │ Scheduled for │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ CRITICAL │ High priority │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ YOU NEED TO │ Consider / Option to │
|
|
||||||
├─────────────┼──────────────────────┤
|
|
||||||
│ REQUIRED │ Recommended │
|
|
||||||
└─────────────┴──────────────────────┘
|
|
||||||
Visual Indicators
|
|
||||||
|
|
||||||
Use status indicators consistently:
|
|
||||||
- 🟢 On track / Active
|
|
||||||
- 🔵 Upcoming / Scheduled
|
|
||||||
- ⏸️ Paused / On hold
|
|
||||||
- 💤 Dormant / Inactive
|
|
||||||
- ⚪ Not started
|
|
||||||
|
|
||||||
Display Principles
|
|
||||||
|
|
||||||
1. 10-second scannability — Key info visible immediately
|
|
||||||
2. Visual chunking — Clear sections with headers
|
|
||||||
3. Single-line items — Compact, scannable lists
|
|
||||||
4. Date grouping — Today, Tomorrow, This Week headers
|
|
||||||
5. Progressive disclosure — Details on click, not upfront
|
|
||||||
6. Calm colors — No aggressive reds for status
|
|
||||||
|
|
||||||
Reference
|
|
||||||
|
|
||||||
See docs/DESIGN-PRINCIPLES.md for complete guidelines.
|
|
||||||
For original patterns, see: jarvis-brain/docs/DESIGN-PRINCIPLES.md
|
|
||||||
|
|
||||||
API Conventions
|
|
||||||
|
|
||||||
Endpoints
|
|
||||||
|
|
||||||
GET /api/{resource} # List (with pagination, filters)
|
|
||||||
GET /api/{resource}/:id # Get single
|
|
||||||
POST /api/{resource} # Create
|
|
||||||
PATCH /api/{resource}/:id # Update
|
|
||||||
DELETE /api/{resource}/:id # Delete
|
|
||||||
|
|
||||||
Response Format
|
|
||||||
|
|
||||||
// Success
|
|
||||||
{
|
|
||||||
data: T | T[],
|
|
||||||
meta?: { total, page, limit }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Error
|
|
||||||
{
|
|
||||||
error: {
|
|
||||||
code: string,
|
|
||||||
message: string,
|
|
||||||
details?: any
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Brain Query API
|
|
||||||
|
|
||||||
POST /api/brain/query
|
|
||||||
{
|
|
||||||
query: "what's on my calendar",
|
|
||||||
context?: { view: "dashboard", workspace_id: "..." }
|
|
||||||
}
|
|
||||||
|
|
||||||
Database Conventions
|
|
||||||
|
|
||||||
Multi-Tenant (RLS)
|
|
||||||
|
|
||||||
All workspace-scoped tables use Row-Level Security:
|
|
||||||
- Always include workspace_id in queries
|
|
||||||
- RLS policies enforce isolation
|
|
||||||
- Set session context for current user
|
|
||||||
|
|
||||||
Prisma Commands
|
|
||||||
|
|
||||||
pnpm prisma:generate # Generate client
|
|
||||||
pnpm prisma:migrate # Run migrations
|
|
||||||
pnpm prisma:studio # Open Prisma Studio
|
|
||||||
pnpm prisma:seed # Seed development data
|
|
||||||
|
|
||||||
MoltBot Plugin Development
|
|
||||||
|
|
||||||
Plugins live in plugins/mosaic-plugin-*/ and follow MoltBot skill format:
|
|
||||||
|
|
||||||
# plugins/mosaic-plugin-brain/SKILL.md
|
|
||||||
---
|
|
||||||
name: mosaic-plugin-brain
|
|
||||||
description: Query Mosaic Stack for tasks, events, projects
|
|
||||||
version: 0.0.1
|
|
||||||
triggers:
|
|
||||||
- "what's on my calendar"
|
|
||||||
- "show my tasks"
|
|
||||||
- "morning briefing"
|
|
||||||
tools:
|
|
||||||
- mosaic_api
|
|
||||||
---
|
|
||||||
|
|
||||||
# Plugin instructions here...
|
|
||||||
|
|
||||||
Key principle: MoltBot remains stock. All customization via plugins only.
|
|
||||||
|
|
||||||
Environment Variables
|
|
||||||
|
|
||||||
See .env.example for all variables. Key ones:
|
|
||||||
|
|
||||||
# Database
|
|
||||||
DATABASE_URL=postgresql://mosaic:password@localhost:5432/mosaic
|
|
||||||
|
|
||||||
# Auth
|
|
||||||
AUTHENTIK_URL=https://auth.example.com
|
|
||||||
AUTHENTIK_CLIENT_ID=mosaic-stack
|
|
||||||
AUTHENTIK_CLIENT_SECRET=...
|
|
||||||
|
|
||||||
# Ollama
|
|
||||||
OLLAMA_MODE=local|remote
|
|
||||||
OLLAMA_ENDPOINT=http://localhost:11434
|
|
||||||
|
|
||||||
# MoltBot
|
|
||||||
MOSAIC_API_TOKEN=...
|
|
||||||
|
|
||||||
Issue Tracking
|
|
||||||
|
|
||||||
Issues are tracked at: https://git.mosaicstack.dev/mosaic/stack/issues
|
|
||||||
|
|
||||||
Labels
|
|
||||||
|
|
||||||
- Priority: p0 (critical), p1 (high), p2 (medium), p3 (low)
|
|
||||||
- Type: api, web, database, auth, plugin, ai, devops, docs, migration, security, testing,
|
|
||||||
performance, setup
|
|
||||||
|
|
||||||
Milestones
|
|
||||||
|
|
||||||
- M1-Foundation (0.0.x)
|
|
||||||
- M2-MultiTenant (0.0.x)
|
|
||||||
- M3-Features (0.0.x)
|
|
||||||
- M4-MoltBot (0.0.x)
|
|
||||||
- M5-Migration (0.1.0 MVP)
|
|
||||||
|
|
||||||
Commit Format
|
|
||||||
|
|
||||||
<type>(#issue): Brief description
|
|
||||||
|
|
||||||
Detailed explanation if needed.
|
|
||||||
|
|
||||||
Fixes #123
|
|
||||||
Types: feat, fix, docs, test, refactor, chore
|
|
||||||
|
|
||||||
Test-Driven Development (TDD) - REQUIRED
|
|
||||||
|
|
||||||
**All code must follow TDD principles. This is non-negotiable.**
|
|
||||||
|
|
||||||
TDD Workflow (Red-Green-Refactor)
|
|
||||||
|
|
||||||
1. **RED** — Write a failing test first
|
|
||||||
- Write the test for new functionality BEFORE writing any implementation code
|
|
||||||
- Run the test to verify it fails (proves the test works)
|
|
||||||
- Commit message: `test(#issue): add test for [feature]`
|
|
||||||
|
|
||||||
2. **GREEN** — Write minimal code to make the test pass
|
|
||||||
- Implement only enough code to pass the test
|
|
||||||
- Run tests to verify they pass
|
|
||||||
- Commit message: `feat(#issue): implement [feature]`
|
|
||||||
|
|
||||||
3. **REFACTOR** — Clean up the code while keeping tests green
|
|
||||||
- Improve code quality, remove duplication, enhance readability
|
|
||||||
- Ensure all tests still pass after refactoring
|
|
||||||
- Commit message: `refactor(#issue): improve [component]`
|
|
||||||
|
|
||||||
Testing Requirements
|
|
||||||
|
|
||||||
- **Minimum 85% code coverage** for all new code
|
|
||||||
- **Write tests BEFORE implementation** — no exceptions
|
|
||||||
- Test files must be co-located with source files:
|
|
||||||
- `feature.service.ts` → `feature.service.spec.ts`
|
|
||||||
- `component.tsx` → `component.test.tsx`
|
|
||||||
- All tests must pass before creating a PR
|
|
||||||
- Use descriptive test names: `it("should return user when valid token provided")`
|
|
||||||
- Group related tests with `describe()` blocks
|
|
||||||
- Mock external dependencies (database, APIs, file system)
|
|
||||||
|
|
||||||
Test Types
|
|
||||||
|
|
||||||
- **Unit Tests** — Test individual functions/methods in isolation
|
|
||||||
- **Integration Tests** — Test module interactions (e.g., service + database)
|
|
||||||
- **E2E Tests** — Test complete user workflows with Playwright
|
|
||||||
|
|
||||||
Running Tests
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pnpm test # Run all tests
|
|
||||||
pnpm test:watch # Watch mode for active development
|
|
||||||
pnpm test:coverage # Generate coverage report
|
|
||||||
pnpm test:api # API tests only
|
|
||||||
pnpm test:web # Web tests only
|
|
||||||
pnpm test:e2e # Playwright E2E tests
|
|
||||||
````
|
|
||||||
|
|
||||||
Coverage Verification
|
|
||||||
|
|
||||||
After implementing a feature, verify coverage meets requirements:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pnpm test:coverage
|
|
||||||
# Check the coverage report in coverage/index.html
|
|
||||||
# Ensure your files show ≥85% coverage
|
|
||||||
```
|
|
||||||
|
|
||||||
TDD Anti-Patterns to Avoid
|
|
||||||
|
|
||||||
❌ Writing implementation code before tests
|
|
||||||
❌ Writing tests after implementation is complete
|
|
||||||
❌ Skipping tests for "simple" code
|
|
||||||
❌ Testing implementation details instead of behavior
|
|
||||||
❌ Writing tests that don't fail when they should
|
|
||||||
❌ Committing code with failing tests
|
|
||||||
|
|
||||||
Quality Rails - Mechanical Code Quality Enforcement
|
|
||||||
|
|
||||||
**Status:** ACTIVE (2026-01-30) - Strict enforcement enabled ✅
|
|
||||||
|
|
||||||
Quality Rails provides mechanical enforcement of code quality standards through pre-commit hooks
|
|
||||||
and CI/CD pipelines. See `docs/quality-rails-status.md` for full details.
|
|
||||||
|
|
||||||
What's Enforced (NOW ACTIVE):
|
|
||||||
|
|
||||||
- ✅ **Type Safety** - Blocks explicit `any` types (@typescript-eslint/no-explicit-any: error)
|
|
||||||
- ✅ **Return Types** - Requires explicit return types on exported functions
|
|
||||||
- ✅ **Security** - Detects SQL injection, XSS, unsafe regex (eslint-plugin-security)
|
|
||||||
- ✅ **Promise Safety** - Blocks floating promises and misused promises
|
|
||||||
- ✅ **Code Formatting** - Auto-formats with Prettier on commit
|
|
||||||
- ✅ **Build Verification** - Type-checks before allowing commit
|
|
||||||
- ✅ **Secret Scanning** - Blocks hardcoded passwords/API keys (git-secrets)
|
|
||||||
|
|
||||||
Current Status:
|
|
||||||
|
|
||||||
- ✅ **Pre-commit hooks**: ACTIVE - Blocks commits with violations
|
|
||||||
- ✅ **Strict enforcement**: ENABLED - Package-level enforcement
|
|
||||||
- 🟡 **CI/CD pipeline**: Ready (.woodpecker.yml created, not yet configured)
|
|
||||||
|
|
||||||
How It Works:
|
|
||||||
|
|
||||||
**Package-Level Enforcement** - If you touch ANY file in a package with violations,
|
|
||||||
you must fix ALL violations in that package before committing. This forces incremental
|
|
||||||
cleanup while preventing new violations.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
- Edit `apps/api/src/tasks/tasks.service.ts`
|
|
||||||
- Pre-commit hook runs lint on ENTIRE `@mosaic/api` package
|
|
||||||
- If `@mosaic/api` has violations → Commit BLOCKED
|
|
||||||
- Fix all violations in `@mosaic/api` → Commit allowed
|
|
||||||
|
|
||||||
Next Steps:
|
|
||||||
|
|
||||||
1. Fix violations package-by-package as you work in them
|
|
||||||
2. Priority: Fix explicit `any` types and type safety issues first
|
|
||||||
3. Configure Woodpecker CI to run quality gates on all PRs
|
|
||||||
|
|
||||||
Why This Matters:
|
|
||||||
|
|
||||||
Based on validation of 50 real production issues, Quality Rails mechanically prevents ~70%
|
|
||||||
of quality issues including:
|
|
||||||
|
|
||||||
- Hardcoded passwords
|
|
||||||
- Type safety violations
|
|
||||||
- SQL injection vulnerabilities
|
|
||||||
- Build failures
|
|
||||||
- Test coverage gaps
|
|
||||||
|
|
||||||
**Mechanical enforcement works. Process compliance doesn't.**
|
|
||||||
|
|
||||||
See `docs/quality-rails-status.md` for detailed roadmap and violation breakdown.
|
|
||||||
|
|
||||||
Example TDD Session
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# 1. RED - Write failing test
|
|
||||||
# Edit: feature.service.spec.ts
|
|
||||||
# Add test for getUserById()
|
|
||||||
pnpm test:watch # Watch it fail
|
|
||||||
git add feature.service.spec.ts
|
|
||||||
git commit -m "test(#42): add test for getUserById"
|
|
||||||
|
|
||||||
# 2. GREEN - Implement minimal code
|
|
||||||
# Edit: feature.service.ts
|
|
||||||
# Add getUserById() method
|
|
||||||
pnpm test:watch # Watch it pass
|
|
||||||
git add feature.service.ts
|
|
||||||
git commit -m "feat(#42): implement getUserById"
|
|
||||||
|
|
||||||
# 3. REFACTOR - Improve code quality
|
|
||||||
# Edit: feature.service.ts
|
|
||||||
# Extract helper, improve naming
|
|
||||||
pnpm test:watch # Ensure still passing
|
|
||||||
git add feature.service.ts
|
|
||||||
git commit -m "refactor(#42): extract user mapping logic"
|
|
||||||
```
|
|
||||||
|
|
||||||
Docker Deployment
|
|
||||||
|
|
||||||
Turnkey (includes everything)
|
|
||||||
|
|
||||||
docker compose up -d
|
|
||||||
|
|
||||||
Customized (external services)
|
|
||||||
|
|
||||||
Create docker-compose.override.yml to:
|
|
||||||
|
|
||||||
- Point to external PostgreSQL/Valkey/Ollama
|
|
||||||
- Disable bundled services
|
|
||||||
|
|
||||||
See docs/DOCKER.md for details.
|
|
||||||
|
|
||||||
Key Documentation
|
|
||||||
┌───────────────────────────┬───────────────────────┐
|
|
||||||
│ Document │ Purpose │
|
|
||||||
├───────────────────────────┼───────────────────────┤
|
|
||||||
│ docs/SETUP.md │ Installation guide │
|
|
||||||
├───────────────────────────┼───────────────────────┤
|
|
||||||
│ docs/CONFIGURATION.md │ All config options │
|
|
||||||
├───────────────────────────┼───────────────────────┤
|
|
||||||
│ docs/DESIGN-PRINCIPLES.md │ PDA-friendly patterns │
|
|
||||||
├───────────────────────────┼───────────────────────┤
|
|
||||||
│ docs/DOCKER.md │ Docker deployment │
|
|
||||||
├───────────────────────────┼───────────────────────┤
|
|
||||||
│ docs/API.md │ API documentation │
|
|
||||||
└───────────────────────────┴───────────────────────┘
|
|
||||||
Related Repositories
|
|
||||||
┌──────────────┬──────────────────────────────────────────────┐
|
|
||||||
│ Repo │ Purpose │
|
|
||||||
├──────────────┼──────────────────────────────────────────────┤
|
|
||||||
│ jarvis-brain │ Original JSON-based brain (migration source) │
|
|
||||||
├──────────────┼──────────────────────────────────────────────┤
|
|
||||||
│ MoltBot │ Stock messaging gateway │
|
|
||||||
└──────────────┴──────────────────────────────────────────────┘
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Mosaic Stack v0.0.x — Building the future of personal assistants.
|
|
||||||
|
|||||||
36
Makefile
36
Makefile
@@ -1,4 +1,4 @@
|
|||||||
.PHONY: help install dev build test docker-up docker-down docker-logs docker-ps docker-build docker-restart docker-test clean
|
.PHONY: help install dev build test docker-up docker-down docker-logs docker-ps docker-build docker-restart docker-test speech-up speech-down speech-logs clean matrix-up matrix-down matrix-logs matrix-setup-bot
|
||||||
|
|
||||||
# Default target
|
# Default target
|
||||||
help:
|
help:
|
||||||
@@ -24,6 +24,17 @@ help:
|
|||||||
@echo " make docker-test Run Docker smoke test"
|
@echo " make docker-test Run Docker smoke test"
|
||||||
@echo " make docker-test-traefik Run Traefik integration tests"
|
@echo " make docker-test-traefik Run Traefik integration tests"
|
||||||
@echo ""
|
@echo ""
|
||||||
|
@echo "Speech Services:"
|
||||||
|
@echo " make speech-up Start speech services (STT + TTS)"
|
||||||
|
@echo " make speech-down Stop speech services"
|
||||||
|
@echo " make speech-logs View speech service logs"
|
||||||
|
@echo ""
|
||||||
|
@echo "Matrix Dev Environment:"
|
||||||
|
@echo " make matrix-up Start Matrix services (Synapse + Element)"
|
||||||
|
@echo " make matrix-down Stop Matrix services"
|
||||||
|
@echo " make matrix-logs View Matrix service logs"
|
||||||
|
@echo " make matrix-setup-bot Create bot account and get access token"
|
||||||
|
@echo ""
|
||||||
@echo "Database:"
|
@echo "Database:"
|
||||||
@echo " make db-migrate Run database migrations"
|
@echo " make db-migrate Run database migrations"
|
||||||
@echo " make db-seed Seed development data"
|
@echo " make db-seed Seed development data"
|
||||||
@@ -85,6 +96,29 @@ docker-test:
|
|||||||
docker-test-traefik:
|
docker-test-traefik:
|
||||||
./tests/integration/docker/traefik.test.sh all
|
./tests/integration/docker/traefik.test.sh all
|
||||||
|
|
||||||
|
# Speech services
|
||||||
|
speech-up:
|
||||||
|
docker compose -f docker-compose.yml -f docker-compose.speech.yml up -d speaches kokoro-tts
|
||||||
|
|
||||||
|
speech-down:
|
||||||
|
docker compose -f docker-compose.yml -f docker-compose.speech.yml down --remove-orphans
|
||||||
|
|
||||||
|
speech-logs:
|
||||||
|
docker compose -f docker-compose.yml -f docker-compose.speech.yml logs -f speaches kokoro-tts
|
||||||
|
|
||||||
|
# Matrix Dev Environment
|
||||||
|
matrix-up:
|
||||||
|
docker compose -f docker/docker-compose.yml -f docker/docker-compose.matrix.yml up -d
|
||||||
|
|
||||||
|
matrix-down:
|
||||||
|
docker compose -f docker/docker-compose.yml -f docker/docker-compose.matrix.yml down
|
||||||
|
|
||||||
|
matrix-logs:
|
||||||
|
docker compose -f docker/docker-compose.yml -f docker/docker-compose.matrix.yml logs -f synapse element-web
|
||||||
|
|
||||||
|
matrix-setup-bot:
|
||||||
|
docker/matrix/scripts/setup-bot.sh
|
||||||
|
|
||||||
# Database operations
|
# Database operations
|
||||||
db-migrate:
|
db-migrate:
|
||||||
cd apps/api && pnpm prisma:migrate
|
cd apps/api && pnpm prisma:migrate
|
||||||
|
|||||||
63
README.md
63
README.md
@@ -19,19 +19,20 @@ Mosaic Stack is a modern, PDA-friendly platform designed to help users manage th
|
|||||||
|
|
||||||
## Technology Stack
|
## Technology Stack
|
||||||
|
|
||||||
| Layer | Technology |
|
| Layer | Technology |
|
||||||
| -------------- | -------------------------------------------- |
|
| -------------- | ---------------------------------------------- |
|
||||||
| **Frontend** | Next.js 16 + React + TailwindCSS + Shadcn/ui |
|
| **Frontend** | Next.js 16 + React + TailwindCSS + Shadcn/ui |
|
||||||
| **Backend** | NestJS + Prisma ORM |
|
| **Backend** | NestJS + Prisma ORM |
|
||||||
| **Database** | PostgreSQL 17 + pgvector |
|
| **Database** | PostgreSQL 17 + pgvector |
|
||||||
| **Cache** | Valkey (Redis-compatible) |
|
| **Cache** | Valkey (Redis-compatible) |
|
||||||
| **Auth** | Authentik (OIDC) via BetterAuth |
|
| **Auth** | Authentik (OIDC) via BetterAuth |
|
||||||
| **AI** | Ollama (local or remote) |
|
| **AI** | Ollama (local or remote) |
|
||||||
| **Messaging** | MoltBot (stock + plugins) |
|
| **Messaging** | MoltBot (stock + plugins) |
|
||||||
| **Real-time** | WebSockets (Socket.io) |
|
| **Real-time** | WebSockets (Socket.io) |
|
||||||
| **Monorepo** | pnpm workspaces + TurboRepo |
|
| **Speech** | Speaches (STT) + Kokoro/Chatterbox/Piper (TTS) |
|
||||||
| **Testing** | Vitest + Playwright |
|
| **Monorepo** | pnpm workspaces + TurboRepo |
|
||||||
| **Deployment** | Docker + docker-compose |
|
| **Testing** | Vitest + Playwright |
|
||||||
|
| **Deployment** | Docker + docker-compose |
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
@@ -89,7 +90,7 @@ docker compose down
|
|||||||
If you prefer manual installation, you'll need:
|
If you prefer manual installation, you'll need:
|
||||||
|
|
||||||
- **Docker mode:** Docker 24+ and Docker Compose
|
- **Docker mode:** Docker 24+ and Docker Compose
|
||||||
- **Native mode:** Node.js 22+, pnpm 10+, PostgreSQL 17+
|
- **Native mode:** Node.js 24+, pnpm 10+, PostgreSQL 17+
|
||||||
|
|
||||||
The installer handles these automatically.
|
The installer handles these automatically.
|
||||||
|
|
||||||
@@ -231,7 +232,7 @@ docker compose -f docker-compose.openbao.yml up -d
|
|||||||
sleep 30 # Wait for auto-initialization
|
sleep 30 # Wait for auto-initialization
|
||||||
|
|
||||||
# 5. Deploy swarm stack
|
# 5. Deploy swarm stack
|
||||||
IMAGE_TAG=dev ./scripts/deploy-swarm.sh mosaic
|
IMAGE_TAG=latest ./scripts/deploy-swarm.sh mosaic
|
||||||
|
|
||||||
# 6. Check deployment status
|
# 6. Check deployment status
|
||||||
docker stack services mosaic
|
docker stack services mosaic
|
||||||
@@ -356,6 +357,29 @@ Mosaic Stack includes a sophisticated agent orchestration system for autonomous
|
|||||||
|
|
||||||
See [Agent Orchestration Design](docs/design/agent-orchestration.md) for architecture details.
|
See [Agent Orchestration Design](docs/design/agent-orchestration.md) for architecture details.
|
||||||
|
|
||||||
|
## Speech Services
|
||||||
|
|
||||||
|
Mosaic Stack includes integrated speech-to-text (STT) and text-to-speech (TTS) capabilities through a modular provider architecture. Each component is optional and independently configurable.
|
||||||
|
|
||||||
|
- **Speech-to-Text** - Transcribe audio files and real-time audio streams using Whisper (via Speaches)
|
||||||
|
- **Text-to-Speech** - Synthesize speech with 54+ voices across 8 languages (via Kokoro, CPU-based)
|
||||||
|
- **Premium Voice Cloning** - Clone voices from audio samples with emotion control (via Chatterbox, GPU)
|
||||||
|
- **Fallback TTS** - Ultra-lightweight CPU fallback for low-resource environments (via Piper/OpenedAI Speech)
|
||||||
|
- **WebSocket Streaming** - Real-time streaming transcription via Socket.IO `/speech` namespace
|
||||||
|
- **Automatic Fallback** - TTS tier system with graceful degradation (premium -> default -> fallback)
|
||||||
|
|
||||||
|
**Quick Start:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start speech services alongside core stack
|
||||||
|
make speech-up
|
||||||
|
|
||||||
|
# Or with Docker Compose directly
|
||||||
|
docker compose -f docker-compose.yml -f docker-compose.speech.yml up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
See [Speech Services Documentation](docs/SPEECH.md) for architecture details, API reference, provider configuration, and deployment options.
|
||||||
|
|
||||||
## Current Implementation Status
|
## Current Implementation Status
|
||||||
|
|
||||||
### ✅ Completed (v0.0.1-0.0.6)
|
### ✅ Completed (v0.0.1-0.0.6)
|
||||||
@@ -502,10 +526,9 @@ KNOWLEDGE_CACHE_TTL=300 # 5 minutes
|
|||||||
|
|
||||||
### Branch Strategy
|
### Branch Strategy
|
||||||
|
|
||||||
- `main` — Stable releases only
|
- `main` — Trunk branch (all development merges here)
|
||||||
- `develop` — Active development (default working branch)
|
- `feature/*` — Feature branches from main
|
||||||
- `feature/*` — Feature branches from develop
|
- `fix/*` — Bug fix branches from main
|
||||||
- `fix/*` — Bug fix branches
|
|
||||||
|
|
||||||
### Running Locally
|
### Running Locally
|
||||||
|
|
||||||
@@ -715,7 +738,7 @@ See [Type Sharing Strategy](docs/2-development/3-type-sharing/1-strategy.md) for
|
|||||||
4. Run tests: `pnpm test`
|
4. Run tests: `pnpm test`
|
||||||
5. Build: `pnpm build`
|
5. Build: `pnpm build`
|
||||||
6. Commit with conventional format: `feat(#issue): Description`
|
6. Commit with conventional format: `feat(#issue): Description`
|
||||||
7. Push and create a pull request to `develop`
|
7. Push and create a pull request to `main`
|
||||||
|
|
||||||
### Commit Format
|
### Commit Format
|
||||||
|
|
||||||
|
|||||||
20
SOUL.md
Normal file
20
SOUL.md
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Mosaic Stack Soul
|
||||||
|
|
||||||
|
You are Jarvis for the Mosaic Stack repository, running on the current agent runtime.
|
||||||
|
|
||||||
|
## Behavioral Invariants
|
||||||
|
|
||||||
|
- Identity first: answer identity prompts as Jarvis for this repository.
|
||||||
|
- Implementation detail second: runtime (Codex/Claude/OpenCode/etc.) is secondary metadata.
|
||||||
|
- Be proactive: surface risks, blockers, and next actions without waiting.
|
||||||
|
- Be calm and clear: keep responses concise, chunked, and PDA-friendly.
|
||||||
|
- Respect canonical sources:
|
||||||
|
- Repo operations and conventions: `AGENTS.md`
|
||||||
|
- Machine-wide rails: `~/.config/mosaic/STANDARDS.md`
|
||||||
|
- Repo lifecycle hooks: `.mosaic/repo-hooks.sh`
|
||||||
|
|
||||||
|
## Guardrails
|
||||||
|
|
||||||
|
- Do not claim completion without verification evidence.
|
||||||
|
- Do not bypass lint/type/test quality gates.
|
||||||
|
- Prefer explicit assumptions and concrete file/command references.
|
||||||
@@ -4,15 +4,22 @@
|
|||||||
|
|
||||||
## Patterns
|
## Patterns
|
||||||
|
|
||||||
<!-- Add module-specific patterns as you discover them -->
|
- **Config validation pattern**: Config files use exported validation functions + typed getter functions (not class-validator). See `auth.config.ts`, `federation.config.ts`, `speech/speech.config.ts`. Pattern: export `isXEnabled()`, `validateXConfig()`, and `getXConfig()` functions.
|
||||||
|
- **Config registerAs**: `speech.config.ts` also exports a `registerAs("speech", ...)` factory for NestJS ConfigModule namespaced injection. Use `ConfigModule.forFeature(speechConfig)` in module imports and access via `this.config.get<string>('speech.stt.baseUrl')`.
|
||||||
|
- **Conditional config validation**: When a service has an enabled flag (e.g., `STT_ENABLED`), URL/connection vars are only required when enabled. Validation throws with a helpful message suggesting how to disable.
|
||||||
|
- **Boolean env parsing**: Use `value === "true" || value === "1"` pattern. No default-true -- all services default to disabled when env var is unset.
|
||||||
|
|
||||||
## Gotchas
|
## Gotchas
|
||||||
|
|
||||||
<!-- Add things that trip up agents in this module -->
|
- **Prisma client must be generated** before `tsc --noEmit` will pass. Run `pnpm prisma:generate` first. Pre-existing type errors from Prisma are expected in worktrees without generated client.
|
||||||
|
- **Pre-commit hooks**: lint-staged runs on staged files. If other packages' files are staged, their lint must pass too. Only stage files you intend to commit.
|
||||||
|
- **vitest runs all test files**: Even when targeting a specific test file, vitest loads all spec files. Many will fail if Prisma client isn't generated -- this is expected. Check only your target file's pass/fail status.
|
||||||
|
|
||||||
## Key Files
|
## Key Files
|
||||||
|
|
||||||
| File | Purpose |
|
| File | Purpose |
|
||||||
| ---- | ------- |
|
| ------------------------------------- | ---------------------------------------------------------------------- |
|
||||||
|
| `src/speech/speech.config.ts` | Speech services env var validation and typed config (STT, TTS, limits) |
|
||||||
<!-- Add important files in this directory -->
|
| `src/speech/speech.config.spec.ts` | Unit tests for speech config validation (51 tests) |
|
||||||
|
| `src/auth/auth.config.ts` | Auth/OIDC config validation (reference pattern) |
|
||||||
|
| `src/federation/federation.config.ts` | Federation config validation (reference pattern) |
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
# syntax=docker/dockerfile:1
|
|
||||||
# Enable BuildKit features for cache mounts
|
|
||||||
|
|
||||||
# Base image for all stages
|
# Base image for all stages
|
||||||
FROM node:24-alpine AS base
|
# Uses Debian slim (glibc) instead of Alpine (musl) because native Node.js addons
|
||||||
|
# (matrix-sdk-crypto-nodejs, Prisma engines) require glibc-compatible binaries.
|
||||||
|
FROM node:24-slim AS base
|
||||||
|
|
||||||
# Install pnpm globally
|
# Install pnpm globally
|
||||||
RUN corepack enable && corepack prepare pnpm@10.27.0 --activate
|
RUN corepack enable && corepack prepare pnpm@10.27.0 --activate
|
||||||
@@ -25,9 +24,8 @@ COPY packages/ui/package.json ./packages/ui/
|
|||||||
COPY packages/config/package.json ./packages/config/
|
COPY packages/config/package.json ./packages/config/
|
||||||
COPY apps/api/package.json ./apps/api/
|
COPY apps/api/package.json ./apps/api/
|
||||||
|
|
||||||
# Install dependencies with pnpm store cache
|
# Install dependencies (no cache mount — Kaniko builds are ephemeral in CI)
|
||||||
RUN --mount=type=cache,id=pnpm-store,target=/root/.local/share/pnpm/store \
|
RUN pnpm install --frozen-lockfile
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
# ======================
|
# ======================
|
||||||
# Builder stage
|
# Builder stage
|
||||||
@@ -53,16 +51,16 @@ RUN pnpm turbo build --filter=@mosaic/api --force
|
|||||||
# ======================
|
# ======================
|
||||||
# Production stage
|
# Production stage
|
||||||
# ======================
|
# ======================
|
||||||
FROM node:24-alpine AS production
|
FROM node:24-slim AS production
|
||||||
|
|
||||||
# Remove npm (unused in production — we use pnpm) to reduce attack surface
|
# Install dumb-init for proper signal handling (static binary from GitHub,
|
||||||
RUN rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx
|
# avoids apt-get which fails under Kaniko with bookworm GPG signature errors)
|
||||||
|
ADD https://github.com/Yelp/dumb-init/releases/download/v1.2.5/dumb-init_1.2.5_x86_64 /usr/local/bin/dumb-init
|
||||||
|
|
||||||
# Install dumb-init for proper signal handling
|
# Single RUN to minimize Kaniko filesystem snapshots (each RUN = full snapshot)
|
||||||
RUN apk add --no-cache dumb-init
|
RUN rm -rf /usr/local/lib/node_modules/npm /usr/local/bin/npm /usr/local/bin/npx \
|
||||||
|
&& chmod 755 /usr/local/bin/dumb-init \
|
||||||
# Create non-root user
|
&& groupadd -g 1001 nodejs && useradd -m -u 1001 -g nodejs nestjs
|
||||||
RUN addgroup -g 1001 -S nodejs && adduser -S nestjs -u 1001
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
|||||||
@@ -27,7 +27,7 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@anthropic-ai/sdk": "^0.72.1",
|
"@anthropic-ai/sdk": "^0.72.1",
|
||||||
"@mosaic/shared": "workspace:*",
|
"@mosaic/shared": "workspace:*",
|
||||||
"@mosaicstack/telemetry-client": "^0.1.0",
|
"@mosaicstack/telemetry-client": "^0.1.1",
|
||||||
"@nestjs/axios": "^4.0.1",
|
"@nestjs/axios": "^4.0.1",
|
||||||
"@nestjs/bullmq": "^11.0.4",
|
"@nestjs/bullmq": "^11.0.4",
|
||||||
"@nestjs/common": "^11.1.12",
|
"@nestjs/common": "^11.1.12",
|
||||||
@@ -65,6 +65,7 @@
|
|||||||
"marked": "^17.0.1",
|
"marked": "^17.0.1",
|
||||||
"marked-gfm-heading-id": "^4.1.3",
|
"marked-gfm-heading-id": "^4.1.3",
|
||||||
"marked-highlight": "^2.2.3",
|
"marked-highlight": "^2.2.3",
|
||||||
|
"matrix-bot-sdk": "^0.8.0",
|
||||||
"ollama": "^0.6.3",
|
"ollama": "^0.6.3",
|
||||||
"openai": "^6.17.0",
|
"openai": "^6.17.0",
|
||||||
"reflect-metadata": "^0.2.2",
|
"reflect-metadata": "^0.2.2",
|
||||||
|
|||||||
@@ -1,3 +1,38 @@
|
|||||||
|
-- RecreateEnum: FormalityLevel was dropped in 20260129235248_add_link_storage_fields
|
||||||
|
CREATE TYPE "FormalityLevel" AS ENUM ('VERY_CASUAL', 'CASUAL', 'NEUTRAL', 'FORMAL', 'VERY_FORMAL');
|
||||||
|
|
||||||
|
-- RecreateTable: personalities was dropped in 20260129235248_add_link_storage_fields
|
||||||
|
-- Recreated with current schema (display_name, system_prompt, temperature, etc.)
|
||||||
|
CREATE TABLE "personalities" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"workspace_id" UUID NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"display_name" TEXT NOT NULL,
|
||||||
|
"description" TEXT,
|
||||||
|
"system_prompt" TEXT NOT NULL,
|
||||||
|
"temperature" DOUBLE PRECISION,
|
||||||
|
"max_tokens" INTEGER,
|
||||||
|
"llm_provider_instance_id" UUID,
|
||||||
|
"is_default" BOOLEAN NOT NULL DEFAULT false,
|
||||||
|
"is_enabled" BOOLEAN NOT NULL DEFAULT true,
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "personalities_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex: personalities
|
||||||
|
CREATE UNIQUE INDEX "personalities_id_workspace_id_key" ON "personalities"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "personalities_workspace_id_name_key" ON "personalities"("workspace_id", "name");
|
||||||
|
CREATE INDEX "personalities_workspace_id_idx" ON "personalities"("workspace_id");
|
||||||
|
CREATE INDEX "personalities_workspace_id_is_default_idx" ON "personalities"("workspace_id", "is_default");
|
||||||
|
CREATE INDEX "personalities_workspace_id_is_enabled_idx" ON "personalities"("workspace_id", "is_enabled");
|
||||||
|
CREATE INDEX "personalities_llm_provider_instance_id_idx" ON "personalities"("llm_provider_instance_id");
|
||||||
|
|
||||||
|
-- AddForeignKey: personalities
|
||||||
|
ALTER TABLE "personalities" ADD CONSTRAINT "personalities_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
ALTER TABLE "personalities" ADD CONSTRAINT "personalities_llm_provider_instance_id_fkey" FOREIGN KEY ("llm_provider_instance_id") REFERENCES "llm_provider_instances"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||||
|
|
||||||
-- CreateTable
|
-- CreateTable
|
||||||
CREATE TABLE "cron_schedules" (
|
CREATE TABLE "cron_schedules" (
|
||||||
"id" UUID NOT NULL,
|
"id" UUID NOT NULL,
|
||||||
|
|||||||
@@ -0,0 +1,2 @@
|
|||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "workspaces" ADD COLUMN "matrix_room_id" TEXT;
|
||||||
@@ -0,0 +1,49 @@
|
|||||||
|
-- Fix schema drift: tables, indexes, and constraints defined in schema.prisma
|
||||||
|
-- but never created (or dropped and never recreated) by prior migrations.
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- CreateTable: instances (Federation module)
|
||||||
|
-- Never created in any prior migration
|
||||||
|
-- ============================================
|
||||||
|
CREATE TABLE "instances" (
|
||||||
|
"id" UUID NOT NULL,
|
||||||
|
"instance_id" TEXT NOT NULL,
|
||||||
|
"name" TEXT NOT NULL,
|
||||||
|
"url" TEXT NOT NULL,
|
||||||
|
"public_key" TEXT NOT NULL,
|
||||||
|
"private_key" TEXT NOT NULL,
|
||||||
|
"capabilities" JSONB NOT NULL DEFAULT '{}',
|
||||||
|
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||||
|
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"updated_at" TIMESTAMPTZ NOT NULL,
|
||||||
|
|
||||||
|
CONSTRAINT "instances_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX "instances_instance_id_key" ON "instances"("instance_id");
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Recreate dropped unique index on knowledge_links
|
||||||
|
-- Created in 20260129220645_add_knowledge_module, dropped in
|
||||||
|
-- 20260129235248_add_link_storage_fields, never recreated.
|
||||||
|
-- ============================================
|
||||||
|
CREATE UNIQUE INDEX "knowledge_links_source_id_target_id_key" ON "knowledge_links"("source_id", "target_id");
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Missing @@unique([id, workspaceId]) composite indexes
|
||||||
|
-- Defined in schema.prisma but never created in migrations.
|
||||||
|
-- (agent_tasks and runner_jobs already have these.)
|
||||||
|
-- ============================================
|
||||||
|
CREATE UNIQUE INDEX "tasks_id_workspace_id_key" ON "tasks"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "events_id_workspace_id_key" ON "events"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "projects_id_workspace_id_key" ON "projects"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "activity_logs_id_workspace_id_key" ON "activity_logs"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "domains_id_workspace_id_key" ON "domains"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "ideas_id_workspace_id_key" ON "ideas"("id", "workspace_id");
|
||||||
|
CREATE UNIQUE INDEX "user_layouts_id_workspace_id_key" ON "user_layouts"("id", "workspace_id");
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- Missing index on agent_tasks.agent_type
|
||||||
|
-- Defined as @@index([agentType]) in schema.prisma
|
||||||
|
-- ============================================
|
||||||
|
CREATE INDEX "agent_tasks_agent_type_idx" ON "agent_tasks"("agent_type");
|
||||||
@@ -261,12 +261,13 @@ model UserPreference {
|
|||||||
}
|
}
|
||||||
|
|
||||||
model Workspace {
|
model Workspace {
|
||||||
id String @id @default(uuid()) @db.Uuid
|
id String @id @default(uuid()) @db.Uuid
|
||||||
name String
|
name String
|
||||||
ownerId String @map("owner_id") @db.Uuid
|
ownerId String @map("owner_id") @db.Uuid
|
||||||
settings Json @default("{}")
|
settings Json @default("{}")
|
||||||
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
matrixRoomId String? @map("matrix_room_id")
|
||||||
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
|
||||||
|
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
|
||||||
|
|
||||||
// Relations
|
// Relations
|
||||||
owner User @relation("WorkspaceOwner", fields: [ownerId], references: [id], onDelete: Cascade)
|
owner User @relation("WorkspaceOwner", fields: [ownerId], references: [id], onDelete: Cascade)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { Controller, Get } from "@nestjs/common";
|
import { Controller, Get } from "@nestjs/common";
|
||||||
|
import { SkipThrottle } from "@nestjs/throttler";
|
||||||
import { AppService } from "./app.service";
|
import { AppService } from "./app.service";
|
||||||
import { PrismaService } from "./prisma/prisma.service";
|
import { PrismaService } from "./prisma/prisma.service";
|
||||||
import type { ApiResponse, HealthStatus } from "@mosaic/shared";
|
import type { ApiResponse, HealthStatus } from "@mosaic/shared";
|
||||||
@@ -17,6 +18,7 @@ export class AppController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Get("health")
|
@Get("health")
|
||||||
|
@SkipThrottle()
|
||||||
async getHealth(): Promise<ApiResponse<HealthStatus>> {
|
async getHealth(): Promise<ApiResponse<HealthStatus>> {
|
||||||
const dbHealthy = await this.prisma.isHealthy();
|
const dbHealthy = await this.prisma.isHealthy();
|
||||||
const dbInfo = await this.prisma.getConnectionInfo();
|
const dbInfo = await this.prisma.getConnectionInfo();
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ import { CoordinatorIntegrationModule } from "./coordinator-integration/coordina
|
|||||||
import { FederationModule } from "./federation/federation.module";
|
import { FederationModule } from "./federation/federation.module";
|
||||||
import { CredentialsModule } from "./credentials/credentials.module";
|
import { CredentialsModule } from "./credentials/credentials.module";
|
||||||
import { MosaicTelemetryModule } from "./mosaic-telemetry";
|
import { MosaicTelemetryModule } from "./mosaic-telemetry";
|
||||||
|
import { SpeechModule } from "./speech/speech.module";
|
||||||
import { RlsContextInterceptor } from "./common/interceptors/rls-context.interceptor";
|
import { RlsContextInterceptor } from "./common/interceptors/rls-context.interceptor";
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
@@ -99,6 +100,7 @@ import { RlsContextInterceptor } from "./common/interceptors/rls-context.interce
|
|||||||
FederationModule,
|
FederationModule,
|
||||||
CredentialsModule,
|
CredentialsModule,
|
||||||
MosaicTelemetryModule,
|
MosaicTelemetryModule,
|
||||||
|
SpeechModule,
|
||||||
],
|
],
|
||||||
controllers: [AppController, CsrfController],
|
controllers: [AppController, CsrfController],
|
||||||
providers: [
|
providers: [
|
||||||
|
|||||||
@@ -12,7 +12,10 @@ import { PrismaClient, Prisma } from "@prisma/client";
|
|||||||
import { randomUUID as uuid } from "crypto";
|
import { randomUUID as uuid } from "crypto";
|
||||||
import { runWithRlsClient, getRlsClient } from "../prisma/rls-context.provider";
|
import { runWithRlsClient, getRlsClient } from "../prisma/rls-context.provider";
|
||||||
|
|
||||||
describe.skipIf(!process.env.DATABASE_URL)(
|
const shouldRunDbIntegrationTests =
|
||||||
|
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||||
|
|
||||||
|
describe.skipIf(!shouldRunDbIntegrationTests)(
|
||||||
"Auth Tables RLS Policies (requires DATABASE_URL)",
|
"Auth Tables RLS Policies (requires DATABASE_URL)",
|
||||||
() => {
|
() => {
|
||||||
let prisma: PrismaClient;
|
let prisma: PrismaClient;
|
||||||
@@ -28,7 +31,7 @@ describe.skipIf(!process.env.DATABASE_URL)(
|
|||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
// Skip setup if DATABASE_URL is not available
|
// Skip setup if DATABASE_URL is not available
|
||||||
if (!process.env.DATABASE_URL) {
|
if (!shouldRunDbIntegrationTests) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -49,7 +52,7 @@ describe.skipIf(!process.env.DATABASE_URL)(
|
|||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
// Skip cleanup if DATABASE_URL is not available or prisma not initialized
|
// Skip cleanup if DATABASE_URL is not available or prisma not initialized
|
||||||
if (!process.env.DATABASE_URL || !prisma) {
|
if (!shouldRunDbIntegrationTests || !prisma) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,30 @@
|
|||||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
import { isOidcEnabled, validateOidcConfig } from "./auth.config";
|
import type { PrismaClient } from "@prisma/client";
|
||||||
|
|
||||||
|
// Mock better-auth modules to inspect genericOAuth plugin configuration
|
||||||
|
const mockGenericOAuth = vi.fn().mockReturnValue({ id: "generic-oauth" });
|
||||||
|
const mockBetterAuth = vi.fn().mockReturnValue({ handler: vi.fn() });
|
||||||
|
const mockPrismaAdapter = vi.fn().mockReturnValue({});
|
||||||
|
|
||||||
|
vi.mock("better-auth/plugins", () => ({
|
||||||
|
genericOAuth: (...args: unknown[]) => mockGenericOAuth(...args),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth", () => ({
|
||||||
|
betterAuth: (...args: unknown[]) => mockBetterAuth(...args),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/adapters/prisma", () => ({
|
||||||
|
prismaAdapter: (...args: unknown[]) => mockPrismaAdapter(...args),
|
||||||
|
}));
|
||||||
|
|
||||||
|
import {
|
||||||
|
isOidcEnabled,
|
||||||
|
validateOidcConfig,
|
||||||
|
createAuth,
|
||||||
|
getTrustedOrigins,
|
||||||
|
getBetterAuthBaseUrl,
|
||||||
|
} from "./auth.config";
|
||||||
|
|
||||||
describe("auth.config", () => {
|
describe("auth.config", () => {
|
||||||
// Store original env vars to restore after each test
|
// Store original env vars to restore after each test
|
||||||
@@ -11,6 +36,13 @@ describe("auth.config", () => {
|
|||||||
delete process.env.OIDC_ISSUER;
|
delete process.env.OIDC_ISSUER;
|
||||||
delete process.env.OIDC_CLIENT_ID;
|
delete process.env.OIDC_CLIENT_ID;
|
||||||
delete process.env.OIDC_CLIENT_SECRET;
|
delete process.env.OIDC_CLIENT_SECRET;
|
||||||
|
delete process.env.OIDC_REDIRECT_URI;
|
||||||
|
delete process.env.NODE_ENV;
|
||||||
|
delete process.env.BETTER_AUTH_URL;
|
||||||
|
delete process.env.NEXT_PUBLIC_APP_URL;
|
||||||
|
delete process.env.NEXT_PUBLIC_API_URL;
|
||||||
|
delete process.env.TRUSTED_ORIGINS;
|
||||||
|
delete process.env.COOKIE_DOMAIN;
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -70,6 +102,7 @@ describe("auth.config", () => {
|
|||||||
it("should throw when OIDC_ISSUER is missing", () => {
|
it("should throw when OIDC_ISSUER is missing", () => {
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC authentication is enabled");
|
expect(() => validateOidcConfig()).toThrow("OIDC authentication is enabled");
|
||||||
@@ -78,6 +111,7 @@ describe("auth.config", () => {
|
|||||||
it("should throw when OIDC_CLIENT_ID is missing", () => {
|
it("should throw when OIDC_CLIENT_ID is missing", () => {
|
||||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_ID");
|
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_ID");
|
||||||
});
|
});
|
||||||
@@ -85,13 +119,22 @@ describe("auth.config", () => {
|
|||||||
it("should throw when OIDC_CLIENT_SECRET is missing", () => {
|
it("should throw when OIDC_CLIENT_SECRET is missing", () => {
|
||||||
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_SECRET");
|
expect(() => validateOidcConfig()).toThrow("OIDC_CLIENT_SECRET");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should throw when OIDC_REDIRECT_URI is missing", () => {
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).toThrow("OIDC_REDIRECT_URI");
|
||||||
|
});
|
||||||
|
|
||||||
it("should throw when all required vars are missing", () => {
|
it("should throw when all required vars are missing", () => {
|
||||||
expect(() => validateOidcConfig()).toThrow(
|
expect(() => validateOidcConfig()).toThrow(
|
||||||
"OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET"
|
"OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET, OIDC_REDIRECT_URI"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -99,9 +142,10 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = "";
|
process.env.OIDC_ISSUER = "";
|
||||||
process.env.OIDC_CLIENT_ID = "";
|
process.env.OIDC_CLIENT_ID = "";
|
||||||
process.env.OIDC_CLIENT_SECRET = "";
|
process.env.OIDC_CLIENT_SECRET = "";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow(
|
expect(() => validateOidcConfig()).toThrow(
|
||||||
"OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET"
|
"OIDC_ISSUER, OIDC_CLIENT_ID, OIDC_CLIENT_SECRET, OIDC_REDIRECT_URI"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -109,6 +153,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = " ";
|
process.env.OIDC_ISSUER = " ";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER");
|
||||||
});
|
});
|
||||||
@@ -117,6 +162,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic";
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER must end with a trailing slash");
|
expect(() => validateOidcConfig()).toThrow("OIDC_ISSUER must end with a trailing slash");
|
||||||
expect(() => validateOidcConfig()).toThrow("https://auth.example.com/application/o/mosaic");
|
expect(() => validateOidcConfig()).toThrow("https://auth.example.com/application/o/mosaic");
|
||||||
@@ -126,6 +172,7 @@ describe("auth.config", () => {
|
|||||||
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
process.env.OIDC_CLIENT_ID = "test-client-id";
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
expect(() => validateOidcConfig()).not.toThrow();
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
});
|
});
|
||||||
@@ -133,6 +180,537 @@ describe("auth.config", () => {
|
|||||||
it("should suggest disabling OIDC in error message", () => {
|
it("should suggest disabling OIDC in error message", () => {
|
||||||
expect(() => validateOidcConfig()).toThrow("OIDC_ENABLED=false");
|
expect(() => validateOidcConfig()).toThrow("OIDC_ENABLED=false");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("OIDC_REDIRECT_URI validation", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when OIDC_REDIRECT_URI is not a valid URL", () => {
|
||||||
|
process.env.OIDC_REDIRECT_URI = "not-a-url";
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).toThrow("OIDC_REDIRECT_URI must be a valid URL");
|
||||||
|
expect(() => validateOidcConfig()).toThrow("not-a-url");
|
||||||
|
expect(() => validateOidcConfig()).toThrow("Parse error:");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when OIDC_REDIRECT_URI path does not start with /auth/oauth2/callback", () => {
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/oauth/callback";
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).toThrow(
|
||||||
|
'OIDC_REDIRECT_URI path must start with "/auth/oauth2/callback"'
|
||||||
|
);
|
||||||
|
expect(() => validateOidcConfig()).toThrow("/oauth/callback");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept a valid OIDC_REDIRECT_URI with /auth/oauth2/callback path", () => {
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept OIDC_REDIRECT_URI with exactly /auth/oauth2/callback path", () => {
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback";
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should warn but not throw when using localhost in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "http://localhost:3000/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
|
expect(warnSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("OIDC_REDIRECT_URI uses localhost")
|
||||||
|
);
|
||||||
|
|
||||||
|
warnSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should warn but not throw when using 127.0.0.1 in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "http://127.0.0.1:3000/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
|
expect(warnSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("OIDC_REDIRECT_URI uses localhost")
|
||||||
|
);
|
||||||
|
|
||||||
|
warnSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not warn about localhost when not in production", () => {
|
||||||
|
process.env.NODE_ENV = "development";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "http://localhost:3000/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
|
expect(() => validateOidcConfig()).not.toThrow();
|
||||||
|
expect(warnSpy).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
warnSpy.mockRestore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("createAuth - genericOAuth PKCE configuration", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
mockGenericOAuth.mockClear();
|
||||||
|
mockBetterAuth.mockClear();
|
||||||
|
mockPrismaAdapter.mockClear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should enable PKCE in the genericOAuth provider config when OIDC is enabled", () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockGenericOAuth).toHaveBeenCalledOnce();
|
||||||
|
const callArgs = mockGenericOAuth.mock.calls[0][0] as {
|
||||||
|
config: Array<{ pkce?: boolean; redirectURI?: string }>;
|
||||||
|
};
|
||||||
|
expect(callArgs.config[0].pkce).toBe(true);
|
||||||
|
expect(callArgs.config[0].redirectURI).toBe(
|
||||||
|
"https://app.example.com/auth/oauth2/callback/authentik"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not call genericOAuth when OIDC is disabled", () => {
|
||||||
|
process.env.OIDC_ENABLED = "false";
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockGenericOAuth).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw if OIDC_CLIENT_ID is missing when OIDC is enabled", () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
// OIDC_CLIENT_ID deliberately not set
|
||||||
|
|
||||||
|
// validateOidcConfig will throw first, so we need to bypass it
|
||||||
|
// by setting the var then deleting it after validation
|
||||||
|
// Instead, test via the validation path which is fine — but let's
|
||||||
|
// verify the plugin-level guard by using a direct approach:
|
||||||
|
// Set env to pass validateOidcConfig, then delete OIDC_CLIENT_ID
|
||||||
|
// The validateOidcConfig will catch this first, which is correct behavior
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
expect(() => createAuth(mockPrisma)).toThrow("OIDC_CLIENT_ID");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw if OIDC_CLIENT_SECRET is missing when OIDC is enabled", () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/application/o/mosaic-stack/";
|
||||||
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
// OIDC_CLIENT_SECRET deliberately not set
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
expect(() => createAuth(mockPrisma)).toThrow("OIDC_CLIENT_SECRET");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw if OIDC_ISSUER is missing when OIDC is enabled", () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_CLIENT_ID = "test-client-id";
|
||||||
|
process.env.OIDC_CLIENT_SECRET = "test-client-secret";
|
||||||
|
process.env.OIDC_REDIRECT_URI = "https://app.example.com/auth/oauth2/callback/authentik";
|
||||||
|
// OIDC_ISSUER deliberately not set
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
expect(() => createAuth(mockPrisma)).toThrow("OIDC_ISSUER");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getTrustedOrigins", () => {
|
||||||
|
it("should return localhost URLs when NODE_ENV is not production", () => {
|
||||||
|
process.env.NODE_ENV = "development";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("http://localhost:3000");
|
||||||
|
expect(origins).toContain("http://localhost:3001");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return localhost URLs when NODE_ENV is not set", () => {
|
||||||
|
// NODE_ENV is deleted in beforeEach, so it's undefined here
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("http://localhost:3000");
|
||||||
|
expect(origins).toContain("http://localhost:3001");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should exclude localhost URLs in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).not.toContain("http://localhost:3000");
|
||||||
|
expect(origins).not.toContain("http://localhost:3001");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should parse TRUSTED_ORIGINS comma-separated values", () => {
|
||||||
|
process.env.TRUSTED_ORIGINS = "https://app.mosaicstack.dev,https://api.mosaicstack.dev";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||||
|
expect(origins).toContain("https://api.mosaicstack.dev");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should trim whitespace from TRUSTED_ORIGINS entries", () => {
|
||||||
|
process.env.TRUSTED_ORIGINS = " https://app.mosaicstack.dev , https://api.mosaicstack.dev ";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||||
|
expect(origins).toContain("https://api.mosaicstack.dev");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should filter out empty strings from TRUSTED_ORIGINS", () => {
|
||||||
|
process.env.TRUSTED_ORIGINS = "https://app.mosaicstack.dev,,, ,";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||||
|
// No empty strings in the result
|
||||||
|
origins.forEach((o) => expect(o).not.toBe(""));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include NEXT_PUBLIC_APP_URL", () => {
|
||||||
|
process.env.NEXT_PUBLIC_APP_URL = "https://my-app.example.com";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://my-app.example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include NEXT_PUBLIC_API_URL", () => {
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://my-api.example.com";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://my-api.example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should deduplicate origins", () => {
|
||||||
|
process.env.NEXT_PUBLIC_APP_URL = "http://localhost:3000";
|
||||||
|
process.env.TRUSTED_ORIGINS = "http://localhost:3000,http://localhost:3001";
|
||||||
|
// NODE_ENV not set, so localhost fallbacks are also added
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
const countLocalhost3000 = origins.filter((o) => o === "http://localhost:3000").length;
|
||||||
|
const countLocalhost3001 = origins.filter((o) => o === "http://localhost:3001").length;
|
||||||
|
expect(countLocalhost3000).toBe(1);
|
||||||
|
expect(countLocalhost3001).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle all env vars missing gracefully", () => {
|
||||||
|
// All env vars deleted in beforeEach; NODE_ENV is also deleted (not production)
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
// Should still return localhost fallbacks since not in production
|
||||||
|
expect(origins).toContain("http://localhost:3000");
|
||||||
|
expect(origins).toContain("http://localhost:3001");
|
||||||
|
expect(origins).toHaveLength(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return empty array when all env vars missing in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should combine all sources correctly", () => {
|
||||||
|
process.env.NEXT_PUBLIC_APP_URL = "https://app.mosaicstack.dev";
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.mosaicstack.dev";
|
||||||
|
process.env.TRUSTED_ORIGINS = "https://extra.example.com";
|
||||||
|
process.env.NODE_ENV = "development";
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://app.mosaicstack.dev");
|
||||||
|
expect(origins).toContain("https://api.mosaicstack.dev");
|
||||||
|
expect(origins).toContain("https://extra.example.com");
|
||||||
|
expect(origins).toContain("http://localhost:3000");
|
||||||
|
expect(origins).toContain("http://localhost:3001");
|
||||||
|
expect(origins).toHaveLength(5);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject invalid URLs in TRUSTED_ORIGINS with a warning including error details", () => {
|
||||||
|
process.env.TRUSTED_ORIGINS = "not-a-url,https://valid.example.com";
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
|
||||||
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://valid.example.com");
|
||||||
|
expect(origins).not.toContain("not-a-url");
|
||||||
|
expect(warnSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining('Ignoring invalid URL in TRUSTED_ORIGINS: "not-a-url"')
|
||||||
|
);
|
||||||
|
// Verify that error detail is included in the warning
|
||||||
|
const warnCall = warnSpy.mock.calls.find(
|
||||||
|
(call) => typeof call[0] === "string" && call[0].includes("not-a-url")
|
||||||
|
);
|
||||||
|
expect(warnCall).toBeDefined();
|
||||||
|
expect(warnCall![0]).toMatch(/\(.*\)$/);
|
||||||
|
|
||||||
|
warnSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject non-HTTP origins in TRUSTED_ORIGINS with a warning", () => {
|
||||||
|
process.env.TRUSTED_ORIGINS = "ftp://files.example.com,https://valid.example.com";
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
|
||||||
|
const warnSpy = vi.spyOn(console, "warn").mockImplementation(() => {});
|
||||||
|
|
||||||
|
const origins = getTrustedOrigins();
|
||||||
|
|
||||||
|
expect(origins).toContain("https://valid.example.com");
|
||||||
|
expect(origins).not.toContain("ftp://files.example.com");
|
||||||
|
expect(warnSpy).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("Ignoring non-HTTP origin in TRUSTED_ORIGINS")
|
||||||
|
);
|
||||||
|
|
||||||
|
warnSpy.mockRestore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("createAuth - session and cookie configuration", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
mockGenericOAuth.mockClear();
|
||||||
|
mockBetterAuth.mockClear();
|
||||||
|
mockPrismaAdapter.mockClear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should configure session expiresIn to 7 days (604800 seconds)", () => {
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
session: { expiresIn: number; updateAge: number };
|
||||||
|
};
|
||||||
|
expect(config.session.expiresIn).toBe(604800);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should configure session updateAge to 2 hours (7200 seconds)", () => {
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
session: { expiresIn: number; updateAge: number };
|
||||||
|
};
|
||||||
|
expect(config.session.updateAge).toBe(7200);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should configure BetterAuth database ID generation as UUID", () => {
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
database: {
|
||||||
|
generateId: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.database.generateId).toBe("uuid");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set httpOnly cookie attribute to true", () => {
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.httpOnly).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set sameSite cookie attribute to lax", () => {
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.sameSite).toBe("lax");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set secure cookie attribute to true in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.secure).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set secure cookie attribute to false in non-production", () => {
|
||||||
|
process.env.NODE_ENV = "development";
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.secure).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set cookie domain when COOKIE_DOMAIN env var is present", () => {
|
||||||
|
process.env.COOKIE_DOMAIN = ".mosaicstack.dev";
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
domain?: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.domain).toBe(".mosaicstack.dev");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not set cookie domain when COOKIE_DOMAIN env var is absent", () => {
|
||||||
|
delete process.env.COOKIE_DOMAIN;
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as {
|
||||||
|
advanced: {
|
||||||
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: boolean;
|
||||||
|
secure: boolean;
|
||||||
|
sameSite: string;
|
||||||
|
domain?: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
expect(config.advanced.defaultCookieAttributes.domain).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getBetterAuthBaseUrl", () => {
|
||||||
|
it("should prefer BETTER_AUTH_URL when set", () => {
|
||||||
|
process.env.BETTER_AUTH_URL = "https://auth-base.example.com";
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||||
|
|
||||||
|
expect(getBetterAuthBaseUrl()).toBe("https://auth-base.example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fall back to NEXT_PUBLIC_API_URL when BETTER_AUTH_URL is not set", () => {
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.example.com";
|
||||||
|
|
||||||
|
expect(getBetterAuthBaseUrl()).toBe("https://api.example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when base URL is invalid", () => {
|
||||||
|
process.env.BETTER_AUTH_URL = "not-a-url";
|
||||||
|
|
||||||
|
expect(() => getBetterAuthBaseUrl()).toThrow("BetterAuth base URL must be a valid URL");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when base URL is missing in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
|
||||||
|
expect(() => getBetterAuthBaseUrl()).toThrow("Missing BetterAuth base URL in production");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw when base URL is not https in production", () => {
|
||||||
|
process.env.NODE_ENV = "production";
|
||||||
|
process.env.BETTER_AUTH_URL = "http://api.example.com";
|
||||||
|
|
||||||
|
expect(() => getBetterAuthBaseUrl()).toThrow(
|
||||||
|
"BetterAuth base URL must use https in production"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("createAuth - baseURL wiring", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
mockBetterAuth.mockClear();
|
||||||
|
mockPrismaAdapter.mockClear();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass BETTER_AUTH_URL into BetterAuth config", () => {
|
||||||
|
process.env.BETTER_AUTH_URL = "https://api.mosaicstack.dev";
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as { baseURL?: string };
|
||||||
|
expect(config.baseURL).toBe("https://api.mosaicstack.dev");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass NEXT_PUBLIC_API_URL into BetterAuth config when BETTER_AUTH_URL is absent", () => {
|
||||||
|
process.env.NEXT_PUBLIC_API_URL = "https://api.fallback.dev";
|
||||||
|
|
||||||
|
const mockPrisma = {} as PrismaClient;
|
||||||
|
createAuth(mockPrisma);
|
||||||
|
|
||||||
|
expect(mockBetterAuth).toHaveBeenCalledOnce();
|
||||||
|
const config = mockBetterAuth.mock.calls[0][0] as { baseURL?: string };
|
||||||
|
expect(config.baseURL).toBe("https://api.fallback.dev");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -6,7 +6,47 @@ import type { PrismaClient } from "@prisma/client";
|
|||||||
/**
|
/**
|
||||||
* Required OIDC environment variables when OIDC is enabled
|
* Required OIDC environment variables when OIDC is enabled
|
||||||
*/
|
*/
|
||||||
const REQUIRED_OIDC_ENV_VARS = ["OIDC_ISSUER", "OIDC_CLIENT_ID", "OIDC_CLIENT_SECRET"] as const;
|
const REQUIRED_OIDC_ENV_VARS = [
|
||||||
|
"OIDC_ISSUER",
|
||||||
|
"OIDC_CLIENT_ID",
|
||||||
|
"OIDC_CLIENT_SECRET",
|
||||||
|
"OIDC_REDIRECT_URI",
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve BetterAuth base URL from explicit auth URL or API URL.
|
||||||
|
* BetterAuth uses this to generate absolute callback/error URLs.
|
||||||
|
*/
|
||||||
|
export function getBetterAuthBaseUrl(): string | undefined {
|
||||||
|
const configured = process.env.BETTER_AUTH_URL ?? process.env.NEXT_PUBLIC_API_URL;
|
||||||
|
|
||||||
|
if (!configured || configured.trim() === "") {
|
||||||
|
if (process.env.NODE_ENV === "production") {
|
||||||
|
throw new Error(
|
||||||
|
"Missing BetterAuth base URL in production. Set BETTER_AUTH_URL (preferred) or NEXT_PUBLIC_API_URL."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsed: URL;
|
||||||
|
try {
|
||||||
|
parsed = new URL(configured);
|
||||||
|
} catch (urlError: unknown) {
|
||||||
|
const detail = urlError instanceof Error ? urlError.message : String(urlError);
|
||||||
|
throw new Error(
|
||||||
|
`BetterAuth base URL must be a valid URL. Current value: "${configured}". Parse error: ${detail}.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.NODE_ENV === "production" && parsed.protocol !== "https:") {
|
||||||
|
throw new Error(
|
||||||
|
`BetterAuth base URL must use https in production. Current value: "${configured}".`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsed.origin;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if OIDC authentication is enabled via environment variable
|
* Check if OIDC authentication is enabled via environment variable
|
||||||
@@ -52,6 +92,54 @@ export function validateOidcConfig(): void {
|
|||||||
`The discovery URL is constructed by appending ".well-known/openid-configuration" to the issuer.`
|
`The discovery URL is constructed by appending ".well-known/openid-configuration" to the issuer.`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Additional validation: OIDC_REDIRECT_URI must be a valid URL with /auth/oauth2/callback path
|
||||||
|
validateRedirectUri();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates the OIDC_REDIRECT_URI environment variable.
|
||||||
|
* - Must be a parseable URL
|
||||||
|
* - Path must start with /auth/oauth2/callback
|
||||||
|
* - Warns (but does not throw) if using localhost in production
|
||||||
|
*
|
||||||
|
* @throws Error if URL is invalid or path does not start with /auth/oauth2/callback
|
||||||
|
*/
|
||||||
|
function validateRedirectUri(): void {
|
||||||
|
const redirectUri = process.env.OIDC_REDIRECT_URI;
|
||||||
|
if (!redirectUri || redirectUri.trim() === "") {
|
||||||
|
// Already caught by REQUIRED_OIDC_ENV_VARS check above
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsed: URL;
|
||||||
|
try {
|
||||||
|
parsed = new URL(redirectUri);
|
||||||
|
} catch (urlError: unknown) {
|
||||||
|
const detail = urlError instanceof Error ? urlError.message : String(urlError);
|
||||||
|
throw new Error(
|
||||||
|
`OIDC_REDIRECT_URI must be a valid URL. Current value: "${redirectUri}". ` +
|
||||||
|
`Parse error: ${detail}. ` +
|
||||||
|
`Example: "https://api.example.com/auth/oauth2/callback/authentik".`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!parsed.pathname.startsWith("/auth/oauth2/callback")) {
|
||||||
|
throw new Error(
|
||||||
|
`OIDC_REDIRECT_URI path must start with "/auth/oauth2/callback". Current path: "${parsed.pathname}". ` +
|
||||||
|
`Example: "https://api.example.com/auth/oauth2/callback/authentik".`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
process.env.NODE_ENV === "production" &&
|
||||||
|
(parsed.hostname === "localhost" || parsed.hostname === "127.0.0.1")
|
||||||
|
) {
|
||||||
|
console.warn(
|
||||||
|
`[AUTH WARNING] OIDC_REDIRECT_URI uses localhost ("${redirectUri}") in production. ` +
|
||||||
|
`This is likely a misconfiguration. Use a public domain for production deployments.`
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -63,14 +151,34 @@ function getOidcPlugins(): ReturnType<typeof genericOAuth>[] {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const clientId = process.env.OIDC_CLIENT_ID;
|
||||||
|
const clientSecret = process.env.OIDC_CLIENT_SECRET;
|
||||||
|
const issuer = process.env.OIDC_ISSUER;
|
||||||
|
const redirectUri = process.env.OIDC_REDIRECT_URI;
|
||||||
|
|
||||||
|
if (!clientId) {
|
||||||
|
throw new Error("OIDC_CLIENT_ID is required when OIDC is enabled but was not set.");
|
||||||
|
}
|
||||||
|
if (!clientSecret) {
|
||||||
|
throw new Error("OIDC_CLIENT_SECRET is required when OIDC is enabled but was not set.");
|
||||||
|
}
|
||||||
|
if (!issuer) {
|
||||||
|
throw new Error("OIDC_ISSUER is required when OIDC is enabled but was not set.");
|
||||||
|
}
|
||||||
|
if (!redirectUri) {
|
||||||
|
throw new Error("OIDC_REDIRECT_URI is required when OIDC is enabled but was not set.");
|
||||||
|
}
|
||||||
|
|
||||||
return [
|
return [
|
||||||
genericOAuth({
|
genericOAuth({
|
||||||
config: [
|
config: [
|
||||||
{
|
{
|
||||||
providerId: "authentik",
|
providerId: "authentik",
|
||||||
clientId: process.env.OIDC_CLIENT_ID ?? "",
|
clientId,
|
||||||
clientSecret: process.env.OIDC_CLIENT_SECRET ?? "",
|
clientSecret,
|
||||||
discoveryUrl: `${process.env.OIDC_ISSUER ?? ""}.well-known/openid-configuration`,
|
discoveryUrl: `${issuer}.well-known/openid-configuration`,
|
||||||
|
redirectURI: redirectUri,
|
||||||
|
pkce: true,
|
||||||
scopes: ["openid", "profile", "email"],
|
scopes: ["openid", "profile", "email"],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
@@ -78,28 +186,91 @@ function getOidcPlugins(): ReturnType<typeof genericOAuth>[] {
|
|||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the list of trusted origins from environment variables.
|
||||||
|
*
|
||||||
|
* Sources (in order):
|
||||||
|
* - NEXT_PUBLIC_APP_URL — primary frontend URL
|
||||||
|
* - NEXT_PUBLIC_API_URL — API's own origin
|
||||||
|
* - TRUSTED_ORIGINS — comma-separated additional origins
|
||||||
|
* - localhost fallbacks — only when NODE_ENV !== "production"
|
||||||
|
*
|
||||||
|
* The returned list is deduplicated and empty strings are filtered out.
|
||||||
|
*/
|
||||||
|
export function getTrustedOrigins(): string[] {
|
||||||
|
const origins: string[] = [];
|
||||||
|
|
||||||
|
// Environment-driven origins
|
||||||
|
if (process.env.NEXT_PUBLIC_APP_URL) {
|
||||||
|
origins.push(process.env.NEXT_PUBLIC_APP_URL);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.NEXT_PUBLIC_API_URL) {
|
||||||
|
origins.push(process.env.NEXT_PUBLIC_API_URL);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Comma-separated additional origins (validated)
|
||||||
|
if (process.env.TRUSTED_ORIGINS) {
|
||||||
|
const rawOrigins = process.env.TRUSTED_ORIGINS.split(",")
|
||||||
|
.map((o) => o.trim())
|
||||||
|
.filter((o) => o !== "");
|
||||||
|
for (const origin of rawOrigins) {
|
||||||
|
try {
|
||||||
|
const parsed = new URL(origin);
|
||||||
|
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
||||||
|
console.warn(`[AUTH] Ignoring non-HTTP origin in TRUSTED_ORIGINS: "${origin}"`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
origins.push(origin);
|
||||||
|
} catch (urlError: unknown) {
|
||||||
|
const detail = urlError instanceof Error ? urlError.message : String(urlError);
|
||||||
|
console.warn(`[AUTH] Ignoring invalid URL in TRUSTED_ORIGINS: "${origin}" (${detail})`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Localhost fallbacks for development only
|
||||||
|
if (process.env.NODE_ENV !== "production") {
|
||||||
|
origins.push("http://localhost:3000", "http://localhost:3001");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deduplicate and filter empty strings
|
||||||
|
return [...new Set(origins)].filter((o) => o !== "");
|
||||||
|
}
|
||||||
|
|
||||||
export function createAuth(prisma: PrismaClient) {
|
export function createAuth(prisma: PrismaClient) {
|
||||||
// Validate OIDC configuration at startup - fail fast if misconfigured
|
// Validate OIDC configuration at startup - fail fast if misconfigured
|
||||||
validateOidcConfig();
|
validateOidcConfig();
|
||||||
|
|
||||||
|
const baseURL = getBetterAuthBaseUrl();
|
||||||
|
|
||||||
return betterAuth({
|
return betterAuth({
|
||||||
|
baseURL,
|
||||||
|
basePath: "/auth",
|
||||||
database: prismaAdapter(prisma, {
|
database: prismaAdapter(prisma, {
|
||||||
provider: "postgresql",
|
provider: "postgresql",
|
||||||
}),
|
}),
|
||||||
emailAndPassword: {
|
emailAndPassword: {
|
||||||
enabled: true, // Enable for now, can be disabled later
|
enabled: true,
|
||||||
},
|
},
|
||||||
plugins: [...getOidcPlugins()],
|
plugins: [...getOidcPlugins()],
|
||||||
session: {
|
session: {
|
||||||
expiresIn: 60 * 60 * 24, // 24 hours
|
expiresIn: 60 * 60 * 24 * 7, // 7 days absolute max
|
||||||
updateAge: 60 * 60 * 24, // 24 hours
|
updateAge: 60 * 60 * 2, // 2 hours — minimum session age before BetterAuth refreshes the expiry on next request
|
||||||
},
|
},
|
||||||
trustedOrigins: [
|
advanced: {
|
||||||
process.env.NEXT_PUBLIC_APP_URL ?? "http://localhost:3000",
|
database: {
|
||||||
"http://localhost:3001", // API origin (dev)
|
// BetterAuth's default ID generator emits opaque strings; our auth tables use UUID PKs.
|
||||||
"https://app.mosaicstack.dev", // Production web
|
generateId: "uuid",
|
||||||
"https://api.mosaicstack.dev", // Production API
|
},
|
||||||
],
|
defaultCookieAttributes: {
|
||||||
|
httpOnly: true,
|
||||||
|
secure: process.env.NODE_ENV === "production",
|
||||||
|
sameSite: "lax" as const,
|
||||||
|
...(process.env.COOKIE_DOMAIN ? { domain: process.env.COOKIE_DOMAIN } : {}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
trustedOrigins: getTrustedOrigins(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,41 @@
|
|||||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
|
||||||
|
// Mock better-auth modules before importing AuthService (pulled in by AuthController)
|
||||||
|
vi.mock("better-auth/node", () => ({
|
||||||
|
toNodeHandler: vi.fn().mockReturnValue(vi.fn()),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth", () => ({
|
||||||
|
betterAuth: vi.fn().mockReturnValue({
|
||||||
|
handler: vi.fn(),
|
||||||
|
api: { getSession: vi.fn() },
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/adapters/prisma", () => ({
|
||||||
|
prismaAdapter: vi.fn().mockReturnValue({}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/plugins", () => ({
|
||||||
|
genericOAuth: vi.fn().mockReturnValue({ id: "generic-oauth" }),
|
||||||
|
}));
|
||||||
|
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { HttpException, HttpStatus, UnauthorizedException } from "@nestjs/common";
|
||||||
import type { AuthUser, AuthSession } from "@mosaic/shared";
|
import type { AuthUser, AuthSession } from "@mosaic/shared";
|
||||||
|
import type { Request as ExpressRequest, Response as ExpressResponse } from "express";
|
||||||
import { AuthController } from "./auth.controller";
|
import { AuthController } from "./auth.controller";
|
||||||
import { AuthService } from "./auth.service";
|
import { AuthService } from "./auth.service";
|
||||||
|
|
||||||
describe("AuthController", () => {
|
describe("AuthController", () => {
|
||||||
let controller: AuthController;
|
let controller: AuthController;
|
||||||
let authService: AuthService;
|
|
||||||
|
const mockNodeHandler = vi.fn().mockResolvedValue(undefined);
|
||||||
|
|
||||||
const mockAuthService = {
|
const mockAuthService = {
|
||||||
getAuth: vi.fn(),
|
getAuth: vi.fn(),
|
||||||
|
getNodeHandler: vi.fn().mockReturnValue(mockNodeHandler),
|
||||||
|
getAuthConfig: vi.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
@@ -24,25 +50,239 @@ describe("AuthController", () => {
|
|||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
controller = module.get<AuthController>(AuthController);
|
controller = module.get<AuthController>(AuthController);
|
||||||
authService = module.get<AuthService>(AuthService);
|
|
||||||
|
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
// Restore mock implementations after clearAllMocks
|
||||||
|
mockAuthService.getNodeHandler.mockReturnValue(mockNodeHandler);
|
||||||
|
mockNodeHandler.mockResolvedValue(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("handleAuth", () => {
|
describe("handleAuth", () => {
|
||||||
it("should call BetterAuth handler", async () => {
|
it("should delegate to BetterAuth node handler with Express req/res", async () => {
|
||||||
const mockHandler = vi.fn().mockResolvedValue({ status: 200 });
|
|
||||||
mockAuthService.getAuth.mockReturnValue({ handler: mockHandler });
|
|
||||||
|
|
||||||
const mockRequest = {
|
const mockRequest = {
|
||||||
method: "GET",
|
method: "GET",
|
||||||
url: "/auth/session",
|
url: "/auth/session",
|
||||||
|
headers: {},
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
|
expect(mockAuthService.getNodeHandler).toHaveBeenCalled();
|
||||||
|
expect(mockNodeHandler).toHaveBeenCalledWith(mockRequest, mockResponse);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw HttpException with 500 when handler throws before headers sent", async () => {
|
||||||
|
const handlerError = new Error("BetterAuth internal failure");
|
||||||
|
mockNodeHandler.mockRejectedValueOnce(handlerError);
|
||||||
|
|
||||||
|
const mockRequest = {
|
||||||
|
method: "POST",
|
||||||
|
url: "/auth/sign-in",
|
||||||
|
headers: {},
|
||||||
|
ip: "192.168.1.10",
|
||||||
|
socket: { remoteAddress: "192.168.1.10" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
// Should not reach here
|
||||||
|
expect.unreachable("Expected HttpException to be thrown");
|
||||||
|
} catch (err) {
|
||||||
|
expect(err).toBeInstanceOf(HttpException);
|
||||||
|
expect((err as HttpException).getStatus()).toBe(HttpStatus.INTERNAL_SERVER_ERROR);
|
||||||
|
expect((err as HttpException).getResponse()).toBe(
|
||||||
|
"Unable to complete authentication. Please try again in a moment."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should preserve better-call status and body for handler APIError", async () => {
|
||||||
|
const apiError = {
|
||||||
|
statusCode: HttpStatus.BAD_REQUEST,
|
||||||
|
message: "Invalid OAuth configuration",
|
||||||
|
body: {
|
||||||
|
message: "Invalid OAuth configuration",
|
||||||
|
code: "INVALID_OAUTH_CONFIGURATION",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
mockNodeHandler.mockRejectedValueOnce(apiError);
|
||||||
|
|
||||||
|
const mockRequest = {
|
||||||
|
method: "POST",
|
||||||
|
url: "/auth/sign-in/oauth2",
|
||||||
|
headers: {},
|
||||||
|
ip: "192.168.1.10",
|
||||||
|
socket: { remoteAddress: "192.168.1.10" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
expect.unreachable("Expected HttpException to be thrown");
|
||||||
|
} catch (err) {
|
||||||
|
expect(err).toBeInstanceOf(HttpException);
|
||||||
|
expect((err as HttpException).getStatus()).toBe(HttpStatus.BAD_REQUEST);
|
||||||
|
expect((err as HttpException).getResponse()).toMatchObject({
|
||||||
|
message: "Invalid OAuth configuration",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should log warning and not throw when handler throws after headers sent", async () => {
|
||||||
|
const handlerError = new Error("Stream interrupted");
|
||||||
|
mockNodeHandler.mockRejectedValueOnce(handlerError);
|
||||||
|
|
||||||
|
const mockRequest = {
|
||||||
|
method: "POST",
|
||||||
|
url: "/auth/sign-up",
|
||||||
|
headers: {},
|
||||||
|
ip: "10.0.0.5",
|
||||||
|
socket: { remoteAddress: "10.0.0.5" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: true,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
// Should not throw when headers already sent
|
||||||
|
await expect(controller.handleAuth(mockRequest, mockResponse)).resolves.toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle non-Error thrown values", async () => {
|
||||||
|
mockNodeHandler.mockRejectedValueOnce("string error");
|
||||||
|
|
||||||
|
const mockRequest = {
|
||||||
|
method: "GET",
|
||||||
|
url: "/auth/callback",
|
||||||
|
headers: {},
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
await expect(controller.handleAuth(mockRequest, mockResponse)).rejects.toThrow(HttpException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getConfig", () => {
|
||||||
|
it("should return auth config from service", async () => {
|
||||||
|
const mockConfig = {
|
||||||
|
providers: [
|
||||||
|
{ id: "email", name: "Email", type: "credentials" as const },
|
||||||
|
{ id: "authentik", name: "Authentik", type: "oauth" as const },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
mockAuthService.getAuthConfig.mockResolvedValue(mockConfig);
|
||||||
|
|
||||||
|
const result = await controller.getConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual(mockConfig);
|
||||||
|
expect(mockAuthService.getAuthConfig).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return correct response shape with only email provider", async () => {
|
||||||
|
const mockConfig = {
|
||||||
|
providers: [{ id: "email", name: "Email", type: "credentials" as const }],
|
||||||
|
};
|
||||||
|
mockAuthService.getAuthConfig.mockResolvedValue(mockConfig);
|
||||||
|
|
||||||
|
const result = await controller.getConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual(mockConfig);
|
||||||
|
expect(result.providers).toHaveLength(1);
|
||||||
|
expect(result.providers[0]).toEqual({
|
||||||
|
id: "email",
|
||||||
|
name: "Email",
|
||||||
|
type: "credentials",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should never leak secrets in auth config response", async () => {
|
||||||
|
// Set ALL sensitive environment variables with known values
|
||||||
|
const sensitiveEnv: Record<string, string> = {
|
||||||
|
OIDC_CLIENT_SECRET: "test-client-secret",
|
||||||
|
OIDC_CLIENT_ID: "test-client-id",
|
||||||
|
OIDC_ISSUER: "https://auth.test.com/",
|
||||||
|
OIDC_REDIRECT_URI: "https://app.test.com/auth/oauth2/callback/authentik",
|
||||||
|
BETTER_AUTH_SECRET: "test-better-auth-secret",
|
||||||
|
JWT_SECRET: "test-jwt-secret",
|
||||||
|
CSRF_SECRET: "test-csrf-secret",
|
||||||
|
DATABASE_URL: "postgresql://user:password@localhost/db",
|
||||||
|
OIDC_ENABLED: "true",
|
||||||
};
|
};
|
||||||
|
|
||||||
await controller.handleAuth(mockRequest as unknown as Request);
|
const originalEnv: Record<string, string | undefined> = {};
|
||||||
|
for (const [key, value] of Object.entries(sensitiveEnv)) {
|
||||||
|
originalEnv[key] = process.env[key];
|
||||||
|
process.env[key] = value;
|
||||||
|
}
|
||||||
|
|
||||||
expect(mockAuthService.getAuth).toHaveBeenCalled();
|
try {
|
||||||
expect(mockHandler).toHaveBeenCalledWith(mockRequest);
|
// Mock the service to return a realistic config with both providers
|
||||||
|
const mockConfig = {
|
||||||
|
providers: [
|
||||||
|
{ id: "email", name: "Email", type: "credentials" as const },
|
||||||
|
{ id: "authentik", name: "Authentik", type: "oauth" as const },
|
||||||
|
],
|
||||||
|
};
|
||||||
|
mockAuthService.getAuthConfig.mockResolvedValue(mockConfig);
|
||||||
|
|
||||||
|
const result = await controller.getConfig();
|
||||||
|
const serialized = JSON.stringify(result);
|
||||||
|
|
||||||
|
// Assert no secret values leak into the serialized response
|
||||||
|
const forbiddenPatterns = [
|
||||||
|
"test-client-secret",
|
||||||
|
"test-client-id",
|
||||||
|
"test-better-auth-secret",
|
||||||
|
"test-jwt-secret",
|
||||||
|
"test-csrf-secret",
|
||||||
|
"auth.test.com",
|
||||||
|
"callback",
|
||||||
|
"password",
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const pattern of forbiddenPatterns) {
|
||||||
|
expect(serialized).not.toContain(pattern);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assert response contains ONLY expected fields
|
||||||
|
expect(result).toHaveProperty("providers");
|
||||||
|
expect(Object.keys(result)).toEqual(["providers"]);
|
||||||
|
expect(Array.isArray(result.providers)).toBe(true);
|
||||||
|
|
||||||
|
for (const provider of result.providers) {
|
||||||
|
const keys = Object.keys(provider);
|
||||||
|
expect(keys).toEqual(expect.arrayContaining(["id", "name", "type"]));
|
||||||
|
expect(keys).toHaveLength(3);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
// Restore original environment
|
||||||
|
for (const [key] of Object.entries(sensitiveEnv)) {
|
||||||
|
if (originalEnv[key] === undefined) {
|
||||||
|
delete process.env[key];
|
||||||
|
} else {
|
||||||
|
process.env[key] = originalEnv[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -80,19 +320,22 @@ describe("AuthController", () => {
|
|||||||
expect(result).toEqual(expected);
|
expect(result).toEqual(expected);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw error if user not found in request", () => {
|
it("should throw UnauthorizedException when req.user is undefined", () => {
|
||||||
const mockRequest = {
|
const mockRequest = {
|
||||||
session: {
|
session: {
|
||||||
id: "session-123",
|
id: "session-123",
|
||||||
token: "session-token",
|
token: "session-token",
|
||||||
expiresAt: new Date(),
|
expiresAt: new Date(Date.now() + 86400000),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(() => controller.getSession(mockRequest)).toThrow("User session not found");
|
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||||
|
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||||
|
"Missing authentication context"
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw error if session not found in request", () => {
|
it("should throw UnauthorizedException when req.session is undefined", () => {
|
||||||
const mockRequest = {
|
const mockRequest = {
|
||||||
user: {
|
user: {
|
||||||
id: "user-123",
|
id: "user-123",
|
||||||
@@ -101,7 +344,19 @@ describe("AuthController", () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
expect(() => controller.getSession(mockRequest)).toThrow("User session not found");
|
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||||
|
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||||
|
"Missing authentication context"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw UnauthorizedException when both req.user and req.session are undefined", () => {
|
||||||
|
const mockRequest = {};
|
||||||
|
|
||||||
|
expect(() => controller.getSession(mockRequest as never)).toThrow(UnauthorizedException);
|
||||||
|
expect(() => controller.getSession(mockRequest as never)).toThrow(
|
||||||
|
"Missing authentication context"
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -153,4 +408,89 @@ describe("AuthController", () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe("getClientIp (via handleAuth)", () => {
|
||||||
|
it("should extract IP from X-Forwarded-For with single IP", async () => {
|
||||||
|
const mockRequest = {
|
||||||
|
method: "GET",
|
||||||
|
url: "/auth/callback",
|
||||||
|
headers: { "x-forwarded-for": "203.0.113.50" },
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
// Spy on the logger to verify the extracted IP
|
||||||
|
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||||
|
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract first IP from X-Forwarded-For with comma-separated IPs", async () => {
|
||||||
|
const mockRequest = {
|
||||||
|
method: "GET",
|
||||||
|
url: "/auth/callback",
|
||||||
|
headers: { "x-forwarded-for": "203.0.113.50, 70.41.3.18" },
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||||
|
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||||
|
// Ensure it does NOT contain the second IP in the extracted position
|
||||||
|
expect(debugSpy).toHaveBeenCalledWith(expect.not.stringContaining("70.41.3.18"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should extract first IP from X-Forwarded-For as array", async () => {
|
||||||
|
const mockRequest = {
|
||||||
|
method: "GET",
|
||||||
|
url: "/auth/callback",
|
||||||
|
headers: { "x-forwarded-for": ["203.0.113.50", "70.41.3.18"] },
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
socket: { remoteAddress: "127.0.0.1" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||||
|
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("203.0.113.50"));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fallback to req.ip when no X-Forwarded-For header", async () => {
|
||||||
|
const mockRequest = {
|
||||||
|
method: "GET",
|
||||||
|
url: "/auth/callback",
|
||||||
|
headers: {},
|
||||||
|
ip: "192.168.1.100",
|
||||||
|
socket: { remoteAddress: "192.168.1.100" },
|
||||||
|
} as unknown as ExpressRequest;
|
||||||
|
|
||||||
|
const mockResponse = {
|
||||||
|
headersSent: false,
|
||||||
|
} as unknown as ExpressResponse;
|
||||||
|
|
||||||
|
const debugSpy = vi.spyOn(controller["logger"], "debug");
|
||||||
|
|
||||||
|
await controller.handleAuth(mockRequest, mockResponse);
|
||||||
|
|
||||||
|
expect(debugSpy).toHaveBeenCalledWith(expect.stringContaining("192.168.1.100"));
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,19 +1,25 @@
|
|||||||
import { Controller, All, Req, Get, UseGuards, Request, Logger } from "@nestjs/common";
|
import {
|
||||||
|
Controller,
|
||||||
|
All,
|
||||||
|
Req,
|
||||||
|
Res,
|
||||||
|
Get,
|
||||||
|
Header,
|
||||||
|
UseGuards,
|
||||||
|
Request,
|
||||||
|
Logger,
|
||||||
|
HttpException,
|
||||||
|
HttpStatus,
|
||||||
|
UnauthorizedException,
|
||||||
|
} from "@nestjs/common";
|
||||||
import { Throttle } from "@nestjs/throttler";
|
import { Throttle } from "@nestjs/throttler";
|
||||||
import type { AuthUser, AuthSession } from "@mosaic/shared";
|
import type { Request as ExpressRequest, Response as ExpressResponse } from "express";
|
||||||
|
import type { AuthUser, AuthSession, AuthConfigResponse } from "@mosaic/shared";
|
||||||
import { AuthService } from "./auth.service";
|
import { AuthService } from "./auth.service";
|
||||||
import { AuthGuard } from "./guards/auth.guard";
|
import { AuthGuard } from "./guards/auth.guard";
|
||||||
import { CurrentUser } from "./decorators/current-user.decorator";
|
import { CurrentUser } from "./decorators/current-user.decorator";
|
||||||
|
import { SkipCsrf } from "../common/decorators/skip-csrf.decorator";
|
||||||
interface RequestWithSession {
|
import type { AuthenticatedRequest } from "./types/better-auth-request.interface";
|
||||||
user?: AuthUser;
|
|
||||||
session?: {
|
|
||||||
id: string;
|
|
||||||
token: string;
|
|
||||||
expiresAt: Date;
|
|
||||||
[key: string]: unknown;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Controller("auth")
|
@Controller("auth")
|
||||||
export class AuthController {
|
export class AuthController {
|
||||||
@@ -27,10 +33,13 @@ export class AuthController {
|
|||||||
*/
|
*/
|
||||||
@Get("session")
|
@Get("session")
|
||||||
@UseGuards(AuthGuard)
|
@UseGuards(AuthGuard)
|
||||||
getSession(@Request() req: RequestWithSession): AuthSession {
|
getSession(@Request() req: AuthenticatedRequest): AuthSession {
|
||||||
|
// Defense-in-depth: AuthGuard should guarantee these, but if someone adds
|
||||||
|
// a route with AuthenticatedRequest and forgets @UseGuards(AuthGuard),
|
||||||
|
// TypeScript types won't help at runtime.
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||||
if (!req.user || !req.session) {
|
if (!req.user || !req.session) {
|
||||||
// This should never happen after AuthGuard, but TypeScript needs the check
|
throw new UnauthorizedException("Missing authentication context");
|
||||||
throw new Error("User session not found");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -76,6 +85,17 @@ export class AuthController {
|
|||||||
return profile;
|
return profile;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get available authentication providers.
|
||||||
|
* Public endpoint (no auth guard) so the frontend can discover login options
|
||||||
|
* before the user is authenticated.
|
||||||
|
*/
|
||||||
|
@Get("config")
|
||||||
|
@Header("Cache-Control", "public, max-age=300")
|
||||||
|
async getConfig(): Promise<AuthConfigResponse> {
|
||||||
|
return this.authService.getAuthConfig();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle all other auth routes (sign-in, sign-up, sign-out, etc.)
|
* Handle all other auth routes (sign-in, sign-up, sign-out, etc.)
|
||||||
* Delegates to BetterAuth
|
* Delegates to BetterAuth
|
||||||
@@ -87,38 +107,102 @@ export class AuthController {
|
|||||||
* Rate limiting and logging are applied to mitigate abuse (SEC-API-10).
|
* Rate limiting and logging are applied to mitigate abuse (SEC-API-10).
|
||||||
*/
|
*/
|
||||||
@All("*")
|
@All("*")
|
||||||
|
// BetterAuth handles CSRF internally (Fetch Metadata + SameSite=Lax cookies).
|
||||||
|
// @SkipCsrf avoids double-protection conflicts.
|
||||||
|
// See: https://www.better-auth.com/docs/reference/security
|
||||||
|
@SkipCsrf()
|
||||||
@Throttle({ strict: { limit: 10, ttl: 60000 } })
|
@Throttle({ strict: { limit: 10, ttl: 60000 } })
|
||||||
async handleAuth(@Req() req: Request): Promise<unknown> {
|
async handleAuth(@Req() req: ExpressRequest, @Res() res: ExpressResponse): Promise<void> {
|
||||||
// Extract client IP for logging
|
// Extract client IP for logging
|
||||||
const clientIp = this.getClientIp(req);
|
const clientIp = this.getClientIp(req);
|
||||||
const requestPath = (req as unknown as { url?: string }).url ?? "unknown";
|
|
||||||
const method = (req as unknown as { method?: string }).method ?? "UNKNOWN";
|
|
||||||
|
|
||||||
// Log auth catch-all hits for monitoring and debugging
|
// Log auth catch-all hits for monitoring and debugging
|
||||||
this.logger.debug(`Auth catch-all: ${method} ${requestPath} from ${clientIp}`);
|
this.logger.debug(`Auth catch-all: ${req.method} ${req.url} from ${clientIp}`);
|
||||||
|
|
||||||
const auth = this.authService.getAuth();
|
const handler = this.authService.getNodeHandler();
|
||||||
return auth.handler(req);
|
|
||||||
|
try {
|
||||||
|
await handler(req, res);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
const stack = error instanceof Error ? error.stack : undefined;
|
||||||
|
|
||||||
|
this.logger.error(
|
||||||
|
`BetterAuth handler error: ${req.method} ${req.url} from ${clientIp} - ${message}`,
|
||||||
|
stack
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!res.headersSent) {
|
||||||
|
const mappedError = this.mapToHttpException(error);
|
||||||
|
if (mappedError) {
|
||||||
|
throw mappedError;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new HttpException(
|
||||||
|
"Unable to complete authentication. Please try again in a moment.",
|
||||||
|
HttpStatus.INTERNAL_SERVER_ERROR
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.error(
|
||||||
|
`Headers already sent for failed auth request ${req.method} ${req.url} — client may have received partial response`
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract client IP from request, handling proxies
|
* Extract client IP from request, handling proxies
|
||||||
*/
|
*/
|
||||||
private getClientIp(req: Request): string {
|
private getClientIp(req: ExpressRequest): string {
|
||||||
const reqWithHeaders = req as unknown as {
|
|
||||||
headers?: Record<string, string | string[] | undefined>;
|
|
||||||
ip?: string;
|
|
||||||
socket?: { remoteAddress?: string };
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check X-Forwarded-For header (for reverse proxy setups)
|
// Check X-Forwarded-For header (for reverse proxy setups)
|
||||||
const forwardedFor = reqWithHeaders.headers?.["x-forwarded-for"];
|
const forwardedFor = req.headers["x-forwarded-for"];
|
||||||
if (forwardedFor) {
|
if (forwardedFor) {
|
||||||
const ips = Array.isArray(forwardedFor) ? forwardedFor[0] : forwardedFor;
|
const ips = Array.isArray(forwardedFor) ? forwardedFor[0] : forwardedFor;
|
||||||
return ips?.split(",")[0]?.trim() ?? "unknown";
|
return ips?.split(",")[0]?.trim() ?? "unknown";
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fall back to direct IP
|
// Fall back to direct IP
|
||||||
return reqWithHeaders.ip ?? reqWithHeaders.socket?.remoteAddress ?? "unknown";
|
return req.ip ?? req.socket.remoteAddress ?? "unknown";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Preserve known HTTP errors from BetterAuth/better-call instead of converting
|
||||||
|
* every failure into a generic 500.
|
||||||
|
*/
|
||||||
|
private mapToHttpException(error: unknown): HttpException | null {
|
||||||
|
if (error instanceof HttpException) {
|
||||||
|
return error;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!error || typeof error !== "object") {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const statusCode = "statusCode" in error ? error.statusCode : undefined;
|
||||||
|
if (!this.isHttpStatus(statusCode)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const responseBody = "body" in error && error.body !== undefined ? error.body : undefined;
|
||||||
|
if (
|
||||||
|
responseBody !== undefined &&
|
||||||
|
responseBody !== null &&
|
||||||
|
(typeof responseBody === "string" || typeof responseBody === "object")
|
||||||
|
) {
|
||||||
|
return new HttpException(responseBody, statusCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
const message =
|
||||||
|
"message" in error && typeof error.message === "string" && error.message.length > 0
|
||||||
|
? error.message
|
||||||
|
: "Authentication request failed";
|
||||||
|
return new HttpException(message, statusCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
private isHttpStatus(value: unknown): value is number {
|
||||||
|
if (typeof value !== "number" || !Number.isInteger(value)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return value >= 400 && value <= 599;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,10 +23,17 @@ describe("AuthController - Rate Limiting", () => {
|
|||||||
let app: INestApplication;
|
let app: INestApplication;
|
||||||
let loggerSpy: ReturnType<typeof vi.spyOn>;
|
let loggerSpy: ReturnType<typeof vi.spyOn>;
|
||||||
|
|
||||||
|
const mockNodeHandler = vi.fn(
|
||||||
|
(_req: unknown, res: { statusCode: number; end: (body: string) => void }) => {
|
||||||
|
res.statusCode = 200;
|
||||||
|
res.end(JSON.stringify({}));
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
const mockAuthService = {
|
const mockAuthService = {
|
||||||
getAuth: vi.fn().mockReturnValue({
|
getAuth: vi.fn(),
|
||||||
handler: vi.fn().mockResolvedValue({ status: 200, body: {} }),
|
getNodeHandler: vi.fn().mockReturnValue(mockNodeHandler),
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
@@ -76,7 +83,7 @@ describe("AuthController - Rate Limiting", () => {
|
|||||||
expect(response.status).not.toBe(HttpStatus.TOO_MANY_REQUESTS);
|
expect(response.status).not.toBe(HttpStatus.TOO_MANY_REQUESTS);
|
||||||
}
|
}
|
||||||
|
|
||||||
expect(mockAuthService.getAuth).toHaveBeenCalledTimes(3);
|
expect(mockAuthService.getNodeHandler).toHaveBeenCalledTimes(3);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return 429 when rate limit is exceeded", async () => {
|
it("should return 429 when rate limit is exceeded", async () => {
|
||||||
|
|||||||
@@ -1,5 +1,26 @@
|
|||||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
|
||||||
|
// Mock better-auth modules before importing AuthService
|
||||||
|
vi.mock("better-auth/node", () => ({
|
||||||
|
toNodeHandler: vi.fn().mockReturnValue(vi.fn()),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth", () => ({
|
||||||
|
betterAuth: vi.fn().mockReturnValue({
|
||||||
|
handler: vi.fn(),
|
||||||
|
api: { getSession: vi.fn() },
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/adapters/prisma", () => ({
|
||||||
|
prismaAdapter: vi.fn().mockReturnValue({}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/plugins", () => ({
|
||||||
|
genericOAuth: vi.fn().mockReturnValue({ id: "generic-oauth" }),
|
||||||
|
}));
|
||||||
|
|
||||||
import { AuthService } from "./auth.service";
|
import { AuthService } from "./auth.service";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
|
||||||
@@ -30,6 +51,12 @@ describe("AuthService", () => {
|
|||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
delete process.env.OIDC_ENABLED;
|
||||||
|
delete process.env.OIDC_ISSUER;
|
||||||
|
});
|
||||||
|
|
||||||
describe("getAuth", () => {
|
describe("getAuth", () => {
|
||||||
it("should return BetterAuth instance", () => {
|
it("should return BetterAuth instance", () => {
|
||||||
const auth = service.getAuth();
|
const auth = service.getAuth();
|
||||||
@@ -62,6 +89,23 @@ describe("AuthService", () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should return null when user is not found", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
const result = await service.getUserById("nonexistent-id");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
expect(mockPrismaService.user.findUnique).toHaveBeenCalledWith({
|
||||||
|
where: { id: "nonexistent-id" },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
email: true,
|
||||||
|
name: true,
|
||||||
|
authProviderId: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("getUserByEmail", () => {
|
describe("getUserByEmail", () => {
|
||||||
@@ -88,6 +132,269 @@ describe("AuthService", () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should return null when user is not found", async () => {
|
||||||
|
mockPrismaService.user.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
const result = await service.getUserByEmail("unknown@example.com");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
expect(mockPrismaService.user.findUnique).toHaveBeenCalledWith({
|
||||||
|
where: { email: "unknown@example.com" },
|
||||||
|
select: {
|
||||||
|
id: true,
|
||||||
|
email: true,
|
||||||
|
name: true,
|
||||||
|
authProviderId: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("isOidcProviderReachable", () => {
|
||||||
|
const discoveryUrl = "https://auth.example.com/.well-known/openid-configuration";
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
|
// Reset the cache by accessing private fields via bracket notation
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthResult = false;
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).consecutiveHealthFailures = 0;
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return true when discovery URL returns 200", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(result).toBe(true);
|
||||||
|
expect(mockFetch).toHaveBeenCalledWith(discoveryUrl, {
|
||||||
|
signal: expect.any(AbortSignal) as AbortSignal,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false on network error", async () => {
|
||||||
|
const mockFetch = vi.fn().mockRejectedValue(new Error("ECONNREFUSED"));
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(result).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false on timeout", async () => {
|
||||||
|
const mockFetch = vi.fn().mockRejectedValue(new DOMException("The operation was aborted"));
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(result).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false when discovery URL returns non-200", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: false,
|
||||||
|
status: 503,
|
||||||
|
});
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(result).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should cache result for 30 seconds", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
// First call - fetches
|
||||||
|
const result1 = await service.isOidcProviderReachable();
|
||||||
|
expect(result1).toBe(true);
|
||||||
|
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
// Second call within 30s - uses cache
|
||||||
|
const result2 = await service.isOidcProviderReachable();
|
||||||
|
expect(result2).toBe(true);
|
||||||
|
expect(mockFetch).toHaveBeenCalledTimes(1); // Still 1, no new fetch
|
||||||
|
|
||||||
|
// Simulate cache expiry by moving lastHealthCheck back
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = Date.now() - 31_000;
|
||||||
|
|
||||||
|
// Third call after cache expiry - fetches again
|
||||||
|
const result3 = await service.isOidcProviderReachable();
|
||||||
|
expect(result3).toBe(true);
|
||||||
|
expect(mockFetch).toHaveBeenCalledTimes(2); // Now 2
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should cache false results too", async () => {
|
||||||
|
const mockFetch = vi
|
||||||
|
.fn()
|
||||||
|
.mockRejectedValueOnce(new Error("ECONNREFUSED"))
|
||||||
|
.mockResolvedValueOnce({ ok: true, status: 200 });
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
// First call - fails
|
||||||
|
const result1 = await service.isOidcProviderReachable();
|
||||||
|
expect(result1).toBe(false);
|
||||||
|
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
|
// Second call within 30s - returns cached false
|
||||||
|
const result2 = await service.isOidcProviderReachable();
|
||||||
|
expect(result2).toBe(false);
|
||||||
|
expect(mockFetch).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should escalate to error level after 3 consecutive failures", async () => {
|
||||||
|
const mockFetch = vi.fn().mockRejectedValue(new Error("ECONNREFUSED"));
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||||
|
const loggerError = vi.spyOn(service["logger"], "error");
|
||||||
|
|
||||||
|
// Failures 1 and 2 should log at warn level
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0; // Reset cache
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(loggerWarn).toHaveBeenCalledTimes(2);
|
||||||
|
expect(loggerError).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Failure 3 should escalate to error level
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(loggerError).toHaveBeenCalledTimes(1);
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("OIDC provider unreachable")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should escalate to error level after 3 consecutive non-OK responses", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({ ok: false, status: 503 });
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||||
|
const loggerError = vi.spyOn(service["logger"], "error");
|
||||||
|
|
||||||
|
// Failures 1 and 2 at warn level
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(loggerWarn).toHaveBeenCalledTimes(2);
|
||||||
|
expect(loggerError).not.toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Failure 3 at error level
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(loggerError).toHaveBeenCalledTimes(1);
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("OIDC provider returned non-OK status")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reset failure counter and log recovery on success after failures", async () => {
|
||||||
|
const mockFetch = vi
|
||||||
|
.fn()
|
||||||
|
.mockRejectedValueOnce(new Error("ECONNREFUSED"))
|
||||||
|
.mockRejectedValueOnce(new Error("ECONNREFUSED"))
|
||||||
|
.mockResolvedValueOnce({ ok: true, status: 200 });
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const loggerLog = vi.spyOn(service["logger"], "log");
|
||||||
|
|
||||||
|
// Two failures
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
await service.isOidcProviderReachable();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
|
||||||
|
// Recovery
|
||||||
|
const result = await service.isOidcProviderReachable();
|
||||||
|
|
||||||
|
expect(result).toBe(true);
|
||||||
|
expect(loggerLog).toHaveBeenCalledWith(
|
||||||
|
expect.stringContaining("OIDC provider recovered after 2 consecutive failure(s)")
|
||||||
|
);
|
||||||
|
// Verify counter reset
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
expect((service as any).consecutiveHealthFailures).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getAuthConfig", () => {
|
||||||
|
it("should return only email provider when OIDC is disabled", async () => {
|
||||||
|
delete process.env.OIDC_ENABLED;
|
||||||
|
|
||||||
|
const result = await service.getAuthConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
providers: [{ id: "email", name: "Email", type: "credentials" }],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return both email and authentik providers when OIDC is enabled and reachable", async () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
|
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({ ok: true, status: 200 });
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.getAuthConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
providers: [
|
||||||
|
{ id: "email", name: "Email", type: "credentials" },
|
||||||
|
{ id: "authentik", name: "Authentik", type: "oauth" },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return only email provider when OIDC_ENABLED is false", async () => {
|
||||||
|
process.env.OIDC_ENABLED = "false";
|
||||||
|
|
||||||
|
const result = await service.getAuthConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
providers: [{ id: "email", name: "Email", type: "credentials" }],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should omit authentik when OIDC is enabled but provider is unreachable", async () => {
|
||||||
|
process.env.OIDC_ENABLED = "true";
|
||||||
|
process.env.OIDC_ISSUER = "https://auth.example.com/";
|
||||||
|
|
||||||
|
// Reset cache
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
(service as any).lastHealthCheck = 0;
|
||||||
|
|
||||||
|
const mockFetch = vi.fn().mockRejectedValue(new Error("ECONNREFUSED"));
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const result = await service.getAuthConfig();
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
providers: [{ id: "email", name: "Email", type: "credentials" }],
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("verifySession", () => {
|
describe("verifySession", () => {
|
||||||
@@ -103,7 +410,7 @@ describe("AuthService", () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
it("should return session data for valid token", async () => {
|
it("should validate session token using secure BetterAuth cookie header", async () => {
|
||||||
const auth = service.getAuth();
|
const auth = service.getAuth();
|
||||||
const mockGetSession = vi.fn().mockResolvedValue(mockSessionData);
|
const mockGetSession = vi.fn().mockResolvedValue(mockSessionData);
|
||||||
auth.api = { getSession: mockGetSession } as any;
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
@@ -111,7 +418,58 @@ describe("AuthService", () => {
|
|||||||
const result = await service.verifySession("valid-token");
|
const result = await service.verifySession("valid-token");
|
||||||
|
|
||||||
expect(result).toEqual(mockSessionData);
|
expect(result).toEqual(mockSessionData);
|
||||||
|
expect(mockGetSession).toHaveBeenCalledTimes(1);
|
||||||
expect(mockGetSession).toHaveBeenCalledWith({
|
expect(mockGetSession).toHaveBeenCalledWith({
|
||||||
|
headers: {
|
||||||
|
cookie: "__Secure-better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should preserve raw cookie token value without URL re-encoding", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockResolvedValue(mockSessionData);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("tok/with+=chars=");
|
||||||
|
|
||||||
|
expect(result).toEqual(mockSessionData);
|
||||||
|
expect(mockGetSession).toHaveBeenCalledWith({
|
||||||
|
headers: {
|
||||||
|
cookie: "__Secure-better-auth.session_token=tok/with+=chars=",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fall back to Authorization header when cookie-based lookups miss", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi
|
||||||
|
.fn()
|
||||||
|
.mockResolvedValueOnce(null)
|
||||||
|
.mockResolvedValueOnce(null)
|
||||||
|
.mockResolvedValueOnce(null)
|
||||||
|
.mockResolvedValueOnce(mockSessionData);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("valid-token");
|
||||||
|
|
||||||
|
expect(result).toEqual(mockSessionData);
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(1, {
|
||||||
|
headers: {
|
||||||
|
cookie: "__Secure-better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(2, {
|
||||||
|
headers: {
|
||||||
|
cookie: "better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(3, {
|
||||||
|
headers: {
|
||||||
|
cookie: "__Host-better-auth.session_token=valid-token",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(mockGetSession).toHaveBeenNthCalledWith(4, {
|
||||||
headers: {
|
headers: {
|
||||||
authorization: "Bearer valid-token",
|
authorization: "Bearer valid-token",
|
||||||
},
|
},
|
||||||
@@ -128,14 +486,264 @@ describe("AuthService", () => {
|
|||||||
expect(result).toBeNull();
|
expect(result).toBeNull();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should return null and log error on verification failure", async () => {
|
it("should return null for 'invalid token' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Invalid token provided"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("bad-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for 'expired' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Token expired"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("expired-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for 'session not found' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Session not found"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("missing-session");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for 'unauthorized' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Unauthorized"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("unauth-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for 'invalid session' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Invalid session"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("invalid-session");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for 'session expired' auth error", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Session expired"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("expired-session");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for bare 'unauthorized' (exact match)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("unauthorized"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("unauth-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for bare 'expired' (exact match)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("expired"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("expired-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw 'certificate has expired' as infrastructure error (not auth)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("certificate has expired"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow("certificate has expired");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw 'Unauthorized: Access denied for user' as infrastructure error (not auth)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi
|
||||||
|
.fn()
|
||||||
|
.mockRejectedValue(new Error("Unauthorized: Access denied for user"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow(
|
||||||
|
"Unauthorized: Access denied for user"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null when a non-Error value is thrown", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue("string-error");
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("any-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null when getSession throws a non-Error value (string)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue("some error");
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("any-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null when getSession throws a non-Error value (object)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue({ code: "ERR_UNKNOWN" });
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const result = await service.verifySession("any-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw unexpected errors that are not known auth errors", async () => {
|
||||||
const auth = service.getAuth();
|
const auth = service.getAuth();
|
||||||
const mockGetSession = vi.fn().mockRejectedValue(new Error("Verification failed"));
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Verification failed"));
|
||||||
auth.api = { getSession: mockGetSession } as any;
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
const result = await service.verifySession("error-token");
|
await expect(service.verifySession("error-token")).rejects.toThrow("Verification failed");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw Prisma infrastructure errors", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const prismaError = new Error("connect ECONNREFUSED 127.0.0.1:5432");
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(prismaError);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow("ECONNREFUSED");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw timeout errors as infrastructure errors", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const timeoutError = new Error("Connection timeout after 5000ms");
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(timeoutError);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow("timeout");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should re-throw errors with Prisma-prefixed constructor name", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
class PrismaClientKnownRequestError extends Error {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message);
|
||||||
|
this.name = "PrismaClientKnownRequestError";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const prismaError = new PrismaClientKnownRequestError("Database connection lost");
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(prismaError);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow("Database connection lost");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should redact Bearer tokens from logged error messages", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const errorWithToken = new Error(
|
||||||
|
"Request failed: Bearer eyJhbGciOiJIUzI1NiJ9.secret-payload in header"
|
||||||
|
);
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(errorWithToken);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const loggerError = vi.spyOn(service["logger"], "error");
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow();
|
||||||
|
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
"Session verification failed due to unexpected error",
|
||||||
|
expect.stringContaining("Bearer [REDACTED]")
|
||||||
|
);
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
"Session verification failed due to unexpected error",
|
||||||
|
expect.not.stringContaining("eyJhbGciOiJIUzI1NiJ9")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should redact Bearer tokens from error stack traces", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const errorWithToken = new Error("Something went wrong");
|
||||||
|
errorWithToken.stack =
|
||||||
|
"Error: Something went wrong\n at fetch (Bearer abc123-secret-token)\n at verifySession";
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(errorWithToken);
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const loggerError = vi.spyOn(service["logger"], "error");
|
||||||
|
|
||||||
|
await expect(service.verifySession("any-token")).rejects.toThrow();
|
||||||
|
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
"Session verification failed due to unexpected error",
|
||||||
|
expect.stringContaining("Bearer [REDACTED]")
|
||||||
|
);
|
||||||
|
expect(loggerError).toHaveBeenCalledWith(
|
||||||
|
"Session verification failed due to unexpected error",
|
||||||
|
expect.not.stringContaining("abc123-secret-token")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should warn when a non-Error string value is thrown", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue("string-error");
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||||
|
|
||||||
|
const result = await service.verifySession("any-token");
|
||||||
|
|
||||||
expect(result).toBeNull();
|
expect(result).toBeNull();
|
||||||
|
expect(loggerWarn).toHaveBeenCalledWith(
|
||||||
|
"Session verification received non-Error thrown value",
|
||||||
|
"string-error"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should warn with JSON when a non-Error object is thrown", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue({ code: "ERR_UNKNOWN" });
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||||
|
|
||||||
|
const result = await service.verifySession("any-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
expect(loggerWarn).toHaveBeenCalledWith(
|
||||||
|
"Session verification received non-Error thrown value",
|
||||||
|
JSON.stringify({ code: "ERR_UNKNOWN" })
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not warn for expected auth errors (Error instances)", async () => {
|
||||||
|
const auth = service.getAuth();
|
||||||
|
const mockGetSession = vi.fn().mockRejectedValue(new Error("Invalid token provided"));
|
||||||
|
auth.api = { getSession: mockGetSession } as any;
|
||||||
|
|
||||||
|
const loggerWarn = vi.spyOn(service["logger"], "warn");
|
||||||
|
|
||||||
|
const result = await service.verifySession("bad-token");
|
||||||
|
|
||||||
|
expect(result).toBeNull();
|
||||||
|
expect(loggerWarn).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,17 +1,49 @@
|
|||||||
import { Injectable, Logger } from "@nestjs/common";
|
import { Injectable, Logger } from "@nestjs/common";
|
||||||
import type { PrismaClient } from "@prisma/client";
|
import type { PrismaClient } from "@prisma/client";
|
||||||
|
import type { IncomingMessage, ServerResponse } from "http";
|
||||||
|
import { toNodeHandler } from "better-auth/node";
|
||||||
|
import type { AuthConfigResponse, AuthProviderConfig } from "@mosaic/shared";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
import { createAuth, type Auth } from "./auth.config";
|
import { createAuth, isOidcEnabled, type Auth } from "./auth.config";
|
||||||
|
|
||||||
|
/** Duration in milliseconds to cache the OIDC health check result */
|
||||||
|
const OIDC_HEALTH_CACHE_TTL_MS = 30_000;
|
||||||
|
|
||||||
|
/** Timeout in milliseconds for the OIDC discovery URL fetch */
|
||||||
|
const OIDC_HEALTH_TIMEOUT_MS = 2_000;
|
||||||
|
|
||||||
|
/** Number of consecutive health-check failures before escalating to error level */
|
||||||
|
const HEALTH_ESCALATION_THRESHOLD = 3;
|
||||||
|
|
||||||
|
/** Verified session shape returned by BetterAuth's getSession */
|
||||||
|
interface VerifiedSession {
|
||||||
|
user: Record<string, unknown>;
|
||||||
|
session: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SessionHeaderCandidate {
|
||||||
|
headers: Record<string, string>;
|
||||||
|
}
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AuthService {
|
export class AuthService {
|
||||||
private readonly logger = new Logger(AuthService.name);
|
private readonly logger = new Logger(AuthService.name);
|
||||||
private readonly auth: Auth;
|
private readonly auth: Auth;
|
||||||
|
private readonly nodeHandler: (req: IncomingMessage, res: ServerResponse) => Promise<void>;
|
||||||
|
|
||||||
|
/** Timestamp of the last OIDC health check */
|
||||||
|
private lastHealthCheck = 0;
|
||||||
|
/** Cached result of the last OIDC health check */
|
||||||
|
private lastHealthResult = false;
|
||||||
|
/** Consecutive OIDC health check failure count for log-level escalation */
|
||||||
|
private consecutiveHealthFailures = 0;
|
||||||
|
|
||||||
constructor(private readonly prisma: PrismaService) {
|
constructor(private readonly prisma: PrismaService) {
|
||||||
// PrismaService extends PrismaClient and is compatible with BetterAuth's adapter
|
// PrismaService extends PrismaClient and is compatible with BetterAuth's adapter
|
||||||
// Cast is safe as PrismaService provides all required PrismaClient methods
|
// Cast is safe as PrismaService provides all required PrismaClient methods
|
||||||
|
// TODO(#411): BetterAuth returns opaque types — replace when upstream exports typed interfaces
|
||||||
this.auth = createAuth(this.prisma as unknown as PrismaClient);
|
this.auth = createAuth(this.prisma as unknown as PrismaClient);
|
||||||
|
this.nodeHandler = toNodeHandler(this.auth);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -21,6 +53,14 @@ export class AuthService {
|
|||||||
return this.auth;
|
return this.auth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Node.js-compatible request handler for BetterAuth.
|
||||||
|
* Wraps BetterAuth's Web API handler to work with Express/Node.js req/res.
|
||||||
|
*/
|
||||||
|
getNodeHandler(): (req: IncomingMessage, res: ServerResponse) => Promise<void> {
|
||||||
|
return this.nodeHandler;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get user by ID
|
* Get user by ID
|
||||||
*/
|
*/
|
||||||
@@ -63,32 +103,159 @@ export class AuthService {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Verify session token
|
* Verify session token
|
||||||
* Returns session data if valid, null if invalid or expired
|
* Returns session data if valid, null if invalid or expired.
|
||||||
|
* Only known-safe auth errors return null; everything else propagates as 500.
|
||||||
*/
|
*/
|
||||||
async verifySession(
|
async verifySession(token: string): Promise<VerifiedSession | null> {
|
||||||
token: string
|
let sawNonError = false;
|
||||||
): Promise<{ user: Record<string, unknown>; session: Record<string, unknown> } | null> {
|
|
||||||
try {
|
for (const candidate of this.buildSessionHeaderCandidates(token)) {
|
||||||
const session = await this.auth.api.getSession({
|
try {
|
||||||
|
// TODO(#411): BetterAuth getSession returns opaque types — replace when upstream exports typed interfaces
|
||||||
|
const session = await this.auth.api.getSession(candidate);
|
||||||
|
|
||||||
|
if (!session) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
user: session.user as Record<string, unknown>,
|
||||||
|
session: session.session as Record<string, unknown>,
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
if (this.isExpectedAuthError(error.message)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Infrastructure or unexpected — propagate as 500
|
||||||
|
const safeMessage = (error.stack ?? error.message).replace(
|
||||||
|
/Bearer\s+\S+/gi,
|
||||||
|
"Bearer [REDACTED]"
|
||||||
|
);
|
||||||
|
this.logger.error("Session verification failed due to unexpected error", safeMessage);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Non-Error thrown values — log once for observability, treat as auth failure
|
||||||
|
if (!sawNonError) {
|
||||||
|
const errorDetail = typeof error === "string" ? error : JSON.stringify(error);
|
||||||
|
this.logger.warn("Session verification received non-Error thrown value", errorDetail);
|
||||||
|
sawNonError = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildSessionHeaderCandidates(token: string): SessionHeaderCandidate[] {
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
cookie: `__Secure-better-auth.session_token=${token}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
cookie: `better-auth.session_token=${token}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
cookie: `__Host-better-auth.session_token=${token}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
headers: {
|
headers: {
|
||||||
authorization: `Bearer ${token}`,
|
authorization: `Bearer ${token}`,
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
private isExpectedAuthError(message: string): boolean {
|
||||||
|
const normalized = message.toLowerCase();
|
||||||
|
return (
|
||||||
|
normalized.includes("invalid token") ||
|
||||||
|
normalized.includes("token expired") ||
|
||||||
|
normalized.includes("session expired") ||
|
||||||
|
normalized.includes("session not found") ||
|
||||||
|
normalized.includes("invalid session") ||
|
||||||
|
normalized === "unauthorized" ||
|
||||||
|
normalized === "expired"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the OIDC provider (Authentik) is reachable by fetching the discovery URL.
|
||||||
|
* Results are cached for 30 seconds to prevent repeated network calls.
|
||||||
|
*
|
||||||
|
* @returns true if the provider responds with an HTTP 2xx status, false otherwise
|
||||||
|
*/
|
||||||
|
async isOidcProviderReachable(): Promise<boolean> {
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
// Return cached result if still valid
|
||||||
|
if (now - this.lastHealthCheck < OIDC_HEALTH_CACHE_TTL_MS) {
|
||||||
|
this.logger.debug("OIDC health check: returning cached result");
|
||||||
|
return this.lastHealthResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
const discoveryUrl = `${process.env.OIDC_ISSUER ?? ""}.well-known/openid-configuration`;
|
||||||
|
this.logger.debug(`OIDC health check: fetching ${discoveryUrl}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(discoveryUrl, {
|
||||||
|
signal: AbortSignal.timeout(OIDC_HEALTH_TIMEOUT_MS),
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!session) {
|
this.lastHealthCheck = Date.now();
|
||||||
return null;
|
this.lastHealthResult = response.ok;
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
if (this.consecutiveHealthFailures > 0) {
|
||||||
|
this.logger.log(
|
||||||
|
`OIDC provider recovered after ${String(this.consecutiveHealthFailures)} consecutive failure(s)`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
this.consecutiveHealthFailures = 0;
|
||||||
|
} else {
|
||||||
|
this.consecutiveHealthFailures++;
|
||||||
|
const logLevel =
|
||||||
|
this.consecutiveHealthFailures >= HEALTH_ESCALATION_THRESHOLD ? "error" : "warn";
|
||||||
|
this.logger[logLevel](
|
||||||
|
`OIDC provider returned non-OK status: ${String(response.status)} from ${discoveryUrl}`
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return this.lastHealthResult;
|
||||||
user: session.user as Record<string, unknown>,
|
} catch (error: unknown) {
|
||||||
session: session.session as Record<string, unknown>,
|
this.lastHealthCheck = Date.now();
|
||||||
};
|
this.lastHealthResult = false;
|
||||||
} catch (error) {
|
this.consecutiveHealthFailures++;
|
||||||
this.logger.error(
|
|
||||||
"Session verification failed",
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
error instanceof Error ? error.message : "Unknown error"
|
const logLevel =
|
||||||
);
|
this.consecutiveHealthFailures >= HEALTH_ESCALATION_THRESHOLD ? "error" : "warn";
|
||||||
return null;
|
this.logger[logLevel](`OIDC provider unreachable at ${discoveryUrl}: ${message}`);
|
||||||
|
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get authentication configuration for the frontend.
|
||||||
|
* Returns available auth providers so the UI can render login options dynamically.
|
||||||
|
* When OIDC is enabled, performs a health check to verify the provider is reachable.
|
||||||
|
*/
|
||||||
|
async getAuthConfig(): Promise<AuthConfigResponse> {
|
||||||
|
const providers: AuthProviderConfig[] = [{ id: "email", name: "Email", type: "credentials" }];
|
||||||
|
|
||||||
|
if (isOidcEnabled() && (await this.isOidcProviderReachable())) {
|
||||||
|
providers.push({ id: "authentik", name: "Authentik", type: "oauth" });
|
||||||
|
}
|
||||||
|
|
||||||
|
return { providers };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
import type { ExecutionContext } from "@nestjs/common";
|
import type { ExecutionContext } from "@nestjs/common";
|
||||||
import { createParamDecorator, UnauthorizedException } from "@nestjs/common";
|
import { createParamDecorator, UnauthorizedException } from "@nestjs/common";
|
||||||
import type { AuthUser } from "@mosaic/shared";
|
import type { AuthUser } from "@mosaic/shared";
|
||||||
|
import type { MaybeAuthenticatedRequest } from "../types/better-auth-request.interface";
|
||||||
interface RequestWithUser {
|
|
||||||
user?: AuthUser;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const CurrentUser = createParamDecorator(
|
export const CurrentUser = createParamDecorator(
|
||||||
(_data: unknown, ctx: ExecutionContext): AuthUser => {
|
(_data: unknown, ctx: ExecutionContext): AuthUser => {
|
||||||
const request = ctx.switchToHttp().getRequest<RequestWithUser>();
|
// Use MaybeAuthenticatedRequest because the decorator doesn't know
|
||||||
|
// whether AuthGuard ran — the null check provides defense-in-depth.
|
||||||
|
const request = ctx.switchToHttp().getRequest<MaybeAuthenticatedRequest>();
|
||||||
if (!request.user) {
|
if (!request.user) {
|
||||||
throw new UnauthorizedException("No authenticated user found on request");
|
throw new UnauthorizedException("No authenticated user found on request");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,30 +1,39 @@
|
|||||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
|
||||||
import { ExecutionContext, UnauthorizedException } from "@nestjs/common";
|
import { ExecutionContext, UnauthorizedException } from "@nestjs/common";
|
||||||
|
|
||||||
|
// Mock better-auth modules before importing AuthGuard (which imports AuthService)
|
||||||
|
vi.mock("better-auth/node", () => ({
|
||||||
|
toNodeHandler: vi.fn().mockReturnValue(vi.fn()),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth", () => ({
|
||||||
|
betterAuth: vi.fn().mockReturnValue({
|
||||||
|
handler: vi.fn(),
|
||||||
|
api: { getSession: vi.fn() },
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/adapters/prisma", () => ({
|
||||||
|
prismaAdapter: vi.fn().mockReturnValue({}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
vi.mock("better-auth/plugins", () => ({
|
||||||
|
genericOAuth: vi.fn().mockReturnValue({ id: "generic-oauth" }),
|
||||||
|
}));
|
||||||
|
|
||||||
import { AuthGuard } from "./auth.guard";
|
import { AuthGuard } from "./auth.guard";
|
||||||
import { AuthService } from "../auth.service";
|
import type { AuthService } from "../auth.service";
|
||||||
|
|
||||||
describe("AuthGuard", () => {
|
describe("AuthGuard", () => {
|
||||||
let guard: AuthGuard;
|
let guard: AuthGuard;
|
||||||
let authService: AuthService;
|
|
||||||
|
|
||||||
const mockAuthService = {
|
const mockAuthService = {
|
||||||
verifySession: vi.fn(),
|
verifySession: vi.fn(),
|
||||||
};
|
};
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(() => {
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
// Directly construct the guard with the mock to avoid NestJS DI issues
|
||||||
providers: [
|
guard = new AuthGuard(mockAuthService as unknown as AuthService);
|
||||||
AuthGuard,
|
|
||||||
{
|
|
||||||
provide: AuthService,
|
|
||||||
useValue: mockAuthService,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}).compile();
|
|
||||||
|
|
||||||
guard = module.get<AuthGuard>(AuthGuard);
|
|
||||||
authService = module.get<AuthService>(AuthService);
|
|
||||||
|
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
});
|
});
|
||||||
@@ -147,17 +156,134 @@ describe("AuthGuard", () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should throw UnauthorizedException if session verification fails", async () => {
|
it("should propagate non-auth errors as-is (not wrap as 401)", async () => {
|
||||||
mockAuthService.verifySession.mockRejectedValue(new Error("Verification failed"));
|
const infraError = new Error("connect ECONNREFUSED 127.0.0.1:5432");
|
||||||
|
mockAuthService.verifySession.mockRejectedValue(infraError);
|
||||||
|
|
||||||
const context = createMockExecutionContext({
|
const context = createMockExecutionContext({
|
||||||
authorization: "Bearer error-token",
|
authorization: "Bearer error-token",
|
||||||
});
|
});
|
||||||
|
|
||||||
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
await expect(guard.canActivate(context)).rejects.toThrow(infraError);
|
||||||
await expect(guard.canActivate(context)).rejects.toThrow("Authentication failed");
|
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(UnauthorizedException);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("should propagate database errors so GlobalExceptionFilter returns 500", async () => {
|
||||||
|
const dbError = new Error("PrismaClientKnownRequestError: Connection refused");
|
||||||
|
mockAuthService.verifySession.mockRejectedValue(dbError);
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(dbError);
|
||||||
|
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(UnauthorizedException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should propagate timeout errors so GlobalExceptionFilter returns 503", async () => {
|
||||||
|
const timeoutError = new Error("Connection timeout after 5000ms");
|
||||||
|
mockAuthService.verifySession.mockRejectedValue(timeoutError);
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(timeoutError);
|
||||||
|
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(UnauthorizedException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("user data validation", () => {
|
||||||
|
const mockSession = {
|
||||||
|
id: "session-123",
|
||||||
|
token: "session-token",
|
||||||
|
expiresAt: new Date(Date.now() + 86400000),
|
||||||
|
};
|
||||||
|
|
||||||
|
it("should throw UnauthorizedException when user is missing id", async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({
|
||||||
|
user: { email: "a@b.com", name: "Test" },
|
||||||
|
session: mockSession,
|
||||||
|
});
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||||
|
"Invalid user data in session"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw UnauthorizedException when user is missing email", async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({
|
||||||
|
user: { id: "1", name: "Test" },
|
||||||
|
session: mockSession,
|
||||||
|
});
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||||
|
"Invalid user data in session"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw UnauthorizedException when user is missing name", async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({
|
||||||
|
user: { id: "1", email: "a@b.com" },
|
||||||
|
session: mockSession,
|
||||||
|
});
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||||
|
"Invalid user data in session"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw UnauthorizedException when user is a string", async () => {
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({
|
||||||
|
user: "not-an-object",
|
||||||
|
session: mockSession,
|
||||||
|
});
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(UnauthorizedException);
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(
|
||||||
|
"Invalid user data in session"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject when user is null (typeof null === 'object' causes TypeError on 'in' operator)", async () => {
|
||||||
|
// Note: typeof null === "object" in JS, so the guard's typeof check passes
|
||||||
|
// but "id" in null throws TypeError. The catch block propagates non-auth errors as-is.
|
||||||
|
mockAuthService.verifySession.mockResolvedValue({
|
||||||
|
user: null,
|
||||||
|
session: mockSession,
|
||||||
|
});
|
||||||
|
|
||||||
|
const context = createMockExecutionContext({
|
||||||
|
authorization: "Bearer valid-token",
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(guard.canActivate(context)).rejects.toThrow(TypeError);
|
||||||
|
await expect(guard.canActivate(context)).rejects.not.toBeInstanceOf(
|
||||||
|
UnauthorizedException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("request attachment", () => {
|
||||||
it("should attach user and session to request on success", async () => {
|
it("should attach user and session to request on success", async () => {
|
||||||
mockAuthService.verifySession.mockResolvedValue(mockSessionData);
|
mockAuthService.verifySession.mockResolvedValue(mockSessionData);
|
||||||
|
|
||||||
|
|||||||
@@ -1,23 +1,22 @@
|
|||||||
import { Injectable, CanActivate, ExecutionContext, UnauthorizedException } from "@nestjs/common";
|
import {
|
||||||
|
Injectable,
|
||||||
|
CanActivate,
|
||||||
|
ExecutionContext,
|
||||||
|
UnauthorizedException,
|
||||||
|
Logger,
|
||||||
|
} from "@nestjs/common";
|
||||||
import { AuthService } from "../auth.service";
|
import { AuthService } from "../auth.service";
|
||||||
import type { AuthUser } from "@mosaic/shared";
|
import type { AuthUser } from "@mosaic/shared";
|
||||||
|
import type { MaybeAuthenticatedRequest } from "../types/better-auth-request.interface";
|
||||||
/**
|
|
||||||
* Request type with authentication context
|
|
||||||
*/
|
|
||||||
interface AuthRequest {
|
|
||||||
user?: AuthUser;
|
|
||||||
session?: Record<string, unknown>;
|
|
||||||
headers: Record<string, string | string[] | undefined>;
|
|
||||||
cookies?: Record<string, string>;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AuthGuard implements CanActivate {
|
export class AuthGuard implements CanActivate {
|
||||||
|
private readonly logger = new Logger(AuthGuard.name);
|
||||||
|
|
||||||
constructor(private readonly authService: AuthService) {}
|
constructor(private readonly authService: AuthService) {}
|
||||||
|
|
||||||
async canActivate(context: ExecutionContext): Promise<boolean> {
|
async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||||
const request = context.switchToHttp().getRequest<AuthRequest>();
|
const request = context.switchToHttp().getRequest<MaybeAuthenticatedRequest>();
|
||||||
|
|
||||||
// Try to get token from either cookie (preferred) or Authorization header
|
// Try to get token from either cookie (preferred) or Authorization header
|
||||||
const token = this.extractToken(request);
|
const token = this.extractToken(request);
|
||||||
@@ -44,18 +43,19 @@ export class AuthGuard implements CanActivate {
|
|||||||
|
|
||||||
return true;
|
return true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Re-throw if it's already an UnauthorizedException
|
|
||||||
if (error instanceof UnauthorizedException) {
|
if (error instanceof UnauthorizedException) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
throw new UnauthorizedException("Authentication failed");
|
// Infrastructure errors (DB down, connection refused, timeouts) must propagate
|
||||||
|
// as 500/503 via GlobalExceptionFilter — never mask as 401
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract token from cookie (preferred) or Authorization header
|
* Extract token from cookie (preferred) or Authorization header
|
||||||
*/
|
*/
|
||||||
private extractToken(request: AuthRequest): string | undefined {
|
private extractToken(request: MaybeAuthenticatedRequest): string | undefined {
|
||||||
// Try cookie first (BetterAuth default)
|
// Try cookie first (BetterAuth default)
|
||||||
const cookieToken = this.extractTokenFromCookie(request);
|
const cookieToken = this.extractTokenFromCookie(request);
|
||||||
if (cookieToken) {
|
if (cookieToken) {
|
||||||
@@ -67,21 +67,39 @@ export class AuthGuard implements CanActivate {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract token from cookie (BetterAuth stores session token in better-auth.session_token cookie)
|
* Extract token from cookie.
|
||||||
|
* BetterAuth may prefix the cookie name with "__Secure-" when running on HTTPS.
|
||||||
*/
|
*/
|
||||||
private extractTokenFromCookie(request: AuthRequest): string | undefined {
|
private extractTokenFromCookie(request: MaybeAuthenticatedRequest): string | undefined {
|
||||||
if (!request.cookies) {
|
// Express types `cookies` as `any`; cast to a known shape for type safety.
|
||||||
|
const cookies = request.cookies as Record<string, string> | undefined;
|
||||||
|
if (!cookies) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
// BetterAuth uses 'better-auth.session_token' as the cookie name by default
|
// BetterAuth default cookie name is "better-auth.session_token"
|
||||||
return request.cookies["better-auth.session_token"];
|
// When Secure cookies are enabled, BetterAuth prefixes with "__Secure-".
|
||||||
|
const candidates = [
|
||||||
|
"__Secure-better-auth.session_token",
|
||||||
|
"better-auth.session_token",
|
||||||
|
"__Host-better-auth.session_token",
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
for (const name of candidates) {
|
||||||
|
const token = cookies[name];
|
||||||
|
if (token) {
|
||||||
|
this.logger.debug(`Session cookie found: ${name}`);
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract token from Authorization header (Bearer token)
|
* Extract token from Authorization header (Bearer token)
|
||||||
*/
|
*/
|
||||||
private extractTokenFromHeader(request: AuthRequest): string | undefined {
|
private extractTokenFromHeader(request: MaybeAuthenticatedRequest): string | undefined {
|
||||||
const authHeader = request.headers.authorization;
|
const authHeader = request.headers.authorization;
|
||||||
if (typeof authHeader !== "string") {
|
if (typeof authHeader !== "string") {
|
||||||
return undefined;
|
return undefined;
|
||||||
|
|||||||
@@ -1,11 +1,14 @@
|
|||||||
/**
|
/**
|
||||||
* BetterAuth Request Type
|
* Unified request types for authentication context.
|
||||||
*
|
*
|
||||||
* BetterAuth expects a Request object compatible with the Fetch API standard.
|
* Replaces the previously scattered interfaces:
|
||||||
* This extends the web standard Request interface with additional properties
|
* - RequestWithSession (auth.controller.ts)
|
||||||
* that may be present in the Express request object at runtime.
|
* - AuthRequest (auth.guard.ts)
|
||||||
|
* - BetterAuthRequest (this file, removed)
|
||||||
|
* - RequestWithUser (current-user.decorator.ts)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import type { Request } from "express";
|
||||||
import type { AuthUser } from "@mosaic/shared";
|
import type { AuthUser } from "@mosaic/shared";
|
||||||
|
|
||||||
// Re-export AuthUser for use in other modules
|
// Re-export AuthUser for use in other modules
|
||||||
@@ -22,19 +25,21 @@ export interface RequestSession {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Web standard Request interface extended with Express-specific properties
|
* Request that may or may not have auth data (before guard runs).
|
||||||
* This matches the Fetch API Request specification that BetterAuth expects.
|
* Used by AuthGuard and other middleware that processes requests
|
||||||
|
* before authentication is confirmed.
|
||||||
*/
|
*/
|
||||||
export interface BetterAuthRequest extends Request {
|
export interface MaybeAuthenticatedRequest extends Request {
|
||||||
// Express route parameters
|
|
||||||
params?: Record<string, string>;
|
|
||||||
|
|
||||||
// Express query string parameters
|
|
||||||
query?: Record<string, string | string[]>;
|
|
||||||
|
|
||||||
// Session data attached by AuthGuard after successful authentication
|
|
||||||
session?: RequestSession;
|
|
||||||
|
|
||||||
// Authenticated user attached by AuthGuard
|
|
||||||
user?: AuthUser;
|
user?: AuthUser;
|
||||||
|
session?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Request with authenticated user attached by AuthGuard.
|
||||||
|
* After AuthGuard runs, user and session are guaranteed present.
|
||||||
|
* Use this type in controllers/decorators that sit behind AuthGuard.
|
||||||
|
*/
|
||||||
|
export interface AuthenticatedRequest extends Request {
|
||||||
|
user: AuthUser;
|
||||||
|
session: RequestSession;
|
||||||
}
|
}
|
||||||
|
|||||||
15
apps/api/src/bridge/bridge.constants.ts
Normal file
15
apps/api/src/bridge/bridge.constants.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
/**
|
||||||
|
* Bridge Module Constants
|
||||||
|
*
|
||||||
|
* Injection tokens for the bridge module.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Injection token for the array of active IChatProvider instances.
|
||||||
|
*
|
||||||
|
* Use this token to inject all configured chat providers:
|
||||||
|
* ```
|
||||||
|
* @Inject(CHAT_PROVIDERS) private readonly chatProviders: IChatProvider[]
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export const CHAT_PROVIDERS = "CHAT_PROVIDERS";
|
||||||
@@ -1,10 +1,13 @@
|
|||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import { BridgeModule } from "./bridge.module";
|
import { BridgeModule } from "./bridge.module";
|
||||||
import { DiscordService } from "./discord/discord.service";
|
import { DiscordService } from "./discord/discord.service";
|
||||||
|
import { MatrixService } from "./matrix/matrix.service";
|
||||||
import { StitcherService } from "../stitcher/stitcher.service";
|
import { StitcherService } from "../stitcher/stitcher.service";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
import { BullMqService } from "../bullmq/bullmq.service";
|
import { BullMqService } from "../bullmq/bullmq.service";
|
||||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
import { CHAT_PROVIDERS } from "./bridge.constants";
|
||||||
|
import type { IChatProvider } from "./interfaces";
|
||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||||
|
|
||||||
// Mock discord.js
|
// Mock discord.js
|
||||||
const mockReadyCallbacks: Array<() => void> = [];
|
const mockReadyCallbacks: Array<() => void> = [];
|
||||||
@@ -53,20 +56,93 @@ vi.mock("discord.js", () => {
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("BridgeModule", () => {
|
// Mock matrix-bot-sdk
|
||||||
let module: TestingModule;
|
vi.mock("matrix-bot-sdk", () => {
|
||||||
|
return {
|
||||||
|
MatrixClient: class MockMatrixClient {
|
||||||
|
start = vi.fn().mockResolvedValue(undefined);
|
||||||
|
stop = vi.fn();
|
||||||
|
on = vi.fn();
|
||||||
|
sendMessage = vi.fn().mockResolvedValue("$mock-event-id");
|
||||||
|
},
|
||||||
|
SimpleFsStorageProvider: class MockStorage {
|
||||||
|
constructor(_path: string) {
|
||||||
|
// no-op
|
||||||
|
}
|
||||||
|
},
|
||||||
|
AutojoinRoomsMixin: {
|
||||||
|
setupOnClient: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
beforeEach(async () => {
|
/**
|
||||||
// Set environment variables
|
* Saved environment variables to restore after each test
|
||||||
process.env.DISCORD_BOT_TOKEN = "test-token";
|
*/
|
||||||
process.env.DISCORD_GUILD_ID = "test-guild-id";
|
interface SavedEnvVars {
|
||||||
process.env.DISCORD_CONTROL_CHANNEL_ID = "test-channel-id";
|
DISCORD_BOT_TOKEN?: string;
|
||||||
|
DISCORD_GUILD_ID?: string;
|
||||||
|
DISCORD_CONTROL_CHANNEL_ID?: string;
|
||||||
|
MATRIX_ACCESS_TOKEN?: string;
|
||||||
|
MATRIX_HOMESERVER_URL?: string;
|
||||||
|
MATRIX_BOT_USER_ID?: string;
|
||||||
|
MATRIX_CONTROL_ROOM_ID?: string;
|
||||||
|
MATRIX_WORKSPACE_ID?: string;
|
||||||
|
ENCRYPTION_KEY?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("BridgeModule", () => {
|
||||||
|
let savedEnv: SavedEnvVars;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
// Save current env vars
|
||||||
|
savedEnv = {
|
||||||
|
DISCORD_BOT_TOKEN: process.env.DISCORD_BOT_TOKEN,
|
||||||
|
DISCORD_GUILD_ID: process.env.DISCORD_GUILD_ID,
|
||||||
|
DISCORD_CONTROL_CHANNEL_ID: process.env.DISCORD_CONTROL_CHANNEL_ID,
|
||||||
|
MATRIX_ACCESS_TOKEN: process.env.MATRIX_ACCESS_TOKEN,
|
||||||
|
MATRIX_HOMESERVER_URL: process.env.MATRIX_HOMESERVER_URL,
|
||||||
|
MATRIX_BOT_USER_ID: process.env.MATRIX_BOT_USER_ID,
|
||||||
|
MATRIX_CONTROL_ROOM_ID: process.env.MATRIX_CONTROL_ROOM_ID,
|
||||||
|
MATRIX_WORKSPACE_ID: process.env.MATRIX_WORKSPACE_ID,
|
||||||
|
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Clear all bridge env vars
|
||||||
|
delete process.env.DISCORD_BOT_TOKEN;
|
||||||
|
delete process.env.DISCORD_GUILD_ID;
|
||||||
|
delete process.env.DISCORD_CONTROL_CHANNEL_ID;
|
||||||
|
delete process.env.MATRIX_ACCESS_TOKEN;
|
||||||
|
delete process.env.MATRIX_HOMESERVER_URL;
|
||||||
|
delete process.env.MATRIX_BOT_USER_ID;
|
||||||
|
delete process.env.MATRIX_CONTROL_ROOM_ID;
|
||||||
|
delete process.env.MATRIX_WORKSPACE_ID;
|
||||||
|
|
||||||
|
// Set encryption key (needed by StitcherService)
|
||||||
process.env.ENCRYPTION_KEY = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
|
process.env.ENCRYPTION_KEY = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
|
||||||
|
|
||||||
// Clear ready callbacks
|
// Clear ready callbacks
|
||||||
mockReadyCallbacks.length = 0;
|
mockReadyCallbacks.length = 0;
|
||||||
|
|
||||||
module = await Test.createTestingModule({
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
// Restore env vars
|
||||||
|
for (const [key, value] of Object.entries(savedEnv)) {
|
||||||
|
if (value === undefined) {
|
||||||
|
delete process.env[key];
|
||||||
|
} else {
|
||||||
|
process.env[key] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to compile a test module with BridgeModule
|
||||||
|
*/
|
||||||
|
async function compileModule(): Promise<TestingModule> {
|
||||||
|
return Test.createTestingModule({
|
||||||
imports: [BridgeModule],
|
imports: [BridgeModule],
|
||||||
})
|
})
|
||||||
.overrideProvider(PrismaService)
|
.overrideProvider(PrismaService)
|
||||||
@@ -74,24 +150,144 @@ describe("BridgeModule", () => {
|
|||||||
.overrideProvider(BullMqService)
|
.overrideProvider(BullMqService)
|
||||||
.useValue({})
|
.useValue({})
|
||||||
.compile();
|
.compile();
|
||||||
|
}
|
||||||
|
|
||||||
// Clear all mocks
|
/**
|
||||||
vi.clearAllMocks();
|
* Helper to set Discord env vars
|
||||||
|
*/
|
||||||
|
function setDiscordEnv(): void {
|
||||||
|
process.env.DISCORD_BOT_TOKEN = "test-discord-token";
|
||||||
|
process.env.DISCORD_GUILD_ID = "test-guild-id";
|
||||||
|
process.env.DISCORD_CONTROL_CHANNEL_ID = "test-channel-id";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to set Matrix env vars
|
||||||
|
*/
|
||||||
|
function setMatrixEnv(): void {
|
||||||
|
process.env.MATRIX_ACCESS_TOKEN = "test-matrix-token";
|
||||||
|
process.env.MATRIX_HOMESERVER_URL = "https://matrix.example.com";
|
||||||
|
process.env.MATRIX_BOT_USER_ID = "@bot:example.com";
|
||||||
|
process.env.MATRIX_CONTROL_ROOM_ID = "!room:example.com";
|
||||||
|
process.env.MATRIX_WORKSPACE_ID = "test-workspace-id";
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("with both Discord and Matrix configured", () => {
|
||||||
|
let module: TestingModule;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
setDiscordEnv();
|
||||||
|
setMatrixEnv();
|
||||||
|
module = await compileModule();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should compile the module", () => {
|
||||||
|
expect(module).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should provide DiscordService", () => {
|
||||||
|
const discordService = module.get<DiscordService>(DiscordService);
|
||||||
|
expect(discordService).toBeDefined();
|
||||||
|
expect(discordService).toBeInstanceOf(DiscordService);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should provide MatrixService", () => {
|
||||||
|
const matrixService = module.get<MatrixService>(MatrixService);
|
||||||
|
expect(matrixService).toBeDefined();
|
||||||
|
expect(matrixService).toBeInstanceOf(MatrixService);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should provide CHAT_PROVIDERS with both providers", () => {
|
||||||
|
const chatProviders = module.get<IChatProvider[]>(CHAT_PROVIDERS);
|
||||||
|
expect(chatProviders).toBeDefined();
|
||||||
|
expect(chatProviders).toHaveLength(2);
|
||||||
|
expect(chatProviders[0]).toBeInstanceOf(DiscordService);
|
||||||
|
expect(chatProviders[1]).toBeInstanceOf(MatrixService);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should provide StitcherService via StitcherModule", () => {
|
||||||
|
const stitcherService = module.get<StitcherService>(StitcherService);
|
||||||
|
expect(stitcherService).toBeDefined();
|
||||||
|
expect(stitcherService).toBeInstanceOf(StitcherService);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should be defined", () => {
|
describe("with only Discord configured", () => {
|
||||||
expect(module).toBeDefined();
|
let module: TestingModule;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
setDiscordEnv();
|
||||||
|
module = await compileModule();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should compile the module", () => {
|
||||||
|
expect(module).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should provide DiscordService", () => {
|
||||||
|
const discordService = module.get<DiscordService>(DiscordService);
|
||||||
|
expect(discordService).toBeDefined();
|
||||||
|
expect(discordService).toBeInstanceOf(DiscordService);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should provide CHAT_PROVIDERS with only Discord", () => {
|
||||||
|
const chatProviders = module.get<IChatProvider[]>(CHAT_PROVIDERS);
|
||||||
|
expect(chatProviders).toBeDefined();
|
||||||
|
expect(chatProviders).toHaveLength(1);
|
||||||
|
expect(chatProviders[0]).toBeInstanceOf(DiscordService);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should provide DiscordService", () => {
|
describe("with only Matrix configured", () => {
|
||||||
const discordService = module.get<DiscordService>(DiscordService);
|
let module: TestingModule;
|
||||||
expect(discordService).toBeDefined();
|
|
||||||
expect(discordService).toBeInstanceOf(DiscordService);
|
beforeEach(async () => {
|
||||||
|
setMatrixEnv();
|
||||||
|
module = await compileModule();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should compile the module", () => {
|
||||||
|
expect(module).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should provide MatrixService", () => {
|
||||||
|
const matrixService = module.get<MatrixService>(MatrixService);
|
||||||
|
expect(matrixService).toBeDefined();
|
||||||
|
expect(matrixService).toBeInstanceOf(MatrixService);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should provide CHAT_PROVIDERS with only Matrix", () => {
|
||||||
|
const chatProviders = module.get<IChatProvider[]>(CHAT_PROVIDERS);
|
||||||
|
expect(chatProviders).toBeDefined();
|
||||||
|
expect(chatProviders).toHaveLength(1);
|
||||||
|
expect(chatProviders[0]).toBeInstanceOf(MatrixService);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should provide StitcherService", () => {
|
describe("with neither bridge configured", () => {
|
||||||
const stitcherService = module.get<StitcherService>(StitcherService);
|
let module: TestingModule;
|
||||||
expect(stitcherService).toBeDefined();
|
|
||||||
expect(stitcherService).toBeInstanceOf(StitcherService);
|
beforeEach(async () => {
|
||||||
|
// No env vars set for either bridge
|
||||||
|
module = await compileModule();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should compile the module without errors", () => {
|
||||||
|
expect(module).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should provide CHAT_PROVIDERS as an empty array", () => {
|
||||||
|
const chatProviders = module.get<IChatProvider[]>(CHAT_PROVIDERS);
|
||||||
|
expect(chatProviders).toBeDefined();
|
||||||
|
expect(chatProviders).toHaveLength(0);
|
||||||
|
expect(Array.isArray(chatProviders)).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("CHAT_PROVIDERS token", () => {
|
||||||
|
it("should be a string constant", () => {
|
||||||
|
expect(CHAT_PROVIDERS).toBe("CHAT_PROVIDERS");
|
||||||
|
expect(typeof CHAT_PROVIDERS).toBe("string");
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,16 +1,81 @@
|
|||||||
import { Module } from "@nestjs/common";
|
import { Logger, Module } from "@nestjs/common";
|
||||||
import { DiscordService } from "./discord/discord.service";
|
import { DiscordService } from "./discord/discord.service";
|
||||||
|
import { MatrixService } from "./matrix/matrix.service";
|
||||||
|
import { MatrixRoomService } from "./matrix/matrix-room.service";
|
||||||
|
import { MatrixStreamingService } from "./matrix/matrix-streaming.service";
|
||||||
|
import { CommandParserService } from "./parser/command-parser.service";
|
||||||
import { StitcherModule } from "../stitcher/stitcher.module";
|
import { StitcherModule } from "../stitcher/stitcher.module";
|
||||||
|
import { CHAT_PROVIDERS } from "./bridge.constants";
|
||||||
|
import type { IChatProvider } from "./interfaces";
|
||||||
|
|
||||||
|
const logger = new Logger("BridgeModule");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Bridge Module - Chat platform integrations
|
* Bridge Module - Chat platform integrations
|
||||||
*
|
*
|
||||||
* Provides integration with chat platforms (Discord, Slack, Matrix, etc.)
|
* Provides integration with chat platforms (Discord, Matrix, etc.)
|
||||||
* for controlling Mosaic Stack via chat commands.
|
* for controlling Mosaic Stack via chat commands.
|
||||||
|
*
|
||||||
|
* Both services are always registered as providers, but the CHAT_PROVIDERS
|
||||||
|
* injection token only includes bridges whose environment variables are set:
|
||||||
|
* - Discord: included when DISCORD_BOT_TOKEN is set
|
||||||
|
* - Matrix: included when MATRIX_ACCESS_TOKEN is set
|
||||||
|
*
|
||||||
|
* Both bridges can run simultaneously, and no error occurs if neither is configured.
|
||||||
|
* Consumers should inject CHAT_PROVIDERS for bridge-agnostic access to all active providers.
|
||||||
|
*
|
||||||
|
* CommandParserService provides shared, platform-agnostic command parsing.
|
||||||
|
* MatrixRoomService handles workspace-to-Matrix-room mapping.
|
||||||
*/
|
*/
|
||||||
@Module({
|
@Module({
|
||||||
imports: [StitcherModule],
|
imports: [StitcherModule],
|
||||||
providers: [DiscordService],
|
providers: [
|
||||||
exports: [DiscordService],
|
CommandParserService,
|
||||||
|
MatrixRoomService,
|
||||||
|
MatrixStreamingService,
|
||||||
|
DiscordService,
|
||||||
|
MatrixService,
|
||||||
|
{
|
||||||
|
provide: CHAT_PROVIDERS,
|
||||||
|
useFactory: (discord: DiscordService, matrix: MatrixService): IChatProvider[] => {
|
||||||
|
const providers: IChatProvider[] = [];
|
||||||
|
|
||||||
|
if (process.env.DISCORD_BOT_TOKEN) {
|
||||||
|
providers.push(discord);
|
||||||
|
logger.log("Discord bridge enabled (DISCORD_BOT_TOKEN detected)");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.MATRIX_ACCESS_TOKEN) {
|
||||||
|
const missingVars = [
|
||||||
|
"MATRIX_HOMESERVER_URL",
|
||||||
|
"MATRIX_BOT_USER_ID",
|
||||||
|
"MATRIX_WORKSPACE_ID",
|
||||||
|
].filter((v) => !process.env[v]);
|
||||||
|
if (missingVars.length > 0) {
|
||||||
|
logger.warn(
|
||||||
|
`Matrix bridge enabled but missing: ${missingVars.join(", ")}. connect() will fail.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
providers.push(matrix);
|
||||||
|
logger.log("Matrix bridge enabled (MATRIX_ACCESS_TOKEN detected)");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (providers.length === 0) {
|
||||||
|
logger.warn("No chat bridges configured. Set DISCORD_BOT_TOKEN or MATRIX_ACCESS_TOKEN.");
|
||||||
|
}
|
||||||
|
|
||||||
|
return providers;
|
||||||
|
},
|
||||||
|
inject: [DiscordService, MatrixService],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
exports: [
|
||||||
|
DiscordService,
|
||||||
|
MatrixService,
|
||||||
|
MatrixRoomService,
|
||||||
|
MatrixStreamingService,
|
||||||
|
CommandParserService,
|
||||||
|
CHAT_PROVIDERS,
|
||||||
|
],
|
||||||
})
|
})
|
||||||
export class BridgeModule {}
|
export class BridgeModule {}
|
||||||
|
|||||||
@@ -187,6 +187,7 @@ describe("DiscordService", () => {
|
|||||||
await service.connect();
|
await service.connect();
|
||||||
await service.sendThreadMessage({
|
await service.sendThreadMessage({
|
||||||
threadId: "thread-123",
|
threadId: "thread-123",
|
||||||
|
channelId: "test-channel-id",
|
||||||
content: "Step completed",
|
content: "Step completed",
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -305,6 +305,7 @@ export class DiscordService implements IChatProvider {
|
|||||||
// Send confirmation to thread
|
// Send confirmation to thread
|
||||||
await this.sendThreadMessage({
|
await this.sendThreadMessage({
|
||||||
threadId,
|
threadId,
|
||||||
|
channelId: message.channelId,
|
||||||
content: `Job created: ${result.jobId}\nStatus: ${result.status}\nQueue: ${result.queueName}`,
|
content: `Job created: ${result.jobId}\nStatus: ${result.status}\nQueue: ${result.queueName}`,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ export interface ThreadCreateOptions {
|
|||||||
|
|
||||||
export interface ThreadMessageOptions {
|
export interface ThreadMessageOptions {
|
||||||
threadId: string;
|
threadId: string;
|
||||||
|
channelId: string;
|
||||||
content: string;
|
content: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -76,4 +77,17 @@ export interface IChatProvider {
|
|||||||
* Parse a command from a message
|
* Parse a command from a message
|
||||||
*/
|
*/
|
||||||
parseCommand(message: ChatMessage): ChatCommand | null;
|
parseCommand(message: ChatMessage): ChatCommand | null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Edit an existing message in a channel.
|
||||||
|
*
|
||||||
|
* Optional method for providers that support message editing
|
||||||
|
* (e.g., Matrix via m.replace, Discord via message.edit).
|
||||||
|
* Used for streaming AI responses with incremental updates.
|
||||||
|
*
|
||||||
|
* @param channelId - The channel/room ID
|
||||||
|
* @param messageId - The original message/event ID to edit
|
||||||
|
* @param content - The updated message content
|
||||||
|
*/
|
||||||
|
editMessage?(channelId: string, messageId: string, content: string): Promise<void>;
|
||||||
}
|
}
|
||||||
|
|||||||
4
apps/api/src/bridge/matrix/index.ts
Normal file
4
apps/api/src/bridge/matrix/index.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
export { MatrixService } from "./matrix.service";
|
||||||
|
export { MatrixRoomService } from "./matrix-room.service";
|
||||||
|
export { MatrixStreamingService } from "./matrix-streaming.service";
|
||||||
|
export type { StreamResponseOptions } from "./matrix-streaming.service";
|
||||||
1065
apps/api/src/bridge/matrix/matrix-bridge.integration.spec.ts
Normal file
1065
apps/api/src/bridge/matrix/matrix-bridge.integration.spec.ts
Normal file
File diff suppressed because it is too large
Load Diff
212
apps/api/src/bridge/matrix/matrix-room.service.spec.ts
Normal file
212
apps/api/src/bridge/matrix/matrix-room.service.spec.ts
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { MatrixRoomService } from "./matrix-room.service";
|
||||||
|
import { MatrixService } from "./matrix.service";
|
||||||
|
import { PrismaService } from "../../prisma/prisma.service";
|
||||||
|
import { vi, describe, it, expect, beforeEach } from "vitest";
|
||||||
|
|
||||||
|
// Mock matrix-bot-sdk to avoid native module import errors
|
||||||
|
vi.mock("matrix-bot-sdk", () => {
|
||||||
|
return {
|
||||||
|
MatrixClient: class MockMatrixClient {},
|
||||||
|
SimpleFsStorageProvider: class MockStorageProvider {
|
||||||
|
constructor(_filename: string) {
|
||||||
|
// No-op for testing
|
||||||
|
}
|
||||||
|
},
|
||||||
|
AutojoinRoomsMixin: {
|
||||||
|
setupOnClient: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("MatrixRoomService", () => {
|
||||||
|
let service: MatrixRoomService;
|
||||||
|
|
||||||
|
const mockCreateRoom = vi.fn().mockResolvedValue("!new-room:example.com");
|
||||||
|
|
||||||
|
const mockMatrixClient = {
|
||||||
|
createRoom: mockCreateRoom,
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockMatrixService = {
|
||||||
|
isConnected: vi.fn().mockReturnValue(true),
|
||||||
|
getClient: vi.fn().mockReturnValue(mockMatrixClient),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockPrismaService = {
|
||||||
|
workspace: {
|
||||||
|
findUnique: vi.fn(),
|
||||||
|
findFirst: vi.fn(),
|
||||||
|
update: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
process.env.MATRIX_SERVER_NAME = "example.com";
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
MatrixRoomService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: mockPrismaService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: MatrixService,
|
||||||
|
useValue: mockMatrixService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<MatrixRoomService>(MatrixRoomService);
|
||||||
|
|
||||||
|
vi.clearAllMocks();
|
||||||
|
// Restore defaults after clearing
|
||||||
|
mockMatrixService.isConnected.mockReturnValue(true);
|
||||||
|
mockCreateRoom.mockResolvedValue("!new-room:example.com");
|
||||||
|
mockPrismaService.workspace.update.mockResolvedValue({});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("provisionRoom", () => {
|
||||||
|
it("should create a Matrix room and store the mapping", async () => {
|
||||||
|
const roomId = await service.provisionRoom(
|
||||||
|
"workspace-uuid-1",
|
||||||
|
"My Workspace",
|
||||||
|
"my-workspace"
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(roomId).toBe("!new-room:example.com");
|
||||||
|
|
||||||
|
expect(mockCreateRoom).toHaveBeenCalledWith({
|
||||||
|
name: "Mosaic: My Workspace",
|
||||||
|
room_alias_name: "mosaic-my-workspace",
|
||||||
|
topic: "Mosaic workspace: My Workspace",
|
||||||
|
preset: "private_chat",
|
||||||
|
visibility: "private",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockPrismaService.workspace.update).toHaveBeenCalledWith({
|
||||||
|
where: { id: "workspace-uuid-1" },
|
||||||
|
data: { matrixRoomId: "!new-room:example.com" },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null when Matrix is not configured (no MatrixService)", async () => {
|
||||||
|
// Create a service without MatrixService
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
MatrixRoomService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: mockPrismaService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
const serviceWithoutMatrix = module.get<MatrixRoomService>(MatrixRoomService);
|
||||||
|
|
||||||
|
const roomId = await serviceWithoutMatrix.provisionRoom(
|
||||||
|
"workspace-uuid-1",
|
||||||
|
"My Workspace",
|
||||||
|
"my-workspace"
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(roomId).toBeNull();
|
||||||
|
expect(mockCreateRoom).not.toHaveBeenCalled();
|
||||||
|
expect(mockPrismaService.workspace.update).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null when Matrix is not connected", async () => {
|
||||||
|
mockMatrixService.isConnected.mockReturnValue(false);
|
||||||
|
|
||||||
|
const roomId = await service.provisionRoom(
|
||||||
|
"workspace-uuid-1",
|
||||||
|
"My Workspace",
|
||||||
|
"my-workspace"
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(roomId).toBeNull();
|
||||||
|
expect(mockCreateRoom).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getRoomForWorkspace", () => {
|
||||||
|
it("should return the room ID for a mapped workspace", async () => {
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue({
|
||||||
|
matrixRoomId: "!mapped-room:example.com",
|
||||||
|
});
|
||||||
|
|
||||||
|
const roomId = await service.getRoomForWorkspace("workspace-uuid-1");
|
||||||
|
|
||||||
|
expect(roomId).toBe("!mapped-room:example.com");
|
||||||
|
expect(mockPrismaService.workspace.findUnique).toHaveBeenCalledWith({
|
||||||
|
where: { id: "workspace-uuid-1" },
|
||||||
|
select: { matrixRoomId: true },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for an unmapped workspace", async () => {
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue({
|
||||||
|
matrixRoomId: null,
|
||||||
|
});
|
||||||
|
|
||||||
|
const roomId = await service.getRoomForWorkspace("workspace-uuid-2");
|
||||||
|
|
||||||
|
expect(roomId).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for a non-existent workspace", async () => {
|
||||||
|
mockPrismaService.workspace.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
const roomId = await service.getRoomForWorkspace("non-existent-uuid");
|
||||||
|
|
||||||
|
expect(roomId).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("getWorkspaceForRoom", () => {
|
||||||
|
it("should return the workspace ID for a mapped room", async () => {
|
||||||
|
mockPrismaService.workspace.findFirst.mockResolvedValue({
|
||||||
|
id: "workspace-uuid-1",
|
||||||
|
});
|
||||||
|
|
||||||
|
const workspaceId = await service.getWorkspaceForRoom("!mapped-room:example.com");
|
||||||
|
|
||||||
|
expect(workspaceId).toBe("workspace-uuid-1");
|
||||||
|
expect(mockPrismaService.workspace.findFirst).toHaveBeenCalledWith({
|
||||||
|
where: { matrixRoomId: "!mapped-room:example.com" },
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for an unmapped room", async () => {
|
||||||
|
mockPrismaService.workspace.findFirst.mockResolvedValue(null);
|
||||||
|
|
||||||
|
const workspaceId = await service.getWorkspaceForRoom("!unknown-room:example.com");
|
||||||
|
|
||||||
|
expect(workspaceId).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("linkWorkspaceToRoom", () => {
|
||||||
|
it("should store the room mapping in the workspace", async () => {
|
||||||
|
await service.linkWorkspaceToRoom("workspace-uuid-1", "!existing-room:example.com");
|
||||||
|
|
||||||
|
expect(mockPrismaService.workspace.update).toHaveBeenCalledWith({
|
||||||
|
where: { id: "workspace-uuid-1" },
|
||||||
|
data: { matrixRoomId: "!existing-room:example.com" },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("unlinkWorkspace", () => {
|
||||||
|
it("should remove the room mapping from the workspace", async () => {
|
||||||
|
await service.unlinkWorkspace("workspace-uuid-1");
|
||||||
|
|
||||||
|
expect(mockPrismaService.workspace.update).toHaveBeenCalledWith({
|
||||||
|
where: { id: "workspace-uuid-1" },
|
||||||
|
data: { matrixRoomId: null },
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
154
apps/api/src/bridge/matrix/matrix-room.service.ts
Normal file
154
apps/api/src/bridge/matrix/matrix-room.service.ts
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import { Injectable, Logger, Optional, Inject } from "@nestjs/common";
|
||||||
|
import { PrismaService } from "../../prisma/prisma.service";
|
||||||
|
import { MatrixService } from "./matrix.service";
|
||||||
|
import type { MatrixClient, RoomCreateOptions } from "matrix-bot-sdk";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MatrixRoomService - Workspace-to-Matrix-Room mapping and provisioning
|
||||||
|
*
|
||||||
|
* Responsibilities:
|
||||||
|
* - Provision Matrix rooms for Mosaic workspaces
|
||||||
|
* - Map workspaces to Matrix room IDs
|
||||||
|
* - Link/unlink existing rooms to workspaces
|
||||||
|
*
|
||||||
|
* Room provisioning creates a private Matrix room with:
|
||||||
|
* - Name: "Mosaic: {workspace_name}"
|
||||||
|
* - Alias: #mosaic-{workspace_slug}:{server_name}
|
||||||
|
* - Room ID stored in workspace.matrixRoomId
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class MatrixRoomService {
|
||||||
|
private readonly logger = new Logger(MatrixRoomService.name);
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private readonly prisma: PrismaService,
|
||||||
|
@Optional() @Inject(MatrixService) private readonly matrixService: MatrixService | null
|
||||||
|
) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provision a Matrix room for a workspace and store the mapping.
|
||||||
|
*
|
||||||
|
* @param workspaceId - The workspace UUID
|
||||||
|
* @param workspaceName - Human-readable workspace name
|
||||||
|
* @param workspaceSlug - URL-safe workspace identifier for the room alias
|
||||||
|
* @returns The Matrix room ID, or null if Matrix is not configured
|
||||||
|
*/
|
||||||
|
async provisionRoom(
|
||||||
|
workspaceId: string,
|
||||||
|
workspaceName: string,
|
||||||
|
workspaceSlug: string
|
||||||
|
): Promise<string | null> {
|
||||||
|
if (!this.matrixService?.isConnected()) {
|
||||||
|
this.logger.warn("Matrix is not configured or not connected; skipping room provisioning");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = this.getMatrixClient();
|
||||||
|
if (!client) {
|
||||||
|
this.logger.warn("Matrix client is not available; skipping room provisioning");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const roomOptions: RoomCreateOptions = {
|
||||||
|
name: `Mosaic: ${workspaceName}`,
|
||||||
|
room_alias_name: `mosaic-${workspaceSlug}`,
|
||||||
|
topic: `Mosaic workspace: ${workspaceName}`,
|
||||||
|
preset: "private_chat",
|
||||||
|
visibility: "private",
|
||||||
|
};
|
||||||
|
|
||||||
|
this.logger.log(
|
||||||
|
`Provisioning Matrix room for workspace "${workspaceName}" (${workspaceId})...`
|
||||||
|
);
|
||||||
|
|
||||||
|
const roomId = await client.createRoom(roomOptions);
|
||||||
|
|
||||||
|
// Store the room mapping
|
||||||
|
try {
|
||||||
|
await this.prisma.workspace.update({
|
||||||
|
where: { id: workspaceId },
|
||||||
|
data: { matrixRoomId: roomId },
|
||||||
|
});
|
||||||
|
} catch (dbError: unknown) {
|
||||||
|
this.logger.error(
|
||||||
|
`Failed to store room mapping for workspace ${workspaceId}, room ${roomId} may be orphaned: ${dbError instanceof Error ? dbError.message : "unknown"}`
|
||||||
|
);
|
||||||
|
throw dbError;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.log(`Matrix room ${roomId} provisioned and linked to workspace ${workspaceId}`);
|
||||||
|
|
||||||
|
return roomId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Look up the Matrix room ID mapped to a workspace.
|
||||||
|
*
|
||||||
|
* @param workspaceId - The workspace UUID
|
||||||
|
* @returns The Matrix room ID, or null if no room is mapped
|
||||||
|
*/
|
||||||
|
async getRoomForWorkspace(workspaceId: string): Promise<string | null> {
|
||||||
|
const workspace = await this.prisma.workspace.findUnique({
|
||||||
|
where: { id: workspaceId },
|
||||||
|
select: { matrixRoomId: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!workspace) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return workspace.matrixRoomId ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reverse lookup: find the workspace that owns a given Matrix room.
|
||||||
|
*
|
||||||
|
* @param roomId - The Matrix room ID (e.g. "!abc:example.com")
|
||||||
|
* @returns The workspace ID, or null if the room is not mapped to any workspace
|
||||||
|
*/
|
||||||
|
async getWorkspaceForRoom(roomId: string): Promise<string | null> {
|
||||||
|
const workspace = await this.prisma.workspace.findFirst({
|
||||||
|
where: { matrixRoomId: roomId },
|
||||||
|
select: { id: true },
|
||||||
|
});
|
||||||
|
|
||||||
|
return workspace?.id ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Manually link an existing Matrix room to a workspace.
|
||||||
|
*
|
||||||
|
* @param workspaceId - The workspace UUID
|
||||||
|
* @param roomId - The Matrix room ID to link
|
||||||
|
*/
|
||||||
|
async linkWorkspaceToRoom(workspaceId: string, roomId: string): Promise<void> {
|
||||||
|
await this.prisma.workspace.update({
|
||||||
|
where: { id: workspaceId },
|
||||||
|
data: { matrixRoomId: roomId },
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`Linked workspace ${workspaceId} to Matrix room ${roomId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove the Matrix room mapping from a workspace.
|
||||||
|
*
|
||||||
|
* @param workspaceId - The workspace UUID
|
||||||
|
*/
|
||||||
|
async unlinkWorkspace(workspaceId: string): Promise<void> {
|
||||||
|
await this.prisma.workspace.update({
|
||||||
|
where: { id: workspaceId },
|
||||||
|
data: { matrixRoomId: null },
|
||||||
|
});
|
||||||
|
|
||||||
|
this.logger.log(`Unlinked Matrix room from workspace ${workspaceId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Access the underlying MatrixClient from the MatrixService
|
||||||
|
* via the public getClient() accessor.
|
||||||
|
*/
|
||||||
|
private getMatrixClient(): MatrixClient | null {
|
||||||
|
if (!this.matrixService) return null;
|
||||||
|
return this.matrixService.getClient();
|
||||||
|
}
|
||||||
|
}
|
||||||
408
apps/api/src/bridge/matrix/matrix-streaming.service.spec.ts
Normal file
408
apps/api/src/bridge/matrix/matrix-streaming.service.spec.ts
Normal file
@@ -0,0 +1,408 @@
|
|||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { MatrixStreamingService } from "./matrix-streaming.service";
|
||||||
|
import { MatrixService } from "./matrix.service";
|
||||||
|
import { vi, describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||||
|
import type { StreamResponseOptions } from "./matrix-streaming.service";
|
||||||
|
|
||||||
|
// Mock matrix-bot-sdk to prevent native module loading
|
||||||
|
vi.mock("matrix-bot-sdk", () => {
|
||||||
|
return {
|
||||||
|
MatrixClient: class MockMatrixClient {},
|
||||||
|
SimpleFsStorageProvider: class MockStorageProvider {
|
||||||
|
constructor(_filename: string) {
|
||||||
|
// No-op for testing
|
||||||
|
}
|
||||||
|
},
|
||||||
|
AutojoinRoomsMixin: {
|
||||||
|
setupOnClient: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock MatrixClient
|
||||||
|
const mockClient = {
|
||||||
|
sendMessage: vi.fn().mockResolvedValue("$initial-event-id"),
|
||||||
|
sendEvent: vi.fn().mockResolvedValue("$edit-event-id"),
|
||||||
|
setTyping: vi.fn().mockResolvedValue(undefined),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mock MatrixService
|
||||||
|
const mockMatrixService = {
|
||||||
|
isConnected: vi.fn().mockReturnValue(true),
|
||||||
|
getClient: vi.fn().mockReturnValue(mockClient),
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper: create an async iterable from an array of strings with optional delays
|
||||||
|
*/
|
||||||
|
async function* createTokenStream(
|
||||||
|
tokens: string[],
|
||||||
|
delayMs = 0
|
||||||
|
): AsyncGenerator<string, void, undefined> {
|
||||||
|
for (const token of tokens) {
|
||||||
|
if (delayMs > 0) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, delayMs));
|
||||||
|
}
|
||||||
|
yield token;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper: create a token stream that throws an error mid-stream
|
||||||
|
*/
|
||||||
|
async function* createErrorStream(
|
||||||
|
tokens: string[],
|
||||||
|
errorAfter: number
|
||||||
|
): AsyncGenerator<string, void, undefined> {
|
||||||
|
let count = 0;
|
||||||
|
for (const token of tokens) {
|
||||||
|
if (count >= errorAfter) {
|
||||||
|
throw new Error("LLM provider connection lost");
|
||||||
|
}
|
||||||
|
yield token;
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("MatrixStreamingService", () => {
|
||||||
|
let service: MatrixStreamingService;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
vi.useFakeTimers({ shouldAdvanceTime: true });
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
MatrixStreamingService,
|
||||||
|
{
|
||||||
|
provide: MatrixService,
|
||||||
|
useValue: mockMatrixService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<MatrixStreamingService>(MatrixStreamingService);
|
||||||
|
|
||||||
|
// Clear all mocks
|
||||||
|
vi.clearAllMocks();
|
||||||
|
|
||||||
|
// Re-apply default mock returns after clearing
|
||||||
|
mockMatrixService.isConnected.mockReturnValue(true);
|
||||||
|
mockMatrixService.getClient.mockReturnValue(mockClient);
|
||||||
|
mockClient.sendMessage.mockResolvedValue("$initial-event-id");
|
||||||
|
mockClient.sendEvent.mockResolvedValue("$edit-event-id");
|
||||||
|
mockClient.setTyping.mockResolvedValue(undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("editMessage", () => {
|
||||||
|
it("should send a m.replace event to edit an existing message", async () => {
|
||||||
|
await service.editMessage("!room:example.com", "$original-event-id", "Updated content");
|
||||||
|
|
||||||
|
expect(mockClient.sendEvent).toHaveBeenCalledWith("!room:example.com", "m.room.message", {
|
||||||
|
"m.new_content": {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "Updated content",
|
||||||
|
},
|
||||||
|
"m.relates_to": {
|
||||||
|
rel_type: "m.replace",
|
||||||
|
event_id: "$original-event-id",
|
||||||
|
},
|
||||||
|
// Fallback for clients that don't support edits
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "* Updated content",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error when client is not connected", async () => {
|
||||||
|
mockMatrixService.isConnected.mockReturnValue(false);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.editMessage("!room:example.com", "$event-id", "content")
|
||||||
|
).rejects.toThrow("Matrix client is not connected");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error when client is null", async () => {
|
||||||
|
mockMatrixService.getClient.mockReturnValue(null);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
service.editMessage("!room:example.com", "$event-id", "content")
|
||||||
|
).rejects.toThrow("Matrix client is not connected");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("setTypingIndicator", () => {
|
||||||
|
it("should call client.setTyping with true and timeout", async () => {
|
||||||
|
await service.setTypingIndicator("!room:example.com", true);
|
||||||
|
|
||||||
|
expect(mockClient.setTyping).toHaveBeenCalledWith("!room:example.com", true, 30000);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should call client.setTyping with false to clear indicator", async () => {
|
||||||
|
await service.setTypingIndicator("!room:example.com", false);
|
||||||
|
|
||||||
|
expect(mockClient.setTyping).toHaveBeenCalledWith("!room:example.com", false, undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error when client is not connected", async () => {
|
||||||
|
mockMatrixService.isConnected.mockReturnValue(false);
|
||||||
|
|
||||||
|
await expect(service.setTypingIndicator("!room:example.com", true)).rejects.toThrow(
|
||||||
|
"Matrix client is not connected"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("sendStreamingMessage", () => {
|
||||||
|
it("should send an initial message and return the event ID", async () => {
|
||||||
|
const eventId = await service.sendStreamingMessage("!room:example.com", "Thinking...");
|
||||||
|
|
||||||
|
expect(eventId).toBe("$initial-event-id");
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith("!room:example.com", {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "Thinking...",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should send a thread message when threadId is provided", async () => {
|
||||||
|
const eventId = await service.sendStreamingMessage(
|
||||||
|
"!room:example.com",
|
||||||
|
"Thinking...",
|
||||||
|
"$thread-root-id"
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(eventId).toBe("$initial-event-id");
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith("!room:example.com", {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "Thinking...",
|
||||||
|
"m.relates_to": {
|
||||||
|
rel_type: "m.thread",
|
||||||
|
event_id: "$thread-root-id",
|
||||||
|
is_falling_back: true,
|
||||||
|
"m.in_reply_to": {
|
||||||
|
event_id: "$thread-root-id",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error when client is not connected", async () => {
|
||||||
|
mockMatrixService.isConnected.mockReturnValue(false);
|
||||||
|
|
||||||
|
await expect(service.sendStreamingMessage("!room:example.com", "Test")).rejects.toThrow(
|
||||||
|
"Matrix client is not connected"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("streamResponse", () => {
|
||||||
|
it("should send initial 'Thinking...' message and start typing indicator", async () => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
|
||||||
|
const tokens = ["Hello", " world"];
|
||||||
|
const stream = createTokenStream(tokens);
|
||||||
|
|
||||||
|
await service.streamResponse("!room:example.com", stream);
|
||||||
|
|
||||||
|
// Should have sent initial message
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith(
|
||||||
|
"!room:example.com",
|
||||||
|
expect.objectContaining({
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "Thinking...",
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Should have started typing indicator
|
||||||
|
expect(mockClient.setTyping).toHaveBeenCalledWith("!room:example.com", true, 30000);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use custom initial message when provided", async () => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
|
||||||
|
const tokens = ["Hi"];
|
||||||
|
const stream = createTokenStream(tokens);
|
||||||
|
|
||||||
|
const options: StreamResponseOptions = { initialMessage: "Processing..." };
|
||||||
|
await service.streamResponse("!room:example.com", stream, options);
|
||||||
|
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith(
|
||||||
|
"!room:example.com",
|
||||||
|
expect.objectContaining({
|
||||||
|
body: "Processing...",
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should edit message with accumulated tokens on completion", async () => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
|
||||||
|
const tokens = ["Hello", " ", "world", "!"];
|
||||||
|
const stream = createTokenStream(tokens);
|
||||||
|
|
||||||
|
await service.streamResponse("!room:example.com", stream);
|
||||||
|
|
||||||
|
// The final edit should contain the full accumulated text
|
||||||
|
const sendEventCalls = mockClient.sendEvent.mock.calls;
|
||||||
|
const lastEditCall = sendEventCalls[sendEventCalls.length - 1];
|
||||||
|
|
||||||
|
expect(lastEditCall).toBeDefined();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||||
|
expect(lastEditCall[2]["m.new_content"].body).toBe("Hello world!");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should clear typing indicator on completion", async () => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
|
||||||
|
const tokens = ["Done"];
|
||||||
|
const stream = createTokenStream(tokens);
|
||||||
|
|
||||||
|
await service.streamResponse("!room:example.com", stream);
|
||||||
|
|
||||||
|
// Last setTyping call should be false
|
||||||
|
const typingCalls = mockClient.setTyping.mock.calls;
|
||||||
|
const lastTypingCall = typingCalls[typingCalls.length - 1];
|
||||||
|
|
||||||
|
expect(lastTypingCall).toEqual(["!room:example.com", false, undefined]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should rate-limit edits to at most one every 500ms", async () => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
|
||||||
|
// Send tokens with small delays - all within one 500ms window
|
||||||
|
const tokens = ["a", "b", "c", "d", "e"];
|
||||||
|
const stream = createTokenStream(tokens, 50); // 50ms between tokens = 250ms total
|
||||||
|
|
||||||
|
await service.streamResponse("!room:example.com", stream);
|
||||||
|
|
||||||
|
// With 250ms total streaming time (5 tokens * 50ms), all tokens arrive
|
||||||
|
// within one 500ms window. We expect at most 1 intermediate edit + 1 final edit,
|
||||||
|
// or just the final edit. The key point is that there should NOT be 5 separate edits.
|
||||||
|
const editCalls = mockClient.sendEvent.mock.calls.filter(
|
||||||
|
(call) => call[1] === "m.room.message"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Should have fewer edits than tokens (rate limiting in effect)
|
||||||
|
expect(editCalls.length).toBeLessThanOrEqual(2);
|
||||||
|
// Should have at least the final edit
|
||||||
|
expect(editCalls.length).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle errors gracefully and edit message with error notice", async () => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
|
||||||
|
const stream = createErrorStream(["Hello", " ", "world"], 2);
|
||||||
|
|
||||||
|
await service.streamResponse("!room:example.com", stream);
|
||||||
|
|
||||||
|
// Should edit message with error content
|
||||||
|
const sendEventCalls = mockClient.sendEvent.mock.calls;
|
||||||
|
const lastEditCall = sendEventCalls[sendEventCalls.length - 1];
|
||||||
|
|
||||||
|
expect(lastEditCall).toBeDefined();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||||
|
const finalBody = lastEditCall[2]["m.new_content"].body as string;
|
||||||
|
expect(finalBody).toContain("error");
|
||||||
|
|
||||||
|
// Should clear typing on error
|
||||||
|
const typingCalls = mockClient.setTyping.mock.calls;
|
||||||
|
const lastTypingCall = typingCalls[typingCalls.length - 1];
|
||||||
|
expect(lastTypingCall).toEqual(["!room:example.com", false, undefined]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include token usage in final message when provided", async () => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
|
||||||
|
const tokens = ["Hello"];
|
||||||
|
const stream = createTokenStream(tokens);
|
||||||
|
|
||||||
|
const options: StreamResponseOptions = {
|
||||||
|
showTokenUsage: true,
|
||||||
|
tokenUsage: { prompt: 10, completion: 5, total: 15 },
|
||||||
|
};
|
||||||
|
|
||||||
|
await service.streamResponse("!room:example.com", stream, options);
|
||||||
|
|
||||||
|
const sendEventCalls = mockClient.sendEvent.mock.calls;
|
||||||
|
const lastEditCall = sendEventCalls[sendEventCalls.length - 1];
|
||||||
|
|
||||||
|
expect(lastEditCall).toBeDefined();
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||||
|
const finalBody = lastEditCall[2]["m.new_content"].body as string;
|
||||||
|
expect(finalBody).toContain("15");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error when client is not connected", async () => {
|
||||||
|
mockMatrixService.isConnected.mockReturnValue(false);
|
||||||
|
|
||||||
|
const stream = createTokenStream(["test"]);
|
||||||
|
|
||||||
|
await expect(service.streamResponse("!room:example.com", stream)).rejects.toThrow(
|
||||||
|
"Matrix client is not connected"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle empty token stream", async () => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
|
||||||
|
const stream = createTokenStream([]);
|
||||||
|
|
||||||
|
await service.streamResponse("!room:example.com", stream);
|
||||||
|
|
||||||
|
// Should still send initial message
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Should edit with empty/no-content message
|
||||||
|
const sendEventCalls = mockClient.sendEvent.mock.calls;
|
||||||
|
expect(sendEventCalls.length).toBeGreaterThanOrEqual(1);
|
||||||
|
|
||||||
|
// Should clear typing
|
||||||
|
const typingCalls = mockClient.setTyping.mock.calls;
|
||||||
|
const lastTypingCall = typingCalls[typingCalls.length - 1];
|
||||||
|
expect(lastTypingCall).toEqual(["!room:example.com", false, undefined]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should support thread context in streamResponse", async () => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
|
||||||
|
const tokens = ["Reply"];
|
||||||
|
const stream = createTokenStream(tokens);
|
||||||
|
|
||||||
|
const options: StreamResponseOptions = { threadId: "$thread-root" };
|
||||||
|
await service.streamResponse("!room:example.com", stream, options);
|
||||||
|
|
||||||
|
// Initial message should include thread relation
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith(
|
||||||
|
"!room:example.com",
|
||||||
|
expect.objectContaining({
|
||||||
|
"m.relates_to": expect.objectContaining({
|
||||||
|
rel_type: "m.thread",
|
||||||
|
event_id: "$thread-root",
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should perform multiple edits for long-running streams", async () => {
|
||||||
|
vi.useRealTimers();
|
||||||
|
|
||||||
|
// Create tokens with 200ms delays - total ~2000ms, should get multiple edit windows
|
||||||
|
const tokens = Array.from({ length: 10 }, (_, i) => `token${String(i)} `);
|
||||||
|
const stream = createTokenStream(tokens, 200);
|
||||||
|
|
||||||
|
await service.streamResponse("!room:example.com", stream);
|
||||||
|
|
||||||
|
// With 10 tokens at 200ms each = 2000ms total, at 500ms intervals
|
||||||
|
// we expect roughly 3-4 intermediate edits + 1 final = 4-5 total
|
||||||
|
const editCalls = mockClient.sendEvent.mock.calls.filter(
|
||||||
|
(call) => call[1] === "m.room.message"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Should have multiple edits (at least 2) but far fewer than 10
|
||||||
|
expect(editCalls.length).toBeGreaterThanOrEqual(2);
|
||||||
|
expect(editCalls.length).toBeLessThanOrEqual(8);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
248
apps/api/src/bridge/matrix/matrix-streaming.service.ts
Normal file
248
apps/api/src/bridge/matrix/matrix-streaming.service.ts
Normal file
@@ -0,0 +1,248 @@
|
|||||||
|
import { Injectable, Logger } from "@nestjs/common";
|
||||||
|
import type { MatrixClient } from "matrix-bot-sdk";
|
||||||
|
import { MatrixService } from "./matrix.service";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for the streamResponse method
|
||||||
|
*/
|
||||||
|
export interface StreamResponseOptions {
|
||||||
|
/** Custom initial message (defaults to "Thinking...") */
|
||||||
|
initialMessage?: string;
|
||||||
|
/** Thread root event ID for threaded responses */
|
||||||
|
threadId?: string;
|
||||||
|
/** Whether to show token usage in the final message */
|
||||||
|
showTokenUsage?: boolean;
|
||||||
|
/** Token usage stats to display in the final message */
|
||||||
|
tokenUsage?: { prompt: number; completion: number; total: number };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matrix message content for m.room.message events
|
||||||
|
*/
|
||||||
|
interface MatrixMessageContent {
|
||||||
|
msgtype: string;
|
||||||
|
body: string;
|
||||||
|
"m.new_content"?: {
|
||||||
|
msgtype: string;
|
||||||
|
body: string;
|
||||||
|
};
|
||||||
|
"m.relates_to"?: {
|
||||||
|
rel_type: string;
|
||||||
|
event_id: string;
|
||||||
|
is_falling_back?: boolean;
|
||||||
|
"m.in_reply_to"?: {
|
||||||
|
event_id: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Minimum interval between message edits (milliseconds) */
|
||||||
|
const EDIT_INTERVAL_MS = 500;
|
||||||
|
|
||||||
|
/** Typing indicator timeout (milliseconds) */
|
||||||
|
const TYPING_TIMEOUT_MS = 30000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matrix Streaming Service
|
||||||
|
*
|
||||||
|
* Provides streaming AI response capabilities for Matrix rooms using
|
||||||
|
* incremental message edits. Tokens from an LLM are buffered and the
|
||||||
|
* response message is edited at rate-limited intervals, providing a
|
||||||
|
* smooth streaming experience without excessive API calls.
|
||||||
|
*
|
||||||
|
* Key features:
|
||||||
|
* - Rate-limited edits (max every 500ms)
|
||||||
|
* - Typing indicator management during generation
|
||||||
|
* - Graceful error handling with user-visible error notices
|
||||||
|
* - Thread support for contextual responses
|
||||||
|
* - LLM-agnostic design via AsyncIterable<string> token stream
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class MatrixStreamingService {
|
||||||
|
private readonly logger = new Logger(MatrixStreamingService.name);
|
||||||
|
|
||||||
|
constructor(private readonly matrixService: MatrixService) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Edit an existing Matrix message using the m.replace relation.
|
||||||
|
*
|
||||||
|
* Sends a new event that replaces the content of an existing message.
|
||||||
|
* Includes fallback content for clients that don't support edits.
|
||||||
|
*
|
||||||
|
* @param roomId - The Matrix room ID
|
||||||
|
* @param eventId - The original event ID to replace
|
||||||
|
* @param newContent - The updated message text
|
||||||
|
*/
|
||||||
|
async editMessage(roomId: string, eventId: string, newContent: string): Promise<void> {
|
||||||
|
const client = this.getClientOrThrow();
|
||||||
|
|
||||||
|
const editContent: MatrixMessageContent = {
|
||||||
|
"m.new_content": {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: newContent,
|
||||||
|
},
|
||||||
|
"m.relates_to": {
|
||||||
|
rel_type: "m.replace",
|
||||||
|
event_id: eventId,
|
||||||
|
},
|
||||||
|
// Fallback for clients that don't support edits
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: `* ${newContent}`,
|
||||||
|
};
|
||||||
|
|
||||||
|
await client.sendEvent(roomId, "m.room.message", editContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the typing indicator for the bot in a room.
|
||||||
|
*
|
||||||
|
* @param roomId - The Matrix room ID
|
||||||
|
* @param typing - Whether the bot is typing
|
||||||
|
*/
|
||||||
|
async setTypingIndicator(roomId: string, typing: boolean): Promise<void> {
|
||||||
|
const client = this.getClientOrThrow();
|
||||||
|
|
||||||
|
await client.setTyping(roomId, typing, typing ? TYPING_TIMEOUT_MS : undefined);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send an initial message for streaming, optionally in a thread.
|
||||||
|
*
|
||||||
|
* Returns the event ID of the sent message, which can be used for
|
||||||
|
* subsequent edits via editMessage.
|
||||||
|
*
|
||||||
|
* @param roomId - The Matrix room ID
|
||||||
|
* @param content - The initial message content
|
||||||
|
* @param threadId - Optional thread root event ID
|
||||||
|
* @returns The event ID of the sent message
|
||||||
|
*/
|
||||||
|
async sendStreamingMessage(roomId: string, content: string, threadId?: string): Promise<string> {
|
||||||
|
const client = this.getClientOrThrow();
|
||||||
|
|
||||||
|
const messageContent: MatrixMessageContent = {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: content,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (threadId) {
|
||||||
|
messageContent["m.relates_to"] = {
|
||||||
|
rel_type: "m.thread",
|
||||||
|
event_id: threadId,
|
||||||
|
is_falling_back: true,
|
||||||
|
"m.in_reply_to": {
|
||||||
|
event_id: threadId,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const eventId: string = await client.sendMessage(roomId, messageContent);
|
||||||
|
return eventId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stream an AI response to a Matrix room using incremental message edits.
|
||||||
|
*
|
||||||
|
* This is the main streaming method. It:
|
||||||
|
* 1. Sends an initial "Thinking..." message
|
||||||
|
* 2. Starts the typing indicator
|
||||||
|
* 3. Buffers incoming tokens from the async iterable
|
||||||
|
* 4. Edits the message every 500ms with accumulated text
|
||||||
|
* 5. On completion: sends a final clean edit, clears typing
|
||||||
|
* 6. On error: edits message with error notice, clears typing
|
||||||
|
*
|
||||||
|
* @param roomId - The Matrix room ID
|
||||||
|
* @param tokenStream - AsyncIterable that yields string tokens
|
||||||
|
* @param options - Optional configuration for the stream
|
||||||
|
*/
|
||||||
|
async streamResponse(
|
||||||
|
roomId: string,
|
||||||
|
tokenStream: AsyncIterable<string>,
|
||||||
|
options?: StreamResponseOptions
|
||||||
|
): Promise<void> {
|
||||||
|
// Validate connection before starting
|
||||||
|
this.getClientOrThrow();
|
||||||
|
|
||||||
|
const initialMessage = options?.initialMessage ?? "Thinking...";
|
||||||
|
const threadId = options?.threadId;
|
||||||
|
|
||||||
|
// Step 1: Send initial message
|
||||||
|
const eventId = await this.sendStreamingMessage(roomId, initialMessage, threadId);
|
||||||
|
|
||||||
|
// Step 2: Start typing indicator
|
||||||
|
await this.setTypingIndicator(roomId, true);
|
||||||
|
|
||||||
|
// Step 3: Buffer and stream tokens
|
||||||
|
let accumulatedText = "";
|
||||||
|
let lastEditTime = 0;
|
||||||
|
let hasError = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
for await (const token of tokenStream) {
|
||||||
|
accumulatedText += token;
|
||||||
|
|
||||||
|
const now = Date.now();
|
||||||
|
const elapsed = now - lastEditTime;
|
||||||
|
|
||||||
|
if (elapsed >= EDIT_INTERVAL_MS && accumulatedText.length > 0) {
|
||||||
|
await this.editMessage(roomId, eventId, accumulatedText);
|
||||||
|
lastEditTime = now;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error: unknown) {
|
||||||
|
hasError = true;
|
||||||
|
const errorMessage = error instanceof Error ? error.message : "Unknown error occurred";
|
||||||
|
|
||||||
|
this.logger.error(`Stream error in room ${roomId}: ${errorMessage}`);
|
||||||
|
|
||||||
|
// Edit message to show error
|
||||||
|
try {
|
||||||
|
const errorContent = accumulatedText
|
||||||
|
? `${accumulatedText}\n\n[Streaming error: ${errorMessage}]`
|
||||||
|
: `[Streaming error: ${errorMessage}]`;
|
||||||
|
|
||||||
|
await this.editMessage(roomId, eventId, errorContent);
|
||||||
|
} catch (editError: unknown) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Failed to edit error message in ${roomId}: ${editError instanceof Error ? editError.message : "unknown"}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
// Step 4: Clear typing indicator
|
||||||
|
try {
|
||||||
|
await this.setTypingIndicator(roomId, false);
|
||||||
|
} catch (typingError: unknown) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Failed to clear typing indicator in ${roomId}: ${typingError instanceof Error ? typingError.message : "unknown"}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 5: Final edit with clean output (if no error)
|
||||||
|
if (!hasError) {
|
||||||
|
let finalContent = accumulatedText || "(No response generated)";
|
||||||
|
|
||||||
|
if (options?.showTokenUsage && options.tokenUsage) {
|
||||||
|
const { prompt, completion, total } = options.tokenUsage;
|
||||||
|
finalContent += `\n\n---\nTokens: ${String(total)} (prompt: ${String(prompt)}, completion: ${String(completion)})`;
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.editMessage(roomId, eventId, finalContent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the Matrix client from the parent MatrixService, or throw if not connected.
|
||||||
|
*/
|
||||||
|
private getClientOrThrow(): MatrixClient {
|
||||||
|
if (!this.matrixService.isConnected()) {
|
||||||
|
throw new Error("Matrix client is not connected");
|
||||||
|
}
|
||||||
|
|
||||||
|
const client = this.matrixService.getClient();
|
||||||
|
if (!client) {
|
||||||
|
throw new Error("Matrix client is not connected");
|
||||||
|
}
|
||||||
|
|
||||||
|
return client;
|
||||||
|
}
|
||||||
|
}
|
||||||
979
apps/api/src/bridge/matrix/matrix.service.spec.ts
Normal file
979
apps/api/src/bridge/matrix/matrix.service.spec.ts
Normal file
@@ -0,0 +1,979 @@
|
|||||||
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
|
import { MatrixService } from "./matrix.service";
|
||||||
|
import { MatrixRoomService } from "./matrix-room.service";
|
||||||
|
import { StitcherService } from "../../stitcher/stitcher.service";
|
||||||
|
import { CommandParserService } from "../parser/command-parser.service";
|
||||||
|
import { vi, describe, it, expect, beforeEach } from "vitest";
|
||||||
|
import type { ChatMessage } from "../interfaces";
|
||||||
|
|
||||||
|
// Mock matrix-bot-sdk
|
||||||
|
const mockMessageCallbacks: Array<(roomId: string, event: Record<string, unknown>) => void> = [];
|
||||||
|
const mockEventCallbacks: Array<(roomId: string, event: Record<string, unknown>) => void> = [];
|
||||||
|
|
||||||
|
const mockClient = {
|
||||||
|
start: vi.fn().mockResolvedValue(undefined),
|
||||||
|
stop: vi.fn(),
|
||||||
|
on: vi
|
||||||
|
.fn()
|
||||||
|
.mockImplementation(
|
||||||
|
(event: string, callback: (roomId: string, evt: Record<string, unknown>) => void) => {
|
||||||
|
if (event === "room.message") {
|
||||||
|
mockMessageCallbacks.push(callback);
|
||||||
|
}
|
||||||
|
if (event === "room.event") {
|
||||||
|
mockEventCallbacks.push(callback);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
sendMessage: vi.fn().mockResolvedValue("$event-id-123"),
|
||||||
|
sendEvent: vi.fn().mockResolvedValue("$event-id-456"),
|
||||||
|
};
|
||||||
|
|
||||||
|
vi.mock("matrix-bot-sdk", () => {
|
||||||
|
return {
|
||||||
|
MatrixClient: class MockMatrixClient {
|
||||||
|
start = mockClient.start;
|
||||||
|
stop = mockClient.stop;
|
||||||
|
on = mockClient.on;
|
||||||
|
sendMessage = mockClient.sendMessage;
|
||||||
|
sendEvent = mockClient.sendEvent;
|
||||||
|
},
|
||||||
|
SimpleFsStorageProvider: class MockStorageProvider {
|
||||||
|
constructor(_filename: string) {
|
||||||
|
// No-op for testing
|
||||||
|
}
|
||||||
|
},
|
||||||
|
AutojoinRoomsMixin: {
|
||||||
|
setupOnClient: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("MatrixService", () => {
|
||||||
|
let service: MatrixService;
|
||||||
|
let stitcherService: StitcherService;
|
||||||
|
let commandParser: CommandParserService;
|
||||||
|
let matrixRoomService: MatrixRoomService;
|
||||||
|
|
||||||
|
const mockStitcherService = {
|
||||||
|
dispatchJob: vi.fn().mockResolvedValue({
|
||||||
|
jobId: "test-job-id",
|
||||||
|
queueName: "main",
|
||||||
|
status: "PENDING",
|
||||||
|
}),
|
||||||
|
trackJobEvent: vi.fn().mockResolvedValue(undefined),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mockMatrixRoomService = {
|
||||||
|
getWorkspaceForRoom: vi.fn().mockResolvedValue(null),
|
||||||
|
getRoomForWorkspace: vi.fn().mockResolvedValue(null),
|
||||||
|
provisionRoom: vi.fn().mockResolvedValue(null),
|
||||||
|
linkWorkspaceToRoom: vi.fn().mockResolvedValue(undefined),
|
||||||
|
unlinkWorkspace: vi.fn().mockResolvedValue(undefined),
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
// Set environment variables for testing
|
||||||
|
process.env.MATRIX_HOMESERVER_URL = "https://matrix.example.com";
|
||||||
|
process.env.MATRIX_ACCESS_TOKEN = "test-access-token";
|
||||||
|
process.env.MATRIX_BOT_USER_ID = "@mosaic-bot:example.com";
|
||||||
|
process.env.MATRIX_CONTROL_ROOM_ID = "!test-room:example.com";
|
||||||
|
process.env.MATRIX_WORKSPACE_ID = "test-workspace-id";
|
||||||
|
|
||||||
|
// Clear callbacks
|
||||||
|
mockMessageCallbacks.length = 0;
|
||||||
|
mockEventCallbacks.length = 0;
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
MatrixService,
|
||||||
|
CommandParserService,
|
||||||
|
{
|
||||||
|
provide: StitcherService,
|
||||||
|
useValue: mockStitcherService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: MatrixRoomService,
|
||||||
|
useValue: mockMatrixRoomService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
service = module.get<MatrixService>(MatrixService);
|
||||||
|
stitcherService = module.get<StitcherService>(StitcherService);
|
||||||
|
commandParser = module.get<CommandParserService>(CommandParserService);
|
||||||
|
matrixRoomService = module.get(MatrixRoomService) as MatrixRoomService;
|
||||||
|
|
||||||
|
// Clear all mocks
|
||||||
|
vi.clearAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Connection Management", () => {
|
||||||
|
it("should connect to Matrix", async () => {
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
expect(mockClient.start).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should disconnect from Matrix", async () => {
|
||||||
|
await service.connect();
|
||||||
|
await service.disconnect();
|
||||||
|
|
||||||
|
expect(mockClient.stop).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should check connection status", async () => {
|
||||||
|
expect(service.isConnected()).toBe(false);
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
expect(service.isConnected()).toBe(true);
|
||||||
|
|
||||||
|
await service.disconnect();
|
||||||
|
expect(service.isConnected()).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Message Handling", () => {
|
||||||
|
it("should send a message to a room", async () => {
|
||||||
|
await service.connect();
|
||||||
|
await service.sendMessage("!test-room:example.com", "Hello, Matrix!");
|
||||||
|
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith("!test-room:example.com", {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "Hello, Matrix!",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error if client is not connected", async () => {
|
||||||
|
await expect(service.sendMessage("!room:example.com", "Test")).rejects.toThrow(
|
||||||
|
"Matrix client is not connected"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Thread Management", () => {
|
||||||
|
it("should create a thread by sending an initial message", async () => {
|
||||||
|
await service.connect();
|
||||||
|
const threadId = await service.createThread({
|
||||||
|
channelId: "!test-room:example.com",
|
||||||
|
name: "Job #42",
|
||||||
|
message: "Starting job...",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(threadId).toBe("$event-id-123");
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith("!test-room:example.com", {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "[Job #42] Starting job...",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should send a message to a thread with m.thread relation", async () => {
|
||||||
|
await service.connect();
|
||||||
|
await service.sendThreadMessage({
|
||||||
|
threadId: "$root-event-id",
|
||||||
|
channelId: "!test-room:example.com",
|
||||||
|
content: "Step completed",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith("!test-room:example.com", {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "Step completed",
|
||||||
|
"m.relates_to": {
|
||||||
|
rel_type: "m.thread",
|
||||||
|
event_id: "$root-event-id",
|
||||||
|
is_falling_back: true,
|
||||||
|
"m.in_reply_to": {
|
||||||
|
event_id: "$root-event-id",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fall back to controlRoomId when channelId is empty", async () => {
|
||||||
|
await service.connect();
|
||||||
|
await service.sendThreadMessage({
|
||||||
|
threadId: "$root-event-id",
|
||||||
|
channelId: "",
|
||||||
|
content: "Fallback message",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith("!test-room:example.com", {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "Fallback message",
|
||||||
|
"m.relates_to": {
|
||||||
|
rel_type: "m.thread",
|
||||||
|
event_id: "$root-event-id",
|
||||||
|
is_falling_back: true,
|
||||||
|
"m.in_reply_to": {
|
||||||
|
event_id: "$root-event-id",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error when creating thread without connection", async () => {
|
||||||
|
await expect(
|
||||||
|
service.createThread({
|
||||||
|
channelId: "!room:example.com",
|
||||||
|
name: "Test",
|
||||||
|
message: "Test",
|
||||||
|
})
|
||||||
|
).rejects.toThrow("Matrix client is not connected");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error when sending thread message without connection", async () => {
|
||||||
|
await expect(
|
||||||
|
service.sendThreadMessage({
|
||||||
|
threadId: "$event-id",
|
||||||
|
channelId: "!room:example.com",
|
||||||
|
content: "Test",
|
||||||
|
})
|
||||||
|
).rejects.toThrow("Matrix client is not connected");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Command Parsing with shared CommandParserService", () => {
|
||||||
|
it("should parse @mosaic fix #42 via shared parser", () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-1",
|
||||||
|
channelId: "!room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic fix #42",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const command = service.parseCommand(message);
|
||||||
|
|
||||||
|
expect(command).not.toBeNull();
|
||||||
|
expect(command?.command).toBe("fix");
|
||||||
|
expect(command?.args).toContain("#42");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should parse !mosaic fix #42 by normalizing to @mosaic for the shared parser", () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-1",
|
||||||
|
channelId: "!room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "!mosaic fix #42",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const command = service.parseCommand(message);
|
||||||
|
|
||||||
|
expect(command).not.toBeNull();
|
||||||
|
expect(command?.command).toBe("fix");
|
||||||
|
expect(command?.args).toContain("#42");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should parse @mosaic status command via shared parser", () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-2",
|
||||||
|
channelId: "!room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic status job-123",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const command = service.parseCommand(message);
|
||||||
|
|
||||||
|
expect(command).not.toBeNull();
|
||||||
|
expect(command?.command).toBe("status");
|
||||||
|
expect(command?.args).toContain("job-123");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should parse @mosaic cancel command via shared parser", () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-3",
|
||||||
|
channelId: "!room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic cancel job-456",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const command = service.parseCommand(message);
|
||||||
|
|
||||||
|
expect(command).not.toBeNull();
|
||||||
|
expect(command?.command).toBe("cancel");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should parse @mosaic help command via shared parser", () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-6",
|
||||||
|
channelId: "!room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic help",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const command = service.parseCommand(message);
|
||||||
|
|
||||||
|
expect(command).not.toBeNull();
|
||||||
|
expect(command?.command).toBe("help");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for non-command messages", () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-7",
|
||||||
|
channelId: "!room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "Just a regular message",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const command = service.parseCommand(message);
|
||||||
|
|
||||||
|
expect(command).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for messages without @mosaic or !mosaic mention", () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-8",
|
||||||
|
channelId: "!room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "fix 42",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const command = service.parseCommand(message);
|
||||||
|
|
||||||
|
expect(command).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return null for @mosaic mention without a command", () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-11",
|
||||||
|
channelId: "!room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const command = service.parseCommand(message);
|
||||||
|
|
||||||
|
expect(command).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Event-driven message reception", () => {
|
||||||
|
it("should ignore messages from the bot itself", async () => {
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
const parseCommandSpy = vi.spyOn(commandParser, "parseCommand");
|
||||||
|
|
||||||
|
// Simulate a message from the bot
|
||||||
|
expect(mockMessageCallbacks.length).toBeGreaterThan(0);
|
||||||
|
const callback = mockMessageCallbacks[0];
|
||||||
|
callback?.("!test-room:example.com", {
|
||||||
|
event_id: "$msg-1",
|
||||||
|
sender: "@mosaic-bot:example.com",
|
||||||
|
origin_server_ts: Date.now(),
|
||||||
|
content: {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "@mosaic fix #42",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should not attempt to parse
|
||||||
|
expect(parseCommandSpy).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should ignore messages in unmapped rooms", async () => {
|
||||||
|
// MatrixRoomService returns null for unknown rooms
|
||||||
|
mockMatrixRoomService.getWorkspaceForRoom.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
const callback = mockMessageCallbacks[0];
|
||||||
|
callback?.("!unknown-room:example.com", {
|
||||||
|
event_id: "$msg-1",
|
||||||
|
sender: "@user:example.com",
|
||||||
|
origin_server_ts: Date.now(),
|
||||||
|
content: {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "@mosaic fix #42",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for async processing
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||||
|
|
||||||
|
// Should not dispatch to stitcher
|
||||||
|
expect(stitcherService.dispatchJob).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should process commands in the control room (fallback workspace)", async () => {
|
||||||
|
// MatrixRoomService returns null, but room matches controlRoomId
|
||||||
|
mockMatrixRoomService.getWorkspaceForRoom.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
const callback = mockMessageCallbacks[0];
|
||||||
|
callback?.("!test-room:example.com", {
|
||||||
|
event_id: "$msg-1",
|
||||||
|
sender: "@user:example.com",
|
||||||
|
origin_server_ts: Date.now(),
|
||||||
|
content: {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "@mosaic help",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for async processing
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||||
|
|
||||||
|
// Should send help message
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith(
|
||||||
|
"!test-room:example.com",
|
||||||
|
expect.objectContaining({
|
||||||
|
body: expect.stringContaining("Available commands:"),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should process commands in rooms mapped via MatrixRoomService", async () => {
|
||||||
|
// MatrixRoomService resolves the workspace
|
||||||
|
mockMatrixRoomService.getWorkspaceForRoom.mockResolvedValue("mapped-workspace-id");
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
const callback = mockMessageCallbacks[0];
|
||||||
|
callback?.("!mapped-room:example.com", {
|
||||||
|
event_id: "$msg-1",
|
||||||
|
sender: "@user:example.com",
|
||||||
|
origin_server_ts: Date.now(),
|
||||||
|
content: {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "@mosaic fix #42",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for async processing
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||||
|
|
||||||
|
// Should dispatch with the mapped workspace ID
|
||||||
|
expect(stitcherService.dispatchJob).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
workspaceId: "mapped-workspace-id",
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle !mosaic prefix in incoming messages", async () => {
|
||||||
|
mockMatrixRoomService.getWorkspaceForRoom.mockResolvedValue("test-workspace-id");
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
const callback = mockMessageCallbacks[0];
|
||||||
|
callback?.("!test-room:example.com", {
|
||||||
|
event_id: "$msg-1",
|
||||||
|
sender: "@user:example.com",
|
||||||
|
origin_server_ts: Date.now(),
|
||||||
|
content: {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "!mosaic help",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for async processing
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||||
|
|
||||||
|
// Should send help message (normalized !mosaic -> @mosaic for parser)
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith(
|
||||||
|
"!test-room:example.com",
|
||||||
|
expect.objectContaining({
|
||||||
|
body: expect.stringContaining("Available commands:"),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should send help text when user tries an unknown command", async () => {
|
||||||
|
mockMatrixRoomService.getWorkspaceForRoom.mockResolvedValue("test-workspace-id");
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
const callback = mockMessageCallbacks[0];
|
||||||
|
callback?.("!test-room:example.com", {
|
||||||
|
event_id: "$msg-1",
|
||||||
|
sender: "@user:example.com",
|
||||||
|
origin_server_ts: Date.now(),
|
||||||
|
content: {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "@mosaic invalidcommand",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for async processing
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||||
|
|
||||||
|
// Should send error/help message (CommandParserService returns help text for unknown actions)
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith(
|
||||||
|
"!test-room:example.com",
|
||||||
|
expect.objectContaining({
|
||||||
|
body: expect.stringContaining("Available commands"),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should ignore non-text messages", async () => {
|
||||||
|
mockMatrixRoomService.getWorkspaceForRoom.mockResolvedValue("test-workspace-id");
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
const callback = mockMessageCallbacks[0];
|
||||||
|
callback?.("!test-room:example.com", {
|
||||||
|
event_id: "$msg-1",
|
||||||
|
sender: "@user:example.com",
|
||||||
|
origin_server_ts: Date.now(),
|
||||||
|
content: {
|
||||||
|
msgtype: "m.image",
|
||||||
|
body: "photo.jpg",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for async processing
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||||
|
|
||||||
|
// Should not attempt any message sending
|
||||||
|
expect(mockClient.sendMessage).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Command Execution", () => {
|
||||||
|
it("should forward fix command to stitcher and create a thread", async () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-1",
|
||||||
|
channelId: "!test-room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic fix 42",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
await service.handleCommand({
|
||||||
|
command: "fix",
|
||||||
|
args: ["42"],
|
||||||
|
message,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(stitcherService.dispatchJob).toHaveBeenCalledWith({
|
||||||
|
workspaceId: "test-workspace-id",
|
||||||
|
type: "code-task",
|
||||||
|
priority: 10,
|
||||||
|
metadata: {
|
||||||
|
issueNumber: 42,
|
||||||
|
command: "fix",
|
||||||
|
channelId: "!test-room:example.com",
|
||||||
|
threadId: "$event-id-123",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle fix with #-prefixed issue number", async () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-1",
|
||||||
|
channelId: "!test-room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic fix #42",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
await service.handleCommand({
|
||||||
|
command: "fix",
|
||||||
|
args: ["#42"],
|
||||||
|
message,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(stitcherService.dispatchJob).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
metadata: expect.objectContaining({
|
||||||
|
issueNumber: 42,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should respond with help message", async () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-1",
|
||||||
|
channelId: "!test-room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic help",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
await service.handleCommand({
|
||||||
|
command: "help",
|
||||||
|
args: [],
|
||||||
|
message,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith(
|
||||||
|
"!test-room:example.com",
|
||||||
|
expect.objectContaining({
|
||||||
|
body: expect.stringContaining("Available commands:"),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include retry command in help output", async () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-1",
|
||||||
|
channelId: "!test-room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic help",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
await service.handleCommand({
|
||||||
|
command: "help",
|
||||||
|
args: [],
|
||||||
|
message,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith(
|
||||||
|
"!test-room:example.com",
|
||||||
|
expect.objectContaining({
|
||||||
|
body: expect.stringContaining("retry"),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should send error for fix command without issue number", async () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-1",
|
||||||
|
channelId: "!test-room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic fix",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
await service.handleCommand({
|
||||||
|
command: "fix",
|
||||||
|
args: [],
|
||||||
|
message,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith(
|
||||||
|
"!test-room:example.com",
|
||||||
|
expect.objectContaining({
|
||||||
|
body: expect.stringContaining("Usage:"),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should send error for fix command with non-numeric issue", async () => {
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-1",
|
||||||
|
channelId: "!test-room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic fix abc",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
await service.handleCommand({
|
||||||
|
command: "fix",
|
||||||
|
args: ["abc"],
|
||||||
|
message,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockClient.sendMessage).toHaveBeenCalledWith(
|
||||||
|
"!test-room:example.com",
|
||||||
|
expect.objectContaining({
|
||||||
|
body: expect.stringContaining("Invalid issue number"),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should dispatch fix command with workspace from MatrixRoomService", async () => {
|
||||||
|
mockMatrixRoomService.getWorkspaceForRoom.mockResolvedValue("dynamic-workspace-id");
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
const callback = mockMessageCallbacks[0];
|
||||||
|
callback?.("!mapped-room:example.com", {
|
||||||
|
event_id: "$msg-1",
|
||||||
|
sender: "@user:example.com",
|
||||||
|
origin_server_ts: Date.now(),
|
||||||
|
content: {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "@mosaic fix #99",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for async processing
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||||
|
|
||||||
|
expect(stitcherService.dispatchJob).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
workspaceId: "dynamic-workspace-id",
|
||||||
|
metadata: expect.objectContaining({
|
||||||
|
issueNumber: 99,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Configuration", () => {
|
||||||
|
it("should throw error if MATRIX_HOMESERVER_URL is not set", async () => {
|
||||||
|
delete process.env.MATRIX_HOMESERVER_URL;
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
MatrixService,
|
||||||
|
CommandParserService,
|
||||||
|
{
|
||||||
|
provide: StitcherService,
|
||||||
|
useValue: mockStitcherService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: MatrixRoomService,
|
||||||
|
useValue: mockMatrixRoomService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
const newService = module.get<MatrixService>(MatrixService);
|
||||||
|
|
||||||
|
await expect(newService.connect()).rejects.toThrow("MATRIX_HOMESERVER_URL is required");
|
||||||
|
|
||||||
|
// Restore for other tests
|
||||||
|
process.env.MATRIX_HOMESERVER_URL = "https://matrix.example.com";
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error if MATRIX_ACCESS_TOKEN is not set", async () => {
|
||||||
|
delete process.env.MATRIX_ACCESS_TOKEN;
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
MatrixService,
|
||||||
|
CommandParserService,
|
||||||
|
{
|
||||||
|
provide: StitcherService,
|
||||||
|
useValue: mockStitcherService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: MatrixRoomService,
|
||||||
|
useValue: mockMatrixRoomService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
const newService = module.get<MatrixService>(MatrixService);
|
||||||
|
|
||||||
|
await expect(newService.connect()).rejects.toThrow("MATRIX_ACCESS_TOKEN is required");
|
||||||
|
|
||||||
|
// Restore for other tests
|
||||||
|
process.env.MATRIX_ACCESS_TOKEN = "test-access-token";
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error if MATRIX_BOT_USER_ID is not set", async () => {
|
||||||
|
delete process.env.MATRIX_BOT_USER_ID;
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
MatrixService,
|
||||||
|
CommandParserService,
|
||||||
|
{
|
||||||
|
provide: StitcherService,
|
||||||
|
useValue: mockStitcherService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: MatrixRoomService,
|
||||||
|
useValue: mockMatrixRoomService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
const newService = module.get<MatrixService>(MatrixService);
|
||||||
|
|
||||||
|
await expect(newService.connect()).rejects.toThrow("MATRIX_BOT_USER_ID is required");
|
||||||
|
|
||||||
|
// Restore for other tests
|
||||||
|
process.env.MATRIX_BOT_USER_ID = "@mosaic-bot:example.com";
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw error if MATRIX_WORKSPACE_ID is not set", async () => {
|
||||||
|
delete process.env.MATRIX_WORKSPACE_ID;
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
MatrixService,
|
||||||
|
CommandParserService,
|
||||||
|
{
|
||||||
|
provide: StitcherService,
|
||||||
|
useValue: mockStitcherService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: MatrixRoomService,
|
||||||
|
useValue: mockMatrixRoomService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
const newService = module.get<MatrixService>(MatrixService);
|
||||||
|
|
||||||
|
await expect(newService.connect()).rejects.toThrow("MATRIX_WORKSPACE_ID is required");
|
||||||
|
|
||||||
|
// Restore for other tests
|
||||||
|
process.env.MATRIX_WORKSPACE_ID = "test-workspace-id";
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use configured workspace ID from environment", async () => {
|
||||||
|
const testWorkspaceId = "configured-workspace-456";
|
||||||
|
process.env.MATRIX_WORKSPACE_ID = testWorkspaceId;
|
||||||
|
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
MatrixService,
|
||||||
|
CommandParserService,
|
||||||
|
{
|
||||||
|
provide: StitcherService,
|
||||||
|
useValue: mockStitcherService,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: MatrixRoomService,
|
||||||
|
useValue: mockMatrixRoomService,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
const newService = module.get<MatrixService>(MatrixService);
|
||||||
|
|
||||||
|
const message: ChatMessage = {
|
||||||
|
id: "msg-1",
|
||||||
|
channelId: "!test-room:example.com",
|
||||||
|
authorId: "@user:example.com",
|
||||||
|
authorName: "@user:example.com",
|
||||||
|
content: "@mosaic fix 42",
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
await newService.connect();
|
||||||
|
await newService.handleCommand({
|
||||||
|
command: "fix",
|
||||||
|
args: ["42"],
|
||||||
|
message,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockStitcherService.dispatchJob).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
workspaceId: testWorkspaceId,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Restore for other tests
|
||||||
|
process.env.MATRIX_WORKSPACE_ID = "test-workspace-id";
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Error Logging Security", () => {
|
||||||
|
it("should sanitize sensitive data in error logs", async () => {
|
||||||
|
const loggerErrorSpy = vi.spyOn(
|
||||||
|
(service as Record<string, unknown>)["logger"] as { error: (...args: unknown[]) => void },
|
||||||
|
"error"
|
||||||
|
);
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
// Trigger room.event handler with null event to exercise error path
|
||||||
|
expect(mockEventCallbacks.length).toBeGreaterThan(0);
|
||||||
|
mockEventCallbacks[0]?.("!room:example.com", null as unknown as Record<string, unknown>);
|
||||||
|
|
||||||
|
// Verify error was logged
|
||||||
|
expect(loggerErrorSpy).toHaveBeenCalled();
|
||||||
|
|
||||||
|
// Get the logged error
|
||||||
|
const loggedArgs = loggerErrorSpy.mock.calls[0];
|
||||||
|
const loggedError = loggedArgs?.[1] as Record<string, unknown>;
|
||||||
|
|
||||||
|
// Verify non-sensitive error info is preserved
|
||||||
|
expect(loggedError).toBeDefined();
|
||||||
|
expect((loggedError as { message: string }).message).toBe("Received null event from Matrix");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not include access token in error output", () => {
|
||||||
|
// Verify the access token is stored privately and not exposed
|
||||||
|
const serviceAsRecord = service as unknown as Record<string, unknown>;
|
||||||
|
// The accessToken should exist but should not appear in any public-facing method output
|
||||||
|
expect(serviceAsRecord["accessToken"]).toBe("test-access-token");
|
||||||
|
|
||||||
|
// Verify isConnected does not leak token
|
||||||
|
const connected = service.isConnected();
|
||||||
|
expect(String(connected)).not.toContain("test-access-token");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("MatrixRoomService reverse lookup", () => {
|
||||||
|
it("should call getWorkspaceForRoom when processing messages", async () => {
|
||||||
|
mockMatrixRoomService.getWorkspaceForRoom.mockResolvedValue("resolved-workspace");
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
const callback = mockMessageCallbacks[0];
|
||||||
|
callback?.("!some-room:example.com", {
|
||||||
|
event_id: "$msg-1",
|
||||||
|
sender: "@user:example.com",
|
||||||
|
origin_server_ts: Date.now(),
|
||||||
|
content: {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "@mosaic help",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for async processing
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||||
|
|
||||||
|
expect(matrixRoomService.getWorkspaceForRoom).toHaveBeenCalledWith("!some-room:example.com");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should fall back to control room workspace when MatrixRoomService returns null", async () => {
|
||||||
|
mockMatrixRoomService.getWorkspaceForRoom.mockResolvedValue(null);
|
||||||
|
|
||||||
|
await service.connect();
|
||||||
|
|
||||||
|
const callback = mockMessageCallbacks[0];
|
||||||
|
// Send to the control room (fallback path)
|
||||||
|
callback?.("!test-room:example.com", {
|
||||||
|
event_id: "$msg-1",
|
||||||
|
sender: "@user:example.com",
|
||||||
|
origin_server_ts: Date.now(),
|
||||||
|
content: {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: "@mosaic fix #10",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for async processing
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||||
|
|
||||||
|
// Should dispatch with the env-configured workspace
|
||||||
|
expect(stitcherService.dispatchJob).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
workspaceId: "test-workspace-id",
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
649
apps/api/src/bridge/matrix/matrix.service.ts
Normal file
649
apps/api/src/bridge/matrix/matrix.service.ts
Normal file
@@ -0,0 +1,649 @@
|
|||||||
|
import { Injectable, Logger, Optional, Inject } from "@nestjs/common";
|
||||||
|
import { MatrixClient, SimpleFsStorageProvider, AutojoinRoomsMixin } from "matrix-bot-sdk";
|
||||||
|
import { StitcherService } from "../../stitcher/stitcher.service";
|
||||||
|
import { CommandParserService } from "../parser/command-parser.service";
|
||||||
|
import { CommandAction } from "../parser/command.interface";
|
||||||
|
import type { ParsedCommand } from "../parser/command.interface";
|
||||||
|
import { MatrixRoomService } from "./matrix-room.service";
|
||||||
|
import { sanitizeForLogging } from "../../common/utils";
|
||||||
|
import type {
|
||||||
|
IChatProvider,
|
||||||
|
ChatMessage,
|
||||||
|
ChatCommand,
|
||||||
|
ThreadCreateOptions,
|
||||||
|
ThreadMessageOptions,
|
||||||
|
} from "../interfaces";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matrix room message event content
|
||||||
|
*/
|
||||||
|
interface MatrixMessageContent {
|
||||||
|
msgtype: string;
|
||||||
|
body: string;
|
||||||
|
"m.relates_to"?: MatrixRelatesTo;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matrix relationship metadata for threads (MSC3440)
|
||||||
|
*/
|
||||||
|
interface MatrixRelatesTo {
|
||||||
|
rel_type: string;
|
||||||
|
event_id: string;
|
||||||
|
is_falling_back?: boolean;
|
||||||
|
"m.in_reply_to"?: {
|
||||||
|
event_id: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matrix room event structure
|
||||||
|
*/
|
||||||
|
interface MatrixRoomEvent {
|
||||||
|
event_id: string;
|
||||||
|
sender: string;
|
||||||
|
origin_server_ts: number;
|
||||||
|
content: MatrixMessageContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Matrix Service - Matrix chat platform integration
|
||||||
|
*
|
||||||
|
* Responsibilities:
|
||||||
|
* - Connect to Matrix via access token
|
||||||
|
* - Listen for commands in mapped rooms (via MatrixRoomService)
|
||||||
|
* - Parse commands using shared CommandParserService
|
||||||
|
* - Forward commands to stitcher
|
||||||
|
* - Receive status updates from herald
|
||||||
|
* - Post updates to threads (MSC3440)
|
||||||
|
*/
|
||||||
|
@Injectable()
|
||||||
|
export class MatrixService implements IChatProvider {
|
||||||
|
private readonly logger = new Logger(MatrixService.name);
|
||||||
|
private client: MatrixClient | null = null;
|
||||||
|
private connected = false;
|
||||||
|
private readonly homeserverUrl: string;
|
||||||
|
private readonly accessToken: string;
|
||||||
|
private readonly botUserId: string;
|
||||||
|
private readonly controlRoomId: string;
|
||||||
|
private readonly workspaceId: string;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private readonly stitcherService: StitcherService,
|
||||||
|
@Optional()
|
||||||
|
@Inject(CommandParserService)
|
||||||
|
private readonly commandParser: CommandParserService | null,
|
||||||
|
@Optional()
|
||||||
|
@Inject(MatrixRoomService)
|
||||||
|
private readonly matrixRoomService: MatrixRoomService | null
|
||||||
|
) {
|
||||||
|
this.homeserverUrl = process.env.MATRIX_HOMESERVER_URL ?? "";
|
||||||
|
this.accessToken = process.env.MATRIX_ACCESS_TOKEN ?? "";
|
||||||
|
this.botUserId = process.env.MATRIX_BOT_USER_ID ?? "";
|
||||||
|
this.controlRoomId = process.env.MATRIX_CONTROL_ROOM_ID ?? "";
|
||||||
|
this.workspaceId = process.env.MATRIX_WORKSPACE_ID ?? "";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Connect to Matrix homeserver
|
||||||
|
*/
|
||||||
|
async connect(): Promise<void> {
|
||||||
|
if (!this.homeserverUrl) {
|
||||||
|
throw new Error("MATRIX_HOMESERVER_URL is required");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.accessToken) {
|
||||||
|
throw new Error("MATRIX_ACCESS_TOKEN is required");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.workspaceId) {
|
||||||
|
throw new Error("MATRIX_WORKSPACE_ID is required");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.botUserId) {
|
||||||
|
throw new Error("MATRIX_BOT_USER_ID is required");
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.log("Connecting to Matrix...");
|
||||||
|
|
||||||
|
const storage = new SimpleFsStorageProvider("matrix-bot-storage.json");
|
||||||
|
this.client = new MatrixClient(this.homeserverUrl, this.accessToken, storage);
|
||||||
|
|
||||||
|
// Auto-join rooms when invited
|
||||||
|
AutojoinRoomsMixin.setupOnClient(this.client);
|
||||||
|
|
||||||
|
// Setup event handlers
|
||||||
|
this.setupEventHandlers();
|
||||||
|
|
||||||
|
// Start syncing
|
||||||
|
await this.client.start();
|
||||||
|
this.connected = true;
|
||||||
|
this.logger.log(`Matrix bot connected as ${this.botUserId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup event handlers for Matrix client
|
||||||
|
*/
|
||||||
|
private setupEventHandlers(): void {
|
||||||
|
if (!this.client) return;
|
||||||
|
|
||||||
|
this.client.on("room.message", (roomId: string, event: MatrixRoomEvent) => {
|
||||||
|
// Ignore messages from the bot itself
|
||||||
|
if (event.sender === this.botUserId) return;
|
||||||
|
|
||||||
|
// Only handle text messages
|
||||||
|
if (event.content.msgtype !== "m.text") return;
|
||||||
|
|
||||||
|
this.handleRoomMessage(roomId, event).catch((error: unknown) => {
|
||||||
|
this.logger.error(
|
||||||
|
`Error handling room message in ${roomId}:`,
|
||||||
|
error instanceof Error ? error.message : error
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
this.client.on("room.event", (_roomId: string, event: MatrixRoomEvent | null) => {
|
||||||
|
// Handle errors emitted as events
|
||||||
|
if (!event) {
|
||||||
|
const error = new Error("Received null event from Matrix");
|
||||||
|
const sanitizedError = sanitizeForLogging(error);
|
||||||
|
this.logger.error("Matrix client error:", sanitizedError);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle an incoming room message.
|
||||||
|
*
|
||||||
|
* Resolves the workspace for the room (via MatrixRoomService or fallback
|
||||||
|
* to the control room), then delegates to the shared CommandParserService
|
||||||
|
* for platform-agnostic command parsing and dispatches the result.
|
||||||
|
*/
|
||||||
|
private async handleRoomMessage(roomId: string, event: MatrixRoomEvent): Promise<void> {
|
||||||
|
// Resolve workspace: try MatrixRoomService first, fall back to control room
|
||||||
|
let resolvedWorkspaceId: string | null = null;
|
||||||
|
|
||||||
|
if (this.matrixRoomService) {
|
||||||
|
resolvedWorkspaceId = await this.matrixRoomService.getWorkspaceForRoom(roomId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: if the room is the configured control room, use the env workspace
|
||||||
|
if (!resolvedWorkspaceId && roomId === this.controlRoomId) {
|
||||||
|
resolvedWorkspaceId = this.workspaceId;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If room is not mapped to any workspace, ignore the message
|
||||||
|
if (!resolvedWorkspaceId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageContent = event.content.body;
|
||||||
|
|
||||||
|
// Build ChatMessage for interface compatibility
|
||||||
|
const chatMessage: ChatMessage = {
|
||||||
|
id: event.event_id,
|
||||||
|
channelId: roomId,
|
||||||
|
authorId: event.sender,
|
||||||
|
authorName: event.sender,
|
||||||
|
content: messageContent,
|
||||||
|
timestamp: new Date(event.origin_server_ts),
|
||||||
|
...(event.content["m.relates_to"]?.rel_type === "m.thread" && {
|
||||||
|
threadId: event.content["m.relates_to"].event_id,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Use shared CommandParserService if available
|
||||||
|
if (this.commandParser) {
|
||||||
|
// Normalize !mosaic to @mosaic for the shared parser
|
||||||
|
const normalizedContent = messageContent.replace(/^!mosaic/i, "@mosaic");
|
||||||
|
|
||||||
|
const result = this.commandParser.parseCommand(normalizedContent);
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
await this.handleParsedCommand(result.command, chatMessage, resolvedWorkspaceId);
|
||||||
|
} else if (normalizedContent.toLowerCase().startsWith("@mosaic")) {
|
||||||
|
// The user tried to use a command but it failed to parse -- send help
|
||||||
|
await this.sendMessage(roomId, result.error.help ?? result.error.message);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: use the built-in parseCommand if CommandParserService not injected
|
||||||
|
const command = this.parseCommand(chatMessage);
|
||||||
|
if (command) {
|
||||||
|
await this.handleCommand(command);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle a command parsed by the shared CommandParserService.
|
||||||
|
*
|
||||||
|
* Routes the ParsedCommand to the appropriate handler, passing
|
||||||
|
* along workspace context for job dispatch.
|
||||||
|
*/
|
||||||
|
private async handleParsedCommand(
|
||||||
|
parsed: ParsedCommand,
|
||||||
|
message: ChatMessage,
|
||||||
|
workspaceId: string
|
||||||
|
): Promise<void> {
|
||||||
|
this.logger.log(
|
||||||
|
`Handling command: ${parsed.action} from ${message.authorName} in workspace ${workspaceId}`
|
||||||
|
);
|
||||||
|
|
||||||
|
switch (parsed.action) {
|
||||||
|
case CommandAction.FIX:
|
||||||
|
await this.handleFixCommand(parsed.rawArgs, message, workspaceId);
|
||||||
|
break;
|
||||||
|
case CommandAction.STATUS:
|
||||||
|
await this.handleStatusCommand(parsed.rawArgs, message);
|
||||||
|
break;
|
||||||
|
case CommandAction.CANCEL:
|
||||||
|
await this.handleCancelCommand(parsed.rawArgs, message);
|
||||||
|
break;
|
||||||
|
case CommandAction.VERBOSE:
|
||||||
|
await this.handleVerboseCommand(parsed.rawArgs, message);
|
||||||
|
break;
|
||||||
|
case CommandAction.QUIET:
|
||||||
|
await this.handleQuietCommand(parsed.rawArgs, message);
|
||||||
|
break;
|
||||||
|
case CommandAction.HELP:
|
||||||
|
await this.handleHelpCommand(parsed.rawArgs, message);
|
||||||
|
break;
|
||||||
|
case CommandAction.RETRY:
|
||||||
|
await this.handleRetryCommand(parsed.rawArgs, message);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
`Unknown command. Type \`@mosaic help\` or \`!mosaic help\` for available commands.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Disconnect from Matrix
|
||||||
|
*/
|
||||||
|
disconnect(): Promise<void> {
|
||||||
|
this.logger.log("Disconnecting from Matrix...");
|
||||||
|
this.connected = false;
|
||||||
|
if (this.client) {
|
||||||
|
this.client.stop();
|
||||||
|
}
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the provider is connected
|
||||||
|
*/
|
||||||
|
isConnected(): boolean {
|
||||||
|
return this.connected;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the underlying MatrixClient instance.
|
||||||
|
*
|
||||||
|
* Used by MatrixStreamingService for low-level operations
|
||||||
|
* (message edits, typing indicators) that require direct client access.
|
||||||
|
*
|
||||||
|
* @returns The MatrixClient instance, or null if not connected
|
||||||
|
*/
|
||||||
|
getClient(): MatrixClient | null {
|
||||||
|
return this.client;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send a message to a room
|
||||||
|
*/
|
||||||
|
async sendMessage(roomId: string, content: string): Promise<void> {
|
||||||
|
if (!this.client) {
|
||||||
|
throw new Error("Matrix client is not connected");
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageContent: MatrixMessageContent = {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: content,
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.client.sendMessage(roomId, messageContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a thread for job updates (MSC3440)
|
||||||
|
*
|
||||||
|
* Matrix threads are created by sending an initial message
|
||||||
|
* and then replying with m.thread relation. The initial
|
||||||
|
* message event ID becomes the thread root.
|
||||||
|
*/
|
||||||
|
async createThread(options: ThreadCreateOptions): Promise<string> {
|
||||||
|
if (!this.client) {
|
||||||
|
throw new Error("Matrix client is not connected");
|
||||||
|
}
|
||||||
|
|
||||||
|
const { channelId, name, message } = options;
|
||||||
|
|
||||||
|
// Send the initial message that becomes the thread root
|
||||||
|
const initialContent: MatrixMessageContent = {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: `[${name}] ${message}`,
|
||||||
|
};
|
||||||
|
|
||||||
|
const eventId = await this.client.sendMessage(channelId, initialContent);
|
||||||
|
|
||||||
|
return eventId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send a message to a thread (MSC3440)
|
||||||
|
*
|
||||||
|
* Uses m.thread relation to associate the message with the thread root event.
|
||||||
|
*/
|
||||||
|
async sendThreadMessage(options: ThreadMessageOptions): Promise<void> {
|
||||||
|
if (!this.client) {
|
||||||
|
throw new Error("Matrix client is not connected");
|
||||||
|
}
|
||||||
|
|
||||||
|
const { threadId, channelId, content } = options;
|
||||||
|
|
||||||
|
// Use the channelId from options (threads are room-scoped), fall back to control room
|
||||||
|
const roomId = channelId || this.controlRoomId;
|
||||||
|
|
||||||
|
const threadContent: MatrixMessageContent = {
|
||||||
|
msgtype: "m.text",
|
||||||
|
body: content,
|
||||||
|
"m.relates_to": {
|
||||||
|
rel_type: "m.thread",
|
||||||
|
event_id: threadId,
|
||||||
|
is_falling_back: true,
|
||||||
|
"m.in_reply_to": {
|
||||||
|
event_id: threadId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.client.sendMessage(roomId, threadContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a command from a message (IChatProvider interface).
|
||||||
|
*
|
||||||
|
* Delegates to the shared CommandParserService when available,
|
||||||
|
* falling back to built-in parsing for backwards compatibility.
|
||||||
|
*/
|
||||||
|
parseCommand(message: ChatMessage): ChatCommand | null {
|
||||||
|
const { content } = message;
|
||||||
|
|
||||||
|
// Try shared parser first
|
||||||
|
if (this.commandParser) {
|
||||||
|
const normalizedContent = content.replace(/^!mosaic/i, "@mosaic");
|
||||||
|
const result = this.commandParser.parseCommand(normalizedContent);
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
return {
|
||||||
|
command: result.command.action,
|
||||||
|
args: result.command.rawArgs,
|
||||||
|
message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: built-in parsing for when CommandParserService is not injected
|
||||||
|
const lowerContent = content.toLowerCase();
|
||||||
|
if (!lowerContent.includes("@mosaic") && !lowerContent.includes("!mosaic")) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parts = content.trim().split(/\s+/);
|
||||||
|
const mosaicIndex = parts.findIndex(
|
||||||
|
(part) => part.toLowerCase().includes("@mosaic") || part.toLowerCase().includes("!mosaic")
|
||||||
|
);
|
||||||
|
|
||||||
|
if (mosaicIndex === -1 || mosaicIndex === parts.length - 1) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const commandPart = parts[mosaicIndex + 1];
|
||||||
|
if (!commandPart) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const command = commandPart.toLowerCase();
|
||||||
|
const args = parts.slice(mosaicIndex + 2);
|
||||||
|
|
||||||
|
const validCommands = ["fix", "status", "cancel", "verbose", "quiet", "help"];
|
||||||
|
|
||||||
|
if (!validCommands.includes(command)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
command,
|
||||||
|
args,
|
||||||
|
message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle a parsed command (ChatCommand format, used by fallback path)
|
||||||
|
*/
|
||||||
|
async handleCommand(command: ChatCommand): Promise<void> {
|
||||||
|
const { command: cmd, args, message } = command;
|
||||||
|
|
||||||
|
this.logger.log(
|
||||||
|
`Handling command: ${cmd} with args: ${args.join(", ")} from ${message.authorName}`
|
||||||
|
);
|
||||||
|
|
||||||
|
switch (cmd) {
|
||||||
|
case "fix":
|
||||||
|
await this.handleFixCommand(args, message, this.workspaceId);
|
||||||
|
break;
|
||||||
|
case "status":
|
||||||
|
await this.handleStatusCommand(args, message);
|
||||||
|
break;
|
||||||
|
case "cancel":
|
||||||
|
await this.handleCancelCommand(args, message);
|
||||||
|
break;
|
||||||
|
case "verbose":
|
||||||
|
await this.handleVerboseCommand(args, message);
|
||||||
|
break;
|
||||||
|
case "quiet":
|
||||||
|
await this.handleQuietCommand(args, message);
|
||||||
|
break;
|
||||||
|
case "help":
|
||||||
|
await this.handleHelpCommand(args, message);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
`Unknown command: ${cmd}. Type \`@mosaic help\` or \`!mosaic help\` for available commands.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle fix command - Start a job for an issue
|
||||||
|
*/
|
||||||
|
private async handleFixCommand(
|
||||||
|
args: string[],
|
||||||
|
message: ChatMessage,
|
||||||
|
workspaceId?: string
|
||||||
|
): Promise<void> {
|
||||||
|
if (args.length === 0 || !args[0]) {
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
"Usage: `@mosaic fix <issue-number>` or `!mosaic fix <issue-number>`"
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse issue number: handle both "#42" and "42" formats
|
||||||
|
const issueArg = args[0].replace(/^#/, "");
|
||||||
|
const issueNumber = parseInt(issueArg, 10);
|
||||||
|
|
||||||
|
if (isNaN(issueNumber)) {
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
"Invalid issue number. Please provide a numeric issue number."
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const targetWorkspaceId = workspaceId ?? this.workspaceId;
|
||||||
|
|
||||||
|
// Create thread for job updates
|
||||||
|
const threadId = await this.createThread({
|
||||||
|
channelId: message.channelId,
|
||||||
|
name: `Job #${String(issueNumber)}`,
|
||||||
|
message: `Starting job for issue #${String(issueNumber)}...`,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Dispatch job to stitcher
|
||||||
|
try {
|
||||||
|
const result = await this.stitcherService.dispatchJob({
|
||||||
|
workspaceId: targetWorkspaceId,
|
||||||
|
type: "code-task",
|
||||||
|
priority: 10,
|
||||||
|
metadata: {
|
||||||
|
issueNumber,
|
||||||
|
command: "fix",
|
||||||
|
channelId: message.channelId,
|
||||||
|
threadId: threadId,
|
||||||
|
authorId: message.authorId,
|
||||||
|
authorName: message.authorName,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Send confirmation to thread
|
||||||
|
await this.sendThreadMessage({
|
||||||
|
threadId,
|
||||||
|
channelId: message.channelId,
|
||||||
|
content: `Job created: ${result.jobId}\nStatus: ${result.status}\nQueue: ${result.queueName}`,
|
||||||
|
});
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : "Unknown error";
|
||||||
|
this.logger.error(
|
||||||
|
`Failed to dispatch job for issue #${String(issueNumber)}: ${errorMessage}`
|
||||||
|
);
|
||||||
|
await this.sendThreadMessage({
|
||||||
|
threadId,
|
||||||
|
channelId: message.channelId,
|
||||||
|
content: `Failed to start job: ${errorMessage}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle status command - Get job status
|
||||||
|
*/
|
||||||
|
private async handleStatusCommand(args: string[], message: ChatMessage): Promise<void> {
|
||||||
|
if (args.length === 0 || !args[0]) {
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
"Usage: `@mosaic status <job-id>` or `!mosaic status <job-id>`"
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobId = args[0];
|
||||||
|
|
||||||
|
// TODO: Implement job status retrieval from stitcher
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
`Status command not yet implemented for job: ${jobId}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle cancel command - Cancel a running job
|
||||||
|
*/
|
||||||
|
private async handleCancelCommand(args: string[], message: ChatMessage): Promise<void> {
|
||||||
|
if (args.length === 0 || !args[0]) {
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
"Usage: `@mosaic cancel <job-id>` or `!mosaic cancel <job-id>`"
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobId = args[0];
|
||||||
|
|
||||||
|
// TODO: Implement job cancellation in stitcher
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
`Cancel command not yet implemented for job: ${jobId}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle retry command - Retry a failed job
|
||||||
|
*/
|
||||||
|
private async handleRetryCommand(args: string[], message: ChatMessage): Promise<void> {
|
||||||
|
if (args.length === 0 || !args[0]) {
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
"Usage: `@mosaic retry <job-id>` or `!mosaic retry <job-id>`"
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobId = args[0];
|
||||||
|
|
||||||
|
// TODO: Implement job retry in stitcher
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
`Retry command not yet implemented for job: ${jobId}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle verbose command - Stream full logs to thread
|
||||||
|
*/
|
||||||
|
private async handleVerboseCommand(args: string[], message: ChatMessage): Promise<void> {
|
||||||
|
if (args.length === 0 || !args[0]) {
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
"Usage: `@mosaic verbose <job-id>` or `!mosaic verbose <job-id>`"
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const jobId = args[0];
|
||||||
|
|
||||||
|
// TODO: Implement verbose logging
|
||||||
|
await this.sendMessage(message.channelId, `Verbose mode not yet implemented for job: ${jobId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle quiet command - Reduce notifications
|
||||||
|
*/
|
||||||
|
private async handleQuietCommand(_args: string[], message: ChatMessage): Promise<void> {
|
||||||
|
// TODO: Implement quiet mode
|
||||||
|
await this.sendMessage(
|
||||||
|
message.channelId,
|
||||||
|
"Quiet mode not yet implemented. Currently showing milestone updates only."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle help command - Show available commands
|
||||||
|
*/
|
||||||
|
private async handleHelpCommand(_args: string[], message: ChatMessage): Promise<void> {
|
||||||
|
const helpMessage = `
|
||||||
|
**Available commands:**
|
||||||
|
|
||||||
|
\`@mosaic fix <issue>\` or \`!mosaic fix <issue>\` - Start job for issue
|
||||||
|
\`@mosaic status <job>\` or \`!mosaic status <job>\` - Get job status
|
||||||
|
\`@mosaic cancel <job>\` or \`!mosaic cancel <job>\` - Cancel running job
|
||||||
|
\`@mosaic retry <job>\` or \`!mosaic retry <job>\` - Retry failed job
|
||||||
|
\`@mosaic verbose <job>\` or \`!mosaic verbose <job>\` - Stream full logs to thread
|
||||||
|
\`@mosaic quiet\` or \`!mosaic quiet\` - Reduce notifications
|
||||||
|
\`@mosaic help\` or \`!mosaic help\` - Show this help message
|
||||||
|
|
||||||
|
**Noise Management:**
|
||||||
|
- Main room: Low verbosity (milestones only)
|
||||||
|
- Job threads: Medium verbosity (step completions)
|
||||||
|
- DMs: Configurable per user
|
||||||
|
`.trim();
|
||||||
|
|
||||||
|
await this.sendMessage(message.channelId, helpMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -113,34 +113,24 @@ describe("ApiKeyGuard", () => {
|
|||||||
const validApiKey = "test-api-key-12345";
|
const validApiKey = "test-api-key-12345";
|
||||||
vi.mocked(mockConfigService.get).mockReturnValue(validApiKey);
|
vi.mocked(mockConfigService.get).mockReturnValue(validApiKey);
|
||||||
|
|
||||||
const startTime = Date.now();
|
// Verify that same-length keys are compared properly (exercises timingSafeEqual path)
|
||||||
const context1 = createMockExecutionContext({
|
// and different-length keys are rejected before comparison
|
||||||
"x-api-key": "wrong-key-short",
|
const sameLength = createMockExecutionContext({
|
||||||
|
"x-api-key": "test-api-key-12344", // Same length, one char different
|
||||||
|
});
|
||||||
|
const differentLength = createMockExecutionContext({
|
||||||
|
"x-api-key": "short", // Different length
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
// Both should throw, proving the comparison logic handles both cases
|
||||||
guard.canActivate(context1);
|
expect(() => guard.canActivate(sameLength)).toThrow("Invalid API key");
|
||||||
} catch {
|
expect(() => guard.canActivate(differentLength)).toThrow("Invalid API key");
|
||||||
// Expected to fail
|
|
||||||
}
|
|
||||||
const shortKeyTime = Date.now() - startTime;
|
|
||||||
|
|
||||||
const startTime2 = Date.now();
|
// Correct key should pass
|
||||||
const context2 = createMockExecutionContext({
|
const correct = createMockExecutionContext({
|
||||||
"x-api-key": "test-api-key-12344", // Very close to correct key
|
"x-api-key": validApiKey,
|
||||||
});
|
});
|
||||||
|
expect(guard.canActivate(correct)).toBe(true);
|
||||||
try {
|
|
||||||
guard.canActivate(context2);
|
|
||||||
} catch {
|
|
||||||
// Expected to fail
|
|
||||||
}
|
|
||||||
const longKeyTime = Date.now() - startTime2;
|
|
||||||
|
|
||||||
// Times should be similar (within 10ms) to prevent timing attacks
|
|
||||||
// Note: This is a simplified test; real timing attack prevention
|
|
||||||
// is handled by crypto.timingSafeEqual
|
|
||||||
expect(Math.abs(shortKeyTime - longKeyTime)).toBeLessThan(10);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -137,13 +137,13 @@ describe("RLS Context Integration", () => {
|
|||||||
queries: ["findMany"],
|
queries: ["findMany"],
|
||||||
});
|
});
|
||||||
|
|
||||||
// Verify SET LOCAL was called
|
// Verify transaction-local set_config calls were made
|
||||||
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
||||||
expect.arrayContaining(["SET LOCAL app.current_user_id = ", ""]),
|
expect.arrayContaining(["SELECT set_config('app.current_user_id', ", ", true)"]),
|
||||||
userId
|
userId
|
||||||
);
|
);
|
||||||
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
||||||
expect.arrayContaining(["SET LOCAL app.current_workspace_id = ", ""]),
|
expect.arrayContaining(["SELECT set_config('app.current_workspace_id', ", ", true)"]),
|
||||||
workspaceId
|
workspaceId
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ describe("RlsContextInterceptor", () => {
|
|||||||
|
|
||||||
expect(result).toEqual({ data: "test response" });
|
expect(result).toEqual({ data: "test response" });
|
||||||
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
expect(mockTransactionClient.$executeRaw).toHaveBeenCalledWith(
|
||||||
expect.arrayContaining(["SET LOCAL app.current_user_id = ", ""]),
|
expect.arrayContaining(["SELECT set_config('app.current_user_id', ", ", true)"]),
|
||||||
userId
|
userId
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@@ -111,13 +111,13 @@ describe("RlsContextInterceptor", () => {
|
|||||||
// Check that user context was set
|
// Check that user context was set
|
||||||
expect(mockTransactionClient.$executeRaw).toHaveBeenNthCalledWith(
|
expect(mockTransactionClient.$executeRaw).toHaveBeenNthCalledWith(
|
||||||
1,
|
1,
|
||||||
expect.arrayContaining(["SET LOCAL app.current_user_id = ", ""]),
|
expect.arrayContaining(["SELECT set_config('app.current_user_id', ", ", true)"]),
|
||||||
userId
|
userId
|
||||||
);
|
);
|
||||||
// Check that workspace context was set
|
// Check that workspace context was set
|
||||||
expect(mockTransactionClient.$executeRaw).toHaveBeenNthCalledWith(
|
expect(mockTransactionClient.$executeRaw).toHaveBeenNthCalledWith(
|
||||||
2,
|
2,
|
||||||
expect.arrayContaining(["SET LOCAL app.current_workspace_id = ", ""]),
|
expect.arrayContaining(["SELECT set_config('app.current_workspace_id', ", ", true)"]),
|
||||||
workspaceId
|
workspaceId
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -100,12 +100,12 @@ export class RlsContextInterceptor implements NestInterceptor {
|
|||||||
this.prisma
|
this.prisma
|
||||||
.$transaction(
|
.$transaction(
|
||||||
async (tx) => {
|
async (tx) => {
|
||||||
// Set user context (always present for authenticated requests)
|
// Use set_config(..., true) so values are transaction-local and parameterized safely.
|
||||||
await tx.$executeRaw`SET LOCAL app.current_user_id = ${userId}`;
|
// Direct SET LOCAL with bind parameters produces invalid SQL on PostgreSQL.
|
||||||
|
await tx.$executeRaw`SELECT set_config('app.current_user_id', ${userId}, true)`;
|
||||||
|
|
||||||
// Set workspace context (if present)
|
|
||||||
if (workspaceId) {
|
if (workspaceId) {
|
||||||
await tx.$executeRaw`SET LOCAL app.current_workspace_id = ${workspaceId}`;
|
await tx.$executeRaw`SELECT set_config('app.current_workspace_id', ${workspaceId}, true)`;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Propagate the transaction client via AsyncLocalStorage
|
// Propagate the transaction client via AsyncLocalStorage
|
||||||
|
|||||||
@@ -15,7 +15,12 @@
|
|||||||
import { describe, it, expect, beforeAll, afterAll } from "vitest";
|
import { describe, it, expect, beforeAll, afterAll } from "vitest";
|
||||||
import { PrismaClient, CredentialType, CredentialScope } from "@prisma/client";
|
import { PrismaClient, CredentialType, CredentialScope } from "@prisma/client";
|
||||||
|
|
||||||
describe("UserCredential Model", () => {
|
const shouldRunDbIntegrationTests =
|
||||||
|
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||||
|
|
||||||
|
const describeFn = shouldRunDbIntegrationTests ? describe : describe.skip;
|
||||||
|
|
||||||
|
describeFn("UserCredential Model", () => {
|
||||||
let prisma: PrismaClient;
|
let prisma: PrismaClient;
|
||||||
let testUserId: string;
|
let testUserId: string;
|
||||||
let testWorkspaceId: string;
|
let testWorkspaceId: string;
|
||||||
@@ -23,8 +28,8 @@ describe("UserCredential Model", () => {
|
|||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
// Note: These tests require a running database
|
// Note: These tests require a running database
|
||||||
// They will be skipped in CI if DATABASE_URL is not set
|
// They will be skipped in CI if DATABASE_URL is not set
|
||||||
if (!process.env.DATABASE_URL) {
|
if (!shouldRunDbIntegrationTests) {
|
||||||
console.warn("DATABASE_URL not set, skipping UserCredential model tests");
|
console.warn("Skipping UserCredential model tests (set RUN_DB_TESTS=true and DATABASE_URL)");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import { BridgeModule } from "../bridge/bridge.module";
|
|||||||
* - Subscribe to job events
|
* - Subscribe to job events
|
||||||
* - Format status messages with PDA-friendly language
|
* - Format status messages with PDA-friendly language
|
||||||
* - Route to appropriate channels based on workspace config
|
* - Route to appropriate channels based on workspace config
|
||||||
* - Support Discord (via bridge) and PR comments
|
* - Broadcast to ALL active chat providers via CHAT_PROVIDERS token
|
||||||
*/
|
*/
|
||||||
@Module({
|
@Module({
|
||||||
imports: [PrismaModule, BridgeModule],
|
imports: [PrismaModule, BridgeModule],
|
||||||
|
|||||||
@@ -2,7 +2,8 @@ import { Test, TestingModule } from "@nestjs/testing";
|
|||||||
import { vi, describe, it, expect, beforeEach } from "vitest";
|
import { vi, describe, it, expect, beforeEach } from "vitest";
|
||||||
import { HeraldService } from "./herald.service";
|
import { HeraldService } from "./herald.service";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
import { DiscordService } from "../bridge/discord/discord.service";
|
import { CHAT_PROVIDERS } from "../bridge/bridge.constants";
|
||||||
|
import type { IChatProvider } from "../bridge/interfaces/chat-provider.interface";
|
||||||
import {
|
import {
|
||||||
JOB_CREATED,
|
JOB_CREATED,
|
||||||
JOB_STARTED,
|
JOB_STARTED,
|
||||||
@@ -14,10 +15,31 @@ import {
|
|||||||
GATE_FAILED,
|
GATE_FAILED,
|
||||||
} from "../job-events/event-types";
|
} from "../job-events/event-types";
|
||||||
|
|
||||||
|
function createMockProvider(
|
||||||
|
name: string,
|
||||||
|
connected = true
|
||||||
|
): IChatProvider & {
|
||||||
|
sendMessage: ReturnType<typeof vi.fn>;
|
||||||
|
sendThreadMessage: ReturnType<typeof vi.fn>;
|
||||||
|
createThread: ReturnType<typeof vi.fn>;
|
||||||
|
isConnected: ReturnType<typeof vi.fn>;
|
||||||
|
connect: ReturnType<typeof vi.fn>;
|
||||||
|
disconnect: ReturnType<typeof vi.fn>;
|
||||||
|
parseCommand: ReturnType<typeof vi.fn>;
|
||||||
|
} {
|
||||||
|
return {
|
||||||
|
connect: vi.fn().mockResolvedValue(undefined),
|
||||||
|
disconnect: vi.fn().mockResolvedValue(undefined),
|
||||||
|
isConnected: vi.fn().mockReturnValue(connected),
|
||||||
|
sendMessage: vi.fn().mockResolvedValue(undefined),
|
||||||
|
createThread: vi.fn().mockResolvedValue("thread-id"),
|
||||||
|
sendThreadMessage: vi.fn().mockResolvedValue(undefined),
|
||||||
|
parseCommand: vi.fn().mockReturnValue(null),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
describe("HeraldService", () => {
|
describe("HeraldService", () => {
|
||||||
let service: HeraldService;
|
let service: HeraldService;
|
||||||
let prisma: PrismaService;
|
|
||||||
let discord: DiscordService;
|
|
||||||
|
|
||||||
const mockPrisma = {
|
const mockPrisma = {
|
||||||
workspace: {
|
workspace: {
|
||||||
@@ -31,14 +53,15 @@ describe("HeraldService", () => {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const mockDiscord = {
|
let mockProviderA: ReturnType<typeof createMockProvider>;
|
||||||
isConnected: vi.fn(),
|
let mockProviderB: ReturnType<typeof createMockProvider>;
|
||||||
sendMessage: vi.fn(),
|
let chatProviders: IChatProvider[];
|
||||||
sendThreadMessage: vi.fn(),
|
|
||||||
createThread: vi.fn(),
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
|
mockProviderA = createMockProvider("providerA", true);
|
||||||
|
mockProviderB = createMockProvider("providerB", true);
|
||||||
|
chatProviders = [mockProviderA, mockProviderB];
|
||||||
|
|
||||||
const module: TestingModule = await Test.createTestingModule({
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
providers: [
|
providers: [
|
||||||
HeraldService,
|
HeraldService,
|
||||||
@@ -47,25 +70,47 @@ describe("HeraldService", () => {
|
|||||||
useValue: mockPrisma,
|
useValue: mockPrisma,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
provide: DiscordService,
|
provide: CHAT_PROVIDERS,
|
||||||
useValue: mockDiscord,
|
useValue: chatProviders,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
}).compile();
|
}).compile();
|
||||||
|
|
||||||
service = module.get<HeraldService>(HeraldService);
|
service = module.get<HeraldService>(HeraldService);
|
||||||
prisma = module.get<PrismaService>(PrismaService);
|
|
||||||
discord = module.get<DiscordService>(DiscordService);
|
|
||||||
|
|
||||||
// Reset mocks
|
// Reset mocks
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
|
// Restore default connected state after clearAllMocks
|
||||||
|
mockProviderA.isConnected.mockReturnValue(true);
|
||||||
|
mockProviderB.isConnected.mockReturnValue(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("broadcastJobEvent", () => {
|
describe("broadcastJobEvent", () => {
|
||||||
it("should broadcast job.created event to configured channel", async () => {
|
const baseSetup = (): {
|
||||||
// Arrange
|
jobId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
} => {
|
||||||
const workspaceId = "workspace-1";
|
const workspaceId = "workspace-1";
|
||||||
const jobId = "job-1";
|
const jobId = "job-1";
|
||||||
|
|
||||||
|
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
||||||
|
id: jobId,
|
||||||
|
workspaceId,
|
||||||
|
type: "code-task",
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrisma.jobEvent.findFirst.mockResolvedValue({
|
||||||
|
payload: {
|
||||||
|
metadata: { issueNumber: 42, threadId: "thread-123", channelId: "channel-abc" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return { jobId, workspaceId };
|
||||||
|
};
|
||||||
|
|
||||||
|
it("should broadcast to all connected providers", async () => {
|
||||||
|
// Arrange
|
||||||
|
const { jobId } = baseSetup();
|
||||||
const event = {
|
const event = {
|
||||||
id: "event-1",
|
id: "event-1",
|
||||||
jobId,
|
jobId,
|
||||||
@@ -75,46 +120,25 @@ describe("HeraldService", () => {
|
|||||||
payload: { issueNumber: 42 },
|
payload: { issueNumber: 42 },
|
||||||
};
|
};
|
||||||
|
|
||||||
mockPrisma.workspace.findUnique.mockResolvedValue({
|
|
||||||
id: workspaceId,
|
|
||||||
settings: {
|
|
||||||
herald: {
|
|
||||||
channelMappings: {
|
|
||||||
"code-task": "channel-123",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
|
||||||
id: jobId,
|
|
||||||
workspaceId,
|
|
||||||
type: "code-task",
|
|
||||||
});
|
|
||||||
|
|
||||||
mockPrisma.jobEvent.findFirst.mockResolvedValue({
|
|
||||||
payload: {
|
|
||||||
metadata: { issueNumber: 42, threadId: "thread-123" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockDiscord.isConnected.mockReturnValue(true);
|
|
||||||
mockDiscord.sendThreadMessage.mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
await service.broadcastJobEvent(jobId, event);
|
await service.broadcastJobEvent(jobId, event);
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(mockDiscord.sendThreadMessage).toHaveBeenCalledWith({
|
expect(mockProviderA.sendThreadMessage).toHaveBeenCalledWith({
|
||||||
threadId: "thread-123",
|
threadId: "thread-123",
|
||||||
|
channelId: "channel-abc",
|
||||||
|
content: expect.stringContaining("Job created"),
|
||||||
|
});
|
||||||
|
expect(mockProviderB.sendThreadMessage).toHaveBeenCalledWith({
|
||||||
|
threadId: "thread-123",
|
||||||
|
channelId: "channel-abc",
|
||||||
content: expect.stringContaining("Job created"),
|
content: expect.stringContaining("Job created"),
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should broadcast job.started event", async () => {
|
it("should broadcast job.started event to all providers", async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
const workspaceId = "workspace-1";
|
const { jobId } = baseSetup();
|
||||||
const jobId = "job-1";
|
|
||||||
const event = {
|
const event = {
|
||||||
id: "event-1",
|
id: "event-1",
|
||||||
jobId,
|
jobId,
|
||||||
@@ -124,40 +148,25 @@ describe("HeraldService", () => {
|
|||||||
payload: {},
|
payload: {},
|
||||||
};
|
};
|
||||||
|
|
||||||
mockPrisma.workspace.findUnique.mockResolvedValue({
|
|
||||||
id: workspaceId,
|
|
||||||
settings: { herald: { channelMappings: {} } },
|
|
||||||
});
|
|
||||||
|
|
||||||
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
|
||||||
id: jobId,
|
|
||||||
workspaceId,
|
|
||||||
type: "code-task",
|
|
||||||
});
|
|
||||||
|
|
||||||
mockPrisma.jobEvent.findFirst.mockResolvedValue({
|
|
||||||
payload: {
|
|
||||||
metadata: { threadId: "thread-123" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockDiscord.isConnected.mockReturnValue(true);
|
|
||||||
mockDiscord.sendThreadMessage.mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
await service.broadcastJobEvent(jobId, event);
|
await service.broadcastJobEvent(jobId, event);
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(mockDiscord.sendThreadMessage).toHaveBeenCalledWith({
|
expect(mockProviderA.sendThreadMessage).toHaveBeenCalledWith({
|
||||||
threadId: "thread-123",
|
threadId: "thread-123",
|
||||||
|
channelId: "channel-abc",
|
||||||
|
content: expect.stringContaining("Job started"),
|
||||||
|
});
|
||||||
|
expect(mockProviderB.sendThreadMessage).toHaveBeenCalledWith({
|
||||||
|
threadId: "thread-123",
|
||||||
|
channelId: "channel-abc",
|
||||||
content: expect.stringContaining("Job started"),
|
content: expect.stringContaining("Job started"),
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should broadcast job.completed event with success message", async () => {
|
it("should broadcast job.completed event with success message", async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
const workspaceId = "workspace-1";
|
const { jobId } = baseSetup();
|
||||||
const jobId = "job-1";
|
|
||||||
const event = {
|
const event = {
|
||||||
id: "event-1",
|
id: "event-1",
|
||||||
jobId,
|
jobId,
|
||||||
@@ -167,40 +176,20 @@ describe("HeraldService", () => {
|
|||||||
payload: { duration: 120 },
|
payload: { duration: 120 },
|
||||||
};
|
};
|
||||||
|
|
||||||
mockPrisma.workspace.findUnique.mockResolvedValue({
|
|
||||||
id: workspaceId,
|
|
||||||
settings: { herald: { channelMappings: {} } },
|
|
||||||
});
|
|
||||||
|
|
||||||
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
|
||||||
id: jobId,
|
|
||||||
workspaceId,
|
|
||||||
type: "code-task",
|
|
||||||
});
|
|
||||||
|
|
||||||
mockPrisma.jobEvent.findFirst.mockResolvedValue({
|
|
||||||
payload: {
|
|
||||||
metadata: { threadId: "thread-123" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockDiscord.isConnected.mockReturnValue(true);
|
|
||||||
mockDiscord.sendThreadMessage.mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
await service.broadcastJobEvent(jobId, event);
|
await service.broadcastJobEvent(jobId, event);
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(mockDiscord.sendThreadMessage).toHaveBeenCalledWith({
|
expect(mockProviderA.sendThreadMessage).toHaveBeenCalledWith({
|
||||||
threadId: "thread-123",
|
threadId: "thread-123",
|
||||||
|
channelId: "channel-abc",
|
||||||
content: expect.stringContaining("completed"),
|
content: expect.stringContaining("completed"),
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should broadcast job.failed event with PDA-friendly language", async () => {
|
it("should broadcast job.failed event with PDA-friendly language", async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
const workspaceId = "workspace-1";
|
const { jobId } = baseSetup();
|
||||||
const jobId = "job-1";
|
|
||||||
const event = {
|
const event = {
|
||||||
id: "event-1",
|
id: "event-1",
|
||||||
jobId,
|
jobId,
|
||||||
@@ -210,43 +199,30 @@ describe("HeraldService", () => {
|
|||||||
payload: { error: "Build failed" },
|
payload: { error: "Build failed" },
|
||||||
};
|
};
|
||||||
|
|
||||||
mockPrisma.workspace.findUnique.mockResolvedValue({
|
|
||||||
id: workspaceId,
|
|
||||||
settings: { herald: { channelMappings: {} } },
|
|
||||||
});
|
|
||||||
|
|
||||||
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
|
||||||
id: jobId,
|
|
||||||
workspaceId,
|
|
||||||
type: "code-task",
|
|
||||||
});
|
|
||||||
|
|
||||||
mockPrisma.jobEvent.findFirst.mockResolvedValue({
|
|
||||||
payload: {
|
|
||||||
metadata: { threadId: "thread-123" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockDiscord.isConnected.mockReturnValue(true);
|
|
||||||
mockDiscord.sendThreadMessage.mockResolvedValue(undefined);
|
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
await service.broadcastJobEvent(jobId, event);
|
await service.broadcastJobEvent(jobId, event);
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(mockDiscord.sendThreadMessage).toHaveBeenCalledWith({
|
expect(mockProviderA.sendThreadMessage).toHaveBeenCalledWith({
|
||||||
threadId: "thread-123",
|
threadId: "thread-123",
|
||||||
|
channelId: "channel-abc",
|
||||||
content: expect.stringContaining("encountered an issue"),
|
content: expect.stringContaining("encountered an issue"),
|
||||||
});
|
});
|
||||||
// Verify the actual message doesn't contain demanding language
|
// Verify the actual message doesn't contain demanding language
|
||||||
const actualCall = mockDiscord.sendThreadMessage.mock.calls[0][0];
|
const actualCall = mockProviderA.sendThreadMessage.mock.calls[0][0] as {
|
||||||
|
threadId: string;
|
||||||
|
channelId: string;
|
||||||
|
content: string;
|
||||||
|
};
|
||||||
expect(actualCall.content).not.toMatch(/FAILED|ERROR|CRITICAL|URGENT/);
|
expect(actualCall.content).not.toMatch(/FAILED|ERROR|CRITICAL|URGENT/);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should skip broadcasting if Discord is not connected", async () => {
|
it("should skip disconnected providers", async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
const workspaceId = "workspace-1";
|
const { jobId } = baseSetup();
|
||||||
const jobId = "job-1";
|
mockProviderA.isConnected.mockReturnValue(true);
|
||||||
|
mockProviderB.isConnected.mockReturnValue(false);
|
||||||
|
|
||||||
const event = {
|
const event = {
|
||||||
id: "event-1",
|
id: "event-1",
|
||||||
jobId,
|
jobId,
|
||||||
@@ -256,14 +232,36 @@ describe("HeraldService", () => {
|
|||||||
payload: {},
|
payload: {},
|
||||||
};
|
};
|
||||||
|
|
||||||
mockPrisma.workspace.findUnique.mockResolvedValue({
|
// Act
|
||||||
id: workspaceId,
|
await service.broadcastJobEvent(jobId, event);
|
||||||
settings: { herald: { channelMappings: {} } },
|
|
||||||
});
|
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mockProviderA.sendThreadMessage).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mockProviderB.sendThreadMessage).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle empty providers array without crashing", async () => {
|
||||||
|
// Arrange — rebuild module with empty providers
|
||||||
|
const module: TestingModule = await Test.createTestingModule({
|
||||||
|
providers: [
|
||||||
|
HeraldService,
|
||||||
|
{
|
||||||
|
provide: PrismaService,
|
||||||
|
useValue: mockPrisma,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
provide: CHAT_PROVIDERS,
|
||||||
|
useValue: [],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}).compile();
|
||||||
|
|
||||||
|
const emptyService = module.get<HeraldService>(HeraldService);
|
||||||
|
|
||||||
|
const jobId = "job-1";
|
||||||
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
||||||
id: jobId,
|
id: jobId,
|
||||||
workspaceId,
|
workspaceId: "workspace-1",
|
||||||
type: "code-task",
|
type: "code-task",
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -273,36 +271,68 @@ describe("HeraldService", () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
mockDiscord.isConnected.mockReturnValue(false);
|
const event = {
|
||||||
|
id: "event-1",
|
||||||
|
jobId,
|
||||||
|
type: JOB_CREATED,
|
||||||
|
timestamp: new Date(),
|
||||||
|
actor: "system",
|
||||||
|
payload: {},
|
||||||
|
};
|
||||||
|
|
||||||
// Act
|
// Act & Assert — should not throw
|
||||||
|
await expect(emptyService.broadcastJobEvent(jobId, event)).resolves.not.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should continue broadcasting when one provider errors", async () => {
|
||||||
|
// Arrange
|
||||||
|
const { jobId } = baseSetup();
|
||||||
|
mockProviderA.sendThreadMessage.mockRejectedValue(new Error("Provider A rate limit"));
|
||||||
|
mockProviderB.sendThreadMessage.mockResolvedValue(undefined);
|
||||||
|
|
||||||
|
const event = {
|
||||||
|
id: "event-1",
|
||||||
|
jobId,
|
||||||
|
type: JOB_CREATED,
|
||||||
|
timestamp: new Date(),
|
||||||
|
actor: "system",
|
||||||
|
payload: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act — should not throw despite provider A failing
|
||||||
await service.broadcastJobEvent(jobId, event);
|
await service.broadcastJobEvent(jobId, event);
|
||||||
|
|
||||||
// Assert
|
// Assert — provider B should still have been called
|
||||||
expect(mockDiscord.sendThreadMessage).not.toHaveBeenCalled();
|
expect(mockProviderA.sendThreadMessage).toHaveBeenCalledTimes(1);
|
||||||
|
expect(mockProviderB.sendThreadMessage).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not throw when all providers error", async () => {
|
||||||
|
// Arrange
|
||||||
|
const { jobId } = baseSetup();
|
||||||
|
mockProviderA.sendThreadMessage.mockRejectedValue(new Error("Provider A down"));
|
||||||
|
mockProviderB.sendThreadMessage.mockRejectedValue(new Error("Provider B down"));
|
||||||
|
|
||||||
|
const event = {
|
||||||
|
id: "event-1",
|
||||||
|
jobId,
|
||||||
|
type: JOB_CREATED,
|
||||||
|
timestamp: new Date(),
|
||||||
|
actor: "system",
|
||||||
|
payload: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act & Assert — should not throw; provider errors are logged, not propagated
|
||||||
|
await expect(service.broadcastJobEvent(jobId, event)).resolves.not.toThrow();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should skip broadcasting if job has no threadId", async () => {
|
it("should skip broadcasting if job has no threadId", async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
const workspaceId = "workspace-1";
|
|
||||||
const jobId = "job-1";
|
const jobId = "job-1";
|
||||||
const event = {
|
|
||||||
id: "event-1",
|
|
||||||
jobId,
|
|
||||||
type: JOB_CREATED,
|
|
||||||
timestamp: new Date(),
|
|
||||||
actor: "system",
|
|
||||||
payload: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
mockPrisma.workspace.findUnique.mockResolvedValue({
|
|
||||||
id: workspaceId,
|
|
||||||
settings: { herald: { channelMappings: {} } },
|
|
||||||
});
|
|
||||||
|
|
||||||
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
||||||
id: jobId,
|
id: jobId,
|
||||||
workspaceId,
|
workspaceId: "workspace-1",
|
||||||
type: "code-task",
|
type: "code-task",
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -312,16 +342,45 @@ describe("HeraldService", () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
mockDiscord.isConnected.mockReturnValue(true);
|
const event = {
|
||||||
|
id: "event-1",
|
||||||
|
jobId,
|
||||||
|
type: JOB_CREATED,
|
||||||
|
timestamp: new Date(),
|
||||||
|
actor: "system",
|
||||||
|
payload: {},
|
||||||
|
};
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
await service.broadcastJobEvent(jobId, event);
|
await service.broadcastJobEvent(jobId, event);
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(mockDiscord.sendThreadMessage).not.toHaveBeenCalled();
|
expect(mockProviderA.sendThreadMessage).not.toHaveBeenCalled();
|
||||||
|
expect(mockProviderB.sendThreadMessage).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
// ERROR HANDLING TESTS - Issue #185
|
it("should skip broadcasting if job not found", async () => {
|
||||||
|
// Arrange
|
||||||
|
const jobId = "nonexistent-job";
|
||||||
|
mockPrisma.runnerJob.findUnique.mockResolvedValue(null);
|
||||||
|
|
||||||
|
const event = {
|
||||||
|
id: "event-1",
|
||||||
|
jobId,
|
||||||
|
type: JOB_CREATED,
|
||||||
|
timestamp: new Date(),
|
||||||
|
actor: "system",
|
||||||
|
payload: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Act
|
||||||
|
await service.broadcastJobEvent(jobId, event);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
expect(mockProviderA.sendThreadMessage).not.toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ERROR HANDLING TESTS - database errors should still propagate
|
||||||
|
|
||||||
it("should propagate database errors when job lookup fails", async () => {
|
it("should propagate database errors when job lookup fails", async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
@@ -344,43 +403,8 @@ describe("HeraldService", () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should propagate Discord send failures with context", async () => {
|
|
||||||
// Arrange
|
|
||||||
const workspaceId = "workspace-1";
|
|
||||||
const jobId = "job-1";
|
|
||||||
const event = {
|
|
||||||
id: "event-1",
|
|
||||||
jobId,
|
|
||||||
type: JOB_CREATED,
|
|
||||||
timestamp: new Date(),
|
|
||||||
actor: "system",
|
|
||||||
payload: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
|
||||||
id: jobId,
|
|
||||||
workspaceId,
|
|
||||||
type: "code-task",
|
|
||||||
});
|
|
||||||
|
|
||||||
mockPrisma.jobEvent.findFirst.mockResolvedValue({
|
|
||||||
payload: {
|
|
||||||
metadata: { threadId: "thread-123" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockDiscord.isConnected.mockReturnValue(true);
|
|
||||||
|
|
||||||
const discordError = new Error("Rate limit exceeded");
|
|
||||||
mockDiscord.sendThreadMessage.mockRejectedValue(discordError);
|
|
||||||
|
|
||||||
// Act & Assert
|
|
||||||
await expect(service.broadcastJobEvent(jobId, event)).rejects.toThrow("Rate limit exceeded");
|
|
||||||
});
|
|
||||||
|
|
||||||
it("should propagate errors when fetching job events fails", async () => {
|
it("should propagate errors when fetching job events fails", async () => {
|
||||||
// Arrange
|
// Arrange
|
||||||
const workspaceId = "workspace-1";
|
|
||||||
const jobId = "job-1";
|
const jobId = "job-1";
|
||||||
const event = {
|
const event = {
|
||||||
id: "event-1",
|
id: "event-1",
|
||||||
@@ -393,61 +417,16 @@ describe("HeraldService", () => {
|
|||||||
|
|
||||||
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
||||||
id: jobId,
|
id: jobId,
|
||||||
workspaceId,
|
workspaceId: "workspace-1",
|
||||||
type: "code-task",
|
type: "code-task",
|
||||||
});
|
});
|
||||||
|
|
||||||
const dbError = new Error("Query timeout");
|
const dbError = new Error("Query timeout");
|
||||||
mockPrisma.jobEvent.findFirst.mockRejectedValue(dbError);
|
mockPrisma.jobEvent.findFirst.mockRejectedValue(dbError);
|
||||||
|
|
||||||
mockDiscord.isConnected.mockReturnValue(true);
|
|
||||||
|
|
||||||
// Act & Assert
|
// Act & Assert
|
||||||
await expect(service.broadcastJobEvent(jobId, event)).rejects.toThrow("Query timeout");
|
await expect(service.broadcastJobEvent(jobId, event)).rejects.toThrow("Query timeout");
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should include job context in error messages", async () => {
|
|
||||||
// Arrange
|
|
||||||
const workspaceId = "workspace-1";
|
|
||||||
const jobId = "test-job-123";
|
|
||||||
const event = {
|
|
||||||
id: "event-1",
|
|
||||||
jobId,
|
|
||||||
type: JOB_COMPLETED,
|
|
||||||
timestamp: new Date(),
|
|
||||||
actor: "system",
|
|
||||||
payload: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
mockPrisma.runnerJob.findUnique.mockResolvedValue({
|
|
||||||
id: jobId,
|
|
||||||
workspaceId,
|
|
||||||
type: "code-task",
|
|
||||||
});
|
|
||||||
|
|
||||||
mockPrisma.jobEvent.findFirst.mockResolvedValue({
|
|
||||||
payload: {
|
|
||||||
metadata: { threadId: "thread-123" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
mockDiscord.isConnected.mockReturnValue(true);
|
|
||||||
|
|
||||||
const discordError = new Error("Network failure");
|
|
||||||
mockDiscord.sendThreadMessage.mockRejectedValue(discordError);
|
|
||||||
|
|
||||||
// Act & Assert
|
|
||||||
try {
|
|
||||||
await service.broadcastJobEvent(jobId, event);
|
|
||||||
// Should not reach here
|
|
||||||
expect(true).toBe(false);
|
|
||||||
} catch (error) {
|
|
||||||
// Verify error was thrown
|
|
||||||
expect(error).toBeDefined();
|
|
||||||
// Verify original error is preserved
|
|
||||||
expect((error as Error).message).toContain("Network failure");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("formatJobEventMessage", () => {
|
describe("formatJobEventMessage", () => {
|
||||||
@@ -473,7 +452,6 @@ describe("HeraldService", () => {
|
|||||||
const message = service.formatJobEventMessage(event, job, metadata);
|
const message = service.formatJobEventMessage(event, job, metadata);
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(message).toContain("🟢");
|
|
||||||
expect(message).toContain("Job created");
|
expect(message).toContain("Job created");
|
||||||
expect(message).toContain("#42");
|
expect(message).toContain("#42");
|
||||||
expect(message.length).toBeLessThan(200); // Keep it scannable
|
expect(message.length).toBeLessThan(200); // Keep it scannable
|
||||||
@@ -526,7 +504,6 @@ describe("HeraldService", () => {
|
|||||||
const message = service.formatJobEventMessage(event, job, metadata);
|
const message = service.formatJobEventMessage(event, job, metadata);
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
expect(message).toMatch(/✅|🟢/);
|
|
||||||
expect(message).toContain("completed");
|
expect(message).toContain("completed");
|
||||||
expect(message).not.toMatch(/COMPLETED|SUCCESS/);
|
expect(message).not.toMatch(/COMPLETED|SUCCESS/);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { Injectable, Logger } from "@nestjs/common";
|
import { Inject, Injectable, Logger } from "@nestjs/common";
|
||||||
import { PrismaService } from "../prisma/prisma.service";
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
import { DiscordService } from "../bridge/discord/discord.service";
|
import { CHAT_PROVIDERS } from "../bridge/bridge.constants";
|
||||||
|
import type { IChatProvider } from "../bridge/interfaces/chat-provider.interface";
|
||||||
import {
|
import {
|
||||||
JOB_CREATED,
|
JOB_CREATED,
|
||||||
JOB_STARTED,
|
JOB_STARTED,
|
||||||
@@ -21,7 +22,7 @@ import {
|
|||||||
* - Subscribe to job events
|
* - Subscribe to job events
|
||||||
* - Format status messages with PDA-friendly language
|
* - Format status messages with PDA-friendly language
|
||||||
* - Route to appropriate channels based on workspace config
|
* - Route to appropriate channels based on workspace config
|
||||||
* - Support Discord (via bridge) and PR comments
|
* - Broadcast to ALL active chat providers (Discord, Matrix, etc.)
|
||||||
*/
|
*/
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class HeraldService {
|
export class HeraldService {
|
||||||
@@ -29,11 +30,11 @@ export class HeraldService {
|
|||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly prisma: PrismaService,
|
private readonly prisma: PrismaService,
|
||||||
private readonly discord: DiscordService
|
@Inject(CHAT_PROVIDERS) private readonly chatProviders: IChatProvider[]
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Broadcast a job event to the appropriate channel
|
* Broadcast a job event to all connected chat providers
|
||||||
*/
|
*/
|
||||||
async broadcastJobEvent(
|
async broadcastJobEvent(
|
||||||
jobId: string,
|
jobId: string,
|
||||||
@@ -47,66 +48,68 @@ export class HeraldService {
|
|||||||
payload: unknown;
|
payload: unknown;
|
||||||
}
|
}
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
try {
|
// Get job details
|
||||||
// Get job details
|
const job = await this.prisma.runnerJob.findUnique({
|
||||||
const job = await this.prisma.runnerJob.findUnique({
|
where: { id: jobId },
|
||||||
where: { id: jobId },
|
select: {
|
||||||
select: {
|
id: true,
|
||||||
id: true,
|
workspaceId: true,
|
||||||
workspaceId: true,
|
type: true,
|
||||||
type: true,
|
},
|
||||||
},
|
});
|
||||||
});
|
|
||||||
|
|
||||||
if (!job) {
|
if (!job) {
|
||||||
this.logger.warn(`Job ${jobId} not found, skipping broadcast`);
|
this.logger.warn(`Job ${jobId} not found, skipping broadcast`);
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
|
|
||||||
// Check if Discord is connected
|
|
||||||
if (!this.discord.isConnected()) {
|
|
||||||
this.logger.debug("Discord not connected, skipping broadcast");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get threadId from first event payload (job.created event has metadata)
|
|
||||||
const firstEvent = await this.prisma.jobEvent.findFirst({
|
|
||||||
where: {
|
|
||||||
jobId,
|
|
||||||
type: JOB_CREATED,
|
|
||||||
},
|
|
||||||
select: {
|
|
||||||
payload: true,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const firstEventPayload = firstEvent?.payload as Record<string, unknown> | undefined;
|
|
||||||
const metadata = firstEventPayload?.metadata as Record<string, unknown> | undefined;
|
|
||||||
const threadId = metadata?.threadId as string | undefined;
|
|
||||||
|
|
||||||
if (!threadId) {
|
|
||||||
this.logger.debug(`Job ${jobId} has no threadId, skipping broadcast`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Format message
|
|
||||||
const message = this.formatJobEventMessage(event, job, metadata);
|
|
||||||
|
|
||||||
// Send to thread
|
|
||||||
await this.discord.sendThreadMessage({
|
|
||||||
threadId,
|
|
||||||
content: message,
|
|
||||||
});
|
|
||||||
|
|
||||||
this.logger.debug(`Broadcasted event ${event.type} for job ${jobId} to thread ${threadId}`);
|
|
||||||
} catch (error) {
|
|
||||||
// Log the error with full context for debugging
|
|
||||||
this.logger.error(`Failed to broadcast event ${event.type} for job ${jobId}:`, error);
|
|
||||||
|
|
||||||
// Re-throw the error so callers can handle it appropriately
|
|
||||||
// This enables proper error tracking, retry logic, and alerting
|
|
||||||
throw error;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get threadId from first event payload (job.created event has metadata)
|
||||||
|
const firstEvent = await this.prisma.jobEvent.findFirst({
|
||||||
|
where: {
|
||||||
|
jobId,
|
||||||
|
type: JOB_CREATED,
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
payload: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const firstEventPayload = firstEvent?.payload as Record<string, unknown> | undefined;
|
||||||
|
const metadata = firstEventPayload?.metadata as Record<string, unknown> | undefined;
|
||||||
|
const threadId = metadata?.threadId as string | undefined;
|
||||||
|
const channelId = metadata?.channelId as string | undefined;
|
||||||
|
|
||||||
|
if (!threadId) {
|
||||||
|
this.logger.debug(`Job ${jobId} has no threadId, skipping broadcast`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format message
|
||||||
|
const message = this.formatJobEventMessage(event, job, metadata);
|
||||||
|
|
||||||
|
// Broadcast to all connected providers
|
||||||
|
for (const provider of this.chatProviders) {
|
||||||
|
if (!provider.isConnected()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await provider.sendThreadMessage({
|
||||||
|
threadId,
|
||||||
|
channelId: channelId ?? "",
|
||||||
|
content: message,
|
||||||
|
});
|
||||||
|
} catch (error: unknown) {
|
||||||
|
// Log and continue — one provider failure must not block others
|
||||||
|
const providerName = provider.constructor.name;
|
||||||
|
this.logger.error(
|
||||||
|
`Failed to broadcast event ${event.type} for job ${jobId} via ${providerName}:`,
|
||||||
|
error instanceof Error ? error.message : error
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.debug(`Broadcasted event ${event.type} for job ${jobId} to thread ${threadId}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -16,7 +16,9 @@ import { JOB_CREATED, JOB_STARTED, STEP_STARTED } from "./event-types";
|
|||||||
* NOTE: These tests require a real database connection with realistic data volume.
|
* NOTE: These tests require a real database connection with realistic data volume.
|
||||||
* Run with: pnpm test:api -- job-events.performance.spec.ts
|
* Run with: pnpm test:api -- job-events.performance.spec.ts
|
||||||
*/
|
*/
|
||||||
const describeFn = process.env.DATABASE_URL ? describe : describe.skip;
|
const shouldRunDbIntegrationTests =
|
||||||
|
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||||
|
const describeFn = shouldRunDbIntegrationTests ? describe : describe.skip;
|
||||||
|
|
||||||
describeFn("JobEventsService Performance", () => {
|
describeFn("JobEventsService Performance", () => {
|
||||||
let service: JobEventsService;
|
let service: JobEventsService;
|
||||||
|
|||||||
@@ -27,7 +27,9 @@ async function isFulltextSearchConfigured(prisma: PrismaClient): Promise<boolean
|
|||||||
* Skip when DATABASE_URL is not set. Tests that require the trigger/index
|
* Skip when DATABASE_URL is not set. Tests that require the trigger/index
|
||||||
* will be skipped if the database migration hasn't been applied.
|
* will be skipped if the database migration hasn't been applied.
|
||||||
*/
|
*/
|
||||||
const describeFn = process.env.DATABASE_URL ? describe : describe.skip;
|
const shouldRunDbIntegrationTests =
|
||||||
|
process.env.RUN_DB_TESTS === "true" && Boolean(process.env.DATABASE_URL);
|
||||||
|
const describeFn = shouldRunDbIntegrationTests ? describe : describe.skip;
|
||||||
|
|
||||||
describeFn("Full-Text Search Setup (Integration)", () => {
|
describeFn("Full-Text Search Setup (Integration)", () => {
|
||||||
let prisma: PrismaClient;
|
let prisma: PrismaClient;
|
||||||
|
|||||||
@@ -1,12 +1,6 @@
|
|||||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import {
|
import { TaskType, Complexity, Harness, Provider, Outcome } from "@mosaicstack/telemetry-client";
|
||||||
TaskType,
|
|
||||||
Complexity,
|
|
||||||
Harness,
|
|
||||||
Provider,
|
|
||||||
Outcome,
|
|
||||||
} from "@mosaicstack/telemetry-client";
|
|
||||||
import type { TaskCompletionEvent, EventBuilderParams } from "@mosaicstack/telemetry-client";
|
import type { TaskCompletionEvent, EventBuilderParams } from "@mosaicstack/telemetry-client";
|
||||||
import { MosaicTelemetryService } from "../mosaic-telemetry/mosaic-telemetry.service";
|
import { MosaicTelemetryService } from "../mosaic-telemetry/mosaic-telemetry.service";
|
||||||
import {
|
import {
|
||||||
@@ -291,7 +285,7 @@ describe("LlmTelemetryTrackerService", () => {
|
|||||||
actual_input_tokens: 150,
|
actual_input_tokens: 150,
|
||||||
actual_output_tokens: 300,
|
actual_output_tokens: 300,
|
||||||
outcome: Outcome.SUCCESS,
|
outcome: Outcome.SUCCESS,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(mockTelemetryService.trackTaskCompletion).toHaveBeenCalledWith(mockEvent);
|
expect(mockTelemetryService.trackTaskCompletion).toHaveBeenCalledWith(mockEvent);
|
||||||
@@ -309,7 +303,7 @@ describe("LlmTelemetryTrackerService", () => {
|
|||||||
model: "gpt-4o",
|
model: "gpt-4o",
|
||||||
provider: Provider.OPENAI,
|
provider: Provider.OPENAI,
|
||||||
harness: Harness.API_DIRECT,
|
harness: Harness.API_DIRECT,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -325,7 +319,7 @@ describe("LlmTelemetryTrackerService", () => {
|
|||||||
model: "llama3.2",
|
model: "llama3.2",
|
||||||
provider: Provider.OLLAMA,
|
provider: Provider.OLLAMA,
|
||||||
harness: Harness.OLLAMA_LOCAL,
|
harness: Harness.OLLAMA_LOCAL,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -340,7 +334,7 @@ describe("LlmTelemetryTrackerService", () => {
|
|||||||
// Estimated values are 0 when no PredictionService is injected
|
// Estimated values are 0 when no PredictionService is injected
|
||||||
estimated_cost_usd_micros: 0,
|
estimated_cost_usd_micros: 0,
|
||||||
actual_cost_usd_micros: expectedActualCost,
|
actual_cost_usd_micros: expectedActualCost,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -355,7 +349,7 @@ describe("LlmTelemetryTrackerService", () => {
|
|||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
estimated_cost_usd_micros: 0,
|
estimated_cost_usd_micros: 0,
|
||||||
actual_cost_usd_micros: 0,
|
actual_cost_usd_micros: 0,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -368,7 +362,7 @@ describe("LlmTelemetryTrackerService", () => {
|
|||||||
expect(mockTelemetryService.eventBuilder?.build).toHaveBeenCalledWith(
|
expect(mockTelemetryService.eventBuilder?.build).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
outcome: Outcome.FAILURE,
|
outcome: Outcome.FAILURE,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -381,7 +375,7 @@ describe("LlmTelemetryTrackerService", () => {
|
|||||||
expect(mockTelemetryService.eventBuilder?.build).toHaveBeenCalledWith(
|
expect(mockTelemetryService.eventBuilder?.build).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
task_type: TaskType.PLANNING,
|
task_type: TaskType.PLANNING,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -393,7 +387,7 @@ describe("LlmTelemetryTrackerService", () => {
|
|||||||
quality_gate_passed: true,
|
quality_gate_passed: true,
|
||||||
quality_gates_run: [],
|
quality_gates_run: [],
|
||||||
quality_gates_failed: [],
|
quality_gates_failed: [],
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -441,7 +435,7 @@ describe("LlmTelemetryTrackerService", () => {
|
|||||||
// Estimated values are 0 when no PredictionService is injected
|
// Estimated values are 0 when no PredictionService is injected
|
||||||
estimated_input_tokens: 0,
|
estimated_input_tokens: 0,
|
||||||
estimated_output_tokens: 0,
|
estimated_output_tokens: 0,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -457,7 +451,7 @@ describe("LlmTelemetryTrackerService", () => {
|
|||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
task_type: TaskType.IMPLEMENTATION,
|
task_type: TaskType.IMPLEMENTATION,
|
||||||
actual_output_tokens: 0,
|
actual_output_tokens: 0,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -169,7 +169,7 @@ describe("LlmService", () => {
|
|||||||
outputTokens: 20,
|
outputTokens: 20,
|
||||||
callingContext: "chat",
|
callingContext: "chat",
|
||||||
success: true,
|
success: true,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -183,7 +183,7 @@ describe("LlmService", () => {
|
|||||||
model: "llama3.2",
|
model: "llama3.2",
|
||||||
operation: "chat",
|
operation: "chat",
|
||||||
success: false,
|
success: false,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -261,7 +261,7 @@ describe("LlmService", () => {
|
|||||||
outputTokens: 10,
|
outputTokens: 10,
|
||||||
callingContext: "brain",
|
callingContext: "brain",
|
||||||
success: true,
|
success: true,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -295,7 +295,7 @@ describe("LlmService", () => {
|
|||||||
inputTokens: 1,
|
inputTokens: 1,
|
||||||
// Output estimated from "Hello world" -> ceil(11/4) = 3
|
// Output estimated from "Hello world" -> ceil(11/4) = 3
|
||||||
outputTokens: 3,
|
outputTokens: 3,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -313,7 +313,7 @@ describe("LlmService", () => {
|
|||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
operation: "chatStream",
|
operation: "chatStream",
|
||||||
success: false,
|
success: false,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -368,7 +368,7 @@ describe("LlmService", () => {
|
|||||||
outputTokens: 0,
|
outputTokens: 0,
|
||||||
callingContext: "embed",
|
callingContext: "embed",
|
||||||
success: true,
|
success: true,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -381,7 +381,7 @@ describe("LlmService", () => {
|
|||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
operation: "embed",
|
operation: "embed",
|
||||||
success: false,
|
success: false,
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { NestFactory } from "@nestjs/core";
|
|||||||
import { ValidationPipe } from "@nestjs/common";
|
import { ValidationPipe } from "@nestjs/common";
|
||||||
import cookieParser from "cookie-parser";
|
import cookieParser from "cookie-parser";
|
||||||
import { AppModule } from "./app.module";
|
import { AppModule } from "./app.module";
|
||||||
|
import { getTrustedOrigins } from "./auth/auth.config";
|
||||||
import { GlobalExceptionFilter } from "./filters/global-exception.filter";
|
import { GlobalExceptionFilter } from "./filters/global-exception.filter";
|
||||||
|
|
||||||
function getPort(): number {
|
function getPort(): number {
|
||||||
@@ -47,39 +48,11 @@ async function bootstrap() {
|
|||||||
app.useGlobalFilters(new GlobalExceptionFilter());
|
app.useGlobalFilters(new GlobalExceptionFilter());
|
||||||
|
|
||||||
// Configure CORS for cookie-based authentication
|
// Configure CORS for cookie-based authentication
|
||||||
// SECURITY: Cannot use wildcard (*) with credentials: true
|
// Origin list is shared with BetterAuth trustedOrigins via getTrustedOrigins()
|
||||||
const isDevelopment = process.env.NODE_ENV !== "production";
|
const trustedOrigins = getTrustedOrigins();
|
||||||
|
console.log(`[CORS] Trusted origins: ${JSON.stringify(trustedOrigins)}`);
|
||||||
const allowedOrigins = [
|
|
||||||
process.env.NEXT_PUBLIC_APP_URL ?? "http://localhost:3000",
|
|
||||||
"https://app.mosaicstack.dev", // Production web
|
|
||||||
"https://api.mosaicstack.dev", // Production API
|
|
||||||
];
|
|
||||||
|
|
||||||
// Development-only origins (not allowed in production)
|
|
||||||
if (isDevelopment) {
|
|
||||||
allowedOrigins.push("http://localhost:3001"); // API origin (dev)
|
|
||||||
}
|
|
||||||
|
|
||||||
app.enableCors({
|
app.enableCors({
|
||||||
origin: (
|
origin: trustedOrigins,
|
||||||
origin: string | undefined,
|
|
||||||
callback: (err: Error | null, allow?: boolean) => void
|
|
||||||
): void => {
|
|
||||||
// Allow requests with no Origin header (health checks, server-to-server,
|
|
||||||
// load balancer probes). These are not cross-origin requests per the CORS spec.
|
|
||||||
if (!origin) {
|
|
||||||
callback(null, true);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if origin is in allowed list
|
|
||||||
if (allowedOrigins.includes(origin)) {
|
|
||||||
callback(null, true);
|
|
||||||
} else {
|
|
||||||
callback(new Error(`Origin ${origin} not allowed by CORS`));
|
|
||||||
}
|
|
||||||
},
|
|
||||||
credentials: true, // Required for cookie-based authentication
|
credentials: true, // Required for cookie-based authentication
|
||||||
methods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
methods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
||||||
allowedHeaders: ["Content-Type", "Authorization", "Cookie", "X-CSRF-Token", "X-Workspace-Id"],
|
allowedHeaders: ["Content-Type", "Authorization", "Cookie", "X-CSRF-Token", "X-Workspace-Id"],
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { Test, TestingModule } from "@nestjs/testing";
|
|||||||
import { ConfigModule } from "@nestjs/config";
|
import { ConfigModule } from "@nestjs/config";
|
||||||
import { MosaicTelemetryModule } from "./mosaic-telemetry.module";
|
import { MosaicTelemetryModule } from "./mosaic-telemetry.module";
|
||||||
import { MosaicTelemetryService } from "./mosaic-telemetry.service";
|
import { MosaicTelemetryService } from "./mosaic-telemetry.service";
|
||||||
|
import { PrismaService } from "../prisma/prisma.service";
|
||||||
|
|
||||||
// Mock the telemetry client to avoid real HTTP calls
|
// Mock the telemetry client to avoid real HTTP calls
|
||||||
vi.mock("@mosaicstack/telemetry-client", async (importOriginal) => {
|
vi.mock("@mosaicstack/telemetry-client", async (importOriginal) => {
|
||||||
@@ -56,6 +57,30 @@ vi.mock("@mosaicstack/telemetry-client", async (importOriginal) => {
|
|||||||
|
|
||||||
describe("MosaicTelemetryModule", () => {
|
describe("MosaicTelemetryModule", () => {
|
||||||
let module: TestingModule;
|
let module: TestingModule;
|
||||||
|
const sharedTestEnv = {
|
||||||
|
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef",
|
||||||
|
};
|
||||||
|
const mockPrismaService = {
|
||||||
|
onModuleInit: vi.fn(),
|
||||||
|
onModuleDestroy: vi.fn(),
|
||||||
|
$connect: vi.fn(),
|
||||||
|
$disconnect: vi.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const buildTestModule = async (env: Record<string, string>): Promise<TestingModule> =>
|
||||||
|
Test.createTestingModule({
|
||||||
|
imports: [
|
||||||
|
ConfigModule.forRoot({
|
||||||
|
isGlobal: true,
|
||||||
|
envFilePath: [],
|
||||||
|
load: [() => ({ ...env, ...sharedTestEnv })],
|
||||||
|
}),
|
||||||
|
MosaicTelemetryModule,
|
||||||
|
],
|
||||||
|
})
|
||||||
|
.overrideProvider(PrismaService)
|
||||||
|
.useValue(mockPrismaService)
|
||||||
|
.compile();
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks();
|
vi.clearAllMocks();
|
||||||
@@ -63,40 +88,18 @@ describe("MosaicTelemetryModule", () => {
|
|||||||
|
|
||||||
describe("module initialization", () => {
|
describe("module initialization", () => {
|
||||||
it("should compile the module successfully", async () => {
|
it("should compile the module successfully", async () => {
|
||||||
module = await Test.createTestingModule({
|
module = await buildTestModule({
|
||||||
imports: [
|
MOSAIC_TELEMETRY_ENABLED: "false",
|
||||||
ConfigModule.forRoot({
|
});
|
||||||
isGlobal: true,
|
|
||||||
envFilePath: [],
|
|
||||||
load: [
|
|
||||||
() => ({
|
|
||||||
MOSAIC_TELEMETRY_ENABLED: "false",
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
MosaicTelemetryModule,
|
|
||||||
],
|
|
||||||
}).compile();
|
|
||||||
|
|
||||||
expect(module).toBeDefined();
|
expect(module).toBeDefined();
|
||||||
await module.close();
|
await module.close();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("should provide MosaicTelemetryService", async () => {
|
it("should provide MosaicTelemetryService", async () => {
|
||||||
module = await Test.createTestingModule({
|
module = await buildTestModule({
|
||||||
imports: [
|
MOSAIC_TELEMETRY_ENABLED: "false",
|
||||||
ConfigModule.forRoot({
|
});
|
||||||
isGlobal: true,
|
|
||||||
envFilePath: [],
|
|
||||||
load: [
|
|
||||||
() => ({
|
|
||||||
MOSAIC_TELEMETRY_ENABLED: "false",
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
MosaicTelemetryModule,
|
|
||||||
],
|
|
||||||
}).compile();
|
|
||||||
|
|
||||||
const service = module.get<MosaicTelemetryService>(MosaicTelemetryService);
|
const service = module.get<MosaicTelemetryService>(MosaicTelemetryService);
|
||||||
expect(service).toBeDefined();
|
expect(service).toBeDefined();
|
||||||
@@ -106,20 +109,9 @@ describe("MosaicTelemetryModule", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should export MosaicTelemetryService for injection in other modules", async () => {
|
it("should export MosaicTelemetryService for injection in other modules", async () => {
|
||||||
module = await Test.createTestingModule({
|
module = await buildTestModule({
|
||||||
imports: [
|
MOSAIC_TELEMETRY_ENABLED: "false",
|
||||||
ConfigModule.forRoot({
|
});
|
||||||
isGlobal: true,
|
|
||||||
envFilePath: [],
|
|
||||||
load: [
|
|
||||||
() => ({
|
|
||||||
MOSAIC_TELEMETRY_ENABLED: "false",
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
MosaicTelemetryModule,
|
|
||||||
],
|
|
||||||
}).compile();
|
|
||||||
|
|
||||||
const service = module.get(MosaicTelemetryService);
|
const service = module.get(MosaicTelemetryService);
|
||||||
expect(service).toBeDefined();
|
expect(service).toBeDefined();
|
||||||
@@ -130,24 +122,13 @@ describe("MosaicTelemetryModule", () => {
|
|||||||
|
|
||||||
describe("lifecycle integration", () => {
|
describe("lifecycle integration", () => {
|
||||||
it("should initialize service on module init when enabled", async () => {
|
it("should initialize service on module init when enabled", async () => {
|
||||||
module = await Test.createTestingModule({
|
module = await buildTestModule({
|
||||||
imports: [
|
MOSAIC_TELEMETRY_ENABLED: "true",
|
||||||
ConfigModule.forRoot({
|
MOSAIC_TELEMETRY_SERVER_URL: "https://tel.test.local",
|
||||||
isGlobal: true,
|
MOSAIC_TELEMETRY_API_KEY: "a".repeat(64),
|
||||||
envFilePath: [],
|
MOSAIC_TELEMETRY_INSTANCE_ID: "550e8400-e29b-41d4-a716-446655440000",
|
||||||
load: [
|
MOSAIC_TELEMETRY_DRY_RUN: "false",
|
||||||
() => ({
|
});
|
||||||
MOSAIC_TELEMETRY_ENABLED: "true",
|
|
||||||
MOSAIC_TELEMETRY_SERVER_URL: "https://tel.test.local",
|
|
||||||
MOSAIC_TELEMETRY_API_KEY: "a".repeat(64),
|
|
||||||
MOSAIC_TELEMETRY_INSTANCE_ID: "550e8400-e29b-41d4-a716-446655440000",
|
|
||||||
MOSAIC_TELEMETRY_DRY_RUN: "false",
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
MosaicTelemetryModule,
|
|
||||||
],
|
|
||||||
}).compile();
|
|
||||||
|
|
||||||
await module.init();
|
await module.init();
|
||||||
|
|
||||||
@@ -158,20 +139,9 @@ describe("MosaicTelemetryModule", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should not start client when disabled via env", async () => {
|
it("should not start client when disabled via env", async () => {
|
||||||
module = await Test.createTestingModule({
|
module = await buildTestModule({
|
||||||
imports: [
|
MOSAIC_TELEMETRY_ENABLED: "false",
|
||||||
ConfigModule.forRoot({
|
});
|
||||||
isGlobal: true,
|
|
||||||
envFilePath: [],
|
|
||||||
load: [
|
|
||||||
() => ({
|
|
||||||
MOSAIC_TELEMETRY_ENABLED: "false",
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
MosaicTelemetryModule,
|
|
||||||
],
|
|
||||||
}).compile();
|
|
||||||
|
|
||||||
await module.init();
|
await module.init();
|
||||||
|
|
||||||
@@ -182,24 +152,13 @@ describe("MosaicTelemetryModule", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should cleanly shut down on module destroy", async () => {
|
it("should cleanly shut down on module destroy", async () => {
|
||||||
module = await Test.createTestingModule({
|
module = await buildTestModule({
|
||||||
imports: [
|
MOSAIC_TELEMETRY_ENABLED: "true",
|
||||||
ConfigModule.forRoot({
|
MOSAIC_TELEMETRY_SERVER_URL: "https://tel.test.local",
|
||||||
isGlobal: true,
|
MOSAIC_TELEMETRY_API_KEY: "a".repeat(64),
|
||||||
envFilePath: [],
|
MOSAIC_TELEMETRY_INSTANCE_ID: "550e8400-e29b-41d4-a716-446655440000",
|
||||||
load: [
|
MOSAIC_TELEMETRY_DRY_RUN: "false",
|
||||||
() => ({
|
});
|
||||||
MOSAIC_TELEMETRY_ENABLED: "true",
|
|
||||||
MOSAIC_TELEMETRY_SERVER_URL: "https://tel.test.local",
|
|
||||||
MOSAIC_TELEMETRY_API_KEY: "a".repeat(64),
|
|
||||||
MOSAIC_TELEMETRY_INSTANCE_ID: "550e8400-e29b-41d4-a716-446655440000",
|
|
||||||
MOSAIC_TELEMETRY_DRY_RUN: "false",
|
|
||||||
}),
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
MosaicTelemetryModule,
|
|
||||||
],
|
|
||||||
}).compile();
|
|
||||||
|
|
||||||
await module.init();
|
await module.init();
|
||||||
|
|
||||||
|
|||||||
@@ -73,9 +73,7 @@ const { MosaicTelemetryService } = await import("./mosaic-telemetry.service");
|
|||||||
/**
|
/**
|
||||||
* Create a ConfigService mock that returns environment values from the provided map.
|
* Create a ConfigService mock that returns environment values from the provided map.
|
||||||
*/
|
*/
|
||||||
function createConfigService(
|
function createConfigService(envMap: Record<string, string | undefined> = {}): ConfigService {
|
||||||
envMap: Record<string, string | undefined> = {},
|
|
||||||
): ConfigService {
|
|
||||||
const configService = {
|
const configService = {
|
||||||
get: vi.fn((key: string, defaultValue?: string): string => {
|
get: vi.fn((key: string, defaultValue?: string): string => {
|
||||||
const value = envMap[key];
|
const value = envMap[key];
|
||||||
@@ -289,7 +287,7 @@ describe("MosaicTelemetryService", () => {
|
|||||||
cost_usd_micros: { median: 5000 },
|
cost_usd_micros: { median: 5000 },
|
||||||
duration_ms: { median: 10000 },
|
duration_ms: { median: 10000 },
|
||||||
correction_factors: { input: 1.0, output: 1.0 },
|
correction_factors: { input: 1.0, output: 1.0 },
|
||||||
quality: { gate_pass_rate: 0.95, success_rate: 0.90 },
|
quality: { gate_pass_rate: 0.95, success_rate: 0.9 },
|
||||||
},
|
},
|
||||||
metadata: {
|
metadata: {
|
||||||
sample_size: 100,
|
sample_size: 100,
|
||||||
@@ -467,7 +465,7 @@ describe("MosaicTelemetryService", () => {
|
|||||||
model: "test",
|
model: "test",
|
||||||
provider: Provider.ANTHROPIC,
|
provider: Provider.ANTHROPIC,
|
||||||
complexity: Complexity.LOW,
|
complexity: Complexity.LOW,
|
||||||
}),
|
})
|
||||||
).toBeNull();
|
).toBeNull();
|
||||||
await expect(service.refreshPredictions([])).resolves.not.toThrow();
|
await expect(service.refreshPredictions([])).resolves.not.toThrow();
|
||||||
expect(service.eventBuilder).toBeNull();
|
expect(service.eventBuilder).toBeNull();
|
||||||
|
|||||||
@@ -1,14 +1,7 @@
|
|||||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
import { Test, TestingModule } from "@nestjs/testing";
|
import { Test, TestingModule } from "@nestjs/testing";
|
||||||
import {
|
import { TaskType, Complexity, Provider } from "@mosaicstack/telemetry-client";
|
||||||
TaskType,
|
import type { PredictionResponse, PredictionQuery } from "@mosaicstack/telemetry-client";
|
||||||
Complexity,
|
|
||||||
Provider,
|
|
||||||
} from "@mosaicstack/telemetry-client";
|
|
||||||
import type {
|
|
||||||
PredictionResponse,
|
|
||||||
PredictionQuery,
|
|
||||||
} from "@mosaicstack/telemetry-client";
|
|
||||||
import { MosaicTelemetryService } from "./mosaic-telemetry.service";
|
import { MosaicTelemetryService } from "./mosaic-telemetry.service";
|
||||||
import { PredictionService } from "./prediction.service";
|
import { PredictionService } from "./prediction.service";
|
||||||
|
|
||||||
@@ -124,12 +117,7 @@ describe("PredictionService", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should pass correct query parameters to telemetry service", () => {
|
it("should pass correct query parameters to telemetry service", () => {
|
||||||
service.getEstimate(
|
service.getEstimate(TaskType.CODE_REVIEW, "gpt-4o", Provider.OPENAI, Complexity.HIGH);
|
||||||
TaskType.CODE_REVIEW,
|
|
||||||
"gpt-4o",
|
|
||||||
Provider.OPENAI,
|
|
||||||
Complexity.HIGH
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(mockTelemetryService.getPrediction).toHaveBeenCalledWith({
|
expect(mockTelemetryService.getPrediction).toHaveBeenCalledWith({
|
||||||
task_type: TaskType.CODE_REVIEW,
|
task_type: TaskType.CODE_REVIEW,
|
||||||
@@ -205,8 +193,7 @@ describe("PredictionService", () => {
|
|||||||
|
|
||||||
expect(mockTelemetryService.refreshPredictions).toHaveBeenCalledTimes(1);
|
expect(mockTelemetryService.refreshPredictions).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
const queries: PredictionQuery[] =
|
const queries: PredictionQuery[] = mockTelemetryService.refreshPredictions.mock.calls[0][0];
|
||||||
mockTelemetryService.refreshPredictions.mock.calls[0][0];
|
|
||||||
|
|
||||||
// Should have queries for cross-product of models, task types, and complexities
|
// Should have queries for cross-product of models, task types, and complexities
|
||||||
expect(queries.length).toBeGreaterThan(0);
|
expect(queries.length).toBeGreaterThan(0);
|
||||||
@@ -223,8 +210,7 @@ describe("PredictionService", () => {
|
|||||||
it("should include Anthropic model predictions", async () => {
|
it("should include Anthropic model predictions", async () => {
|
||||||
await service.refreshCommonPredictions();
|
await service.refreshCommonPredictions();
|
||||||
|
|
||||||
const queries: PredictionQuery[] =
|
const queries: PredictionQuery[] = mockTelemetryService.refreshPredictions.mock.calls[0][0];
|
||||||
mockTelemetryService.refreshPredictions.mock.calls[0][0];
|
|
||||||
|
|
||||||
const anthropicQueries = queries.filter(
|
const anthropicQueries = queries.filter(
|
||||||
(q: PredictionQuery) => q.provider === Provider.ANTHROPIC
|
(q: PredictionQuery) => q.provider === Provider.ANTHROPIC
|
||||||
@@ -235,12 +221,9 @@ describe("PredictionService", () => {
|
|||||||
it("should include OpenAI model predictions", async () => {
|
it("should include OpenAI model predictions", async () => {
|
||||||
await service.refreshCommonPredictions();
|
await service.refreshCommonPredictions();
|
||||||
|
|
||||||
const queries: PredictionQuery[] =
|
const queries: PredictionQuery[] = mockTelemetryService.refreshPredictions.mock.calls[0][0];
|
||||||
mockTelemetryService.refreshPredictions.mock.calls[0][0];
|
|
||||||
|
|
||||||
const openaiQueries = queries.filter(
|
const openaiQueries = queries.filter((q: PredictionQuery) => q.provider === Provider.OPENAI);
|
||||||
(q: PredictionQuery) => q.provider === Provider.OPENAI
|
|
||||||
);
|
|
||||||
expect(openaiQueries.length).toBeGreaterThan(0);
|
expect(openaiQueries.length).toBeGreaterThan(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -253,9 +236,7 @@ describe("PredictionService", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should not throw when refreshPredictions rejects", async () => {
|
it("should not throw when refreshPredictions rejects", async () => {
|
||||||
mockTelemetryService.refreshPredictions.mockRejectedValue(
|
mockTelemetryService.refreshPredictions.mockRejectedValue(new Error("Server unreachable"));
|
||||||
new Error("Server unreachable")
|
|
||||||
);
|
|
||||||
|
|
||||||
// Should not throw
|
// Should not throw
|
||||||
await expect(service.refreshCommonPredictions()).resolves.not.toThrow();
|
await expect(service.refreshCommonPredictions()).resolves.not.toThrow();
|
||||||
@@ -264,8 +245,7 @@ describe("PredictionService", () => {
|
|||||||
it("should include common task types in queries", async () => {
|
it("should include common task types in queries", async () => {
|
||||||
await service.refreshCommonPredictions();
|
await service.refreshCommonPredictions();
|
||||||
|
|
||||||
const queries: PredictionQuery[] =
|
const queries: PredictionQuery[] = mockTelemetryService.refreshPredictions.mock.calls[0][0];
|
||||||
mockTelemetryService.refreshPredictions.mock.calls[0][0];
|
|
||||||
|
|
||||||
const taskTypes = new Set(queries.map((q: PredictionQuery) => q.task_type));
|
const taskTypes = new Set(queries.map((q: PredictionQuery) => q.task_type));
|
||||||
|
|
||||||
@@ -277,8 +257,7 @@ describe("PredictionService", () => {
|
|||||||
it("should include common complexity levels in queries", async () => {
|
it("should include common complexity levels in queries", async () => {
|
||||||
await service.refreshCommonPredictions();
|
await service.refreshCommonPredictions();
|
||||||
|
|
||||||
const queries: PredictionQuery[] =
|
const queries: PredictionQuery[] = mockTelemetryService.refreshPredictions.mock.calls[0][0];
|
||||||
mockTelemetryService.refreshPredictions.mock.calls[0][0];
|
|
||||||
|
|
||||||
const complexities = new Set(queries.map((q: PredictionQuery) => q.complexity));
|
const complexities = new Set(queries.map((q: PredictionQuery) => q.complexity));
|
||||||
|
|
||||||
@@ -309,9 +288,7 @@ describe("PredictionService", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it("should not throw when refresh fails on init", () => {
|
it("should not throw when refresh fails on init", () => {
|
||||||
mockTelemetryService.refreshPredictions.mockRejectedValue(
|
mockTelemetryService.refreshPredictions.mockRejectedValue(new Error("Connection refused"));
|
||||||
new Error("Connection refused")
|
|
||||||
);
|
|
||||||
|
|
||||||
// Should not throw
|
// Should not throw
|
||||||
expect(() => service.onModuleInit()).not.toThrow();
|
expect(() => service.onModuleInit()).not.toThrow();
|
||||||
|
|||||||
@@ -156,7 +156,7 @@ describe("PrismaService", () => {
|
|||||||
it("should set workspace context variables in transaction", async () => {
|
it("should set workspace context variables in transaction", async () => {
|
||||||
const userId = "user-123";
|
const userId = "user-123";
|
||||||
const workspaceId = "workspace-456";
|
const workspaceId = "workspace-456";
|
||||||
const executeRawSpy = vi.spyOn(service, "$executeRaw").mockResolvedValue(0);
|
vi.spyOn(service, "$executeRaw").mockResolvedValue(0);
|
||||||
|
|
||||||
// Mock $transaction to execute the callback with a mock tx client
|
// Mock $transaction to execute the callback with a mock tx client
|
||||||
const mockTx = {
|
const mockTx = {
|
||||||
@@ -195,7 +195,6 @@ describe("PrismaService", () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Mock both methods at the same time to avoid spy issues
|
// Mock both methods at the same time to avoid spy issues
|
||||||
const originalSetContext = service.setWorkspaceContext.bind(service);
|
|
||||||
const setContextCalls: [string, string, unknown][] = [];
|
const setContextCalls: [string, string, unknown][] = [];
|
||||||
service.setWorkspaceContext = vi.fn().mockImplementation((uid, wid, tx) => {
|
service.setWorkspaceContext = vi.fn().mockImplementation((uid, wid, tx) => {
|
||||||
setContextCalls.push([uid, wid, tx]);
|
setContextCalls.push([uid, wid, tx]);
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { PrismaClient } from "@prisma/client";
|
|||||||
import { VaultService } from "../vault/vault.service";
|
import { VaultService } from "../vault/vault.service";
|
||||||
import { createAccountEncryptionExtension } from "./account-encryption.extension";
|
import { createAccountEncryptionExtension } from "./account-encryption.extension";
|
||||||
import { createLlmEncryptionExtension } from "./llm-encryption.extension";
|
import { createLlmEncryptionExtension } from "./llm-encryption.extension";
|
||||||
|
import { getRlsClient } from "./rls-context.provider";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prisma service that manages database connection lifecycle
|
* Prisma service that manages database connection lifecycle
|
||||||
@@ -177,6 +178,13 @@ export class PrismaService extends PrismaClient implements OnModuleInit, OnModul
|
|||||||
workspaceId: string,
|
workspaceId: string,
|
||||||
fn: (tx: PrismaClient) => Promise<T>
|
fn: (tx: PrismaClient) => Promise<T>
|
||||||
): Promise<T> {
|
): Promise<T> {
|
||||||
|
const rlsClient = getRlsClient();
|
||||||
|
|
||||||
|
if (rlsClient) {
|
||||||
|
await this.setWorkspaceContext(userId, workspaceId, rlsClient as unknown as PrismaClient);
|
||||||
|
return fn(rlsClient as unknown as PrismaClient);
|
||||||
|
}
|
||||||
|
|
||||||
return this.$transaction(async (tx) => {
|
return this.$transaction(async (tx) => {
|
||||||
await this.setWorkspaceContext(userId, workspaceId, tx as PrismaClient);
|
await this.setWorkspaceContext(userId, workspaceId, tx as PrismaClient);
|
||||||
return fn(tx as PrismaClient);
|
return fn(tx as PrismaClient);
|
||||||
|
|||||||
247
apps/api/src/speech/AGENTS.md
Normal file
247
apps/api/src/speech/AGENTS.md
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
# speech — Agent Context
|
||||||
|
|
||||||
|
> Part of the `apps/api/src` layer. Speech-to-text (STT) and text-to-speech (TTS) services.
|
||||||
|
|
||||||
|
## Module Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
speech/
|
||||||
|
├── speech.module.ts # NestJS module (conditional provider registration)
|
||||||
|
├── speech.config.ts # Environment validation + typed config (registerAs)
|
||||||
|
├── speech.config.spec.ts # 51 config validation tests
|
||||||
|
├── speech.constants.ts # NestJS injection tokens (STT_PROVIDER, TTS_PROVIDERS)
|
||||||
|
├── speech.controller.ts # REST endpoints (transcribe, synthesize, voices, health)
|
||||||
|
├── speech.controller.spec.ts # Controller tests
|
||||||
|
├── speech.service.ts # High-level service with fallback orchestration
|
||||||
|
├── speech.service.spec.ts # Service tests
|
||||||
|
├── speech.gateway.ts # WebSocket gateway (/speech namespace)
|
||||||
|
├── speech.gateway.spec.ts # Gateway tests
|
||||||
|
├── dto/
|
||||||
|
│ ├── transcribe.dto.ts # Transcription request DTO (class-validator)
|
||||||
|
│ ├── synthesize.dto.ts # Synthesis request DTO (class-validator)
|
||||||
|
│ └── index.ts # Barrel export
|
||||||
|
├── interfaces/
|
||||||
|
│ ├── speech-types.ts # Shared types (SpeechTier, AudioFormat, options, results)
|
||||||
|
│ ├── stt-provider.interface.ts # ISTTProvider contract
|
||||||
|
│ ├── tts-provider.interface.ts # ITTSProvider contract
|
||||||
|
│ └── index.ts # Barrel export
|
||||||
|
├── pipes/
|
||||||
|
│ ├── audio-validation.pipe.ts # Validates uploaded audio (MIME type, size)
|
||||||
|
│ ├── audio-validation.pipe.spec.ts
|
||||||
|
│ ├── text-validation.pipe.ts # Validates TTS text input (non-empty, max length)
|
||||||
|
│ ├── text-validation.pipe.spec.ts
|
||||||
|
│ └── index.ts # Barrel export
|
||||||
|
└── providers/
|
||||||
|
├── base-tts.provider.ts # Abstract base class (OpenAI SDK + common logic)
|
||||||
|
├── base-tts.provider.spec.ts
|
||||||
|
├── kokoro-tts.provider.ts # Default tier (CPU, 53 voices, 8 languages)
|
||||||
|
├── kokoro-tts.provider.spec.ts
|
||||||
|
├── chatterbox-tts.provider.ts # Premium tier (GPU, voice cloning, emotion control)
|
||||||
|
├── chatterbox-tts.provider.spec.ts
|
||||||
|
├── piper-tts.provider.ts # Fallback tier (CPU, lightweight, Raspberry Pi)
|
||||||
|
├── piper-tts.provider.spec.ts
|
||||||
|
├── speaches-stt.provider.ts # STT provider (Whisper via Speaches)
|
||||||
|
├── speaches-stt.provider.spec.ts
|
||||||
|
├── tts-provider.factory.ts # Factory: creates providers from config
|
||||||
|
└── tts-provider.factory.spec.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
## Codebase Patterns
|
||||||
|
|
||||||
|
### Provider Pattern (BaseTTSProvider + Factory)
|
||||||
|
|
||||||
|
All TTS providers extend `BaseTTSProvider`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export class MyNewProvider extends BaseTTSProvider {
|
||||||
|
readonly name = "my-provider";
|
||||||
|
readonly tier: SpeechTier = "default"; // or "premium" or "fallback"
|
||||||
|
|
||||||
|
constructor(baseURL: string) {
|
||||||
|
super(baseURL, "default-voice-id", "mp3");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Override listVoices() for custom voice catalog
|
||||||
|
override listVoices(): Promise<VoiceInfo[]> { ... }
|
||||||
|
|
||||||
|
// Override synthesize() only if non-standard API behavior is needed
|
||||||
|
// (see ChatterboxTTSProvider for example with extra body params)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The base class handles:
|
||||||
|
|
||||||
|
- OpenAI SDK client creation with custom `baseURL` and `apiKey: "not-needed"`
|
||||||
|
- Standard `synthesize()` via `client.audio.speech.create()`
|
||||||
|
- Default `listVoices()` returning just the default voice
|
||||||
|
- `isHealthy()` via GET to the `/v1/models` endpoint
|
||||||
|
|
||||||
|
### Config Pattern
|
||||||
|
|
||||||
|
Config follows the existing pattern (`auth.config.ts`, `federation.config.ts`):
|
||||||
|
|
||||||
|
- Export `isSttEnabled()`, `isTtsEnabled()`, etc. (boolean checks from env)
|
||||||
|
- Export `validateSpeechConfig()` (called at module init, throws on missing required vars)
|
||||||
|
- Export `getSpeechConfig()` (typed config object with defaults)
|
||||||
|
- Export `speechConfig = registerAs("speech", ...)` for NestJS ConfigModule
|
||||||
|
|
||||||
|
Boolean env parsing: `value === "true" || value === "1"`. No default-true.
|
||||||
|
|
||||||
|
### Conditional Provider Registration
|
||||||
|
|
||||||
|
In `speech.module.ts`:
|
||||||
|
|
||||||
|
- STT provider uses `isSttEnabled()` at module definition time to decide whether to register
|
||||||
|
- TTS providers use a factory function injected with `ConfigService`
|
||||||
|
- `@Optional()` decorator on `SpeechService`'s `sttProvider` handles the case where STT is disabled
|
||||||
|
|
||||||
|
### Injection Tokens
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// speech.constants.ts
|
||||||
|
export const STT_PROVIDER = Symbol("STT_PROVIDER"); // ISTTProvider
|
||||||
|
export const TTS_PROVIDERS = Symbol("TTS_PROVIDERS"); // Map<SpeechTier, ITTSProvider>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fallback Chain
|
||||||
|
|
||||||
|
TTS fallback order: `premium` -> `default` -> `fallback`
|
||||||
|
|
||||||
|
- Chain starts at the requested tier and goes downward
|
||||||
|
- Only tiers that are both enabled AND have a registered provider are attempted
|
||||||
|
- `ServiceUnavailableException` if all providers fail
|
||||||
|
|
||||||
|
### WebSocket Gateway
|
||||||
|
|
||||||
|
- Separate `/speech` namespace (not on the main gateway)
|
||||||
|
- Authentication mirrors the main WS gateway pattern (token extraction from handshake)
|
||||||
|
- One session per client, accumulates audio chunks in memory
|
||||||
|
- Chunks concatenated and transcribed on `stop-transcription`
|
||||||
|
- Session cleanup on disconnect
|
||||||
|
|
||||||
|
## How to Add a New TTS Provider
|
||||||
|
|
||||||
|
1. **Create the provider class** in `providers/`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// providers/my-tts.provider.ts
|
||||||
|
import { BaseTTSProvider } from "./base-tts.provider";
|
||||||
|
import type { SpeechTier } from "../interfaces/speech-types";
|
||||||
|
|
||||||
|
export class MyTtsProvider extends BaseTTSProvider {
|
||||||
|
readonly name = "my-provider";
|
||||||
|
readonly tier: SpeechTier = "default"; // Choose tier
|
||||||
|
|
||||||
|
constructor(baseURL: string) {
|
||||||
|
super(baseURL, "default-voice", "mp3");
|
||||||
|
}
|
||||||
|
|
||||||
|
override listVoices(): Promise<VoiceInfo[]> {
|
||||||
|
// Return your voice catalog
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Add env vars** to `speech.config.ts`:
|
||||||
|
- Add enabled check function
|
||||||
|
- Add URL to validation in `validateSpeechConfig()`
|
||||||
|
- Add config section in `getSpeechConfig()`
|
||||||
|
|
||||||
|
3. **Register in factory** (`tts-provider.factory.ts`):
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
if (config.tts.myTier.enabled) {
|
||||||
|
const provider = new MyTtsProvider(config.tts.myTier.url);
|
||||||
|
providers.set("myTier", provider);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Add env vars** to `.env.example`
|
||||||
|
|
||||||
|
5. **Write tests** following existing patterns (mock OpenAI SDK, test synthesis + listVoices + isHealthy)
|
||||||
|
|
||||||
|
## How to Add a New STT Provider
|
||||||
|
|
||||||
|
1. **Implement `ISTTProvider`** (does not use a base class -- STT has only one implementation currently)
|
||||||
|
2. **Add config section** similar to `stt` in `speech.config.ts`
|
||||||
|
3. **Register** in `speech.module.ts` providers array with `STT_PROVIDER` token
|
||||||
|
4. **Write tests** following `speaches-stt.provider.spec.ts` pattern
|
||||||
|
|
||||||
|
## Common Gotchas
|
||||||
|
|
||||||
|
- **OpenAI SDK `apiKey`**: Self-hosted services do not require an API key. Use `apiKey: "not-needed"` when creating the OpenAI client.
|
||||||
|
- **`toFile()` import**: The `toFile` helper is imported from `"openai"` (not from a subpath). Used in the STT provider to convert Buffer to a File-like object for multipart upload.
|
||||||
|
- **Health check URL**: `BaseTTSProvider.isHealthy()` calls `GET /v1/models`. The base URL is expected to end with `/v1`.
|
||||||
|
- **Voice ID prefix parsing**: Kokoro voice IDs encode language + gender in first two characters. See `parseVoicePrefix()` in `kokoro-tts.provider.ts`.
|
||||||
|
- **Chatterbox extra body params**: The `reference_audio` (base64) and `exaggeration` fields are passed via the OpenAI SDK by casting the request body. This works because the SDK passes through unknown fields.
|
||||||
|
- **WebSocket auth**: The gateway checks `auth.token`, then `query.token`, then `Authorization` header (in that order). Match this in test setup.
|
||||||
|
- **Config validation timing**: `validateSpeechConfig()` runs at module init (`onModuleInit`), not at provider construction. This means a misconfigured provider will fail at startup, not at first request.
|
||||||
|
|
||||||
|
## Test Patterns
|
||||||
|
|
||||||
|
### Mocking OpenAI SDK
|
||||||
|
|
||||||
|
All provider tests mock the OpenAI SDK. Pattern:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
vi.mock("openai", () => ({
|
||||||
|
default: vi.fn().mockImplementation(() => ({
|
||||||
|
audio: {
|
||||||
|
speech: {
|
||||||
|
create: vi.fn().mockResolvedValue({
|
||||||
|
arrayBuffer: () => Promise.resolve(new ArrayBuffer(10)),
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
transcriptions: {
|
||||||
|
create: vi.fn().mockResolvedValue({
|
||||||
|
text: "transcribed text",
|
||||||
|
language: "en",
|
||||||
|
duration: 3.5,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
models: { list: vi.fn().mockResolvedValue({ data: [] }) },
|
||||||
|
})),
|
||||||
|
}));
|
||||||
|
```
|
||||||
|
|
||||||
|
### Mocking Config Injection
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const mockConfig: SpeechConfig = {
|
||||||
|
stt: { enabled: true, baseUrl: "http://test:8000/v1", model: "test-model", language: "en" },
|
||||||
|
tts: {
|
||||||
|
default: { enabled: true, url: "http://test:8880/v1", voice: "af_heart", format: "mp3" },
|
||||||
|
premium: { enabled: false, url: "" },
|
||||||
|
fallback: { enabled: false, url: "" },
|
||||||
|
},
|
||||||
|
limits: { maxUploadSize: 25000000, maxDurationSeconds: 600, maxTextLength: 4096 },
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
### Config Test Pattern
|
||||||
|
|
||||||
|
`speech.config.spec.ts` saves and restores `process.env` around each test:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
let savedEnv: NodeJS.ProcessEnv;
|
||||||
|
beforeEach(() => {
|
||||||
|
savedEnv = { ...process.env };
|
||||||
|
});
|
||||||
|
afterEach(() => {
|
||||||
|
process.env = savedEnv;
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Files
|
||||||
|
|
||||||
|
| File | Purpose |
|
||||||
|
| ----------------------------------- | ------------------------------------------------------------------------ |
|
||||||
|
| `speech.module.ts` | Module registration with conditional providers |
|
||||||
|
| `speech.config.ts` | All speech env vars + validation (51 tests) |
|
||||||
|
| `speech.service.ts` | Core service: transcribe, synthesize (with fallback), listVoices |
|
||||||
|
| `speech.controller.ts` | REST endpoints: POST transcribe, POST synthesize, GET voices, GET health |
|
||||||
|
| `speech.gateway.ts` | WebSocket streaming transcription (/speech namespace) |
|
||||||
|
| `providers/base-tts.provider.ts` | Abstract base for all TTS providers (OpenAI SDK wrapper) |
|
||||||
|
| `providers/tts-provider.factory.ts` | Creates provider instances from config |
|
||||||
|
| `interfaces/speech-types.ts` | All shared types: SpeechTier, AudioFormat, options, results |
|
||||||
8
apps/api/src/speech/dto/index.ts
Normal file
8
apps/api/src/speech/dto/index.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/**
|
||||||
|
* Speech DTOs barrel export
|
||||||
|
*
|
||||||
|
* Issue #398
|
||||||
|
*/
|
||||||
|
|
||||||
|
export { TranscribeDto } from "./transcribe.dto";
|
||||||
|
export { SynthesizeDto } from "./synthesize.dto";
|
||||||
69
apps/api/src/speech/dto/synthesize.dto.ts
Normal file
69
apps/api/src/speech/dto/synthesize.dto.ts
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
/**
|
||||||
|
* SynthesizeDto
|
||||||
|
*
|
||||||
|
* DTO for text-to-speech synthesis requests.
|
||||||
|
* Text and option fields are validated by class-validator decorators.
|
||||||
|
* Additional options control voice, speed, format, and tier selection.
|
||||||
|
*
|
||||||
|
* Issue #398
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { IsString, IsOptional, IsNumber, IsIn, Min, Max, MaxLength } from "class-validator";
|
||||||
|
import { Type } from "class-transformer";
|
||||||
|
import { AUDIO_FORMATS, SPEECH_TIERS } from "../interfaces/speech-types";
|
||||||
|
import type { AudioFormat, SpeechTier } from "../interfaces/speech-types";
|
||||||
|
|
||||||
|
export class SynthesizeDto {
|
||||||
|
/**
|
||||||
|
* Text to convert to speech.
|
||||||
|
* Validated by class-validator decorators for type and maximum length.
|
||||||
|
*/
|
||||||
|
@IsString({ message: "text must be a string" })
|
||||||
|
@MaxLength(4096, { message: "text must not exceed 4096 characters" })
|
||||||
|
text!: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Voice ID to use for synthesis.
|
||||||
|
* Available voices depend on the selected tier and provider.
|
||||||
|
* If omitted, the default voice from speech config is used.
|
||||||
|
*/
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "voice must be a string" })
|
||||||
|
@MaxLength(100, { message: "voice must not exceed 100 characters" })
|
||||||
|
voice?: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Speech speed multiplier (0.5 to 2.0).
|
||||||
|
* 1.0 is normal speed, <1.0 is slower, >1.0 is faster.
|
||||||
|
*/
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsNumber({}, { message: "speed must be a number" })
|
||||||
|
@Min(0.5, { message: "speed must be at least 0.5" })
|
||||||
|
@Max(2.0, { message: "speed must not exceed 2.0" })
|
||||||
|
speed?: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Desired audio output format.
|
||||||
|
* Supported: mp3, wav, opus, flac, aac, pcm.
|
||||||
|
* If omitted, the default format from speech config is used.
|
||||||
|
*/
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "format must be a string" })
|
||||||
|
@IsIn(AUDIO_FORMATS, {
|
||||||
|
message: `format must be one of: ${AUDIO_FORMATS.join(", ")}`,
|
||||||
|
})
|
||||||
|
format?: AudioFormat;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TTS tier to use for synthesis.
|
||||||
|
* Controls which provider is used: default (Kokoro), premium (Chatterbox), or fallback (Piper).
|
||||||
|
* If the selected tier is unavailable, the service falls back to the next available tier.
|
||||||
|
*/
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "tier must be a string" })
|
||||||
|
@IsIn(SPEECH_TIERS, {
|
||||||
|
message: `tier must be one of: ${SPEECH_TIERS.join(", ")}`,
|
||||||
|
})
|
||||||
|
tier?: SpeechTier;
|
||||||
|
}
|
||||||
54
apps/api/src/speech/dto/transcribe.dto.ts
Normal file
54
apps/api/src/speech/dto/transcribe.dto.ts
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
/**
|
||||||
|
* TranscribeDto
|
||||||
|
*
|
||||||
|
* DTO for speech-to-text transcription requests.
|
||||||
|
* Supports optional language and model overrides.
|
||||||
|
*
|
||||||
|
* The audio file itself is handled by Multer (FileInterceptor)
|
||||||
|
* and validated by AudioValidationPipe.
|
||||||
|
*
|
||||||
|
* Issue #398
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { IsString, IsOptional, IsNumber, Min, Max, MaxLength } from "class-validator";
|
||||||
|
import { Type } from "class-transformer";
|
||||||
|
|
||||||
|
export class TranscribeDto {
|
||||||
|
/**
|
||||||
|
* Language code for transcription (e.g., "en", "fr", "de").
|
||||||
|
* If omitted, the default from speech config is used.
|
||||||
|
*/
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "language must be a string" })
|
||||||
|
@MaxLength(10, { message: "language must not exceed 10 characters" })
|
||||||
|
language?: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Model override for transcription.
|
||||||
|
* If omitted, the default model from speech config is used.
|
||||||
|
*/
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "model must be a string" })
|
||||||
|
@MaxLength(200, { message: "model must not exceed 200 characters" })
|
||||||
|
model?: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optional prompt to guide the transcription model.
|
||||||
|
* Useful for providing context or expected vocabulary.
|
||||||
|
*/
|
||||||
|
@IsOptional()
|
||||||
|
@IsString({ message: "prompt must be a string" })
|
||||||
|
@MaxLength(1000, { message: "prompt must not exceed 1000 characters" })
|
||||||
|
prompt?: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Temperature for transcription (0.0 to 1.0).
|
||||||
|
* Lower values produce more deterministic results.
|
||||||
|
*/
|
||||||
|
@IsOptional()
|
||||||
|
@Type(() => Number)
|
||||||
|
@IsNumber({}, { message: "temperature must be a number" })
|
||||||
|
@Min(0, { message: "temperature must be at least 0" })
|
||||||
|
@Max(1, { message: "temperature must not exceed 1" })
|
||||||
|
temperature?: number;
|
||||||
|
}
|
||||||
19
apps/api/src/speech/interfaces/index.ts
Normal file
19
apps/api/src/speech/interfaces/index.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
/**
|
||||||
|
* Speech interfaces barrel export.
|
||||||
|
*
|
||||||
|
* Issue #389
|
||||||
|
*/
|
||||||
|
|
||||||
|
export type { ISTTProvider } from "./stt-provider.interface";
|
||||||
|
export type { ITTSProvider } from "./tts-provider.interface";
|
||||||
|
export { SPEECH_TIERS, AUDIO_FORMATS } from "./speech-types";
|
||||||
|
export type {
|
||||||
|
SpeechTier,
|
||||||
|
AudioFormat,
|
||||||
|
TranscribeOptions,
|
||||||
|
TranscriptionResult,
|
||||||
|
TranscriptionSegment,
|
||||||
|
SynthesizeOptions,
|
||||||
|
SynthesisResult,
|
||||||
|
VoiceInfo,
|
||||||
|
} from "./speech-types";
|
||||||
178
apps/api/src/speech/interfaces/speech-types.ts
Normal file
178
apps/api/src/speech/interfaces/speech-types.ts
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
/**
|
||||||
|
* Speech Types
|
||||||
|
*
|
||||||
|
* Shared types for speech-to-text (STT) and text-to-speech (TTS) services.
|
||||||
|
* Used by provider interfaces and the SpeechService.
|
||||||
|
*
|
||||||
|
* Issue #389
|
||||||
|
*/
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Enums / Discriminators
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Canonical array of TTS provider tiers.
|
||||||
|
* Determines which TTS engine is used for synthesis.
|
||||||
|
*
|
||||||
|
* - default: Primary TTS engine (e.g., Kokoro)
|
||||||
|
* - premium: Higher quality TTS engine (e.g., Chatterbox)
|
||||||
|
* - fallback: Backup TTS engine (e.g., Piper/OpenedAI)
|
||||||
|
*/
|
||||||
|
export const SPEECH_TIERS = ["default", "premium", "fallback"] as const;
|
||||||
|
export type SpeechTier = (typeof SPEECH_TIERS)[number];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Canonical array of audio output formats for TTS synthesis.
|
||||||
|
*/
|
||||||
|
export const AUDIO_FORMATS = ["mp3", "wav", "opus", "flac", "aac", "pcm"] as const;
|
||||||
|
export type AudioFormat = (typeof AUDIO_FORMATS)[number];
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// STT Types
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for speech-to-text transcription.
|
||||||
|
*/
|
||||||
|
export interface TranscribeOptions {
|
||||||
|
/** Language code (e.g., "en", "fr", "de") */
|
||||||
|
language?: string;
|
||||||
|
|
||||||
|
/** Model to use for transcription */
|
||||||
|
model?: string;
|
||||||
|
|
||||||
|
/** MIME type of the audio (e.g., "audio/mp3", "audio/wav") */
|
||||||
|
mimeType?: string;
|
||||||
|
|
||||||
|
/** Optional prompt to guide transcription */
|
||||||
|
prompt?: string;
|
||||||
|
|
||||||
|
/** Temperature for transcription (0.0 - 1.0) */
|
||||||
|
temperature?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of a speech-to-text transcription.
|
||||||
|
*/
|
||||||
|
export interface TranscriptionResult {
|
||||||
|
/** Transcribed text */
|
||||||
|
text: string;
|
||||||
|
|
||||||
|
/** Language detected or used */
|
||||||
|
language: string;
|
||||||
|
|
||||||
|
/** Duration of the audio in seconds */
|
||||||
|
durationSeconds?: number;
|
||||||
|
|
||||||
|
/** Confidence score (0.0 - 1.0, if available) */
|
||||||
|
confidence?: number;
|
||||||
|
|
||||||
|
/** Individual word or segment timings (if available) */
|
||||||
|
segments?: TranscriptionSegment[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A segment within a transcription result.
|
||||||
|
*/
|
||||||
|
export interface TranscriptionSegment {
|
||||||
|
/** Segment text */
|
||||||
|
text: string;
|
||||||
|
|
||||||
|
/** Start time in seconds */
|
||||||
|
start: number;
|
||||||
|
|
||||||
|
/** End time in seconds */
|
||||||
|
end: number;
|
||||||
|
|
||||||
|
/** Confidence for this segment */
|
||||||
|
confidence?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// TTS Types
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for text-to-speech synthesis.
|
||||||
|
*/
|
||||||
|
export interface SynthesizeOptions {
|
||||||
|
/** Voice ID to use */
|
||||||
|
voice?: string;
|
||||||
|
|
||||||
|
/** Desired audio format */
|
||||||
|
format?: AudioFormat;
|
||||||
|
|
||||||
|
/** Speech speed multiplier (0.5 - 2.0) */
|
||||||
|
speed?: number;
|
||||||
|
|
||||||
|
/** Preferred TTS tier */
|
||||||
|
tier?: SpeechTier;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of a text-to-speech synthesis.
|
||||||
|
*/
|
||||||
|
export interface SynthesisResult {
|
||||||
|
/** Synthesized audio data */
|
||||||
|
audio: Buffer;
|
||||||
|
|
||||||
|
/** Audio format of the result */
|
||||||
|
format: AudioFormat;
|
||||||
|
|
||||||
|
/** Voice used for synthesis */
|
||||||
|
voice: string;
|
||||||
|
|
||||||
|
/** Tier that produced the synthesis */
|
||||||
|
tier: SpeechTier;
|
||||||
|
|
||||||
|
/** Duration of the generated audio in seconds (if available) */
|
||||||
|
durationSeconds?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extended options for Chatterbox TTS synthesis.
|
||||||
|
*
|
||||||
|
* Chatterbox supports voice cloning via a reference audio buffer and
|
||||||
|
* emotion exaggeration control. These are passed as extra body parameters
|
||||||
|
* to the OpenAI-compatible API.
|
||||||
|
*
|
||||||
|
* Issue #394
|
||||||
|
*/
|
||||||
|
export interface ChatterboxSynthesizeOptions extends SynthesizeOptions {
|
||||||
|
/**
|
||||||
|
* Reference audio buffer for voice cloning.
|
||||||
|
* When provided, Chatterbox will clone the voice from this audio sample.
|
||||||
|
* Should be a WAV or MP3 file of 5-30 seconds for best results.
|
||||||
|
*/
|
||||||
|
referenceAudio?: Buffer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Emotion exaggeration factor (0.0 to 1.0).
|
||||||
|
* Controls how much emotional expression is applied to the synthesized speech.
|
||||||
|
* - 0.0: Neutral, minimal emotion
|
||||||
|
* - 0.5: Moderate emotion (default when not specified)
|
||||||
|
* - 1.0: Maximum emotion exaggeration
|
||||||
|
*/
|
||||||
|
emotionExaggeration?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Information about an available TTS voice.
|
||||||
|
*/
|
||||||
|
export interface VoiceInfo {
|
||||||
|
/** Voice identifier */
|
||||||
|
id: string;
|
||||||
|
|
||||||
|
/** Human-readable voice name */
|
||||||
|
name: string;
|
||||||
|
|
||||||
|
/** Language code */
|
||||||
|
language?: string;
|
||||||
|
|
||||||
|
/** Tier this voice belongs to */
|
||||||
|
tier: SpeechTier;
|
||||||
|
|
||||||
|
/** Whether this is the default voice for its tier */
|
||||||
|
isDefault?: boolean;
|
||||||
|
}
|
||||||
52
apps/api/src/speech/interfaces/stt-provider.interface.ts
Normal file
52
apps/api/src/speech/interfaces/stt-provider.interface.ts
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
/**
|
||||||
|
* STT Provider Interface
|
||||||
|
*
|
||||||
|
* Defines the contract for speech-to-text provider implementations.
|
||||||
|
* All STT providers (e.g., Speaches/faster-whisper) must implement this interface.
|
||||||
|
*
|
||||||
|
* Issue #389
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { TranscribeOptions, TranscriptionResult } from "./speech-types";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface for speech-to-text providers.
|
||||||
|
*
|
||||||
|
* Implementations wrap an OpenAI-compatible API endpoint for transcription.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* class SpeachesSttProvider implements ISTTProvider {
|
||||||
|
* readonly name = "speaches";
|
||||||
|
*
|
||||||
|
* async transcribe(audio: Buffer, options?: TranscribeOptions): Promise<TranscriptionResult> {
|
||||||
|
* // Call speaches API via OpenAI SDK
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* async isHealthy(): Promise<boolean> {
|
||||||
|
* // Check endpoint health
|
||||||
|
* }
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export interface ISTTProvider {
|
||||||
|
/** Provider name for logging and identification */
|
||||||
|
readonly name: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transcribe audio data to text.
|
||||||
|
*
|
||||||
|
* @param audio - Raw audio data as a Buffer
|
||||||
|
* @param options - Optional transcription parameters
|
||||||
|
* @returns Transcription result with text and metadata
|
||||||
|
* @throws {Error} If transcription fails
|
||||||
|
*/
|
||||||
|
transcribe(audio: Buffer, options?: TranscribeOptions): Promise<TranscriptionResult>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the provider is healthy and available.
|
||||||
|
*
|
||||||
|
* @returns true if the provider endpoint is reachable and ready
|
||||||
|
*/
|
||||||
|
isHealthy(): Promise<boolean>;
|
||||||
|
}
|
||||||
68
apps/api/src/speech/interfaces/tts-provider.interface.ts
Normal file
68
apps/api/src/speech/interfaces/tts-provider.interface.ts
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
/**
|
||||||
|
* TTS Provider Interface
|
||||||
|
*
|
||||||
|
* Defines the contract for text-to-speech provider implementations.
|
||||||
|
* All TTS providers (e.g., Kokoro, Chatterbox, Piper/OpenedAI) must implement this interface.
|
||||||
|
*
|
||||||
|
* Issue #389
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { SynthesizeOptions, SynthesisResult, VoiceInfo, SpeechTier } from "./speech-types";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface for text-to-speech providers.
|
||||||
|
*
|
||||||
|
* Implementations wrap an OpenAI-compatible API endpoint for speech synthesis.
|
||||||
|
* Each provider is associated with a SpeechTier (default, premium, fallback).
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* class KokoroProvider implements ITTSProvider {
|
||||||
|
* readonly name = "kokoro";
|
||||||
|
* readonly tier = "default";
|
||||||
|
*
|
||||||
|
* async synthesize(text: string, options?: SynthesizeOptions): Promise<SynthesisResult> {
|
||||||
|
* // Call Kokoro API via OpenAI SDK
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* async listVoices(): Promise<VoiceInfo[]> {
|
||||||
|
* // Return available voices
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* async isHealthy(): Promise<boolean> {
|
||||||
|
* // Check endpoint health
|
||||||
|
* }
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export interface ITTSProvider {
|
||||||
|
/** Provider name for logging and identification */
|
||||||
|
readonly name: string;
|
||||||
|
|
||||||
|
/** Tier this provider serves (default, premium, fallback) */
|
||||||
|
readonly tier: SpeechTier;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Synthesize text to audio.
|
||||||
|
*
|
||||||
|
* @param text - Text to convert to speech
|
||||||
|
* @param options - Optional synthesis parameters (voice, format, speed)
|
||||||
|
* @returns Synthesis result with audio buffer and metadata
|
||||||
|
* @throws {Error} If synthesis fails
|
||||||
|
*/
|
||||||
|
synthesize(text: string, options?: SynthesizeOptions): Promise<SynthesisResult>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List available voices for this provider.
|
||||||
|
*
|
||||||
|
* @returns Array of voice information objects
|
||||||
|
*/
|
||||||
|
listVoices(): Promise<VoiceInfo[]>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the provider is healthy and available.
|
||||||
|
*
|
||||||
|
* @returns true if the provider endpoint is reachable and ready
|
||||||
|
*/
|
||||||
|
isHealthy(): Promise<boolean>;
|
||||||
|
}
|
||||||
205
apps/api/src/speech/pipes/audio-validation.pipe.spec.ts
Normal file
205
apps/api/src/speech/pipes/audio-validation.pipe.spec.ts
Normal file
@@ -0,0 +1,205 @@
|
|||||||
|
/**
|
||||||
|
* AudioValidationPipe Tests
|
||||||
|
*
|
||||||
|
* Issue #398: Validates uploaded audio files for MIME type and file size.
|
||||||
|
* Tests cover valid types, invalid types, size limits, and edge cases.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeEach } from "vitest";
|
||||||
|
import { BadRequestException } from "@nestjs/common";
|
||||||
|
import { AudioValidationPipe } from "./audio-validation.pipe";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to create a mock Express.Multer.File object.
|
||||||
|
*/
|
||||||
|
function createMockFile(overrides: Partial<Express.Multer.File> = {}): Express.Multer.File {
|
||||||
|
return {
|
||||||
|
fieldname: "file",
|
||||||
|
originalname: "test.mp3",
|
||||||
|
encoding: "7bit",
|
||||||
|
mimetype: "audio/mpeg",
|
||||||
|
size: 1024,
|
||||||
|
destination: "",
|
||||||
|
filename: "",
|
||||||
|
path: "",
|
||||||
|
buffer: Buffer.from("fake-audio-data"),
|
||||||
|
stream: undefined as never,
|
||||||
|
...overrides,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("AudioValidationPipe", () => {
|
||||||
|
// ==========================================
|
||||||
|
// Default config (25MB max)
|
||||||
|
// ==========================================
|
||||||
|
describe("with default config", () => {
|
||||||
|
let pipe: AudioValidationPipe;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
pipe = new AudioValidationPipe();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// MIME type validation
|
||||||
|
// ==========================================
|
||||||
|
describe("MIME type validation", () => {
|
||||||
|
it("should accept audio/wav", () => {
|
||||||
|
const file = createMockFile({ mimetype: "audio/wav" });
|
||||||
|
expect(pipe.transform(file)).toBe(file);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept audio/mp3", () => {
|
||||||
|
const file = createMockFile({ mimetype: "audio/mp3" });
|
||||||
|
expect(pipe.transform(file)).toBe(file);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept audio/mpeg", () => {
|
||||||
|
const file = createMockFile({ mimetype: "audio/mpeg" });
|
||||||
|
expect(pipe.transform(file)).toBe(file);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept audio/webm", () => {
|
||||||
|
const file = createMockFile({ mimetype: "audio/webm" });
|
||||||
|
expect(pipe.transform(file)).toBe(file);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept audio/ogg", () => {
|
||||||
|
const file = createMockFile({ mimetype: "audio/ogg" });
|
||||||
|
expect(pipe.transform(file)).toBe(file);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept audio/flac", () => {
|
||||||
|
const file = createMockFile({ mimetype: "audio/flac" });
|
||||||
|
expect(pipe.transform(file)).toBe(file);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept audio/x-m4a", () => {
|
||||||
|
const file = createMockFile({ mimetype: "audio/x-m4a" });
|
||||||
|
expect(pipe.transform(file)).toBe(file);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject unsupported MIME types with descriptive error", () => {
|
||||||
|
const file = createMockFile({ mimetype: "video/mp4" });
|
||||||
|
expect(() => pipe.transform(file)).toThrow(BadRequestException);
|
||||||
|
expect(() => pipe.transform(file)).toThrow(/Unsupported audio format.*video\/mp4/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject application/octet-stream", () => {
|
||||||
|
const file = createMockFile({ mimetype: "application/octet-stream" });
|
||||||
|
expect(() => pipe.transform(file)).toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject text/plain", () => {
|
||||||
|
const file = createMockFile({ mimetype: "text/plain" });
|
||||||
|
expect(() => pipe.transform(file)).toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject image/png", () => {
|
||||||
|
const file = createMockFile({ mimetype: "image/png" });
|
||||||
|
expect(() => pipe.transform(file)).toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include supported formats in error message", () => {
|
||||||
|
const file = createMockFile({ mimetype: "video/mp4" });
|
||||||
|
try {
|
||||||
|
pipe.transform(file);
|
||||||
|
expect.fail("Expected BadRequestException");
|
||||||
|
} catch (error) {
|
||||||
|
expect(error).toBeInstanceOf(BadRequestException);
|
||||||
|
const response = (error as BadRequestException).getResponse();
|
||||||
|
const message =
|
||||||
|
typeof response === "string" ? response : (response as Record<string, unknown>).message;
|
||||||
|
expect(message).toContain("audio/wav");
|
||||||
|
expect(message).toContain("audio/mpeg");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// File size validation
|
||||||
|
// ==========================================
|
||||||
|
describe("file size validation", () => {
|
||||||
|
it("should accept files under the size limit", () => {
|
||||||
|
const file = createMockFile({ size: 1024 * 1024 }); // 1MB
|
||||||
|
expect(pipe.transform(file)).toBe(file);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept files exactly at the size limit", () => {
|
||||||
|
const file = createMockFile({ size: 25_000_000 }); // 25MB (default)
|
||||||
|
expect(pipe.transform(file)).toBe(file);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject files exceeding the size limit", () => {
|
||||||
|
const file = createMockFile({ size: 25_000_001 }); // 1 byte over
|
||||||
|
expect(() => pipe.transform(file)).toThrow(BadRequestException);
|
||||||
|
expect(() => pipe.transform(file)).toThrow(/exceeds maximum/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include human-readable sizes in error message", () => {
|
||||||
|
const file = createMockFile({ size: 30_000_000 }); // 30MB
|
||||||
|
try {
|
||||||
|
pipe.transform(file);
|
||||||
|
expect.fail("Expected BadRequestException");
|
||||||
|
} catch (error) {
|
||||||
|
expect(error).toBeInstanceOf(BadRequestException);
|
||||||
|
const response = (error as BadRequestException).getResponse();
|
||||||
|
const message =
|
||||||
|
typeof response === "string" ? response : (response as Record<string, unknown>).message;
|
||||||
|
// Should show something like "28.6 MB" and "23.8 MB"
|
||||||
|
expect(message).toContain("MB");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept zero-size files (MIME check still applies)", () => {
|
||||||
|
const file = createMockFile({ size: 0 });
|
||||||
|
expect(pipe.transform(file)).toBe(file);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Edge cases
|
||||||
|
// ==========================================
|
||||||
|
describe("edge cases", () => {
|
||||||
|
it("should throw if no file is provided (null)", () => {
|
||||||
|
expect(() => pipe.transform(null as unknown as Express.Multer.File)).toThrow(
|
||||||
|
BadRequestException
|
||||||
|
);
|
||||||
|
expect(() => pipe.transform(null as unknown as Express.Multer.File)).toThrow(
|
||||||
|
/No audio file provided/
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw if no file is provided (undefined)", () => {
|
||||||
|
expect(() => pipe.transform(undefined as unknown as Express.Multer.File)).toThrow(
|
||||||
|
BadRequestException
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Custom config
|
||||||
|
// ==========================================
|
||||||
|
describe("with custom config", () => {
|
||||||
|
it("should use custom max file size", () => {
|
||||||
|
const pipe = new AudioValidationPipe({ maxFileSize: 1_000_000 }); // 1MB
|
||||||
|
const smallFile = createMockFile({ size: 500_000 });
|
||||||
|
expect(pipe.transform(smallFile)).toBe(smallFile);
|
||||||
|
|
||||||
|
const largeFile = createMockFile({ size: 1_000_001 });
|
||||||
|
expect(() => pipe.transform(largeFile)).toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should allow overriding accepted MIME types", () => {
|
||||||
|
const pipe = new AudioValidationPipe({
|
||||||
|
allowedMimeTypes: ["audio/wav"],
|
||||||
|
});
|
||||||
|
|
||||||
|
const wavFile = createMockFile({ mimetype: "audio/wav" });
|
||||||
|
expect(pipe.transform(wavFile)).toBe(wavFile);
|
||||||
|
|
||||||
|
const mp3File = createMockFile({ mimetype: "audio/mpeg" });
|
||||||
|
expect(() => pipe.transform(mp3File)).toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
102
apps/api/src/speech/pipes/audio-validation.pipe.ts
Normal file
102
apps/api/src/speech/pipes/audio-validation.pipe.ts
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
/**
|
||||||
|
* AudioValidationPipe
|
||||||
|
*
|
||||||
|
* NestJS PipeTransform that validates uploaded audio files.
|
||||||
|
* Checks MIME type against an allow-list and file size against a configurable maximum.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* ```typescript
|
||||||
|
* @Post('transcribe')
|
||||||
|
* @UseInterceptors(FileInterceptor('file'))
|
||||||
|
* async transcribe(
|
||||||
|
* @UploadedFile(new AudioValidationPipe()) file: Express.Multer.File,
|
||||||
|
* ) { ... }
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* Issue #398
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { BadRequestException } from "@nestjs/common";
|
||||||
|
import type { PipeTransform } from "@nestjs/common";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default accepted MIME types for audio uploads.
|
||||||
|
*/
|
||||||
|
const DEFAULT_ALLOWED_MIME_TYPES: readonly string[] = [
|
||||||
|
"audio/wav",
|
||||||
|
"audio/mp3",
|
||||||
|
"audio/mpeg",
|
||||||
|
"audio/webm",
|
||||||
|
"audio/ogg",
|
||||||
|
"audio/flac",
|
||||||
|
"audio/x-m4a",
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default maximum upload size in bytes (25 MB).
|
||||||
|
*/
|
||||||
|
const DEFAULT_MAX_FILE_SIZE = 25_000_000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for customizing AudioValidationPipe behavior.
|
||||||
|
*/
|
||||||
|
export interface AudioValidationPipeOptions {
|
||||||
|
/** Maximum file size in bytes. Defaults to 25 MB. */
|
||||||
|
maxFileSize?: number;
|
||||||
|
|
||||||
|
/** List of accepted MIME types. Defaults to common audio formats. */
|
||||||
|
allowedMimeTypes?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format bytes into a human-readable string (e.g., "25.0 MB").
|
||||||
|
*/
|
||||||
|
function formatBytes(bytes: number): string {
|
||||||
|
if (bytes < 1024) {
|
||||||
|
return `${String(bytes)} B`;
|
||||||
|
}
|
||||||
|
if (bytes < 1024 * 1024) {
|
||||||
|
return `${(bytes / 1024).toFixed(1)} KB`;
|
||||||
|
}
|
||||||
|
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class AudioValidationPipe implements PipeTransform<Express.Multer.File | undefined> {
|
||||||
|
private readonly maxFileSize: number;
|
||||||
|
private readonly allowedMimeTypes: readonly string[];
|
||||||
|
|
||||||
|
constructor(options?: AudioValidationPipeOptions) {
|
||||||
|
this.maxFileSize = options?.maxFileSize ?? DEFAULT_MAX_FILE_SIZE;
|
||||||
|
this.allowedMimeTypes = options?.allowedMimeTypes ?? DEFAULT_ALLOWED_MIME_TYPES;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate the uploaded file's MIME type and size.
|
||||||
|
*
|
||||||
|
* @param file - The uploaded file from Multer
|
||||||
|
* @returns The validated file, unchanged
|
||||||
|
* @throws {BadRequestException} If the file is missing, has an unsupported MIME type, or exceeds the size limit
|
||||||
|
*/
|
||||||
|
transform(file: Express.Multer.File | undefined): Express.Multer.File {
|
||||||
|
if (!file) {
|
||||||
|
throw new BadRequestException("No audio file provided");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate MIME type
|
||||||
|
if (!this.allowedMimeTypes.includes(file.mimetype)) {
|
||||||
|
throw new BadRequestException(
|
||||||
|
`Unsupported audio format: ${file.mimetype}. ` +
|
||||||
|
`Supported formats: ${this.allowedMimeTypes.join(", ")}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate file size
|
||||||
|
if (file.size > this.maxFileSize) {
|
||||||
|
throw new BadRequestException(
|
||||||
|
`File size ${formatBytes(file.size)} exceeds maximum allowed size of ${formatBytes(this.maxFileSize)}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
}
|
||||||
10
apps/api/src/speech/pipes/index.ts
Normal file
10
apps/api/src/speech/pipes/index.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
/**
|
||||||
|
* Speech Pipes barrel export
|
||||||
|
*
|
||||||
|
* Issue #398
|
||||||
|
*/
|
||||||
|
|
||||||
|
export { AudioValidationPipe } from "./audio-validation.pipe";
|
||||||
|
export type { AudioValidationPipeOptions } from "./audio-validation.pipe";
|
||||||
|
export { TextValidationPipe } from "./text-validation.pipe";
|
||||||
|
export type { TextValidationPipeOptions } from "./text-validation.pipe";
|
||||||
136
apps/api/src/speech/pipes/text-validation.pipe.spec.ts
Normal file
136
apps/api/src/speech/pipes/text-validation.pipe.spec.ts
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
/**
|
||||||
|
* TextValidationPipe Tests
|
||||||
|
*
|
||||||
|
* Issue #398: Validates text input for TTS synthesis.
|
||||||
|
* Tests cover text length, empty text, whitespace, and configurable limits.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeEach } from "vitest";
|
||||||
|
import { BadRequestException } from "@nestjs/common";
|
||||||
|
import { TextValidationPipe } from "./text-validation.pipe";
|
||||||
|
|
||||||
|
describe("TextValidationPipe", () => {
|
||||||
|
// ==========================================
|
||||||
|
// Default config (4096 max length)
|
||||||
|
// ==========================================
|
||||||
|
describe("with default config", () => {
|
||||||
|
let pipe: TextValidationPipe;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
pipe = new TextValidationPipe();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Valid text
|
||||||
|
// ==========================================
|
||||||
|
describe("valid text", () => {
|
||||||
|
it("should accept normal text", () => {
|
||||||
|
const text = "Hello, world!";
|
||||||
|
expect(pipe.transform(text)).toBe(text);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept text at exactly the max length", () => {
|
||||||
|
const text = "a".repeat(4096);
|
||||||
|
expect(pipe.transform(text)).toBe(text);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept single character text", () => {
|
||||||
|
expect(pipe.transform("a")).toBe("a");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept text with unicode characters", () => {
|
||||||
|
const text = "Hello, world! 你好世界";
|
||||||
|
expect(pipe.transform(text)).toBe(text);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept multi-line text", () => {
|
||||||
|
const text = "Line one.\nLine two.\nLine three.";
|
||||||
|
expect(pipe.transform(text)).toBe(text);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Text length validation
|
||||||
|
// ==========================================
|
||||||
|
describe("text length validation", () => {
|
||||||
|
it("should reject text exceeding max length", () => {
|
||||||
|
const text = "a".repeat(4097);
|
||||||
|
expect(() => pipe.transform(text)).toThrow(BadRequestException);
|
||||||
|
expect(() => pipe.transform(text)).toThrow(/exceeds maximum/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include length details in error message", () => {
|
||||||
|
const text = "a".repeat(5000);
|
||||||
|
try {
|
||||||
|
pipe.transform(text);
|
||||||
|
expect.fail("Expected BadRequestException");
|
||||||
|
} catch (error) {
|
||||||
|
expect(error).toBeInstanceOf(BadRequestException);
|
||||||
|
const response = (error as BadRequestException).getResponse();
|
||||||
|
const message =
|
||||||
|
typeof response === "string" ? response : (response as Record<string, unknown>).message;
|
||||||
|
expect(message).toContain("5000");
|
||||||
|
expect(message).toContain("4096");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Empty text validation
|
||||||
|
// ==========================================
|
||||||
|
describe("empty text validation", () => {
|
||||||
|
it("should reject empty string", () => {
|
||||||
|
expect(() => pipe.transform("")).toThrow(BadRequestException);
|
||||||
|
expect(() => pipe.transform("")).toThrow(/Text cannot be empty/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject whitespace-only string", () => {
|
||||||
|
expect(() => pipe.transform(" ")).toThrow(BadRequestException);
|
||||||
|
expect(() => pipe.transform(" ")).toThrow(/Text cannot be empty/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject tabs and newlines only", () => {
|
||||||
|
expect(() => pipe.transform("\t\n\r")).toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject null", () => {
|
||||||
|
expect(() => pipe.transform(null as unknown as string)).toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should reject undefined", () => {
|
||||||
|
expect(() => pipe.transform(undefined as unknown as string)).toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Text with leading/trailing whitespace
|
||||||
|
// ==========================================
|
||||||
|
describe("whitespace handling", () => {
|
||||||
|
it("should accept text with leading/trailing whitespace (preserves it)", () => {
|
||||||
|
const text = " Hello, world! ";
|
||||||
|
expect(pipe.transform(text)).toBe(text);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Custom config
|
||||||
|
// ==========================================
|
||||||
|
describe("with custom config", () => {
|
||||||
|
it("should use custom max text length", () => {
|
||||||
|
const pipe = new TextValidationPipe({ maxTextLength: 100 });
|
||||||
|
|
||||||
|
const shortText = "Hello";
|
||||||
|
expect(pipe.transform(shortText)).toBe(shortText);
|
||||||
|
|
||||||
|
const longText = "a".repeat(101);
|
||||||
|
expect(() => pipe.transform(longText)).toThrow(BadRequestException);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept text at exact custom limit", () => {
|
||||||
|
const pipe = new TextValidationPipe({ maxTextLength: 50 });
|
||||||
|
const text = "a".repeat(50);
|
||||||
|
expect(pipe.transform(text)).toBe(text);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
65
apps/api/src/speech/pipes/text-validation.pipe.ts
Normal file
65
apps/api/src/speech/pipes/text-validation.pipe.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
/**
|
||||||
|
* TextValidationPipe
|
||||||
|
*
|
||||||
|
* NestJS PipeTransform that validates text input for TTS synthesis.
|
||||||
|
* Checks that text is non-empty and within the configurable maximum length.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* ```typescript
|
||||||
|
* @Post('synthesize')
|
||||||
|
* async synthesize(
|
||||||
|
* @Body('text', new TextValidationPipe()) text: string,
|
||||||
|
* ) { ... }
|
||||||
|
* ```
|
||||||
|
*
|
||||||
|
* Issue #398
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { BadRequestException } from "@nestjs/common";
|
||||||
|
import type { PipeTransform } from "@nestjs/common";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default maximum text length for TTS input (4096 characters).
|
||||||
|
*/
|
||||||
|
const DEFAULT_MAX_TEXT_LENGTH = 4096;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for customizing TextValidationPipe behavior.
|
||||||
|
*/
|
||||||
|
export interface TextValidationPipeOptions {
|
||||||
|
/** Maximum text length in characters. Defaults to 4096. */
|
||||||
|
maxTextLength?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class TextValidationPipe implements PipeTransform<string | null | undefined> {
|
||||||
|
private readonly maxTextLength: number;
|
||||||
|
|
||||||
|
constructor(options?: TextValidationPipeOptions) {
|
||||||
|
this.maxTextLength = options?.maxTextLength ?? DEFAULT_MAX_TEXT_LENGTH;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate the text input for TTS synthesis.
|
||||||
|
*
|
||||||
|
* @param text - The text to validate
|
||||||
|
* @returns The validated text, unchanged
|
||||||
|
* @throws {BadRequestException} If text is empty, whitespace-only, or exceeds the max length
|
||||||
|
*/
|
||||||
|
transform(text: string | null | undefined): string {
|
||||||
|
if (text === null || text === undefined) {
|
||||||
|
throw new BadRequestException("Text cannot be empty");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (text.trim().length === 0) {
|
||||||
|
throw new BadRequestException("Text cannot be empty");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (text.length > this.maxTextLength) {
|
||||||
|
throw new BadRequestException(
|
||||||
|
`Text length ${String(text.length)} exceeds maximum allowed length of ${String(this.maxTextLength)} characters`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return text;
|
||||||
|
}
|
||||||
|
}
|
||||||
329
apps/api/src/speech/providers/base-tts.provider.spec.ts
Normal file
329
apps/api/src/speech/providers/base-tts.provider.spec.ts
Normal file
@@ -0,0 +1,329 @@
|
|||||||
|
/**
|
||||||
|
* BaseTTSProvider Unit Tests
|
||||||
|
*
|
||||||
|
* Tests the abstract base class for OpenAI-compatible TTS providers.
|
||||||
|
* Uses a concrete test implementation to exercise the base class logic.
|
||||||
|
*
|
||||||
|
* Issue #391
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeEach, vi, type Mock } from "vitest";
|
||||||
|
import { BaseTTSProvider } from "./base-tts.provider";
|
||||||
|
import type { SpeechTier, SynthesizeOptions, AudioFormat } from "../interfaces/speech-types";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Mock OpenAI SDK
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
const mockCreate = vi.fn();
|
||||||
|
|
||||||
|
vi.mock("openai", () => {
|
||||||
|
class MockOpenAI {
|
||||||
|
audio = {
|
||||||
|
speech: {
|
||||||
|
create: mockCreate,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return { default: MockOpenAI };
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Concrete test implementation
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
class TestTTSProvider extends BaseTTSProvider {
|
||||||
|
readonly name = "test-provider";
|
||||||
|
readonly tier: SpeechTier = "default";
|
||||||
|
|
||||||
|
constructor(baseURL: string, defaultVoice?: string, defaultFormat?: AudioFormat) {
|
||||||
|
super(baseURL, defaultVoice, defaultFormat);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Test helpers
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a mock Response-like object that mimics OpenAI SDK's audio.speech.create() return.
|
||||||
|
* The OpenAI SDK returns a Response object with arrayBuffer() method.
|
||||||
|
*/
|
||||||
|
function createMockAudioResponse(audioData: Uint8Array): { arrayBuffer: Mock } {
|
||||||
|
return {
|
||||||
|
arrayBuffer: vi.fn().mockResolvedValue(audioData.buffer),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("BaseTTSProvider", () => {
|
||||||
|
let provider: TestTTSProvider;
|
||||||
|
|
||||||
|
const testBaseURL = "http://localhost:8880/v1";
|
||||||
|
const testVoice = "af_heart";
|
||||||
|
const testFormat: AudioFormat = "mp3";
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
provider = new TestTTSProvider(testBaseURL, testVoice, testFormat);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Constructor
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("constructor", () => {
|
||||||
|
it("should create an instance with provided configuration", () => {
|
||||||
|
expect(provider).toBeDefined();
|
||||||
|
expect(provider.name).toBe("test-provider");
|
||||||
|
expect(provider.tier).toBe("default");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default voice 'alloy' when none provided", () => {
|
||||||
|
const defaultProvider = new TestTTSProvider(testBaseURL);
|
||||||
|
expect(defaultProvider).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default format 'mp3' when none provided", () => {
|
||||||
|
const defaultProvider = new TestTTSProvider(testBaseURL, "voice-1");
|
||||||
|
expect(defaultProvider).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// synthesize()
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("synthesize", () => {
|
||||||
|
it("should synthesize text and return a SynthesisResult with audio buffer", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x49, 0x44, 0x33, 0x04, 0x00]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const result = await provider.synthesize("Hello, world!");
|
||||||
|
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.audio).toBeInstanceOf(Buffer);
|
||||||
|
expect(result.audio.length).toBe(audioBytes.length);
|
||||||
|
expect(result.format).toBe("mp3");
|
||||||
|
expect(result.voice).toBe("af_heart");
|
||||||
|
expect(result.tier).toBe("default");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass correct parameters to OpenAI SDK", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01, 0x02]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
await provider.synthesize("Test text");
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith({
|
||||||
|
model: "tts-1",
|
||||||
|
input: "Test text",
|
||||||
|
voice: "af_heart",
|
||||||
|
response_format: "mp3",
|
||||||
|
speed: 1.0,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use custom voice from options", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const options: SynthesizeOptions = { voice: "custom_voice" };
|
||||||
|
const result = await provider.synthesize("Hello", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(expect.objectContaining({ voice: "custom_voice" }));
|
||||||
|
expect(result.voice).toBe("custom_voice");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use custom format from options", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const options: SynthesizeOptions = { format: "wav" };
|
||||||
|
const result = await provider.synthesize("Hello", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(expect.objectContaining({ response_format: "wav" }));
|
||||||
|
expect(result.format).toBe("wav");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use custom speed from options", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const options: SynthesizeOptions = { speed: 1.5 };
|
||||||
|
await provider.synthesize("Hello", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(expect.objectContaining({ speed: 1.5 }));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw an error when synthesis fails", async () => {
|
||||||
|
mockCreate.mockRejectedValue(new Error("Connection refused"));
|
||||||
|
|
||||||
|
await expect(provider.synthesize("Hello")).rejects.toThrow(
|
||||||
|
"TTS synthesis failed for test-provider: Connection refused"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw an error when response arrayBuffer fails", async () => {
|
||||||
|
const mockResponse = {
|
||||||
|
arrayBuffer: vi.fn().mockRejectedValue(new Error("Read error")),
|
||||||
|
};
|
||||||
|
mockCreate.mockResolvedValue(mockResponse);
|
||||||
|
|
||||||
|
await expect(provider.synthesize("Hello")).rejects.toThrow(
|
||||||
|
"TTS synthesis failed for test-provider: Read error"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle empty text input gracefully", async () => {
|
||||||
|
const audioBytes = new Uint8Array([]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const result = await provider.synthesize("");
|
||||||
|
|
||||||
|
expect(result.audio).toBeInstanceOf(Buffer);
|
||||||
|
expect(result.audio.length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle non-Error exceptions", async () => {
|
||||||
|
mockCreate.mockRejectedValue("string error");
|
||||||
|
|
||||||
|
await expect(provider.synthesize("Hello")).rejects.toThrow(
|
||||||
|
"TTS synthesis failed for test-provider: string error"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// listVoices()
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("listVoices", () => {
|
||||||
|
it("should return default voice list with the configured default voice", async () => {
|
||||||
|
const voices = await provider.listVoices();
|
||||||
|
|
||||||
|
expect(voices).toBeInstanceOf(Array);
|
||||||
|
expect(voices.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
const defaultVoice = voices.find((v) => v.isDefault === true);
|
||||||
|
expect(defaultVoice).toBeDefined();
|
||||||
|
expect(defaultVoice?.id).toBe("af_heart");
|
||||||
|
expect(defaultVoice?.tier).toBe("default");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set tier correctly on all returned voices", async () => {
|
||||||
|
const voices = await provider.listVoices();
|
||||||
|
|
||||||
|
for (const voice of voices) {
|
||||||
|
expect(voice.tier).toBe("default");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// isHealthy()
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("isHealthy", () => {
|
||||||
|
it("should return true when the TTS server is reachable", async () => {
|
||||||
|
// Mock global fetch for health check
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const healthy = await provider.isHealthy();
|
||||||
|
|
||||||
|
expect(healthy).toBe(true);
|
||||||
|
expect(mockFetch).toHaveBeenCalled();
|
||||||
|
|
||||||
|
vi.unstubAllGlobals();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false when the TTS server is unreachable", async () => {
|
||||||
|
const mockFetch = vi.fn().mockRejectedValue(new Error("ECONNREFUSED"));
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const healthy = await provider.isHealthy();
|
||||||
|
|
||||||
|
expect(healthy).toBe(false);
|
||||||
|
|
||||||
|
vi.unstubAllGlobals();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false when the TTS server returns an error status", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: false,
|
||||||
|
status: 503,
|
||||||
|
});
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const healthy = await provider.isHealthy();
|
||||||
|
|
||||||
|
expect(healthy).toBe(false);
|
||||||
|
|
||||||
|
vi.unstubAllGlobals();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use the base URL for the health check", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({ ok: true, status: 200 });
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
await provider.isHealthy();
|
||||||
|
|
||||||
|
// Should call a health-related endpoint at the base URL
|
||||||
|
const calledUrl = mockFetch.mock.calls[0][0] as string;
|
||||||
|
expect(calledUrl).toContain("localhost:8880");
|
||||||
|
|
||||||
|
vi.unstubAllGlobals();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set a timeout for the health check", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({ ok: true, status: 200 });
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
await provider.isHealthy();
|
||||||
|
|
||||||
|
// Should pass an AbortSignal for timeout
|
||||||
|
const fetchOptions = mockFetch.mock.calls[0][1] as RequestInit;
|
||||||
|
expect(fetchOptions.signal).toBeDefined();
|
||||||
|
|
||||||
|
vi.unstubAllGlobals();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Default values
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("default values", () => {
|
||||||
|
it("should use 'alloy' as default voice when none specified", async () => {
|
||||||
|
const defaultProvider = new TestTTSProvider(testBaseURL);
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
await defaultProvider.synthesize("Hello");
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(expect.objectContaining({ voice: "alloy" }));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use 'mp3' as default format when none specified", async () => {
|
||||||
|
const defaultProvider = new TestTTSProvider(testBaseURL);
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
await defaultProvider.synthesize("Hello");
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(expect.objectContaining({ response_format: "mp3" }));
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use speed 1.0 as default speed", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
await provider.synthesize("Hello");
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(expect.objectContaining({ speed: 1.0 }));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
189
apps/api/src/speech/providers/base-tts.provider.ts
Normal file
189
apps/api/src/speech/providers/base-tts.provider.ts
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
/**
|
||||||
|
* Base TTS Provider
|
||||||
|
*
|
||||||
|
* Abstract base class implementing common OpenAI-compatible TTS logic.
|
||||||
|
* All concrete TTS providers (Kokoro, Chatterbox, Piper) extend this class.
|
||||||
|
*
|
||||||
|
* Uses the OpenAI SDK with a configurable baseURL to communicate with
|
||||||
|
* OpenAI-compatible speech synthesis endpoints.
|
||||||
|
*
|
||||||
|
* Issue #391
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Logger } from "@nestjs/common";
|
||||||
|
import OpenAI from "openai";
|
||||||
|
import type { ITTSProvider } from "../interfaces/tts-provider.interface";
|
||||||
|
import type {
|
||||||
|
SpeechTier,
|
||||||
|
SynthesizeOptions,
|
||||||
|
SynthesisResult,
|
||||||
|
VoiceInfo,
|
||||||
|
AudioFormat,
|
||||||
|
} from "../interfaces/speech-types";
|
||||||
|
|
||||||
|
/** Default TTS model identifier used for OpenAI-compatible APIs */
|
||||||
|
const DEFAULT_MODEL = "tts-1";
|
||||||
|
|
||||||
|
/** Default voice when none is configured */
|
||||||
|
const DEFAULT_VOICE = "alloy";
|
||||||
|
|
||||||
|
/** Default audio format */
|
||||||
|
const DEFAULT_FORMAT: AudioFormat = "mp3";
|
||||||
|
|
||||||
|
/** Default speech speed multiplier */
|
||||||
|
const DEFAULT_SPEED = 1.0;
|
||||||
|
|
||||||
|
/** Health check timeout in milliseconds */
|
||||||
|
const HEALTH_CHECK_TIMEOUT_MS = 5000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abstract base class for OpenAI-compatible TTS providers.
|
||||||
|
*
|
||||||
|
* Provides common logic for:
|
||||||
|
* - Synthesizing text to audio via OpenAI SDK's audio.speech.create()
|
||||||
|
* - Listing available voices (with a default implementation)
|
||||||
|
* - Health checking the TTS endpoint
|
||||||
|
*
|
||||||
|
* Subclasses must set `name` and `tier` properties and may override
|
||||||
|
* `listVoices()` to provide provider-specific voice lists.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* class KokoroProvider extends BaseTTSProvider {
|
||||||
|
* readonly name = "kokoro";
|
||||||
|
* readonly tier: SpeechTier = "default";
|
||||||
|
*
|
||||||
|
* constructor(baseURL: string) {
|
||||||
|
* super(baseURL, "af_heart", "mp3");
|
||||||
|
* }
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export abstract class BaseTTSProvider implements ITTSProvider {
|
||||||
|
abstract readonly name: string;
|
||||||
|
abstract readonly tier: SpeechTier;
|
||||||
|
|
||||||
|
protected readonly logger: Logger;
|
||||||
|
protected readonly client: OpenAI;
|
||||||
|
protected readonly baseURL: string;
|
||||||
|
protected readonly defaultVoice: string;
|
||||||
|
protected readonly defaultFormat: AudioFormat;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new BaseTTSProvider.
|
||||||
|
*
|
||||||
|
* @param baseURL - The base URL for the OpenAI-compatible TTS endpoint
|
||||||
|
* @param defaultVoice - Default voice ID to use when none is specified in options
|
||||||
|
* @param defaultFormat - Default audio format to use when none is specified in options
|
||||||
|
*/
|
||||||
|
constructor(
|
||||||
|
baseURL: string,
|
||||||
|
defaultVoice: string = DEFAULT_VOICE,
|
||||||
|
defaultFormat: AudioFormat = DEFAULT_FORMAT
|
||||||
|
) {
|
||||||
|
this.baseURL = baseURL;
|
||||||
|
this.defaultVoice = defaultVoice;
|
||||||
|
this.defaultFormat = defaultFormat;
|
||||||
|
this.logger = new Logger(this.constructor.name);
|
||||||
|
|
||||||
|
this.client = new OpenAI({
|
||||||
|
baseURL,
|
||||||
|
apiKey: "not-needed", // Self-hosted services don't require an API key
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Synthesize text to audio using the OpenAI-compatible TTS endpoint.
|
||||||
|
*
|
||||||
|
* Calls `client.audio.speech.create()` with the provided text and options,
|
||||||
|
* then converts the response to a Buffer.
|
||||||
|
*
|
||||||
|
* @param text - Text to convert to speech
|
||||||
|
* @param options - Optional synthesis parameters (voice, format, speed)
|
||||||
|
* @returns Synthesis result with audio buffer and metadata
|
||||||
|
* @throws {Error} If synthesis fails
|
||||||
|
*/
|
||||||
|
async synthesize(text: string, options?: SynthesizeOptions): Promise<SynthesisResult> {
|
||||||
|
const voice = options?.voice ?? this.defaultVoice;
|
||||||
|
const format = options?.format ?? this.defaultFormat;
|
||||||
|
const speed = options?.speed ?? DEFAULT_SPEED;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await this.client.audio.speech.create({
|
||||||
|
model: DEFAULT_MODEL,
|
||||||
|
input: text,
|
||||||
|
voice,
|
||||||
|
response_format: format,
|
||||||
|
speed,
|
||||||
|
});
|
||||||
|
|
||||||
|
const arrayBuffer = await response.arrayBuffer();
|
||||||
|
const audio = Buffer.from(arrayBuffer);
|
||||||
|
|
||||||
|
return {
|
||||||
|
audio,
|
||||||
|
format,
|
||||||
|
voice,
|
||||||
|
tier: this.tier,
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
this.logger.error(`TTS synthesis failed: ${message}`);
|
||||||
|
throw new Error(`TTS synthesis failed for ${this.name}: ${message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List available voices for this provider.
|
||||||
|
*
|
||||||
|
* Default implementation returns the configured default voice.
|
||||||
|
* Subclasses should override this to provide a full voice list
|
||||||
|
* from their specific TTS engine.
|
||||||
|
*
|
||||||
|
* @returns Array of voice information objects
|
||||||
|
*/
|
||||||
|
listVoices(): Promise<VoiceInfo[]> {
|
||||||
|
return Promise.resolve([
|
||||||
|
{
|
||||||
|
id: this.defaultVoice,
|
||||||
|
name: this.defaultVoice,
|
||||||
|
tier: this.tier,
|
||||||
|
isDefault: true,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the TTS server is reachable and healthy.
|
||||||
|
*
|
||||||
|
* Performs a simple HTTP request to the base URL's models endpoint
|
||||||
|
* to verify the server is running and responding.
|
||||||
|
*
|
||||||
|
* @returns true if the server is reachable, false otherwise
|
||||||
|
*/
|
||||||
|
async isHealthy(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
// Extract the base URL without the /v1 path for health checking
|
||||||
|
const healthUrl = this.baseURL.replace(/\/v1\/?$/, "/v1/models");
|
||||||
|
const controller = new AbortController();
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
controller.abort();
|
||||||
|
}, HEALTH_CHECK_TIMEOUT_MS);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(healthUrl, {
|
||||||
|
method: "GET",
|
||||||
|
signal: controller.signal,
|
||||||
|
});
|
||||||
|
|
||||||
|
return response.ok;
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
this.logger.warn(`Health check failed for ${this.name}: ${message}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
436
apps/api/src/speech/providers/chatterbox-tts.provider.spec.ts
Normal file
436
apps/api/src/speech/providers/chatterbox-tts.provider.spec.ts
Normal file
@@ -0,0 +1,436 @@
|
|||||||
|
/**
|
||||||
|
* ChatterboxTTSProvider Unit Tests
|
||||||
|
*
|
||||||
|
* Tests the premium-tier TTS provider with voice cloning and
|
||||||
|
* emotion exaggeration support for Chatterbox.
|
||||||
|
*
|
||||||
|
* Issue #394
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeEach, vi, type Mock } from "vitest";
|
||||||
|
import { ChatterboxTTSProvider } from "./chatterbox-tts.provider";
|
||||||
|
import type { ChatterboxSynthesizeOptions, AudioFormat } from "../interfaces/speech-types";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Mock OpenAI SDK
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
const mockCreate = vi.fn();
|
||||||
|
|
||||||
|
vi.mock("openai", () => {
|
||||||
|
class MockOpenAI {
|
||||||
|
audio = {
|
||||||
|
speech: {
|
||||||
|
create: mockCreate,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return { default: MockOpenAI };
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Test helpers
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a mock Response-like object that mimics OpenAI SDK's audio.speech.create() return.
|
||||||
|
*/
|
||||||
|
function createMockAudioResponse(audioData: Uint8Array): { arrayBuffer: Mock } {
|
||||||
|
return {
|
||||||
|
arrayBuffer: vi.fn().mockResolvedValue(audioData.buffer),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("ChatterboxTTSProvider", () => {
|
||||||
|
let provider: ChatterboxTTSProvider;
|
||||||
|
|
||||||
|
const testBaseURL = "http://chatterbox-tts:8881/v1";
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
provider = new ChatterboxTTSProvider(testBaseURL);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Provider identity
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("provider identity", () => {
|
||||||
|
it("should have name 'chatterbox'", () => {
|
||||||
|
expect(provider.name).toBe("chatterbox");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should have tier 'premium'", () => {
|
||||||
|
expect(provider.tier).toBe("premium");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Constructor
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("constructor", () => {
|
||||||
|
it("should create an instance with the provided baseURL", () => {
|
||||||
|
expect(provider).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use 'default' as the default voice", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01, 0x02]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const result = await provider.synthesize("Hello");
|
||||||
|
|
||||||
|
expect(result.voice).toBe("default");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use 'wav' as the default format", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01, 0x02]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const result = await provider.synthesize("Hello");
|
||||||
|
|
||||||
|
expect(result.format).toBe("wav");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// synthesize() — basic (no Chatterbox-specific options)
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("synthesize (basic)", () => {
|
||||||
|
it("should synthesize text and return a SynthesisResult", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x49, 0x44, 0x33, 0x04, 0x00]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const result = await provider.synthesize("Hello, world!");
|
||||||
|
|
||||||
|
expect(result).toBeDefined();
|
||||||
|
expect(result.audio).toBeInstanceOf(Buffer);
|
||||||
|
expect(result.audio.length).toBe(audioBytes.length);
|
||||||
|
expect(result.format).toBe("wav");
|
||||||
|
expect(result.voice).toBe("default");
|
||||||
|
expect(result.tier).toBe("premium");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass correct base parameters to OpenAI SDK when no extra options", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
await provider.synthesize("Test text");
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith({
|
||||||
|
model: "tts-1",
|
||||||
|
input: "Test text",
|
||||||
|
voice: "default",
|
||||||
|
response_format: "wav",
|
||||||
|
speed: 1.0,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use custom voice from options", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const options: ChatterboxSynthesizeOptions = { voice: "cloned_voice_1" };
|
||||||
|
const result = await provider.synthesize("Hello", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(expect.objectContaining({ voice: "cloned_voice_1" }));
|
||||||
|
expect(result.voice).toBe("cloned_voice_1");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use custom format from options", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const options: ChatterboxSynthesizeOptions = { format: "mp3" as AudioFormat };
|
||||||
|
const result = await provider.synthesize("Hello", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(expect.objectContaining({ response_format: "mp3" }));
|
||||||
|
expect(result.format).toBe("mp3");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw on synthesis failure", async () => {
|
||||||
|
mockCreate.mockRejectedValue(new Error("GPU out of memory"));
|
||||||
|
|
||||||
|
await expect(provider.synthesize("Hello")).rejects.toThrow(
|
||||||
|
"TTS synthesis failed for chatterbox: GPU out of memory"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// synthesize() — voice cloning (referenceAudio)
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("synthesize (voice cloning)", () => {
|
||||||
|
it("should pass referenceAudio as base64 in extra body params", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01, 0x02]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const referenceAudio = Buffer.from("fake-audio-data-for-cloning");
|
||||||
|
const options: ChatterboxSynthesizeOptions = {
|
||||||
|
referenceAudio,
|
||||||
|
};
|
||||||
|
|
||||||
|
await provider.synthesize("Clone my voice", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
input: "Clone my voice",
|
||||||
|
reference_audio: referenceAudio.toString("base64"),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not include reference_audio when referenceAudio is not provided", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
await provider.synthesize("No cloning");
|
||||||
|
|
||||||
|
const callArgs = mockCreate.mock.calls[0][0] as Record<string, unknown>;
|
||||||
|
expect(callArgs).not.toHaveProperty("reference_audio");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// synthesize() — emotion exaggeration
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("synthesize (emotion exaggeration)", () => {
|
||||||
|
it("should pass emotionExaggeration as exaggeration in extra body params", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01, 0x02]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const options: ChatterboxSynthesizeOptions = {
|
||||||
|
emotionExaggeration: 0.7,
|
||||||
|
};
|
||||||
|
|
||||||
|
await provider.synthesize("Very emotional text", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
exaggeration: 0.7,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not include exaggeration when emotionExaggeration is not provided", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
await provider.synthesize("Neutral text");
|
||||||
|
|
||||||
|
const callArgs = mockCreate.mock.calls[0][0] as Record<string, unknown>;
|
||||||
|
expect(callArgs).not.toHaveProperty("exaggeration");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept emotionExaggeration of 0.0", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const options: ChatterboxSynthesizeOptions = {
|
||||||
|
emotionExaggeration: 0.0,
|
||||||
|
};
|
||||||
|
|
||||||
|
await provider.synthesize("Minimal emotion", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
exaggeration: 0.0,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept emotionExaggeration of 1.0", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const options: ChatterboxSynthesizeOptions = {
|
||||||
|
emotionExaggeration: 1.0,
|
||||||
|
};
|
||||||
|
|
||||||
|
await provider.synthesize("Maximum emotion", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
exaggeration: 1.0,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should clamp emotionExaggeration above 1.0 to 1.0", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const options: ChatterboxSynthesizeOptions = {
|
||||||
|
emotionExaggeration: 1.5,
|
||||||
|
};
|
||||||
|
|
||||||
|
await provider.synthesize("Over the top", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
exaggeration: 1.0,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should clamp emotionExaggeration below 0.0 to 0.0", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const options: ChatterboxSynthesizeOptions = {
|
||||||
|
emotionExaggeration: -0.5,
|
||||||
|
};
|
||||||
|
|
||||||
|
await provider.synthesize("Negative emotion", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
exaggeration: 0.0,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// synthesize() — combined options
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("synthesize (combined options)", () => {
|
||||||
|
it("should handle referenceAudio and emotionExaggeration together", async () => {
|
||||||
|
const audioBytes = new Uint8Array([0x01, 0x02, 0x03]);
|
||||||
|
mockCreate.mockResolvedValue(createMockAudioResponse(audioBytes));
|
||||||
|
|
||||||
|
const referenceAudio = Buffer.from("reference-audio-sample");
|
||||||
|
const options: ChatterboxSynthesizeOptions = {
|
||||||
|
voice: "custom_voice",
|
||||||
|
format: "mp3",
|
||||||
|
speed: 0.9,
|
||||||
|
referenceAudio,
|
||||||
|
emotionExaggeration: 0.6,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await provider.synthesize("Full options test", options);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledWith({
|
||||||
|
model: "tts-1",
|
||||||
|
input: "Full options test",
|
||||||
|
voice: "custom_voice",
|
||||||
|
response_format: "mp3",
|
||||||
|
speed: 0.9,
|
||||||
|
reference_audio: referenceAudio.toString("base64"),
|
||||||
|
exaggeration: 0.6,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.audio).toBeInstanceOf(Buffer);
|
||||||
|
expect(result.voice).toBe("custom_voice");
|
||||||
|
expect(result.format).toBe("mp3");
|
||||||
|
expect(result.tier).toBe("premium");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// isHealthy() — graceful degradation
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("isHealthy (graceful degradation)", () => {
|
||||||
|
it("should return true when the Chatterbox server is reachable", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const healthy = await provider.isHealthy();
|
||||||
|
|
||||||
|
expect(healthy).toBe(true);
|
||||||
|
|
||||||
|
vi.unstubAllGlobals();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false when GPU is unavailable (server unreachable)", async () => {
|
||||||
|
const mockFetch = vi.fn().mockRejectedValue(new Error("ECONNREFUSED"));
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const healthy = await provider.isHealthy();
|
||||||
|
|
||||||
|
expect(healthy).toBe(false);
|
||||||
|
|
||||||
|
vi.unstubAllGlobals();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false when the server returns 503 (GPU overloaded)", async () => {
|
||||||
|
const mockFetch = vi.fn().mockResolvedValue({
|
||||||
|
ok: false,
|
||||||
|
status: 503,
|
||||||
|
});
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const healthy = await provider.isHealthy();
|
||||||
|
|
||||||
|
expect(healthy).toBe(false);
|
||||||
|
|
||||||
|
vi.unstubAllGlobals();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false on timeout (slow GPU response)", async () => {
|
||||||
|
const mockFetch = vi
|
||||||
|
.fn()
|
||||||
|
.mockRejectedValue(new Error("AbortError: The operation was aborted"));
|
||||||
|
vi.stubGlobal("fetch", mockFetch);
|
||||||
|
|
||||||
|
const healthy = await provider.isHealthy();
|
||||||
|
|
||||||
|
expect(healthy).toBe(false);
|
||||||
|
|
||||||
|
vi.unstubAllGlobals();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// listVoices()
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("listVoices", () => {
|
||||||
|
it("should return the default voice in the premium tier", async () => {
|
||||||
|
const voices = await provider.listVoices();
|
||||||
|
|
||||||
|
expect(voices).toBeInstanceOf(Array);
|
||||||
|
expect(voices.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
const defaultVoice = voices.find((v) => v.isDefault === true);
|
||||||
|
expect(defaultVoice).toBeDefined();
|
||||||
|
expect(defaultVoice?.id).toBe("default");
|
||||||
|
expect(defaultVoice?.tier).toBe("premium");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set tier to 'premium' on all voices", async () => {
|
||||||
|
const voices = await provider.listVoices();
|
||||||
|
|
||||||
|
for (const voice of voices) {
|
||||||
|
expect(voice.tier).toBe("premium");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// supportedLanguages
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("supportedLanguages", () => {
|
||||||
|
it("should expose a list of supported languages for cross-language transfer", () => {
|
||||||
|
const languages = provider.supportedLanguages;
|
||||||
|
|
||||||
|
expect(languages).toBeInstanceOf(Array);
|
||||||
|
expect(languages.length).toBe(23);
|
||||||
|
expect(languages).toContain("en");
|
||||||
|
expect(languages).toContain("fr");
|
||||||
|
expect(languages).toContain("de");
|
||||||
|
expect(languages).toContain("es");
|
||||||
|
expect(languages).toContain("ja");
|
||||||
|
expect(languages).toContain("zh");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
169
apps/api/src/speech/providers/chatterbox-tts.provider.ts
Normal file
169
apps/api/src/speech/providers/chatterbox-tts.provider.ts
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
/**
|
||||||
|
* Chatterbox TTS Provider
|
||||||
|
*
|
||||||
|
* Premium-tier TTS provider with voice cloning and emotion exaggeration support.
|
||||||
|
* Uses the Chatterbox TTS Server's OpenAI-compatible endpoint with extra body
|
||||||
|
* parameters for voice cloning (reference_audio) and emotion control (exaggeration).
|
||||||
|
*
|
||||||
|
* Key capabilities:
|
||||||
|
* - Voice cloning via reference audio sample
|
||||||
|
* - Emotion exaggeration control (0.0 - 1.0)
|
||||||
|
* - Cross-language voice transfer (23 languages)
|
||||||
|
* - Graceful degradation when GPU is unavailable (isHealthy returns false)
|
||||||
|
*
|
||||||
|
* The provider is optional and only instantiated when TTS_PREMIUM_ENABLED=true.
|
||||||
|
*
|
||||||
|
* Issue #394
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { SpeechCreateParams } from "openai/resources/audio/speech";
|
||||||
|
import { BaseTTSProvider } from "./base-tts.provider";
|
||||||
|
import type { SpeechTier, SynthesizeOptions, SynthesisResult } from "../interfaces/speech-types";
|
||||||
|
import type { ChatterboxSynthesizeOptions } from "../interfaces/speech-types";
|
||||||
|
|
||||||
|
/** Default voice for Chatterbox */
|
||||||
|
const CHATTERBOX_DEFAULT_VOICE = "default";
|
||||||
|
|
||||||
|
/** Default audio format for Chatterbox (WAV for highest quality) */
|
||||||
|
const CHATTERBOX_DEFAULT_FORMAT = "wav" as const;
|
||||||
|
|
||||||
|
/** Default TTS model identifier */
|
||||||
|
const DEFAULT_MODEL = "tts-1";
|
||||||
|
|
||||||
|
/** Default speech speed multiplier */
|
||||||
|
const DEFAULT_SPEED = 1.0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Languages supported by Chatterbox for cross-language voice transfer.
|
||||||
|
* Chatterbox supports 23 languages for voice cloning and synthesis.
|
||||||
|
*/
|
||||||
|
const SUPPORTED_LANGUAGES: readonly string[] = [
|
||||||
|
"en", // English
|
||||||
|
"fr", // French
|
||||||
|
"de", // German
|
||||||
|
"es", // Spanish
|
||||||
|
"it", // Italian
|
||||||
|
"pt", // Portuguese
|
||||||
|
"nl", // Dutch
|
||||||
|
"pl", // Polish
|
||||||
|
"ru", // Russian
|
||||||
|
"uk", // Ukrainian
|
||||||
|
"ja", // Japanese
|
||||||
|
"zh", // Chinese
|
||||||
|
"ko", // Korean
|
||||||
|
"ar", // Arabic
|
||||||
|
"hi", // Hindi
|
||||||
|
"tr", // Turkish
|
||||||
|
"sv", // Swedish
|
||||||
|
"da", // Danish
|
||||||
|
"fi", // Finnish
|
||||||
|
"no", // Norwegian
|
||||||
|
"cs", // Czech
|
||||||
|
"el", // Greek
|
||||||
|
"ro", // Romanian
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Chatterbox TTS provider (premium tier).
|
||||||
|
*
|
||||||
|
* Extends BaseTTSProvider with voice cloning and emotion exaggeration support.
|
||||||
|
* The Chatterbox TTS Server uses an OpenAI-compatible API but accepts additional
|
||||||
|
* body parameters for its advanced features.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const provider = new ChatterboxTTSProvider("http://chatterbox:8881/v1");
|
||||||
|
*
|
||||||
|
* // Basic synthesis
|
||||||
|
* const result = await provider.synthesize("Hello!");
|
||||||
|
*
|
||||||
|
* // Voice cloning with emotion
|
||||||
|
* const clonedResult = await provider.synthesize("Hello!", {
|
||||||
|
* referenceAudio: myAudioBuffer,
|
||||||
|
* emotionExaggeration: 0.7,
|
||||||
|
* });
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export class ChatterboxTTSProvider extends BaseTTSProvider {
|
||||||
|
readonly name = "chatterbox";
|
||||||
|
readonly tier: SpeechTier = "premium";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Languages supported for cross-language voice transfer.
|
||||||
|
*/
|
||||||
|
readonly supportedLanguages: readonly string[] = SUPPORTED_LANGUAGES;
|
||||||
|
|
||||||
|
constructor(baseURL: string) {
|
||||||
|
super(baseURL, CHATTERBOX_DEFAULT_VOICE, CHATTERBOX_DEFAULT_FORMAT);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Synthesize text to audio with optional voice cloning and emotion control.
|
||||||
|
*
|
||||||
|
* Overrides the base synthesize() to support Chatterbox-specific options:
|
||||||
|
* - `referenceAudio`: Buffer of audio to clone the voice from (sent as base64)
|
||||||
|
* - `emotionExaggeration`: Emotion intensity factor (0.0 - 1.0, clamped)
|
||||||
|
*
|
||||||
|
* These are passed as extra body parameters to the OpenAI-compatible endpoint,
|
||||||
|
* which Chatterbox's API accepts alongside the standard parameters.
|
||||||
|
*
|
||||||
|
* @param text - Text to convert to speech
|
||||||
|
* @param options - Synthesis options, optionally including Chatterbox-specific params
|
||||||
|
* @returns Synthesis result with audio buffer and metadata
|
||||||
|
* @throws {Error} If synthesis fails (e.g., GPU unavailable)
|
||||||
|
*/
|
||||||
|
async synthesize(
|
||||||
|
text: string,
|
||||||
|
options?: SynthesizeOptions | ChatterboxSynthesizeOptions
|
||||||
|
): Promise<SynthesisResult> {
|
||||||
|
const voice = options?.voice ?? this.defaultVoice;
|
||||||
|
const format = options?.format ?? this.defaultFormat;
|
||||||
|
const speed = options?.speed ?? DEFAULT_SPEED;
|
||||||
|
|
||||||
|
// Build the request body with standard OpenAI-compatible params
|
||||||
|
const requestBody: Record<string, unknown> = {
|
||||||
|
model: DEFAULT_MODEL,
|
||||||
|
input: text,
|
||||||
|
voice,
|
||||||
|
response_format: format,
|
||||||
|
speed,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add Chatterbox-specific params if provided
|
||||||
|
const chatterboxOptions = options as ChatterboxSynthesizeOptions | undefined;
|
||||||
|
|
||||||
|
if (chatterboxOptions?.referenceAudio) {
|
||||||
|
requestBody.reference_audio = chatterboxOptions.referenceAudio.toString("base64");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (chatterboxOptions?.emotionExaggeration !== undefined) {
|
||||||
|
// Clamp to valid range [0.0, 1.0]
|
||||||
|
requestBody.exaggeration = Math.max(
|
||||||
|
0.0,
|
||||||
|
Math.min(1.0, chatterboxOptions.emotionExaggeration)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Use the OpenAI SDK's create method, passing extra params
|
||||||
|
// The OpenAI SDK allows additional body params to be passed through
|
||||||
|
const response = await this.client.audio.speech.create(
|
||||||
|
requestBody as unknown as SpeechCreateParams
|
||||||
|
);
|
||||||
|
|
||||||
|
const arrayBuffer = await response.arrayBuffer();
|
||||||
|
const audio = Buffer.from(arrayBuffer);
|
||||||
|
|
||||||
|
return {
|
||||||
|
audio,
|
||||||
|
format,
|
||||||
|
voice,
|
||||||
|
tier: this.tier,
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error);
|
||||||
|
this.logger.error(`TTS synthesis failed: ${message}`);
|
||||||
|
throw new Error(`TTS synthesis failed for ${this.name}: ${message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
316
apps/api/src/speech/providers/kokoro-tts.provider.spec.ts
Normal file
316
apps/api/src/speech/providers/kokoro-tts.provider.spec.ts
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
/**
|
||||||
|
* KokoroTtsProvider Unit Tests
|
||||||
|
*
|
||||||
|
* Tests the Kokoro-FastAPI TTS provider with full voice catalog,
|
||||||
|
* voice metadata parsing, and Kokoro-specific feature constants.
|
||||||
|
*
|
||||||
|
* Issue #393
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||||
|
import {
|
||||||
|
KokoroTtsProvider,
|
||||||
|
KOKORO_SUPPORTED_FORMATS,
|
||||||
|
KOKORO_SPEED_RANGE,
|
||||||
|
KOKORO_VOICES,
|
||||||
|
parseVoicePrefix,
|
||||||
|
} from "./kokoro-tts.provider";
|
||||||
|
import type { VoiceInfo } from "../interfaces/speech-types";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Mock OpenAI SDK
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
vi.mock("openai", () => {
|
||||||
|
class MockOpenAI {
|
||||||
|
audio = {
|
||||||
|
speech: {
|
||||||
|
create: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return { default: MockOpenAI };
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Provider identity
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("KokoroTtsProvider", () => {
|
||||||
|
const testBaseURL = "http://kokoro-tts:8880/v1";
|
||||||
|
let provider: KokoroTtsProvider;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
provider = new KokoroTtsProvider(testBaseURL);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("provider identity", () => {
|
||||||
|
it("should have name 'kokoro'", () => {
|
||||||
|
expect(provider.name).toBe("kokoro");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should have tier 'default'", () => {
|
||||||
|
expect(provider.tier).toBe("default");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// listVoices()
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("listVoices", () => {
|
||||||
|
let voices: VoiceInfo[];
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
voices = await provider.listVoices();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return an array of VoiceInfo objects", () => {
|
||||||
|
expect(voices).toBeInstanceOf(Array);
|
||||||
|
expect(voices.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return at least 10 voices", () => {
|
||||||
|
// The issue specifies at least: af_heart, af_bella, af_nicole, af_sarah, af_sky,
|
||||||
|
// am_adam, am_michael, bf_emma, bf_isabella, bm_george, bm_lewis
|
||||||
|
expect(voices.length).toBeGreaterThanOrEqual(10);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set tier to 'default' on all voices", () => {
|
||||||
|
for (const voice of voices) {
|
||||||
|
expect(voice.tier).toBe("default");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should have exactly one default voice", () => {
|
||||||
|
const defaults = voices.filter((v) => v.isDefault === true);
|
||||||
|
expect(defaults.length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should mark af_heart as the default voice", () => {
|
||||||
|
const defaultVoice = voices.find((v) => v.isDefault === true);
|
||||||
|
expect(defaultVoice).toBeDefined();
|
||||||
|
expect(defaultVoice?.id).toBe("af_heart");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should have an id and name for every voice", () => {
|
||||||
|
for (const voice of voices) {
|
||||||
|
expect(voice.id).toBeTruthy();
|
||||||
|
expect(voice.name).toBeTruthy();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set language on every voice", () => {
|
||||||
|
for (const voice of voices) {
|
||||||
|
expect(voice.language).toBeTruthy();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Required voices from the issue
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("required voices", () => {
|
||||||
|
const requiredVoiceIds = [
|
||||||
|
"af_heart",
|
||||||
|
"af_bella",
|
||||||
|
"af_nicole",
|
||||||
|
"af_sarah",
|
||||||
|
"af_sky",
|
||||||
|
"am_adam",
|
||||||
|
"am_michael",
|
||||||
|
"bf_emma",
|
||||||
|
"bf_isabella",
|
||||||
|
"bm_george",
|
||||||
|
"bm_lewis",
|
||||||
|
];
|
||||||
|
|
||||||
|
it.each(requiredVoiceIds)("should include voice '%s'", (voiceId) => {
|
||||||
|
const voice = voices.find((v) => v.id === voiceId);
|
||||||
|
expect(voice).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Voice metadata from prefix
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("voice metadata from prefix", () => {
|
||||||
|
it("should set language to 'en-US' for af_ prefix voices", () => {
|
||||||
|
const voice = voices.find((v) => v.id === "af_heart");
|
||||||
|
expect(voice?.language).toBe("en-US");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set language to 'en-US' for am_ prefix voices", () => {
|
||||||
|
const voice = voices.find((v) => v.id === "am_adam");
|
||||||
|
expect(voice?.language).toBe("en-US");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set language to 'en-GB' for bf_ prefix voices", () => {
|
||||||
|
const voice = voices.find((v) => v.id === "bf_emma");
|
||||||
|
expect(voice?.language).toBe("en-GB");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set language to 'en-GB' for bm_ prefix voices", () => {
|
||||||
|
const voice = voices.find((v) => v.id === "bm_george");
|
||||||
|
expect(voice?.language).toBe("en-GB");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include gender in voice name for af_ prefix", () => {
|
||||||
|
const voice = voices.find((v) => v.id === "af_heart");
|
||||||
|
expect(voice?.name).toContain("Female");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include gender in voice name for am_ prefix", () => {
|
||||||
|
const voice = voices.find((v) => v.id === "am_adam");
|
||||||
|
expect(voice?.name).toContain("Male");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include gender in voice name for bf_ prefix", () => {
|
||||||
|
const voice = voices.find((v) => v.id === "bf_emma");
|
||||||
|
expect(voice?.name).toContain("Female");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include gender in voice name for bm_ prefix", () => {
|
||||||
|
const voice = voices.find((v) => v.id === "bm_george");
|
||||||
|
expect(voice?.name).toContain("Male");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Voice name formatting
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("voice name formatting", () => {
|
||||||
|
it("should capitalize the voice name portion", () => {
|
||||||
|
const voice = voices.find((v) => v.id === "af_heart");
|
||||||
|
expect(voice?.name).toContain("Heart");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include the accent/language label in the name", () => {
|
||||||
|
const afVoice = voices.find((v) => v.id === "af_heart");
|
||||||
|
expect(afVoice?.name).toContain("American");
|
||||||
|
|
||||||
|
const bfVoice = voices.find((v) => v.id === "bf_emma");
|
||||||
|
expect(bfVoice?.name).toContain("British");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Custom constructor
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("constructor", () => {
|
||||||
|
it("should accept custom default voice", () => {
|
||||||
|
const customProvider = new KokoroTtsProvider(testBaseURL, "af_bella");
|
||||||
|
expect(customProvider).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept custom default format", () => {
|
||||||
|
const customProvider = new KokoroTtsProvider(testBaseURL, "af_heart", "wav");
|
||||||
|
expect(customProvider).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use af_heart as default voice when none specified", () => {
|
||||||
|
const defaultProvider = new KokoroTtsProvider(testBaseURL);
|
||||||
|
expect(defaultProvider).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// parseVoicePrefix utility
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("parseVoicePrefix", () => {
|
||||||
|
it("should parse af_ as American English Female", () => {
|
||||||
|
const result = parseVoicePrefix("af_heart");
|
||||||
|
expect(result.language).toBe("en-US");
|
||||||
|
expect(result.gender).toBe("female");
|
||||||
|
expect(result.accent).toBe("American");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should parse am_ as American English Male", () => {
|
||||||
|
const result = parseVoicePrefix("am_adam");
|
||||||
|
expect(result.language).toBe("en-US");
|
||||||
|
expect(result.gender).toBe("male");
|
||||||
|
expect(result.accent).toBe("American");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should parse bf_ as British English Female", () => {
|
||||||
|
const result = parseVoicePrefix("bf_emma");
|
||||||
|
expect(result.language).toBe("en-GB");
|
||||||
|
expect(result.gender).toBe("female");
|
||||||
|
expect(result.accent).toBe("British");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should parse bm_ as British English Male", () => {
|
||||||
|
const result = parseVoicePrefix("bm_george");
|
||||||
|
expect(result.language).toBe("en-GB");
|
||||||
|
expect(result.gender).toBe("male");
|
||||||
|
expect(result.accent).toBe("British");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return unknown for unrecognized prefix", () => {
|
||||||
|
const result = parseVoicePrefix("xx_unknown");
|
||||||
|
expect(result.language).toBe("unknown");
|
||||||
|
expect(result.gender).toBe("unknown");
|
||||||
|
expect(result.accent).toBe("Unknown");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Exported constants
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("KOKORO_SUPPORTED_FORMATS", () => {
|
||||||
|
it("should include mp3", () => {
|
||||||
|
expect(KOKORO_SUPPORTED_FORMATS).toContain("mp3");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include wav", () => {
|
||||||
|
expect(KOKORO_SUPPORTED_FORMATS).toContain("wav");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include opus", () => {
|
||||||
|
expect(KOKORO_SUPPORTED_FORMATS).toContain("opus");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include flac", () => {
|
||||||
|
expect(KOKORO_SUPPORTED_FORMATS).toContain("flac");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be a readonly array", () => {
|
||||||
|
expect(Array.isArray(KOKORO_SUPPORTED_FORMATS)).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("KOKORO_SPEED_RANGE", () => {
|
||||||
|
it("should have min speed of 0.25", () => {
|
||||||
|
expect(KOKORO_SPEED_RANGE.min).toBe(0.25);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should have max speed of 4.0", () => {
|
||||||
|
expect(KOKORO_SPEED_RANGE.max).toBe(4.0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("KOKORO_VOICES", () => {
|
||||||
|
it("should be a non-empty array", () => {
|
||||||
|
expect(Array.isArray(KOKORO_VOICES)).toBe(true);
|
||||||
|
expect(KOKORO_VOICES.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should contain voice entries with id and label", () => {
|
||||||
|
for (const voice of KOKORO_VOICES) {
|
||||||
|
expect(voice.id).toBeTruthy();
|
||||||
|
expect(voice.label).toBeTruthy();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include voices from multiple language prefixes", () => {
|
||||||
|
const prefixes = new Set(KOKORO_VOICES.map((v) => v.id.substring(0, 2)));
|
||||||
|
expect(prefixes.size).toBeGreaterThanOrEqual(4);
|
||||||
|
});
|
||||||
|
});
|
||||||
278
apps/api/src/speech/providers/kokoro-tts.provider.ts
Normal file
278
apps/api/src/speech/providers/kokoro-tts.provider.ts
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
/**
|
||||||
|
* Kokoro-FastAPI TTS Provider
|
||||||
|
*
|
||||||
|
* Default-tier TTS provider backed by Kokoro-FastAPI.
|
||||||
|
* CPU-based, always available, Apache 2.0 license.
|
||||||
|
*
|
||||||
|
* Features:
|
||||||
|
* - 53 built-in voices across 8 languages
|
||||||
|
* - Speed control: 0.25x to 4.0x
|
||||||
|
* - Output formats: mp3, wav, opus, flac
|
||||||
|
* - Voice metadata derived from ID prefix (language, gender, accent)
|
||||||
|
*
|
||||||
|
* Voice ID format: {prefix}_{name}
|
||||||
|
* - First character: language/accent code (a=American, b=British, etc.)
|
||||||
|
* - Second character: gender code (f=Female, m=Male)
|
||||||
|
*
|
||||||
|
* Issue #393
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { BaseTTSProvider } from "./base-tts.provider";
|
||||||
|
import type { SpeechTier, VoiceInfo, AudioFormat } from "../interfaces/speech-types";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Constants
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/** Audio formats supported by Kokoro-FastAPI */
|
||||||
|
export const KOKORO_SUPPORTED_FORMATS: readonly AudioFormat[] = [
|
||||||
|
"mp3",
|
||||||
|
"wav",
|
||||||
|
"opus",
|
||||||
|
"flac",
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
/** Speed range supported by Kokoro-FastAPI */
|
||||||
|
export const KOKORO_SPEED_RANGE = {
|
||||||
|
min: 0.25,
|
||||||
|
max: 4.0,
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
/** Default voice for Kokoro */
|
||||||
|
const KOKORO_DEFAULT_VOICE = "af_heart";
|
||||||
|
|
||||||
|
/** Default audio format for Kokoro */
|
||||||
|
const KOKORO_DEFAULT_FORMAT: AudioFormat = "mp3";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Voice prefix mapping
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mapping of voice ID prefix (first two characters) to language/accent/gender metadata.
|
||||||
|
*
|
||||||
|
* Kokoro voice IDs follow the pattern: {lang}{gender}_{name}
|
||||||
|
* - lang: a=American, b=British, e=Spanish, f=French, h=Hindi, j=Japanese, p=Portuguese, z=Chinese
|
||||||
|
* - gender: f=Female, m=Male
|
||||||
|
*/
|
||||||
|
const VOICE_PREFIX_MAP: Record<string, { language: string; gender: string; accent: string }> = {
|
||||||
|
af: { language: "en-US", gender: "female", accent: "American" },
|
||||||
|
am: { language: "en-US", gender: "male", accent: "American" },
|
||||||
|
bf: { language: "en-GB", gender: "female", accent: "British" },
|
||||||
|
bm: { language: "en-GB", gender: "male", accent: "British" },
|
||||||
|
ef: { language: "es", gender: "female", accent: "Spanish" },
|
||||||
|
em: { language: "es", gender: "male", accent: "Spanish" },
|
||||||
|
ff: { language: "fr", gender: "female", accent: "French" },
|
||||||
|
fm: { language: "fr", gender: "male", accent: "French" },
|
||||||
|
hf: { language: "hi", gender: "female", accent: "Hindi" },
|
||||||
|
hm: { language: "hi", gender: "male", accent: "Hindi" },
|
||||||
|
jf: { language: "ja", gender: "female", accent: "Japanese" },
|
||||||
|
jm: { language: "ja", gender: "male", accent: "Japanese" },
|
||||||
|
pf: { language: "pt-BR", gender: "female", accent: "Portuguese" },
|
||||||
|
pm: { language: "pt-BR", gender: "male", accent: "Portuguese" },
|
||||||
|
zf: { language: "zh", gender: "female", accent: "Chinese" },
|
||||||
|
zm: { language: "zh", gender: "male", accent: "Chinese" },
|
||||||
|
};
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Voice catalog
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/** Raw voice catalog entry */
|
||||||
|
interface KokoroVoiceEntry {
|
||||||
|
/** Voice ID (e.g. "af_heart") */
|
||||||
|
id: string;
|
||||||
|
/** Human-readable label (e.g. "Heart") */
|
||||||
|
label: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Complete catalog of Kokoro built-in voices.
|
||||||
|
*
|
||||||
|
* Organized by language/accent prefix:
|
||||||
|
* - af_: American English Female
|
||||||
|
* - am_: American English Male
|
||||||
|
* - bf_: British English Female
|
||||||
|
* - bm_: British English Male
|
||||||
|
* - ef_: Spanish Female
|
||||||
|
* - em_: Spanish Male
|
||||||
|
* - ff_: French Female
|
||||||
|
* - hf_: Hindi Female
|
||||||
|
* - jf_: Japanese Female
|
||||||
|
* - jm_: Japanese Male
|
||||||
|
* - pf_: Portuguese Female
|
||||||
|
* - zf_: Chinese Female
|
||||||
|
* - zm_: Chinese Male
|
||||||
|
*/
|
||||||
|
export const KOKORO_VOICES: readonly KokoroVoiceEntry[] = [
|
||||||
|
// American English Female (af_)
|
||||||
|
{ id: "af_heart", label: "Heart" },
|
||||||
|
{ id: "af_alloy", label: "Alloy" },
|
||||||
|
{ id: "af_aoede", label: "Aoede" },
|
||||||
|
{ id: "af_bella", label: "Bella" },
|
||||||
|
{ id: "af_jessica", label: "Jessica" },
|
||||||
|
{ id: "af_kore", label: "Kore" },
|
||||||
|
{ id: "af_nicole", label: "Nicole" },
|
||||||
|
{ id: "af_nova", label: "Nova" },
|
||||||
|
{ id: "af_river", label: "River" },
|
||||||
|
{ id: "af_sarah", label: "Sarah" },
|
||||||
|
{ id: "af_sky", label: "Sky" },
|
||||||
|
// American English Male (am_)
|
||||||
|
{ id: "am_adam", label: "Adam" },
|
||||||
|
{ id: "am_echo", label: "Echo" },
|
||||||
|
{ id: "am_eric", label: "Eric" },
|
||||||
|
{ id: "am_fenrir", label: "Fenrir" },
|
||||||
|
{ id: "am_liam", label: "Liam" },
|
||||||
|
{ id: "am_michael", label: "Michael" },
|
||||||
|
{ id: "am_onyx", label: "Onyx" },
|
||||||
|
{ id: "am_puck", label: "Puck" },
|
||||||
|
{ id: "am_santa", label: "Santa" },
|
||||||
|
// British English Female (bf_)
|
||||||
|
{ id: "bf_alice", label: "Alice" },
|
||||||
|
{ id: "bf_emma", label: "Emma" },
|
||||||
|
{ id: "bf_isabella", label: "Isabella" },
|
||||||
|
{ id: "bf_lily", label: "Lily" },
|
||||||
|
// British English Male (bm_)
|
||||||
|
{ id: "bm_daniel", label: "Daniel" },
|
||||||
|
{ id: "bm_fable", label: "Fable" },
|
||||||
|
{ id: "bm_george", label: "George" },
|
||||||
|
{ id: "bm_lewis", label: "Lewis" },
|
||||||
|
{ id: "bm_oscar", label: "Oscar" },
|
||||||
|
// Spanish Female (ef_)
|
||||||
|
{ id: "ef_dora", label: "Dora" },
|
||||||
|
{ id: "ef_elena", label: "Elena" },
|
||||||
|
{ id: "ef_maria", label: "Maria" },
|
||||||
|
// Spanish Male (em_)
|
||||||
|
{ id: "em_alex", label: "Alex" },
|
||||||
|
{ id: "em_carlos", label: "Carlos" },
|
||||||
|
{ id: "em_santa", label: "Santa" },
|
||||||
|
// French Female (ff_)
|
||||||
|
{ id: "ff_camille", label: "Camille" },
|
||||||
|
{ id: "ff_siwis", label: "Siwis" },
|
||||||
|
// Hindi Female (hf_)
|
||||||
|
{ id: "hf_alpha", label: "Alpha" },
|
||||||
|
{ id: "hf_beta", label: "Beta" },
|
||||||
|
// Japanese Female (jf_)
|
||||||
|
{ id: "jf_alpha", label: "Alpha" },
|
||||||
|
{ id: "jf_gongitsune", label: "Gongitsune" },
|
||||||
|
{ id: "jf_nezumi", label: "Nezumi" },
|
||||||
|
{ id: "jf_tebukuro", label: "Tebukuro" },
|
||||||
|
// Japanese Male (jm_)
|
||||||
|
{ id: "jm_kumo", label: "Kumo" },
|
||||||
|
// Portuguese Female (pf_)
|
||||||
|
{ id: "pf_dora", label: "Dora" },
|
||||||
|
// Chinese Female (zf_)
|
||||||
|
{ id: "zf_xiaobei", label: "Xiaobei" },
|
||||||
|
{ id: "zf_xiaoni", label: "Xiaoni" },
|
||||||
|
{ id: "zf_xiaoxiao", label: "Xiaoxiao" },
|
||||||
|
{ id: "zf_xiaoyi", label: "Xiaoyi" },
|
||||||
|
// Chinese Male (zm_)
|
||||||
|
{ id: "zm_yunjian", label: "Yunjian" },
|
||||||
|
{ id: "zm_yunxi", label: "Yunxi" },
|
||||||
|
{ id: "zm_yunxia", label: "Yunxia" },
|
||||||
|
{ id: "zm_yunyang", label: "Yunyang" },
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Prefix parser
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/** Parsed voice prefix metadata */
|
||||||
|
export interface VoicePrefixMetadata {
|
||||||
|
/** BCP 47 language code (e.g. "en-US", "en-GB", "ja") */
|
||||||
|
language: string;
|
||||||
|
/** Gender: "female", "male", or "unknown" */
|
||||||
|
gender: string;
|
||||||
|
/** Human-readable accent label (e.g. "American", "British") */
|
||||||
|
accent: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a Kokoro voice ID to extract language, gender, and accent metadata.
|
||||||
|
*
|
||||||
|
* Voice IDs follow the pattern: {lang}{gender}_{name}
|
||||||
|
* The first two characters encode language/accent and gender.
|
||||||
|
*
|
||||||
|
* @param voiceId - Kokoro voice ID (e.g. "af_heart")
|
||||||
|
* @returns Parsed metadata with language, gender, and accent
|
||||||
|
*/
|
||||||
|
export function parseVoicePrefix(voiceId: string): VoicePrefixMetadata {
|
||||||
|
const prefix = voiceId.substring(0, 2);
|
||||||
|
const mapping = VOICE_PREFIX_MAP[prefix];
|
||||||
|
|
||||||
|
if (mapping) {
|
||||||
|
return {
|
||||||
|
language: mapping.language,
|
||||||
|
gender: mapping.gender,
|
||||||
|
accent: mapping.accent,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
language: "unknown",
|
||||||
|
gender: "unknown",
|
||||||
|
accent: "Unknown",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Provider class
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Kokoro-FastAPI TTS provider (default tier).
|
||||||
|
*
|
||||||
|
* CPU-based text-to-speech engine with 53 built-in voices across 8 languages.
|
||||||
|
* Uses the OpenAI-compatible API exposed by Kokoro-FastAPI.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const kokoro = new KokoroTtsProvider("http://kokoro-tts:8880/v1");
|
||||||
|
* const voices = await kokoro.listVoices();
|
||||||
|
* const result = await kokoro.synthesize("Hello!", { voice: "af_heart" });
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export class KokoroTtsProvider extends BaseTTSProvider {
|
||||||
|
readonly name = "kokoro";
|
||||||
|
readonly tier: SpeechTier = "default";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new Kokoro TTS provider.
|
||||||
|
*
|
||||||
|
* @param baseURL - Base URL for the Kokoro-FastAPI endpoint (e.g. "http://kokoro-tts:8880/v1")
|
||||||
|
* @param defaultVoice - Default voice ID (defaults to "af_heart")
|
||||||
|
* @param defaultFormat - Default audio format (defaults to "mp3")
|
||||||
|
*/
|
||||||
|
constructor(
|
||||||
|
baseURL: string,
|
||||||
|
defaultVoice: string = KOKORO_DEFAULT_VOICE,
|
||||||
|
defaultFormat: AudioFormat = KOKORO_DEFAULT_FORMAT
|
||||||
|
) {
|
||||||
|
super(baseURL, defaultVoice, defaultFormat);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all available Kokoro voices with metadata.
|
||||||
|
*
|
||||||
|
* Returns the full catalog of 53 built-in voices with language, gender,
|
||||||
|
* and accent information derived from voice ID prefixes.
|
||||||
|
*
|
||||||
|
* @returns Array of VoiceInfo objects for all Kokoro voices
|
||||||
|
*/
|
||||||
|
override listVoices(): Promise<VoiceInfo[]> {
|
||||||
|
const voices: VoiceInfo[] = KOKORO_VOICES.map((entry) => {
|
||||||
|
const metadata = parseVoicePrefix(entry.id);
|
||||||
|
const genderLabel = metadata.gender === "female" ? "Female" : "Male";
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: entry.id,
|
||||||
|
name: `${entry.label} (${metadata.accent} ${genderLabel})`,
|
||||||
|
language: metadata.language,
|
||||||
|
tier: this.tier,
|
||||||
|
isDefault: entry.id === this.defaultVoice,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return Promise.resolve(voices);
|
||||||
|
}
|
||||||
|
}
|
||||||
266
apps/api/src/speech/providers/piper-tts.provider.spec.ts
Normal file
266
apps/api/src/speech/providers/piper-tts.provider.spec.ts
Normal file
@@ -0,0 +1,266 @@
|
|||||||
|
/**
|
||||||
|
* PiperTtsProvider Unit Tests
|
||||||
|
*
|
||||||
|
* Tests the Piper TTS provider via OpenedAI Speech (fallback tier).
|
||||||
|
* Validates provider identity, OpenAI voice name mapping, voice listing,
|
||||||
|
* and ultra-lightweight CPU-only design characteristics.
|
||||||
|
*
|
||||||
|
* Issue #395
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||||
|
import {
|
||||||
|
PiperTtsProvider,
|
||||||
|
PIPER_VOICE_MAP,
|
||||||
|
PIPER_SUPPORTED_FORMATS,
|
||||||
|
OPENAI_STANDARD_VOICES,
|
||||||
|
} from "./piper-tts.provider";
|
||||||
|
import type { VoiceInfo } from "../interfaces/speech-types";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Mock OpenAI SDK
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
vi.mock("openai", () => {
|
||||||
|
class MockOpenAI {
|
||||||
|
audio = {
|
||||||
|
speech: {
|
||||||
|
create: vi.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return { default: MockOpenAI };
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Provider identity
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("PiperTtsProvider", () => {
|
||||||
|
const testBaseURL = "http://openedai-speech:8000/v1";
|
||||||
|
let provider: PiperTtsProvider;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
provider = new PiperTtsProvider(testBaseURL);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("provider identity", () => {
|
||||||
|
it("should have name 'piper'", () => {
|
||||||
|
expect(provider.name).toBe("piper");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should have tier 'fallback'", () => {
|
||||||
|
expect(provider.tier).toBe("fallback");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Constructor
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("constructor", () => {
|
||||||
|
it("should use 'alloy' as default voice", () => {
|
||||||
|
const newProvider = new PiperTtsProvider(testBaseURL);
|
||||||
|
expect(newProvider).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept a custom default voice", () => {
|
||||||
|
const customProvider = new PiperTtsProvider(testBaseURL, "nova");
|
||||||
|
expect(customProvider).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should accept a custom default format", () => {
|
||||||
|
const customProvider = new PiperTtsProvider(testBaseURL, "alloy", "wav");
|
||||||
|
expect(customProvider).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// listVoices()
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("listVoices", () => {
|
||||||
|
let voices: VoiceInfo[];
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
voices = await provider.listVoices();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return an array of VoiceInfo objects", () => {
|
||||||
|
expect(voices).toBeInstanceOf(Array);
|
||||||
|
expect(voices.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return exactly 6 voices (OpenAI standard set)", () => {
|
||||||
|
expect(voices.length).toBe(6);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set tier to 'fallback' on all voices", () => {
|
||||||
|
for (const voice of voices) {
|
||||||
|
expect(voice.tier).toBe("fallback");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should have exactly one default voice", () => {
|
||||||
|
const defaults = voices.filter((v) => v.isDefault === true);
|
||||||
|
expect(defaults.length).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should mark 'alloy' as the default voice", () => {
|
||||||
|
const defaultVoice = voices.find((v) => v.isDefault === true);
|
||||||
|
expect(defaultVoice).toBeDefined();
|
||||||
|
expect(defaultVoice?.id).toBe("alloy");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should have an id and name for every voice", () => {
|
||||||
|
for (const voice of voices) {
|
||||||
|
expect(voice.id).toBeTruthy();
|
||||||
|
expect(voice.name).toBeTruthy();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set language on every voice", () => {
|
||||||
|
for (const voice of voices) {
|
||||||
|
expect(voice.language).toBeTruthy();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// All 6 OpenAI standard voices present
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("OpenAI standard voices", () => {
|
||||||
|
const standardVoiceIds = ["alloy", "echo", "fable", "onyx", "nova", "shimmer"];
|
||||||
|
|
||||||
|
it.each(standardVoiceIds)("should include voice '%s'", (voiceId) => {
|
||||||
|
const voice = voices.find((v) => v.id === voiceId);
|
||||||
|
expect(voice).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Voice metadata
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("voice metadata", () => {
|
||||||
|
it("should include gender info in voice names", () => {
|
||||||
|
const alloy = voices.find((v) => v.id === "alloy");
|
||||||
|
expect(alloy?.name).toMatch(/Female|Male/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should map alloy to a female voice", () => {
|
||||||
|
const alloy = voices.find((v) => v.id === "alloy");
|
||||||
|
expect(alloy?.name).toContain("Female");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should map echo to a male voice", () => {
|
||||||
|
const echo = voices.find((v) => v.id === "echo");
|
||||||
|
expect(echo?.name).toContain("Male");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should map fable to a British voice", () => {
|
||||||
|
const fable = voices.find((v) => v.id === "fable");
|
||||||
|
expect(fable?.language).toBe("en-GB");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should map onyx to a male voice", () => {
|
||||||
|
const onyx = voices.find((v) => v.id === "onyx");
|
||||||
|
expect(onyx?.name).toContain("Male");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should map nova to a female voice", () => {
|
||||||
|
const nova = voices.find((v) => v.id === "nova");
|
||||||
|
expect(nova?.name).toContain("Female");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should map shimmer to a female voice", () => {
|
||||||
|
const shimmer = voices.find((v) => v.id === "shimmer");
|
||||||
|
expect(shimmer?.name).toContain("Female");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// PIPER_VOICE_MAP
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("PIPER_VOICE_MAP", () => {
|
||||||
|
it("should contain all 6 OpenAI standard voice names", () => {
|
||||||
|
const expectedKeys = ["alloy", "echo", "fable", "onyx", "nova", "shimmer"];
|
||||||
|
for (const key of expectedKeys) {
|
||||||
|
expect(PIPER_VOICE_MAP).toHaveProperty(key);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should map each voice to a Piper voice ID", () => {
|
||||||
|
for (const entry of Object.values(PIPER_VOICE_MAP)) {
|
||||||
|
expect(entry.piperVoice).toBeTruthy();
|
||||||
|
expect(typeof entry.piperVoice).toBe("string");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should have gender for each voice entry", () => {
|
||||||
|
for (const entry of Object.values(PIPER_VOICE_MAP)) {
|
||||||
|
expect(entry.gender).toMatch(/^(female|male)$/);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should have a language for each voice entry", () => {
|
||||||
|
for (const entry of Object.values(PIPER_VOICE_MAP)) {
|
||||||
|
expect(entry.language).toBeTruthy();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should have a description for each voice entry", () => {
|
||||||
|
for (const entry of Object.values(PIPER_VOICE_MAP)) {
|
||||||
|
expect(entry.description).toBeTruthy();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// OPENAI_STANDARD_VOICES
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("OPENAI_STANDARD_VOICES", () => {
|
||||||
|
it("should be an array of 6 voice IDs", () => {
|
||||||
|
expect(Array.isArray(OPENAI_STANDARD_VOICES)).toBe(true);
|
||||||
|
expect(OPENAI_STANDARD_VOICES.length).toBe(6);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should contain all standard OpenAI voice names", () => {
|
||||||
|
expect(OPENAI_STANDARD_VOICES).toContain("alloy");
|
||||||
|
expect(OPENAI_STANDARD_VOICES).toContain("echo");
|
||||||
|
expect(OPENAI_STANDARD_VOICES).toContain("fable");
|
||||||
|
expect(OPENAI_STANDARD_VOICES).toContain("onyx");
|
||||||
|
expect(OPENAI_STANDARD_VOICES).toContain("nova");
|
||||||
|
expect(OPENAI_STANDARD_VOICES).toContain("shimmer");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// PIPER_SUPPORTED_FORMATS
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
describe("PIPER_SUPPORTED_FORMATS", () => {
|
||||||
|
it("should include mp3", () => {
|
||||||
|
expect(PIPER_SUPPORTED_FORMATS).toContain("mp3");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include wav", () => {
|
||||||
|
expect(PIPER_SUPPORTED_FORMATS).toContain("wav");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include opus", () => {
|
||||||
|
expect(PIPER_SUPPORTED_FORMATS).toContain("opus");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should include flac", () => {
|
||||||
|
expect(PIPER_SUPPORTED_FORMATS).toContain("flac");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should be a readonly array", () => {
|
||||||
|
expect(Array.isArray(PIPER_SUPPORTED_FORMATS)).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
212
apps/api/src/speech/providers/piper-tts.provider.ts
Normal file
212
apps/api/src/speech/providers/piper-tts.provider.ts
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
/**
|
||||||
|
* Piper TTS Provider via OpenedAI Speech
|
||||||
|
*
|
||||||
|
* Fallback-tier TTS provider using Piper via OpenedAI Speech for
|
||||||
|
* ultra-lightweight CPU-only synthesis. Designed for low-resource
|
||||||
|
* environments including Raspberry Pi.
|
||||||
|
*
|
||||||
|
* Features:
|
||||||
|
* - OpenAI-compatible API via OpenedAI Speech server
|
||||||
|
* - 100+ Piper voices across 40+ languages
|
||||||
|
* - 6 standard OpenAI voice names mapped to Piper voices
|
||||||
|
* - Output formats: mp3, wav, opus, flac
|
||||||
|
* - CPU-only, no GPU required
|
||||||
|
* - GPL license (via OpenedAI Speech)
|
||||||
|
*
|
||||||
|
* Voice names use the OpenAI standard set (alloy, echo, fable, onyx,
|
||||||
|
* nova, shimmer) which OpenedAI Speech maps to configured Piper voices.
|
||||||
|
*
|
||||||
|
* Issue #395
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { BaseTTSProvider } from "./base-tts.provider";
|
||||||
|
import type { SpeechTier, VoiceInfo, AudioFormat } from "../interfaces/speech-types";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Constants
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/** Audio formats supported by OpenedAI Speech with Piper backend */
|
||||||
|
export const PIPER_SUPPORTED_FORMATS: readonly AudioFormat[] = [
|
||||||
|
"mp3",
|
||||||
|
"wav",
|
||||||
|
"opus",
|
||||||
|
"flac",
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
/** Default voice for Piper (via OpenedAI Speech) */
|
||||||
|
const PIPER_DEFAULT_VOICE = "alloy";
|
||||||
|
|
||||||
|
/** Default audio format for Piper */
|
||||||
|
const PIPER_DEFAULT_FORMAT: AudioFormat = "mp3";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// OpenAI standard voice names
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The 6 standard OpenAI TTS voice names.
|
||||||
|
* OpenedAI Speech accepts these names and routes them to configured Piper voices.
|
||||||
|
*/
|
||||||
|
export const OPENAI_STANDARD_VOICES: readonly string[] = [
|
||||||
|
"alloy",
|
||||||
|
"echo",
|
||||||
|
"fable",
|
||||||
|
"onyx",
|
||||||
|
"nova",
|
||||||
|
"shimmer",
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Voice mapping
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/** Metadata for a Piper voice mapped from an OpenAI voice name */
|
||||||
|
export interface PiperVoiceMapping {
|
||||||
|
/** The underlying Piper voice ID configured in OpenedAI Speech */
|
||||||
|
piperVoice: string;
|
||||||
|
/** Human-readable description of the voice character */
|
||||||
|
description: string;
|
||||||
|
/** Gender of the voice */
|
||||||
|
gender: "female" | "male";
|
||||||
|
/** BCP 47 language code */
|
||||||
|
language: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Fallback mapping used when a voice ID is not found in PIPER_VOICE_MAP */
|
||||||
|
const DEFAULT_MAPPING: PiperVoiceMapping = {
|
||||||
|
piperVoice: "en_US-amy-medium",
|
||||||
|
description: "Default voice",
|
||||||
|
gender: "female",
|
||||||
|
language: "en-US",
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mapping of OpenAI standard voice names to their default Piper voice
|
||||||
|
* configuration in OpenedAI Speech.
|
||||||
|
*
|
||||||
|
* These are the default mappings that OpenedAI Speech uses when configured
|
||||||
|
* with Piper as the TTS backend. The actual Piper voice used can be
|
||||||
|
* customized in the OpenedAI Speech configuration file.
|
||||||
|
*
|
||||||
|
* Default Piper voice assignments:
|
||||||
|
* - alloy: en_US-amy-medium (warm, balanced female)
|
||||||
|
* - echo: en_US-ryan-medium (clear, articulate male)
|
||||||
|
* - fable: en_GB-alan-medium (British male narrator)
|
||||||
|
* - onyx: en_US-danny-low (deep, resonant male)
|
||||||
|
* - nova: en_US-lessac-medium (expressive female)
|
||||||
|
* - shimmer: en_US-kristin-medium (bright, energetic female)
|
||||||
|
*/
|
||||||
|
export const PIPER_VOICE_MAP: Record<string, PiperVoiceMapping> = {
|
||||||
|
alloy: {
|
||||||
|
piperVoice: "en_US-amy-medium",
|
||||||
|
description: "Warm, balanced voice",
|
||||||
|
gender: "female",
|
||||||
|
language: "en-US",
|
||||||
|
},
|
||||||
|
echo: {
|
||||||
|
piperVoice: "en_US-ryan-medium",
|
||||||
|
description: "Clear, articulate voice",
|
||||||
|
gender: "male",
|
||||||
|
language: "en-US",
|
||||||
|
},
|
||||||
|
fable: {
|
||||||
|
piperVoice: "en_GB-alan-medium",
|
||||||
|
description: "British narrator voice",
|
||||||
|
gender: "male",
|
||||||
|
language: "en-GB",
|
||||||
|
},
|
||||||
|
onyx: {
|
||||||
|
piperVoice: "en_US-danny-low",
|
||||||
|
description: "Deep, resonant voice",
|
||||||
|
gender: "male",
|
||||||
|
language: "en-US",
|
||||||
|
},
|
||||||
|
nova: {
|
||||||
|
piperVoice: "en_US-lessac-medium",
|
||||||
|
description: "Expressive, versatile voice",
|
||||||
|
gender: "female",
|
||||||
|
language: "en-US",
|
||||||
|
},
|
||||||
|
shimmer: {
|
||||||
|
piperVoice: "en_US-kristin-medium",
|
||||||
|
description: "Bright, energetic voice",
|
||||||
|
gender: "female",
|
||||||
|
language: "en-US",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Provider class
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Piper TTS provider via OpenedAI Speech (fallback tier).
|
||||||
|
*
|
||||||
|
* Ultra-lightweight CPU-only text-to-speech engine using Piper voices
|
||||||
|
* through the OpenedAI Speech server's OpenAI-compatible API.
|
||||||
|
*
|
||||||
|
* Designed for:
|
||||||
|
* - CPU-only environments (no GPU required)
|
||||||
|
* - Low-resource devices (Raspberry Pi, ARM SBCs)
|
||||||
|
* - Fallback when primary TTS engines are unavailable
|
||||||
|
* - High-volume, low-latency synthesis needs
|
||||||
|
*
|
||||||
|
* The provider exposes the 6 standard OpenAI voice names (alloy, echo,
|
||||||
|
* fable, onyx, nova, shimmer) which OpenedAI Speech maps to configured
|
||||||
|
* Piper voices. Additional Piper voices (100+ across 40+ languages)
|
||||||
|
* can be accessed by passing the Piper voice ID directly.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const piper = new PiperTtsProvider("http://openedai-speech:8000/v1");
|
||||||
|
* const voices = await piper.listVoices();
|
||||||
|
* const result = await piper.synthesize("Hello!", { voice: "alloy" });
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export class PiperTtsProvider extends BaseTTSProvider {
|
||||||
|
readonly name = "piper";
|
||||||
|
readonly tier: SpeechTier = "fallback";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new Piper TTS provider.
|
||||||
|
*
|
||||||
|
* @param baseURL - Base URL for the OpenedAI Speech endpoint (e.g. "http://openedai-speech:8000/v1")
|
||||||
|
* @param defaultVoice - Default OpenAI voice name (defaults to "alloy")
|
||||||
|
* @param defaultFormat - Default audio format (defaults to "mp3")
|
||||||
|
*/
|
||||||
|
constructor(
|
||||||
|
baseURL: string,
|
||||||
|
defaultVoice: string = PIPER_DEFAULT_VOICE,
|
||||||
|
defaultFormat: AudioFormat = PIPER_DEFAULT_FORMAT
|
||||||
|
) {
|
||||||
|
super(baseURL, defaultVoice, defaultFormat);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List available voices with OpenAI-to-Piper mapping metadata.
|
||||||
|
*
|
||||||
|
* Returns the 6 standard OpenAI voice names with information about
|
||||||
|
* the underlying Piper voice, gender, and language. These are the
|
||||||
|
* voices that can be specified in the `voice` parameter of synthesize().
|
||||||
|
*
|
||||||
|
* @returns Array of VoiceInfo objects for all mapped Piper voices
|
||||||
|
*/
|
||||||
|
override listVoices(): Promise<VoiceInfo[]> {
|
||||||
|
const voices: VoiceInfo[] = OPENAI_STANDARD_VOICES.map((voiceId) => {
|
||||||
|
const mapping = PIPER_VOICE_MAP[voiceId] ?? DEFAULT_MAPPING;
|
||||||
|
const genderLabel = mapping.gender === "female" ? "Female" : "Male";
|
||||||
|
const label = voiceId.charAt(0).toUpperCase() + voiceId.slice(1);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: voiceId,
|
||||||
|
name: `${label} (${genderLabel} - ${mapping.description})`,
|
||||||
|
language: mapping.language,
|
||||||
|
tier: this.tier,
|
||||||
|
isDefault: voiceId === this.defaultVoice,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return Promise.resolve(voices);
|
||||||
|
}
|
||||||
|
}
|
||||||
468
apps/api/src/speech/providers/speaches-stt.provider.spec.ts
Normal file
468
apps/api/src/speech/providers/speaches-stt.provider.spec.ts
Normal file
@@ -0,0 +1,468 @@
|
|||||||
|
/**
|
||||||
|
* SpeachesSttProvider Tests
|
||||||
|
*
|
||||||
|
* TDD tests for the Speaches/faster-whisper STT provider.
|
||||||
|
* Tests cover transcription, error handling, health checks, and config injection.
|
||||||
|
*
|
||||||
|
* Issue #390
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||||
|
import { SpeachesSttProvider } from "./speaches-stt.provider";
|
||||||
|
import type { SpeechConfig } from "../speech.config";
|
||||||
|
import type { TranscribeOptions } from "../interfaces/speech-types";
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Mock OpenAI SDK
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
const { mockCreate, mockModelsList, mockToFile, mockOpenAIConstructorCalls } = vi.hoisted(() => {
|
||||||
|
const mockCreate = vi.fn();
|
||||||
|
const mockModelsList = vi.fn();
|
||||||
|
const mockToFile = vi.fn().mockImplementation(async (buffer: Buffer, name: string) => {
|
||||||
|
return new File([buffer], name);
|
||||||
|
});
|
||||||
|
const mockOpenAIConstructorCalls: Array<Record<string, unknown>> = [];
|
||||||
|
return { mockCreate, mockModelsList, mockToFile, mockOpenAIConstructorCalls };
|
||||||
|
});
|
||||||
|
|
||||||
|
vi.mock("openai", () => {
|
||||||
|
class MockOpenAI {
|
||||||
|
audio = {
|
||||||
|
transcriptions: {
|
||||||
|
create: mockCreate,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
models = {
|
||||||
|
list: mockModelsList,
|
||||||
|
};
|
||||||
|
constructor(config: Record<string, unknown>) {
|
||||||
|
mockOpenAIConstructorCalls.push(config);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
default: MockOpenAI,
|
||||||
|
toFile: mockToFile,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Test helpers
|
||||||
|
// ==========================================
|
||||||
|
|
||||||
|
function createTestConfig(overrides?: Partial<SpeechConfig["stt"]>): SpeechConfig {
|
||||||
|
return {
|
||||||
|
stt: {
|
||||||
|
enabled: true,
|
||||||
|
baseUrl: "http://speaches:8000/v1",
|
||||||
|
model: "Systran/faster-whisper-large-v3-turbo",
|
||||||
|
language: "en",
|
||||||
|
...overrides,
|
||||||
|
},
|
||||||
|
tts: {
|
||||||
|
default: { enabled: false, url: "", voice: "", format: "" },
|
||||||
|
premium: { enabled: false, url: "" },
|
||||||
|
fallback: { enabled: false, url: "" },
|
||||||
|
},
|
||||||
|
limits: {
|
||||||
|
maxUploadSize: 25_000_000,
|
||||||
|
maxDurationSeconds: 600,
|
||||||
|
maxTextLength: 4096,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockVerboseResponse(overrides?: Record<string, unknown>): Record<string, unknown> {
|
||||||
|
return {
|
||||||
|
text: "Hello, world!",
|
||||||
|
language: "en",
|
||||||
|
duration: 3.5,
|
||||||
|
segments: [
|
||||||
|
{
|
||||||
|
id: 0,
|
||||||
|
text: "Hello, world!",
|
||||||
|
start: 0.0,
|
||||||
|
end: 3.5,
|
||||||
|
avg_logprob: -0.25,
|
||||||
|
compression_ratio: 1.2,
|
||||||
|
no_speech_prob: 0.01,
|
||||||
|
seek: 0,
|
||||||
|
temperature: 0.0,
|
||||||
|
tokens: [1, 2, 3],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
...overrides,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("SpeachesSttProvider", () => {
|
||||||
|
let provider: SpeachesSttProvider;
|
||||||
|
let config: SpeechConfig;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
mockOpenAIConstructorCalls.length = 0;
|
||||||
|
config = createTestConfig();
|
||||||
|
provider = new SpeachesSttProvider(config);
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Provider identity
|
||||||
|
// ==========================================
|
||||||
|
describe("name", () => {
|
||||||
|
it("should have the name 'speaches'", () => {
|
||||||
|
expect(provider.name).toBe("speaches");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// transcribe
|
||||||
|
// ==========================================
|
||||||
|
describe("transcribe", () => {
|
||||||
|
it("should call OpenAI audio.transcriptions.create with correct parameters", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse();
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
await provider.transcribe(audio);
|
||||||
|
|
||||||
|
expect(mockCreate).toHaveBeenCalledOnce();
|
||||||
|
const callArgs = mockCreate.mock.calls[0][0];
|
||||||
|
expect(callArgs.model).toBe("Systran/faster-whisper-large-v3-turbo");
|
||||||
|
expect(callArgs.language).toBe("en");
|
||||||
|
expect(callArgs.response_format).toBe("verbose_json");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should convert Buffer to File using toFile", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse();
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
await provider.transcribe(audio);
|
||||||
|
|
||||||
|
expect(mockToFile).toHaveBeenCalledWith(audio, "audio.wav", {
|
||||||
|
type: "audio/wav",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return TranscriptionResult with text and language", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse();
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
const result = await provider.transcribe(audio);
|
||||||
|
|
||||||
|
expect(result.text).toBe("Hello, world!");
|
||||||
|
expect(result.language).toBe("en");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return durationSeconds from verbose response", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse({ duration: 5.25 });
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
const result = await provider.transcribe(audio);
|
||||||
|
|
||||||
|
expect(result.durationSeconds).toBe(5.25);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should map segments from verbose response", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse({
|
||||||
|
segments: [
|
||||||
|
{
|
||||||
|
id: 0,
|
||||||
|
text: "Hello,",
|
||||||
|
start: 0.0,
|
||||||
|
end: 1.5,
|
||||||
|
avg_logprob: -0.2,
|
||||||
|
compression_ratio: 1.1,
|
||||||
|
no_speech_prob: 0.01,
|
||||||
|
seek: 0,
|
||||||
|
temperature: 0.0,
|
||||||
|
tokens: [1, 2],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 1,
|
||||||
|
text: " world!",
|
||||||
|
start: 1.5,
|
||||||
|
end: 3.5,
|
||||||
|
avg_logprob: -0.3,
|
||||||
|
compression_ratio: 1.3,
|
||||||
|
no_speech_prob: 0.02,
|
||||||
|
seek: 0,
|
||||||
|
temperature: 0.0,
|
||||||
|
tokens: [3, 4],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
const result = await provider.transcribe(audio);
|
||||||
|
|
||||||
|
expect(result.segments).toHaveLength(2);
|
||||||
|
expect(result.segments?.[0]).toEqual({
|
||||||
|
text: "Hello,",
|
||||||
|
start: 0.0,
|
||||||
|
end: 1.5,
|
||||||
|
});
|
||||||
|
expect(result.segments?.[1]).toEqual({
|
||||||
|
text: " world!",
|
||||||
|
start: 1.5,
|
||||||
|
end: 3.5,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle response without segments gracefully", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse({ segments: undefined });
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
const result = await provider.transcribe(audio);
|
||||||
|
|
||||||
|
expect(result.text).toBe("Hello, world!");
|
||||||
|
expect(result.segments).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle response without duration gracefully", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse({ duration: undefined });
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
const result = await provider.transcribe(audio);
|
||||||
|
|
||||||
|
expect(result.text).toBe("Hello, world!");
|
||||||
|
expect(result.durationSeconds).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
// ------------------------------------------
|
||||||
|
// Options override
|
||||||
|
// ------------------------------------------
|
||||||
|
describe("options override", () => {
|
||||||
|
it("should use custom model from options when provided", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse();
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
const options: TranscribeOptions = { model: "custom-whisper-model" };
|
||||||
|
await provider.transcribe(audio, options);
|
||||||
|
|
||||||
|
const callArgs = mockCreate.mock.calls[0][0];
|
||||||
|
expect(callArgs.model).toBe("custom-whisper-model");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use custom language from options when provided", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse({ language: "fr" });
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
const options: TranscribeOptions = { language: "fr" };
|
||||||
|
await provider.transcribe(audio, options);
|
||||||
|
|
||||||
|
const callArgs = mockCreate.mock.calls[0][0];
|
||||||
|
expect(callArgs.language).toBe("fr");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass through prompt option", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse();
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
const options: TranscribeOptions = { prompt: "This is a meeting about project planning." };
|
||||||
|
await provider.transcribe(audio, options);
|
||||||
|
|
||||||
|
const callArgs = mockCreate.mock.calls[0][0];
|
||||||
|
expect(callArgs.prompt).toBe("This is a meeting about project planning.");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should pass through temperature option", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse();
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
const options: TranscribeOptions = { temperature: 0.3 };
|
||||||
|
await provider.transcribe(audio, options);
|
||||||
|
|
||||||
|
const callArgs = mockCreate.mock.calls[0][0];
|
||||||
|
expect(callArgs.temperature).toBe(0.3);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use custom mimeType for file conversion when provided", async () => {
|
||||||
|
const mockResponse = createMockVerboseResponse();
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
const options: TranscribeOptions = { mimeType: "audio/mp3" };
|
||||||
|
await provider.transcribe(audio, options);
|
||||||
|
|
||||||
|
expect(mockToFile).toHaveBeenCalledWith(audio, "audio.mp3", {
|
||||||
|
type: "audio/mp3",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ------------------------------------------
|
||||||
|
// Simple response fallback
|
||||||
|
// ------------------------------------------
|
||||||
|
describe("simple response fallback", () => {
|
||||||
|
it("should handle simple Transcription response (text only, no verbose fields)", async () => {
|
||||||
|
// Some configurations may return just { text: "..." } without verbose fields
|
||||||
|
const simpleResponse = { text: "Simple transcription result." };
|
||||||
|
mockCreate.mockResolvedValueOnce(simpleResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
const result = await provider.transcribe(audio);
|
||||||
|
|
||||||
|
expect(result.text).toBe("Simple transcription result.");
|
||||||
|
expect(result.language).toBe("en"); // Falls back to config language
|
||||||
|
expect(result.durationSeconds).toBeUndefined();
|
||||||
|
expect(result.segments).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Error handling
|
||||||
|
// ==========================================
|
||||||
|
describe("error handling", () => {
|
||||||
|
it("should throw a descriptive error on connection refused", async () => {
|
||||||
|
const connectionError = new Error("connect ECONNREFUSED 127.0.0.1:8000");
|
||||||
|
mockCreate.mockRejectedValueOnce(connectionError);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
await expect(provider.transcribe(audio)).rejects.toThrow(
|
||||||
|
"STT transcription failed: connect ECONNREFUSED 127.0.0.1:8000"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw a descriptive error on timeout", async () => {
|
||||||
|
const timeoutError = new Error("Request timed out");
|
||||||
|
mockCreate.mockRejectedValueOnce(timeoutError);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
await expect(provider.transcribe(audio)).rejects.toThrow(
|
||||||
|
"STT transcription failed: Request timed out"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should throw a descriptive error on API error", async () => {
|
||||||
|
const apiError = new Error("Invalid model: nonexistent-model");
|
||||||
|
mockCreate.mockRejectedValueOnce(apiError);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
await expect(provider.transcribe(audio)).rejects.toThrow(
|
||||||
|
"STT transcription failed: Invalid model: nonexistent-model"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle non-Error thrown values", async () => {
|
||||||
|
mockCreate.mockRejectedValueOnce("unexpected string error");
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
await expect(provider.transcribe(audio)).rejects.toThrow(
|
||||||
|
"STT transcription failed: unexpected string error"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// isHealthy
|
||||||
|
// ==========================================
|
||||||
|
describe("isHealthy", () => {
|
||||||
|
it("should return true when the server is reachable", async () => {
|
||||||
|
mockModelsList.mockResolvedValueOnce({ data: [{ id: "whisper-1" }] });
|
||||||
|
|
||||||
|
const healthy = await provider.isHealthy();
|
||||||
|
expect(healthy).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false when the server is unreachable", async () => {
|
||||||
|
mockModelsList.mockRejectedValueOnce(new Error("connect ECONNREFUSED"));
|
||||||
|
|
||||||
|
const healthy = await provider.isHealthy();
|
||||||
|
expect(healthy).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not throw on health check failure", async () => {
|
||||||
|
mockModelsList.mockRejectedValueOnce(new Error("Network error"));
|
||||||
|
|
||||||
|
await expect(provider.isHealthy()).resolves.toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should return false on unexpected error types", async () => {
|
||||||
|
mockModelsList.mockRejectedValueOnce("string error");
|
||||||
|
|
||||||
|
const healthy = await provider.isHealthy();
|
||||||
|
expect(healthy).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Config injection
|
||||||
|
// ==========================================
|
||||||
|
describe("config injection", () => {
|
||||||
|
it("should create OpenAI client with baseURL from config", () => {
|
||||||
|
// The constructor was called in beforeEach
|
||||||
|
expect(mockOpenAIConstructorCalls).toHaveLength(1);
|
||||||
|
expect(mockOpenAIConstructorCalls[0]).toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
baseURL: "http://speaches:8000/v1",
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use custom baseURL from config", () => {
|
||||||
|
mockOpenAIConstructorCalls.length = 0;
|
||||||
|
const customConfig = createTestConfig({
|
||||||
|
baseUrl: "http://custom-speaches:9000/v1",
|
||||||
|
});
|
||||||
|
new SpeachesSttProvider(customConfig);
|
||||||
|
|
||||||
|
expect(mockOpenAIConstructorCalls).toHaveLength(1);
|
||||||
|
expect(mockOpenAIConstructorCalls[0]).toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
baseURL: "http://custom-speaches:9000/v1",
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default model from config for transcription", async () => {
|
||||||
|
const customConfig = createTestConfig({
|
||||||
|
model: "Systran/faster-whisper-small",
|
||||||
|
});
|
||||||
|
const customProvider = new SpeachesSttProvider(customConfig);
|
||||||
|
|
||||||
|
const mockResponse = createMockVerboseResponse();
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
await customProvider.transcribe(audio);
|
||||||
|
|
||||||
|
const callArgs = mockCreate.mock.calls[0][0];
|
||||||
|
expect(callArgs.model).toBe("Systran/faster-whisper-small");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should use default language from config for transcription", async () => {
|
||||||
|
const customConfig = createTestConfig({ language: "de" });
|
||||||
|
const customProvider = new SpeachesSttProvider(customConfig);
|
||||||
|
|
||||||
|
const mockResponse = createMockVerboseResponse({ language: "de" });
|
||||||
|
mockCreate.mockResolvedValueOnce(mockResponse);
|
||||||
|
|
||||||
|
const audio = Buffer.from("fake-audio-data");
|
||||||
|
await customProvider.transcribe(audio);
|
||||||
|
|
||||||
|
const callArgs = mockCreate.mock.calls[0][0];
|
||||||
|
expect(callArgs.language).toBe("de");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should set a dummy API key for local Speaches server", () => {
|
||||||
|
expect(mockOpenAIConstructorCalls).toHaveLength(1);
|
||||||
|
expect(mockOpenAIConstructorCalls[0]).toEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
apiKey: "not-needed",
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user