# ============================================== # Mosaic Stack Environment Configuration # ============================================== # Copy this file to .env and customize for your environment # ====================== # Application Ports # ====================== API_PORT=3001 API_HOST=0.0.0.0 WEB_PORT=3000 # ====================== # Web Configuration # ====================== NEXT_PUBLIC_APP_URL=http://localhost:3000 NEXT_PUBLIC_API_URL=http://localhost:3001 # ====================== # PostgreSQL Database # ====================== # Bundled PostgreSQL (when database profile enabled) # SECURITY: Change POSTGRES_PASSWORD to a strong random password in production DATABASE_URL=postgresql://mosaic:REPLACE_WITH_SECURE_PASSWORD@postgres:5432/mosaic POSTGRES_USER=mosaic POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD POSTGRES_DB=mosaic POSTGRES_PORT=5432 # External PostgreSQL (managed service) # Disable 'database' profile and point DATABASE_URL to your external instance # Example: DATABASE_URL=postgresql://user:pass@rds.amazonaws.com:5432/mosaic # PostgreSQL Performance Tuning (Optional) POSTGRES_SHARED_BUFFERS=256MB POSTGRES_EFFECTIVE_CACHE_SIZE=1GB POSTGRES_MAX_CONNECTIONS=100 # ====================== # Valkey Cache (Redis-compatible) # ====================== # Bundled Valkey (when cache profile enabled) VALKEY_URL=redis://valkey:6379 VALKEY_HOST=valkey VALKEY_PORT=6379 # VALKEY_PASSWORD= # Optional: Password for Valkey authentication VALKEY_MAXMEMORY=256mb # External Redis/Valkey (managed service) # Disable 'cache' profile and point VALKEY_URL to your external instance # Example: VALKEY_URL=redis://elasticache.amazonaws.com:6379 # Example with auth: VALKEY_URL=redis://:password@redis.example.com:6379 # Knowledge Module Cache Configuration # Set KNOWLEDGE_CACHE_ENABLED=false to disable caching (useful for development) KNOWLEDGE_CACHE_ENABLED=true # Cache TTL in seconds (default: 300 = 5 minutes) KNOWLEDGE_CACHE_TTL=300 # ====================== # Authentication (Authentik OIDC) # ====================== # Set to 'true' to enable OIDC authentication with Authentik # When enabled, OIDC_ISSUER, OIDC_CLIENT_ID, and OIDC_CLIENT_SECRET are required OIDC_ENABLED=false # Authentik Server URLs (required when OIDC_ENABLED=true) # OIDC_ISSUER must end with a trailing slash (/) OIDC_ISSUER=https://auth.example.com/application/o/mosaic-stack/ OIDC_CLIENT_ID=your-client-id-here OIDC_CLIENT_SECRET=your-client-secret-here # Redirect URI must match what's configured in Authentik # Development: http://localhost:3001/auth/callback/authentik # Production: https://api.mosaicstack.dev/auth/callback/authentik OIDC_REDIRECT_URI=http://localhost:3001/auth/callback/authentik # Authentik PostgreSQL Database AUTHENTIK_POSTGRES_USER=authentik AUTHENTIK_POSTGRES_PASSWORD=REPLACE_WITH_SECURE_PASSWORD AUTHENTIK_POSTGRES_DB=authentik # Authentik Configuration # CRITICAL: Generate a random secret key with at least 50 characters # Example: openssl rand -base64 50 AUTHENTIK_SECRET_KEY=REPLACE_WITH_RANDOM_SECRET_MINIMUM_50_CHARS AUTHENTIK_ERROR_REPORTING=false # SECURITY: Change bootstrap password immediately after first login AUTHENTIK_BOOTSTRAP_PASSWORD=REPLACE_WITH_SECURE_PASSWORD AUTHENTIK_BOOTSTRAP_EMAIL=admin@localhost AUTHENTIK_COOKIE_DOMAIN=.localhost # Authentik Ports AUTHENTIK_PORT_HTTP=9000 AUTHENTIK_PORT_HTTPS=9443 # ====================== # CSRF Protection # ====================== # CRITICAL: Generate a random secret for CSRF token signing # Required in production; auto-generated in development (not persistent across restarts) # Command to generate: node -e "console.log(require('crypto').randomBytes(32).toString('hex'))" CSRF_SECRET=REPLACE_WITH_64_CHAR_HEX_STRING # ====================== # JWT Configuration # ====================== # CRITICAL: Generate a random secret key with at least 32 characters # Example: openssl rand -base64 32 JWT_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS JWT_EXPIRATION=24h # ====================== # BetterAuth Configuration # ====================== # CRITICAL: Generate a random secret key with at least 32 characters # This is used by BetterAuth for session management and CSRF protection # Example: openssl rand -base64 32 BETTER_AUTH_SECRET=REPLACE_WITH_RANDOM_SECRET_MINIMUM_32_CHARS # ====================== # Encryption (Credential Security) # ====================== # CRITICAL: Generate a random 32-byte (256-bit) encryption key # This key is used for AES-256-GCM encryption of OAuth tokens and sensitive data # Command to generate: openssl rand -hex 32 # SECURITY: Never commit this key to version control # SECURITY: Use different keys for development, staging, and production # SECURITY: Store production keys in a secure secrets manager (see docs/design/credential-security.md) ENCRYPTION_KEY=REPLACE_WITH_64_CHAR_HEX_STRING_GENERATE_WITH_OPENSSL_RAND_HEX_32 # ====================== # OpenBao Secrets Management # ====================== # OpenBao provides Transit encryption for sensitive credentials # Enable with: COMPOSE_PROFILES=openbao or COMPOSE_PROFILES=full # Auto-initialized on first run via openbao-init sidecar # Bundled OpenBao (when openbao profile enabled) OPENBAO_ADDR=http://openbao:8200 OPENBAO_PORT=8200 # External OpenBao/Vault (managed service) # Disable 'openbao' profile and set OPENBAO_ADDR to your external instance # Example: OPENBAO_ADDR=https://vault.example.com:8200 # Example: OPENBAO_ADDR=https://vault.hashicorp.com:8200 # AppRole Authentication (Optional) # If not set, credentials are read from /openbao/init/approle-credentials volume # Required when using external OpenBao # OPENBAO_ROLE_ID=your-role-id-here # OPENBAO_SECRET_ID=your-secret-id-here # Fallback Mode # When OpenBao is unavailable, API automatically falls back to AES-256-GCM # encryption using ENCRYPTION_KEY. This provides graceful degradation. # ====================== # Ollama (Optional AI Service) # ====================== # Set OLLAMA_ENDPOINT to use local or remote Ollama # For bundled Docker service: http://ollama:11434 # For external service: http://your-ollama-server:11434 OLLAMA_ENDPOINT=http://ollama:11434 OLLAMA_PORT=11434 # Embedding Model Configuration # Model used for generating knowledge entry embeddings # Default: mxbai-embed-large (1024-dim, padded to 1536) # Alternative: nomic-embed-text (768-dim, padded to 1536) # Note: Embeddings are padded/truncated to 1536 dimensions to match schema OLLAMA_EMBEDDING_MODEL=mxbai-embed-large # Semantic Search Configuration # Similarity threshold for semantic search (0.0 to 1.0, where 1.0 is identical) # Lower values return more results but may be less relevant # Default: 0.5 (50% similarity) SEMANTIC_SEARCH_SIMILARITY_THRESHOLD=0.5 # ====================== # OpenAI API (For Semantic Search) # ====================== # OPTIONAL: Semantic search requires an OpenAI API key # Get your API key from: https://platform.openai.com/api-keys # If not configured, semantic search endpoints will return an error # OPENAI_API_KEY=sk-... # ====================== # Application Environment # ====================== NODE_ENV=development # ====================== # Docker Image Configuration # ====================== # Docker image tag for pulling pre-built images from git.mosaicstack.dev registry # Used by docker-compose.yml (pulls images) and docker-swarm.yml # For local builds, use docker-compose.build.yml instead # Options: # - dev: Pull development images from registry (default, built from develop branch) # - latest: Pull latest stable images from registry (built from main branch) # - : Use specific commit SHA tag (e.g., 658ec077) # - : Use specific version tag (e.g., v1.0.0) IMAGE_TAG=dev # ====================== # Docker Compose Profiles # ====================== # Enable optional services via profiles. Combine multiple profiles with commas. # # Available profiles: # - database: PostgreSQL database (disable to use external database) # - cache: Valkey cache (disable to use external Redis) # - openbao: OpenBao secrets management (disable to use external vault or fallback encryption) # - authentik: Authentik OIDC authentication (disable to use external auth provider) # - ollama: Ollama AI/LLM service (disable to use external LLM service) # - traefik-bundled: Bundled Traefik reverse proxy (disable to use external proxy) # - full: Enable all optional services (turnkey deployment) # # Examples: # COMPOSE_PROFILES=full # Everything bundled (development) # COMPOSE_PROFILES=database,cache,openbao # Core services only # COMPOSE_PROFILES= # All external services (production) COMPOSE_PROFILES=full # ====================== # Traefik Reverse Proxy # ====================== # TRAEFIK_MODE options: # - bundled: Use bundled Traefik (requires traefik-bundled profile) # - upstream: Connect to external Traefik instance # - none: Direct port exposure without reverse proxy (default) TRAEFIK_MODE=none # Domain configuration for Traefik routing MOSAIC_API_DOMAIN=api.mosaic.local MOSAIC_WEB_DOMAIN=mosaic.local MOSAIC_AUTH_DOMAIN=auth.mosaic.local # External Traefik network name (for upstream mode) # Must match the network name of your existing Traefik instance TRAEFIK_NETWORK=traefik-public # TLS/SSL Configuration TRAEFIK_TLS_ENABLED=true # For Let's Encrypt (production): TRAEFIK_ACME_EMAIL=admin@example.com # For self-signed certificates (development), leave TRAEFIK_ACME_EMAIL empty # Traefik Dashboard (bundled mode only) TRAEFIK_DASHBOARD_ENABLED=true TRAEFIK_DASHBOARD_PORT=8080 # ====================== # Gitea Integration (Coordinator) # ====================== # Gitea instance URL GITEA_URL=https://git.mosaicstack.dev # Coordinator bot credentials (see docs/1-getting-started/3-configuration/4-gitea-coordinator.md) # SECURITY: Store GITEA_BOT_TOKEN in secrets vault, not in version control GITEA_BOT_USERNAME=mosaic GITEA_BOT_TOKEN=REPLACE_WITH_COORDINATOR_BOT_API_TOKEN GITEA_BOT_PASSWORD=REPLACE_WITH_COORDINATOR_BOT_PASSWORD # Repository configuration GITEA_REPO_OWNER=mosaic GITEA_REPO_NAME=stack # Webhook secret for coordinator (HMAC SHA256 signature verification) # SECURITY: Generate random secret with: openssl rand -hex 32 # Configure in Gitea: Repository Settings → Webhooks → Add Webhook GITEA_WEBHOOK_SECRET=REPLACE_WITH_RANDOM_WEBHOOK_SECRET # Coordinator API Key (service-to-service authentication) # CRITICAL: Generate a random API key with at least 32 characters # Example: openssl rand -base64 32 # The coordinator service uses this key to authenticate with the API COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS # ====================== # Rate Limiting # ====================== # Rate limiting prevents DoS attacks on webhook and API endpoints # TTL is in seconds, limits are per TTL window # Global rate limit (applies to all endpoints unless overridden) RATE_LIMIT_TTL=60 # Time window in seconds RATE_LIMIT_GLOBAL_LIMIT=100 # Requests per window # Webhook endpoints (/stitcher/webhook, /stitcher/dispatch) RATE_LIMIT_WEBHOOK_LIMIT=60 # Requests per minute # Coordinator endpoints (/coordinator/*) RATE_LIMIT_COORDINATOR_LIMIT=100 # Requests per minute # Health check endpoints (/coordinator/health) RATE_LIMIT_HEALTH_LIMIT=300 # Requests per minute (higher for monitoring) # Storage backend for rate limiting (redis or memory) # redis: Uses Valkey for distributed rate limiting (recommended for production) # memory: Uses in-memory storage (single instance only, for development) RATE_LIMIT_STORAGE=redis # ====================== # Discord Bridge (Optional) # ====================== # Discord bot integration for chat-based control # Get bot token from: https://discord.com/developers/applications # DISCORD_BOT_TOKEN=your-discord-bot-token-here # DISCORD_GUILD_ID=your-discord-server-id # DISCORD_CONTROL_CHANNEL_ID=channel-id-for-commands # DISCORD_WORKSPACE_ID=your-workspace-uuid # # SECURITY: DISCORD_WORKSPACE_ID must be a valid workspace UUID from your database. # All Discord commands will execute within this workspace context for proper # multi-tenant isolation. Each Discord bot instance should be configured for # a single workspace. # ====================== # Matrix Bridge (Optional) # ====================== # Matrix bot integration for chat-based control via Matrix protocol # Requires a Matrix account with an access token for the bot user # MATRIX_HOMESERVER_URL=https://matrix.example.com # MATRIX_ACCESS_TOKEN= # MATRIX_BOT_USER_ID=@mosaic-bot:example.com # MATRIX_CONTROL_ROOM_ID=!roomid:example.com # MATRIX_WORKSPACE_ID=your-workspace-uuid # # SECURITY: MATRIX_WORKSPACE_ID must be a valid workspace UUID from your database. # All Matrix commands will execute within this workspace context for proper # multi-tenant isolation. Each Matrix bot instance should be configured for # a single workspace. # ====================== # Orchestrator Configuration # ====================== # API Key for orchestrator agent management endpoints # CRITICAL: Generate a random API key with at least 32 characters # Example: openssl rand -base64 32 # Required for all /agents/* endpoints (spawn, kill, kill-all, status) # Health endpoints (/health/*) remain unauthenticated ORCHESTRATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS # ====================== # AI Provider Configuration # ====================== # Choose the AI provider for orchestrator agents # Options: ollama, claude, openai # Default: ollama (no API key required) AI_PROVIDER=ollama # Ollama Configuration (when AI_PROVIDER=ollama) # For local Ollama: http://localhost:11434 # For remote Ollama: http://your-ollama-server:11434 OLLAMA_MODEL=llama3.1:latest # Claude API Configuration (when AI_PROVIDER=claude) # OPTIONAL: Only required if AI_PROVIDER=claude # Get your API key from: https://console.anthropic.com/ # Note: Claude Max subscription users should use AI_PROVIDER=ollama instead # CLAUDE_API_KEY=sk-ant-... # OpenAI API Configuration (when AI_PROVIDER=openai) # OPTIONAL: Only required if AI_PROVIDER=openai # Get your API key from: https://platform.openai.com/api-keys # OPENAI_API_KEY=sk-... # ====================== # Speech Services (STT / TTS) # ====================== # Speech-to-Text (STT) - Whisper via Speaches # Set STT_ENABLED=true to enable speech-to-text transcription # STT_BASE_URL is required when STT_ENABLED=true STT_ENABLED=true STT_BASE_URL=http://speaches:8000/v1 STT_MODEL=Systran/faster-whisper-large-v3-turbo STT_LANGUAGE=en # Text-to-Speech (TTS) - Default Engine (Kokoro) # Set TTS_ENABLED=true to enable text-to-speech synthesis # TTS_DEFAULT_URL is required when TTS_ENABLED=true TTS_ENABLED=true TTS_DEFAULT_URL=http://kokoro-tts:8880/v1 TTS_DEFAULT_VOICE=af_heart TTS_DEFAULT_FORMAT=mp3 # Text-to-Speech (TTS) - Premium Engine (Chatterbox) - Optional # Higher quality voice cloning engine, disabled by default # TTS_PREMIUM_URL is required when TTS_PREMIUM_ENABLED=true TTS_PREMIUM_ENABLED=false TTS_PREMIUM_URL=http://chatterbox-tts:8881/v1 # Text-to-Speech (TTS) - Fallback Engine (Piper/OpenedAI) - Optional # Lightweight fallback engine, disabled by default # TTS_FALLBACK_URL is required when TTS_FALLBACK_ENABLED=true TTS_FALLBACK_ENABLED=false TTS_FALLBACK_URL=http://openedai-speech:8000/v1 # Speech Service Limits # Maximum upload file size in bytes (default: 25MB) SPEECH_MAX_UPLOAD_SIZE=25000000 # Maximum audio duration in seconds (default: 600 = 10 minutes) SPEECH_MAX_DURATION_SECONDS=600 # Maximum text length for TTS in characters (default: 4096) SPEECH_MAX_TEXT_LENGTH=4096 # ====================== # Mosaic Telemetry (Task Completion Tracking & Predictions) # ====================== # Telemetry tracks task completion patterns to provide time estimates and predictions. # Data is sent to the Mosaic Telemetry API (a separate service). # Master switch: set to false to completely disable telemetry (no HTTP calls will be made) MOSAIC_TELEMETRY_ENABLED=true # URL of the telemetry API server # For Docker Compose (internal): http://telemetry-api:8000 # For production/swarm: https://tel-api.mosaicstack.dev MOSAIC_TELEMETRY_SERVER_URL=http://telemetry-api:8000 # API key for authenticating with the telemetry server # Generate with: openssl rand -hex 32 MOSAIC_TELEMETRY_API_KEY=your-64-char-hex-api-key-here # Unique identifier for this Mosaic Stack instance # Generate with: uuidgen or python -c "import uuid; print(uuid.uuid4())" MOSAIC_TELEMETRY_INSTANCE_ID=your-instance-uuid-here # Dry run mode: set to true to log telemetry events to console instead of sending HTTP requests # Useful for development and debugging telemetry payloads MOSAIC_TELEMETRY_DRY_RUN=false # ====================== # Matrix Dev Environment (docker-compose.matrix.yml overlay) # ====================== # These variables configure the local Matrix dev environment. # Only used when running: docker compose -f docker/docker-compose.yml -f docker/docker-compose.matrix.yml up # # Synapse homeserver # SYNAPSE_CLIENT_PORT=8008 # SYNAPSE_FEDERATION_PORT=8448 # SYNAPSE_POSTGRES_DB=synapse # SYNAPSE_POSTGRES_USER=synapse # SYNAPSE_POSTGRES_PASSWORD=synapse_dev_password # # Element Web client # ELEMENT_PORT=8501 # # Matrix bridge connection (set after running docker/matrix/scripts/setup-bot.sh) # MATRIX_HOMESERVER_URL=http://localhost:8008 # MATRIX_ACCESS_TOKEN= # MATRIX_BOT_USER_ID=@mosaic-bot:localhost # MATRIX_SERVER_NAME=localhost # ====================== # Logging & Debugging # ====================== LOG_LEVEL=info DEBUG=false