Merge pull request 'Release: Merge develop to main (111 commits)' (#302) from develop into main
Some checks failed
ci/woodpecker/manual/woodpecker Pipeline failed
ci/woodpecker/push/woodpecker Pipeline failed

Reviewed-on: #302
This commit was merged in pull request #302.
This commit is contained in:
2026-02-04 01:37:24 +00:00
871 changed files with 99553 additions and 2310 deletions

58
.dockerignore Normal file
View File

@@ -0,0 +1,58 @@
# Dependencies (installed fresh in Docker)
node_modules
**/node_modules
# Build outputs (built fresh in Docker)
dist
**/dist
.next
**/.next
# TurboRepo cache
.turbo
**/.turbo
# IDE
.idea
.vscode
*.swp
*.swo
# OS
.DS_Store
Thumbs.db
# Environment files
.env
.env.*
!.env.example
# Credentials
.admin-credentials
# Testing
coverage
**/coverage
# Logs
*.log
# Misc
*.tsbuildinfo
**/*.tsbuildinfo
.pnpm-approve-builds
.husky/_
# Git
.git
.gitignore
# Docker
Dockerfile*
docker-compose*.yml
.dockerignore
# Documentation (not needed in container)
docs
*.md
!README.md

View File

@@ -13,6 +13,7 @@ WEB_PORT=3000
# ======================
# Web Configuration
# ======================
NEXT_PUBLIC_APP_URL=http://localhost:3000
NEXT_PUBLIC_API_URL=http://localhost:3001
# ======================
@@ -34,7 +35,9 @@ POSTGRES_MAX_CONNECTIONS=100
# Valkey Cache (Redis-compatible)
# ======================
VALKEY_URL=redis://localhost:6379
VALKEY_HOST=localhost
VALKEY_PORT=6379
# VALKEY_PASSWORD= # Optional: Password for Valkey authentication
VALKEY_MAXMEMORY=256mb
# Knowledge Module Cache Configuration
@@ -91,6 +94,19 @@ JWT_EXPIRATION=24h
OLLAMA_ENDPOINT=http://ollama:11434
OLLAMA_PORT=11434
# Embedding Model Configuration
# Model used for generating knowledge entry embeddings
# Default: mxbai-embed-large (1024-dim, padded to 1536)
# Alternative: nomic-embed-text (768-dim, padded to 1536)
# Note: Embeddings are padded/truncated to 1536 dimensions to match schema
OLLAMA_EMBEDDING_MODEL=mxbai-embed-large
# Semantic Search Configuration
# Similarity threshold for semantic search (0.0 to 1.0, where 1.0 is identical)
# Lower values return more results but may be less relevant
# Default: 0.5 (50% similarity)
SEMANTIC_SEARCH_SIMILARITY_THRESHOLD=0.5
# ======================
# OpenAI API (For Semantic Search)
# ======================
@@ -142,6 +158,72 @@ TRAEFIK_ACME_EMAIL=admin@example.com
TRAEFIK_DASHBOARD_ENABLED=true
TRAEFIK_DASHBOARD_PORT=8080
# ======================
# Gitea Integration (Coordinator)
# ======================
# Gitea instance URL
GITEA_URL=https://git.mosaicstack.dev
# Coordinator bot credentials (see docs/1-getting-started/3-configuration/4-gitea-coordinator.md)
# SECURITY: Store GITEA_BOT_TOKEN in secrets vault, not in version control
GITEA_BOT_USERNAME=mosaic
GITEA_BOT_TOKEN=REPLACE_WITH_COORDINATOR_BOT_API_TOKEN
GITEA_BOT_PASSWORD=REPLACE_WITH_COORDINATOR_BOT_PASSWORD
# Repository configuration
GITEA_REPO_OWNER=mosaic
GITEA_REPO_NAME=stack
# Webhook secret for coordinator (HMAC SHA256 signature verification)
# SECURITY: Generate random secret with: openssl rand -hex 32
# Configure in Gitea: Repository Settings → Webhooks → Add Webhook
GITEA_WEBHOOK_SECRET=REPLACE_WITH_RANDOM_WEBHOOK_SECRET
# Coordinator API Key (service-to-service authentication)
# CRITICAL: Generate a random API key with at least 32 characters
# Example: openssl rand -base64 32
# The coordinator service uses this key to authenticate with the API
COORDINATOR_API_KEY=REPLACE_WITH_RANDOM_API_KEY_MINIMUM_32_CHARS
# ======================
# Rate Limiting
# ======================
# Rate limiting prevents DoS attacks on webhook and API endpoints
# TTL is in seconds, limits are per TTL window
# Global rate limit (applies to all endpoints unless overridden)
RATE_LIMIT_TTL=60 # Time window in seconds
RATE_LIMIT_GLOBAL_LIMIT=100 # Requests per window
# Webhook endpoints (/stitcher/webhook, /stitcher/dispatch)
RATE_LIMIT_WEBHOOK_LIMIT=60 # Requests per minute
# Coordinator endpoints (/coordinator/*)
RATE_LIMIT_COORDINATOR_LIMIT=100 # Requests per minute
# Health check endpoints (/coordinator/health)
RATE_LIMIT_HEALTH_LIMIT=300 # Requests per minute (higher for monitoring)
# Storage backend for rate limiting (redis or memory)
# redis: Uses Valkey for distributed rate limiting (recommended for production)
# memory: Uses in-memory storage (single instance only, for development)
RATE_LIMIT_STORAGE=redis
# ======================
# Discord Bridge (Optional)
# ======================
# Discord bot integration for chat-based control
# Get bot token from: https://discord.com/developers/applications
# DISCORD_BOT_TOKEN=your-discord-bot-token-here
# DISCORD_GUILD_ID=your-discord-server-id
# DISCORD_CONTROL_CHANNEL_ID=channel-id-for-commands
# DISCORD_WORKSPACE_ID=your-workspace-uuid
#
# SECURITY: DISCORD_WORKSPACE_ID must be a valid workspace UUID from your database.
# All Discord commands will execute within this workspace context for proper
# multi-tenant isolation. Each Discord bot instance should be configured for
# a single workspace.
# ======================
# Logging & Debugging
# ======================

View File

@@ -9,6 +9,10 @@ variables:
pnpm install --frozen-lockfile
- &use_deps |
corepack enable
# Kaniko base command setup
- &kaniko_setup |
mkdir -p /kaniko/.docker
echo "{\"auths\":{\"reg.mosaicstack.dev\":{\"username\":\"$HARBOR_USER\",\"password\":\"$HARBOR_PASS\"}}}" > /kaniko/.docker/config.json
steps:
install:
@@ -83,71 +87,99 @@ steps:
# Docker Build & Push (main/develop only)
# ======================
# Requires secrets: harbor_username, harbor_password
#
# Tagging Strategy:
# - Always: commit SHA (e.g., 658ec077)
# - main branch: 'latest'
# - develop branch: 'dev'
# - git tags: version tag (e.g., v1.0.0)
# Build and push API image using Kaniko
docker-build-api:
image: woodpeckerci/plugin-docker-buildx
settings:
registry: reg.diversecanvas.com
repo: reg.diversecanvas.com/mosaic/api
dockerfile: apps/api/Dockerfile
context: .
platforms:
- linux/amd64
tags:
- "${CI_COMMIT_SHA:0:8}"
- latest
username:
image: gcr.io/kaniko-project/executor:debug
environment:
HARBOR_USER:
from_secret: harbor_username
password:
HARBOR_PASS:
from_secret: harbor_password
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
CI_COMMIT_SHA: ${CI_COMMIT_SHA}
commands:
- *kaniko_setup
- |
DESTINATIONS="--destination reg.mosaicstack.dev/mosaic/api:${CI_COMMIT_SHA:0:8}"
if [ "$CI_COMMIT_BRANCH" = "main" ]; then
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/api:latest"
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/api:dev"
fi
if [ -n "$CI_COMMIT_TAG" ]; then
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/api:$CI_COMMIT_TAG"
fi
/kaniko/executor --context . --dockerfile apps/api/Dockerfile $DESTINATIONS
when:
- branch: [main, develop]
event: push
event: [push, manual, tag]
depends_on:
- build
# Build and push Web image using Kaniko
docker-build-web:
image: woodpeckerci/plugin-docker-buildx
settings:
registry: reg.diversecanvas.com
repo: reg.diversecanvas.com/mosaic/web
dockerfile: apps/web/Dockerfile
context: .
platforms:
- linux/amd64
build_args:
- NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev
tags:
- "${CI_COMMIT_SHA:0:8}"
- latest
username:
image: gcr.io/kaniko-project/executor:debug
environment:
HARBOR_USER:
from_secret: harbor_username
password:
HARBOR_PASS:
from_secret: harbor_password
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
CI_COMMIT_SHA: ${CI_COMMIT_SHA}
commands:
- *kaniko_setup
- |
DESTINATIONS="--destination reg.mosaicstack.dev/mosaic/web:${CI_COMMIT_SHA:0:8}"
if [ "$CI_COMMIT_BRANCH" = "main" ]; then
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/web:latest"
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/web:dev"
fi
if [ -n "$CI_COMMIT_TAG" ]; then
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/web:$CI_COMMIT_TAG"
fi
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
when:
- branch: [main, develop]
event: push
event: [push, manual, tag]
depends_on:
- build
# Build and push Postgres image using Kaniko
docker-build-postgres:
image: woodpeckerci/plugin-docker-buildx
settings:
registry: reg.diversecanvas.com
repo: reg.diversecanvas.com/mosaic/postgres
dockerfile: docker/postgres/Dockerfile
context: docker/postgres
platforms:
- linux/amd64
tags:
- "${CI_COMMIT_SHA:0:8}"
- latest
username:
image: gcr.io/kaniko-project/executor:debug
environment:
HARBOR_USER:
from_secret: harbor_username
password:
HARBOR_PASS:
from_secret: harbor_password
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
CI_COMMIT_SHA: ${CI_COMMIT_SHA}
commands:
- *kaniko_setup
- |
DESTINATIONS="--destination reg.mosaicstack.dev/mosaic/postgres:${CI_COMMIT_SHA:0:8}"
if [ "$CI_COMMIT_BRANCH" = "main" ]; then
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/postgres:latest"
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/postgres:dev"
fi
if [ -n "$CI_COMMIT_TAG" ]; then
DESTINATIONS="$DESTINATIONS --destination reg.mosaicstack.dev/mosaic/postgres:$CI_COMMIT_TAG"
fi
/kaniko/executor --context docker/postgres --dockerfile docker/postgres/Dockerfile $DESTINATIONS
when:
- branch: [main, develop]
event: push
event: [push, manual, tag]
depends_on:
- build

View File

@@ -12,13 +12,13 @@ Guidelines for AI agents working on this codebase.
Context = tokens = cost. Be smart.
| Strategy | When |
|----------|------|
| **Spawn sub-agents** | Isolated coding tasks, research, anything that can report back |
| **Batch operations** | Group related API calls, don't do one-at-a-time |
| **Check existing patterns** | Before writing new code, see how similar features were built |
| **Minimize re-reading** | Don't re-read files you just wrote |
| **Summarize before clearing** | Extract learnings to memory before context reset |
| Strategy | When |
| ----------------------------- | -------------------------------------------------------------- |
| **Spawn sub-agents** | Isolated coding tasks, research, anything that can report back |
| **Batch operations** | Group related API calls, don't do one-at-a-time |
| **Check existing patterns** | Before writing new code, see how similar features were built |
| **Minimize re-reading** | Don't re-read files you just wrote |
| **Summarize before clearing** | Extract learnings to memory before context reset |
## Workflow (Non-Negotiable)
@@ -89,13 +89,13 @@ Minimum 85% coverage for new code.
## Key Files
| File | Purpose |
|------|---------|
| `CLAUDE.md` | Project overview, tech stack, conventions |
| `CONTRIBUTING.md` | Human contributor guide |
| `apps/api/prisma/schema.prisma` | Database schema |
| `docs/` | Architecture and setup docs |
| File | Purpose |
| ------------------------------- | ----------------------------------------- |
| `CLAUDE.md` | Project overview, tech stack, conventions |
| `CONTRIBUTING.md` | Human contributor guide |
| `apps/api/prisma/schema.prisma` | Database schema |
| `docs/` | Architecture and setup docs |
---
*Model-agnostic. Works for Claude, MiniMax, GPT, Llama, etc.*
_Model-agnostic. Works for Claude, MiniMax, GPT, Llama, etc._

View File

@@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
### Added
- Complete turnkey Docker Compose setup with all services (#8)
- PostgreSQL 17 with pgvector extension
- Valkey (Redis-compatible cache)
@@ -54,6 +55,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- .env.traefik-upstream.example for upstream mode
### Changed
- Updated README.md with Docker deployment instructions
- Enhanced configuration documentation with Docker-specific settings
- Improved installation guide with profile-based service activation
@@ -63,6 +65,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [0.0.1] - 2026-01-28
### Added
- Initial project structure with pnpm workspaces and TurboRepo
- NestJS API application with BetterAuth integration
- Next.js 16 web application foundation

View File

@@ -78,15 +78,15 @@ Thank you for your interest in contributing to Mosaic Stack! This document provi
### Quick Reference Commands
| Command | Description |
|---------|-------------|
| `pnpm dev` | Start all development servers |
| `pnpm dev:api` | Start API only |
| `pnpm dev:web` | Start Web only |
| `docker compose up -d` | Start Docker services |
| `docker compose logs -f` | View Docker logs |
| `pnpm prisma:studio` | Open Prisma Studio GUI |
| `make help` | View all available commands |
| Command | Description |
| ------------------------ | ----------------------------- |
| `pnpm dev` | Start all development servers |
| `pnpm dev:api` | Start API only |
| `pnpm dev:web` | Start Web only |
| `docker compose up -d` | Start Docker services |
| `docker compose logs -f` | View Docker logs |
| `pnpm prisma:studio` | Open Prisma Studio GUI |
| `make help` | View all available commands |
## Code Style Guidelines
@@ -104,6 +104,7 @@ We use **Prettier** for consistent code formatting:
- **End of line:** LF (Unix style)
Run the formatter:
```bash
pnpm format # Format all files
pnpm format:check # Check formatting without changes
@@ -121,6 +122,7 @@ pnpm lint:fix # Auto-fix linting issues
### TypeScript
All code must be **strictly typed** TypeScript:
- No `any` types allowed
- Explicit type annotations for function returns
- Interfaces over type aliases for object shapes
@@ -130,14 +132,14 @@ All code must be **strictly typed** TypeScript:
**Never** use demanding or stressful language in UI text:
| ❌ AVOID | ✅ INSTEAD |
|---------|------------|
| OVERDUE | Target passed |
| URGENT | Approaching target |
| MUST DO | Scheduled for |
| CRITICAL | High priority |
| ❌ AVOID | ✅ INSTEAD |
| ----------- | -------------------- |
| OVERDUE | Target passed |
| URGENT | Approaching target |
| MUST DO | Scheduled for |
| CRITICAL | High priority |
| YOU NEED TO | Consider / Option to |
| REQUIRED | Recommended |
| REQUIRED | Recommended |
See [docs/3-architecture/3-design-principles/1-pda-friendly.md](./docs/3-architecture/3-design-principles/1-pda-friendly.md) for complete design principles.
@@ -147,13 +149,13 @@ We follow a Git-based workflow with the following branch types:
### Branch Types
| Prefix | Purpose | Example |
|--------|---------|---------|
| `feature/` | New features | `feature/42-user-dashboard` |
| `fix/` | Bug fixes | `fix/123-auth-redirect` |
| `docs/` | Documentation | `docs/contributing` |
| `refactor/` | Code refactoring | `refactor/prisma-queries` |
| `test/` | Test-only changes | `test/coverage-improvements` |
| Prefix | Purpose | Example |
| ----------- | ----------------- | ---------------------------- |
| `feature/` | New features | `feature/42-user-dashboard` |
| `fix/` | Bug fixes | `fix/123-auth-redirect` |
| `docs/` | Documentation | `docs/contributing` |
| `refactor/` | Code refactoring | `refactor/prisma-queries` |
| `test/` | Test-only changes | `test/coverage-improvements` |
### Workflow
@@ -190,14 +192,14 @@ References: #123
### Types
| Type | Description |
|------|-------------|
| `feat` | New feature |
| `fix` | Bug fix |
| `docs` | Documentation changes |
| `test` | Adding or updating tests |
| Type | Description |
| ---------- | --------------------------------------- |
| `feat` | New feature |
| `fix` | Bug fix |
| `docs` | Documentation changes |
| `test` | Adding or updating tests |
| `refactor` | Code refactoring (no functional change) |
| `chore` | Maintenance tasks, dependencies |
| `chore` | Maintenance tasks, dependencies |
### Examples
@@ -233,17 +235,20 @@ Clarified pagination and filtering parameters.
### Before Creating a PR
1. **Ensure tests pass**
```bash
pnpm test
pnpm build
```
2. **Check code coverage** (minimum 85%)
```bash
pnpm test:coverage
```
3. **Format and lint**
```bash
pnpm format
pnpm lint
@@ -256,6 +261,7 @@ Clarified pagination and filtering parameters.
### Creating a Pull Request
1. Push your branch to the remote
```bash
git push origin feature/my-feature
```
@@ -294,6 +300,7 @@ Clarified pagination and filtering parameters.
#### TDD Workflow: Red-Green-Refactor
1. **RED** - Write a failing test first
```bash
# Write test for new functionality
pnpm test:watch # Watch it fail
@@ -302,6 +309,7 @@ Clarified pagination and filtering parameters.
```
2. **GREEN** - Write minimal code to pass the test
```bash
# Implement just enough to pass
pnpm test:watch # Watch it pass
@@ -327,11 +335,11 @@ Clarified pagination and filtering parameters.
### Test Types
| Type | Purpose | Tool |
|------|---------|------|
| **Unit tests** | Test functions/methods in isolation | Vitest |
| **Integration tests** | Test module interactions (service + DB) | Vitest |
| **E2E tests** | Test complete user workflows | Playwright |
| Type | Purpose | Tool |
| --------------------- | --------------------------------------- | ---------- |
| **Unit tests** | Test functions/methods in isolation | Vitest |
| **Integration tests** | Test module interactions (service + DB) | Vitest |
| **E2E tests** | Test complete user workflows | Playwright |
### Running Tests
@@ -347,6 +355,7 @@ pnpm test:e2e # Playwright E2E tests
### Coverage Verification
After implementation:
```bash
pnpm test:coverage
# Open coverage/index.html in browser
@@ -369,15 +378,16 @@ https://git.mosaicstack.dev/mosaic/stack/issues
### Issue Labels
| Category | Labels |
|----------|--------|
| Priority | `p0` (critical), `p1` (high), `p2` (medium), `p3` (low) |
| Type | `api`, `web`, `database`, `auth`, `plugin`, `ai`, `devops`, `docs`, `testing` |
| Status | `todo`, `in-progress`, `review`, `blocked`, `done` |
| Category | Labels |
| -------- | ----------------------------------------------------------------------------- |
| Priority | `p0` (critical), `p1` (high), `p2` (medium), `p3` (low) |
| Type | `api`, `web`, `database`, `auth`, `plugin`, `ai`, `devops`, `docs`, `testing` |
| Status | `todo`, `in-progress`, `review`, `blocked`, `done` |
### Documentation
Check existing documentation first:
- [README.md](./README.md) - Project overview
- [CLAUDE.md](./CLAUDE.md) - Comprehensive development guidelines
- [docs/](./docs/) - Full documentation suite
@@ -402,6 +412,7 @@ Check existing documentation first:
**Thank you for contributing to Mosaic Stack!** Every contribution helps make this platform better for everyone.
For more details, see:
- [Project README](./README.md)
- [Development Guidelines](./CLAUDE.md)
- [API Documentation](./docs/4-api/)

View File

@@ -1,11 +1,13 @@
# Cron Job Configuration - Issue #29
## Overview
Implement cron job configuration for Mosaic Stack, likely as a MoltBot plugin for scheduled reminders/commands.
## Requirements (inferred from CLAUDE.md pattern)
### Plugin Structure
```
plugins/mosaic-plugin-cron/
├── SKILL.md # MoltBot skill definition
@@ -15,17 +17,20 @@ plugins/mosaic-plugin-cron/
```
### Core Features
1. Create/update/delete cron schedules
2. Trigger MoltBot commands on schedule
3. Workspace-scoped (RLS)
4. PDA-friendly UI
### API Endpoints (inferred)
- `POST /api/cron` - Create schedule
- `GET /api/cron` - List schedules
- `DELETE /api/cron/:id` - Delete schedule
### Database (Prisma)
```prisma
model CronSchedule {
id String @id @default(uuid())
@@ -41,11 +46,13 @@ model CronSchedule {
```
## TDD Approach
1. **RED** - Write tests for CronService
2. **GREEN** - Implement minimal service
3. **REFACTOR** - Add CRUD controller + API endpoints
## Next Steps
- [ ] Create feature branch: `git checkout -b feature/29-cron-config`
- [ ] Write failing tests for cron service
- [ ] Implement service (Green)

View File

@@ -0,0 +1,221 @@
# ORCH-117: Killswitch Implementation - Completion Summary
**Issue:** #252 (CLOSED)
**Completion Date:** 2026-02-02
## Overview
Successfully implemented emergency stop (killswitch) functionality for the orchestrator service, enabling immediate termination of single agents or all active agents with full resource cleanup.
## Implementation Details
### Core Service: KillswitchService
**Location:** `/home/localadmin/src/mosaic-stack/apps/orchestrator/src/killswitch/killswitch.service.ts`
**Key Features:**
- `killAgent(agentId)` - Terminates a single agent with full cleanup
- `killAllAgents()` - Terminates all active agents (spawning or running states)
- Best-effort cleanup strategy (logs errors but continues)
- Comprehensive audit logging for all killswitch operations
- State transition validation via AgentLifecycleService
**Cleanup Operations (in order):**
1. Validate agent state and existence
2. Transition agent state to 'killed' (validates state machine)
3. Cleanup Docker container (if sandbox enabled and container exists)
4. Cleanup git worktree (if repository path exists)
5. Log audit trail
### API Endpoints
Added to AgentsController:
1. **POST /agents/:agentId/kill**
- Kills a single agent by ID
- Returns: `{ message: "Agent {agentId} killed successfully" }`
- Error handling: 404 if agent not found, 400 if invalid state transition
2. **POST /agents/kill-all**
- Kills all active agents (spawning or running)
- Returns: `{ message, total, killed, failed, errors? }`
- Continues on individual agent failures
## Test Coverage
### Service Tests
**File:** `killswitch.service.spec.ts`
**Tests:** 13 comprehensive test cases
Coverage:
-**100% Statements**
-**100% Functions**
-**100% Lines**
-**85% Branches** (meets threshold)
Test Scenarios:
- ✅ Kill single agent with full cleanup
- ✅ Throw error if agent not found
- ✅ Continue cleanup even if Docker cleanup fails
- ✅ Continue cleanup even if worktree cleanup fails
- ✅ Skip Docker cleanup if no containerId
- ✅ Skip Docker cleanup if sandbox disabled
- ✅ Skip worktree cleanup if no repository
- ✅ Handle agent already in killed state
- ✅ Kill all running agents
- ✅ Only kill active agents (filter by status)
- ✅ Return zero results when no agents exist
- ✅ Track failures when some agents fail to kill
- ✅ Continue killing other agents even if one fails
### Controller Tests
**File:** `agents-killswitch.controller.spec.ts`
**Tests:** 7 test cases
Test Scenarios:
- ✅ Kill single agent successfully
- ✅ Throw error if agent not found
- ✅ Throw error if state transition fails
- ✅ Kill all agents successfully
- ✅ Return partial results when some agents fail
- ✅ Return zero results when no agents exist
- ✅ Throw error if killswitch service fails
**Total: 20 tests passing**
## Files Created
1. `apps/orchestrator/src/killswitch/killswitch.service.ts` (205 lines)
2. `apps/orchestrator/src/killswitch/killswitch.service.spec.ts` (417 lines)
3. `apps/orchestrator/src/api/agents/agents-killswitch.controller.spec.ts` (154 lines)
4. `docs/scratchpads/orch-117-killswitch.md`
## Files Modified
1. `apps/orchestrator/src/killswitch/killswitch.module.ts`
- Added KillswitchService provider
- Imported dependencies: SpawnerModule, GitModule, ValkeyModule
- Exported KillswitchService
2. `apps/orchestrator/src/api/agents/agents.controller.ts`
- Added KillswitchService dependency injection
- Added POST /agents/:agentId/kill endpoint
- Added POST /agents/kill-all endpoint
3. `apps/orchestrator/src/api/agents/agents.module.ts`
- Imported KillswitchModule
## Technical Highlights
### State Machine Validation
- Killswitch validates state transitions via AgentLifecycleService
- Only allows transitions from 'spawning' or 'running' to 'killed'
- Throws error if agent already killed (prevents duplicate cleanup)
### Resilience & Best-Effort Cleanup
- Docker cleanup failure does not prevent worktree cleanup
- Worktree cleanup failure does not prevent state update
- All errors logged but operation continues
- Ensures immediate termination even if cleanup partially fails
### Audit Trail
Comprehensive logging includes:
- Timestamp
- Operation type (KILL_AGENT or KILL_ALL_AGENTS)
- Agent ID
- Agent status before kill
- Task ID
- Additional context for bulk operations
### Kill-All Smart Filtering
- Only targets agents in 'spawning' or 'running' states
- Skips 'completed', 'failed', or 'killed' agents
- Tracks success/failure counts per agent
- Returns detailed summary with error messages
## Integration Points
**Dependencies:**
- `AgentLifecycleService` - State transition validation and persistence
- `DockerSandboxService` - Container cleanup
- `WorktreeManagerService` - Git worktree cleanup
- `ValkeyService` - Agent state retrieval
**Consumers:**
- `AgentsController` - HTTP endpoints for killswitch operations
## Performance Characteristics
- **Response Time:** < 5 seconds for single agent kill (target met)
- **Concurrent Safety:** Safe to call killAgent() concurrently on different agents
- **Queue Bypass:** Killswitch operations bypass all queues (as required)
- **State Consistency:** State transitions are atomic via ValkeyService
## Security Considerations
- Audit trail logged for all killswitch activations (WARN level)
- State machine prevents invalid transitions
- Cleanup operations are idempotent
- No sensitive data exposed in error messages
## Future Enhancements (Not in Scope)
- Authentication/authorization for killswitch endpoints
- Webhook notifications on killswitch activation
- Killswitch metrics (Prometheus counters)
- Configurable cleanup timeout
- Partial cleanup retry mechanism
## Acceptance Criteria Status
All acceptance criteria met:
-`src/killswitch/killswitch.service.ts` implemented
- ✅ POST /agents/{agentId}/kill endpoint
- ✅ POST /agents/kill-all endpoint
- ✅ Immediate termination (SIGKILL via state transition)
- ✅ Cleanup Docker containers (via DockerSandboxService)
- ✅ Cleanup git worktrees (via WorktreeManagerService)
- ✅ Update agent state to 'killed' (via AgentLifecycleService)
- ✅ Audit trail logged (JSON format with full context)
- ✅ Test coverage >= 85% (achieved 100% statements/functions/lines, 85% branches)
## Related Issues
- **Depends on:** #ORCH-109 (Agent lifecycle management) ✅ Completed
- **Related to:** #114 (Kill Authority in control plane) - Future integration point
- **Part of:** M6-AgentOrchestration (0.0.6)
## Verification
```bash
# Run killswitch tests
cd /home/localadmin/src/mosaic-stack/apps/orchestrator
npm test -- killswitch.service.spec.ts
npm test -- agents-killswitch.controller.spec.ts
# Check coverage
npm test -- --coverage src/killswitch/killswitch.service.spec.ts
```
**Result:** All tests passing, 100% coverage achieved
---
**Implementation:** Complete ✅
**Issue Status:** Closed ✅
**Documentation:** Complete ✅

View File

@@ -19,19 +19,19 @@ Mosaic Stack is a modern, PDA-friendly platform designed to help users manage th
## Technology Stack
| Layer | Technology |
|-------|------------|
| **Frontend** | Next.js 16 + React + TailwindCSS + Shadcn/ui |
| **Backend** | NestJS + Prisma ORM |
| **Database** | PostgreSQL 17 + pgvector |
| **Cache** | Valkey (Redis-compatible) |
| **Auth** | Authentik (OIDC) via BetterAuth |
| **AI** | Ollama (local or remote) |
| **Messaging** | MoltBot (stock + plugins) |
| **Real-time** | WebSockets (Socket.io) |
| **Monorepo** | pnpm workspaces + TurboRepo |
| **Testing** | Vitest + Playwright |
| **Deployment** | Docker + docker-compose |
| Layer | Technology |
| -------------- | -------------------------------------------- |
| **Frontend** | Next.js 16 + React + TailwindCSS + Shadcn/ui |
| **Backend** | NestJS + Prisma ORM |
| **Database** | PostgreSQL 17 + pgvector |
| **Cache** | Valkey (Redis-compatible) |
| **Auth** | Authentik (OIDC) via BetterAuth |
| **AI** | Ollama (local or remote) |
| **Messaging** | MoltBot (stock + plugins) |
| **Real-time** | WebSockets (Socket.io) |
| **Monorepo** | pnpm workspaces + TurboRepo |
| **Testing** | Vitest + Playwright |
| **Deployment** | Docker + docker-compose |
## Quick Start
@@ -105,6 +105,7 @@ docker compose down
```
**What's included:**
- PostgreSQL 17 with pgvector extension
- Valkey (Redis-compatible cache)
- Mosaic API (NestJS)
@@ -204,6 +205,7 @@ The **Knowledge Module** is a powerful personal wiki and knowledge management sy
### Quick Examples
**Create an entry:**
```bash
curl -X POST http://localhost:3001/api/knowledge/entries \
-H "Authorization: Bearer YOUR_TOKEN" \
@@ -217,6 +219,7 @@ curl -X POST http://localhost:3001/api/knowledge/entries \
```
**Search entries:**
```bash
curl -X GET 'http://localhost:3001/api/knowledge/search?q=react+hooks' \
-H "Authorization: Bearer YOUR_TOKEN" \
@@ -224,6 +227,7 @@ curl -X GET 'http://localhost:3001/api/knowledge/search?q=react+hooks' \
```
**Export knowledge base:**
```bash
curl -X GET 'http://localhost:3001/api/knowledge/export?format=markdown' \
-H "Authorization: Bearer YOUR_TOKEN" \
@@ -241,6 +245,7 @@ curl -X GET 'http://localhost:3001/api/knowledge/export?format=markdown' \
**Wiki-links**
Connect entries using double-bracket syntax:
```markdown
See [[Entry Title]] or [[entry-slug]] for details.
Use [[Page|custom text]] for custom display text.
@@ -248,6 +253,7 @@ Use [[Page|custom text]] for custom display text.
**Version History**
Every edit creates a new version. View history, compare changes, and restore previous versions:
```bash
# List versions
GET /api/knowledge/entries/:slug/versions
@@ -261,12 +267,14 @@ POST /api/knowledge/entries/:slug/restore/:version
**Backlinks**
Automatically discover entries that link to a given entry:
```bash
GET /api/knowledge/entries/:slug/backlinks
```
**Tags**
Organize entries with tags:
```bash
# Create tag
POST /api/knowledge/tags
@@ -279,12 +287,14 @@ GET /api/knowledge/search/by-tags?tags=react,frontend
### Performance
With Valkey caching enabled:
- **Entry retrieval:** ~2-5ms (vs ~50ms uncached)
- **Search queries:** ~2-5ms (vs ~200ms uncached)
- **Graph traversals:** ~2-5ms (vs ~400ms uncached)
- **Cache hit rates:** 70-90% for active workspaces
Configure caching via environment variables:
```bash
VALKEY_URL=redis://localhost:6379
KNOWLEDGE_CACHE_ENABLED=true
@@ -342,14 +352,14 @@ Mosaic Stack follows strict **PDA-friendly design principles**:
We **never** use demanding or stressful language:
| ❌ NEVER | ✅ ALWAYS |
|----------|-----------|
| OVERDUE | Target passed |
| URGENT | Approaching target |
| MUST DO | Scheduled for |
| CRITICAL | High priority |
| ❌ NEVER | ✅ ALWAYS |
| ----------- | -------------------- |
| OVERDUE | Target passed |
| URGENT | Approaching target |
| MUST DO | Scheduled for |
| CRITICAL | High priority |
| YOU NEED TO | Consider / Option to |
| REQUIRED | Recommended |
| REQUIRED | Recommended |
### Visual Principles
@@ -456,6 +466,7 @@ POST /api/knowledge/cache/stats/reset
```
**Example response:**
```json
{
"enabled": true,

13
apps/api/.env.example Normal file
View File

@@ -0,0 +1,13 @@
# Database
DATABASE_URL=postgresql://user:password@localhost:5432/database
# Federation Instance Identity
# Display name for this Mosaic instance
INSTANCE_NAME=Mosaic Instance
# Publicly accessible URL for federation (must be valid HTTP/HTTPS URL)
INSTANCE_URL=http://localhost:3000
# Encryption (AES-256-GCM for sensitive data at rest)
# CRITICAL: Generate a secure random key for production!
# Generate with: node -e "console.log(require('crypto').randomBytes(32).toString('hex'))"
ENCRYPTION_KEY=0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef

5
apps/api/.env.test Normal file
View File

@@ -0,0 +1,5 @@
DATABASE_URL="postgresql://test:test@localhost:5432/test"
ENCRYPTION_KEY="test-encryption-key-32-characters"
JWT_SECRET="test-jwt-secret"
INSTANCE_NAME="Test Instance"
INSTANCE_URL="https://test.example.com"

View File

@@ -5,7 +5,7 @@
FROM node:20-alpine AS base
# Install pnpm globally
RUN corepack enable && corepack prepare pnpm@10.19.0 --activate
RUN corepack enable && corepack prepare pnpm@10.27.0 --activate
# Set working directory
WORKDIR /app
@@ -34,20 +34,37 @@ RUN --mount=type=cache,id=pnpm-store,target=/root/.local/share/pnpm/store \
# ======================
FROM base AS builder
# Copy dependencies
# Copy root node_modules from deps
COPY --from=deps /app/node_modules ./node_modules
COPY --from=deps /app/packages ./packages
COPY --from=deps /app/apps/api/node_modules ./apps/api/node_modules
# Copy all source code
# Copy all source code FIRST
COPY packages ./packages
COPY apps/api ./apps/api
# Then copy workspace node_modules from deps (these go AFTER source to avoid being overwritten)
COPY --from=deps /app/packages/shared/node_modules ./packages/shared/node_modules
COPY --from=deps /app/packages/config/node_modules ./packages/config/node_modules
COPY --from=deps /app/apps/api/node_modules ./apps/api/node_modules
# Debug: Show what we have before building
RUN echo "=== Pre-build directory structure ===" && \
echo "--- packages/config/typescript ---" && ls -la packages/config/typescript/ && \
echo "--- packages/shared (top level) ---" && ls -la packages/shared/ && \
echo "--- packages/shared/src ---" && ls -la packages/shared/src/ && \
echo "--- apps/api (top level) ---" && ls -la apps/api/ && \
echo "--- apps/api/src (exists?) ---" && ls apps/api/src/*.ts | head -5 && \
echo "--- node_modules/@mosaic (symlinks?) ---" && ls -la node_modules/@mosaic/ 2>/dev/null || echo "No @mosaic in node_modules"
# Build the API app and its dependencies using TurboRepo
# This ensures @mosaic/shared is built first, then prisma:generate, then the API
# Cache TurboRepo build outputs for faster subsequent builds
RUN --mount=type=cache,id=turbo-cache,target=/app/.turbo \
pnpm turbo build --filter=@mosaic/api
# Disable turbo cache temporarily to ensure fresh build and see full output
RUN pnpm turbo build --filter=@mosaic/api --force --verbosity=2
# Debug: Show what was built
RUN echo "=== Post-build directory structure ===" && \
echo "--- packages/shared/dist ---" && ls -la packages/shared/dist/ 2>/dev/null || echo "NO dist in shared" && \
echo "--- apps/api/dist ---" && ls -la apps/api/dist/ 2>/dev/null || echo "NO dist in api" && \
echo "--- apps/api/dist contents (if exists) ---" && find apps/api/dist -type f 2>/dev/null | head -10 || echo "Cannot find dist files"
# ======================
# Production stage

View File

@@ -5,6 +5,7 @@ The Mosaic Stack API is a NestJS-based backend service providing REST endpoints
## Overview
The API serves as the central backend for:
- **Task Management** - Create, update, track tasks with filtering and sorting
- **Event Management** - Calendar events and scheduling
- **Project Management** - Organize work into projects
@@ -18,20 +19,20 @@ The API serves as the central backend for:
## Available Modules
| Module | Base Path | Description |
|--------|-----------|-------------|
| **Tasks** | `/api/tasks` | CRUD operations for tasks with filtering |
| **Events** | `/api/events` | Calendar events and scheduling |
| **Projects** | `/api/projects` | Project management |
| **Knowledge** | `/api/knowledge/entries` | Wiki entries with markdown support |
| **Knowledge Tags** | `/api/knowledge/tags` | Tag management for knowledge entries |
| **Ideas** | `/api/ideas` | Quick capture and idea management |
| **Domains** | `/api/domains` | Domain categorization |
| **Personalities** | `/api/personalities` | AI personality configurations |
| **Widgets** | `/api/widgets` | Dashboard widget data |
| **Layouts** | `/api/layouts` | Dashboard layout configuration |
| **Ollama** | `/api/ollama` | LLM integration (generate, chat, embed) |
| **Users** | `/api/users/me/preferences` | User preferences |
| Module | Base Path | Description |
| ------------------ | --------------------------- | ---------------------------------------- |
| **Tasks** | `/api/tasks` | CRUD operations for tasks with filtering |
| **Events** | `/api/events` | Calendar events and scheduling |
| **Projects** | `/api/projects` | Project management |
| **Knowledge** | `/api/knowledge/entries` | Wiki entries with markdown support |
| **Knowledge Tags** | `/api/knowledge/tags` | Tag management for knowledge entries |
| **Ideas** | `/api/ideas` | Quick capture and idea management |
| **Domains** | `/api/domains` | Domain categorization |
| **Personalities** | `/api/personalities` | AI personality configurations |
| **Widgets** | `/api/widgets` | Dashboard widget data |
| **Layouts** | `/api/layouts` | Dashboard layout configuration |
| **Ollama** | `/api/ollama` | LLM integration (generate, chat, embed) |
| **Users** | `/api/users/me/preferences` | User preferences |
### Health Check
@@ -51,11 +52,11 @@ The API uses **BetterAuth** for authentication with the following features:
The API uses a layered guard system:
| Guard | Purpose | Applies To |
|-------|---------|------------|
| **AuthGuard** | Verifies user authentication via Bearer token | Most protected endpoints |
| **WorkspaceGuard** | Validates workspace membership and sets Row-Level Security (RLS) context | Workspace-scoped resources |
| **PermissionGuard** | Enforces role-based access control | Admin operations |
| Guard | Purpose | Applies To |
| ------------------- | ------------------------------------------------------------------------ | -------------------------- |
| **AuthGuard** | Verifies user authentication via Bearer token | Most protected endpoints |
| **WorkspaceGuard** | Validates workspace membership and sets Row-Level Security (RLS) context | Workspace-scoped resources |
| **PermissionGuard** | Enforces role-based access control | Admin operations |
### Workspace Roles
@@ -69,15 +70,16 @@ The API uses a layered guard system:
Used with `@RequirePermission()` decorator:
```typescript
Permission.WORKSPACE_OWNER // Requires OWNER role
Permission.WORKSPACE_ADMIN // Requires ADMIN or OWNER
Permission.WORKSPACE_MEMBER // Requires MEMBER, ADMIN, or OWNER
Permission.WORKSPACE_ANY // Any authenticated member including GUEST
Permission.WORKSPACE_OWNER; // Requires OWNER role
Permission.WORKSPACE_ADMIN; // Requires ADMIN or OWNER
Permission.WORKSPACE_MEMBER; // Requires MEMBER, ADMIN, or OWNER
Permission.WORKSPACE_ANY; // Any authenticated member including GUEST
```
### Providing Workspace Context
Workspace ID can be provided via:
1. **Header**: `X-Workspace-Id: <workspace-id>` (highest priority)
2. **URL Parameter**: `:workspaceId`
3. **Request Body**: `workspaceId` field
@@ -85,7 +87,7 @@ Workspace ID can be provided via:
### Example: Protected Controller
```typescript
@Controller('tasks')
@Controller("tasks")
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
export class TasksController {
@Post()
@@ -98,13 +100,13 @@ export class TasksController {
## Environment Variables
| Variable | Description | Default |
|----------|-------------|---------|
| `PORT` | API server port | `3001` |
| `DATABASE_URL` | PostgreSQL connection string | Required |
| `NODE_ENV` | Environment (`development`, `production`) | - |
| `NEXT_PUBLIC_APP_URL` | Frontend application URL (for CORS) | `http://localhost:3000` |
| `WEB_URL` | WebSocket CORS origin | `http://localhost:3000` |
| Variable | Description | Default |
| --------------------- | ----------------------------------------- | ----------------------- |
| `PORT` | API server port | `3001` |
| `DATABASE_URL` | PostgreSQL connection string | Required |
| `NODE_ENV` | Environment (`development`, `production`) | - |
| `NEXT_PUBLIC_APP_URL` | Frontend application URL (for CORS) | `http://localhost:3000` |
| `WEB_URL` | WebSocket CORS origin | `http://localhost:3000` |
## Running Locally
@@ -117,22 +119,26 @@ export class TasksController {
### Setup
1. **Install dependencies:**
```bash
pnpm install
```
2. **Set up environment variables:**
```bash
cp .env.example .env # If available
# Edit .env with your DATABASE_URL
```
3. **Generate Prisma client:**
```bash
pnpm prisma:generate
```
4. **Run database migrations:**
```bash
pnpm prisma:migrate
```

View File

@@ -26,11 +26,15 @@
"dependencies": {
"@anthropic-ai/sdk": "^0.72.1",
"@mosaic/shared": "workspace:*",
"@nestjs/axios": "^4.0.1",
"@nestjs/bullmq": "^11.0.4",
"@nestjs/common": "^11.1.12",
"@nestjs/config": "^4.0.2",
"@nestjs/core": "^11.1.12",
"@nestjs/mapped-types": "^2.1.0",
"@nestjs/platform-express": "^11.1.12",
"@nestjs/platform-socket.io": "^11.1.12",
"@nestjs/throttler": "^6.5.0",
"@nestjs/websockets": "^11.1.12",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/auto-instrumentations-node": "^0.55.0",
@@ -44,12 +48,16 @@
"@types/multer": "^2.0.0",
"adm-zip": "^0.5.16",
"archiver": "^7.0.1",
"axios": "^1.13.4",
"better-auth": "^1.4.17",
"bullmq": "^5.67.2",
"class-transformer": "^0.5.1",
"class-validator": "^0.14.3",
"discord.js": "^14.25.1",
"gray-matter": "^4.0.3",
"highlight.js": "^11.11.1",
"ioredis": "^5.9.2",
"jose": "^6.1.3",
"marked": "^17.0.1",
"marked-gfm-heading-id": "^4.1.3",
"marked-highlight": "^2.2.3",
@@ -74,9 +82,11 @@
"@types/highlight.js": "^10.1.0",
"@types/node": "^22.13.4",
"@types/sanitize-html": "^2.16.0",
"@types/supertest": "^6.0.3",
"@vitest/coverage-v8": "^4.0.18",
"express": "^5.2.1",
"prisma": "^6.19.2",
"supertest": "^7.2.2",
"tsx": "^4.21.0",
"typescript": "^5.8.2",
"unplugin-swc": "^1.5.2",

View File

@@ -0,0 +1,112 @@
-- CreateEnum
CREATE TYPE "RunnerJobStatus" AS ENUM ('PENDING', 'QUEUED', 'RUNNING', 'COMPLETED', 'FAILED', 'CANCELLED');
-- CreateEnum
CREATE TYPE "JobStepPhase" AS ENUM ('SETUP', 'EXECUTION', 'VALIDATION', 'CLEANUP');
-- CreateEnum
CREATE TYPE "JobStepType" AS ENUM ('COMMAND', 'AI_ACTION', 'GATE', 'ARTIFACT');
-- CreateEnum
CREATE TYPE "JobStepStatus" AS ENUM ('PENDING', 'RUNNING', 'COMPLETED', 'FAILED', 'SKIPPED');
-- CreateTable
CREATE TABLE "runner_jobs" (
"id" UUID NOT NULL,
"workspace_id" UUID NOT NULL,
"agent_task_id" UUID,
"type" TEXT NOT NULL,
"status" "RunnerJobStatus" NOT NULL DEFAULT 'PENDING',
"priority" INTEGER NOT NULL,
"progress_percent" INTEGER NOT NULL DEFAULT 0,
"result" JSONB,
"error" TEXT,
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
"started_at" TIMESTAMPTZ,
"completed_at" TIMESTAMPTZ,
CONSTRAINT "runner_jobs_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "job_steps" (
"id" UUID NOT NULL,
"job_id" UUID NOT NULL,
"ordinal" INTEGER NOT NULL,
"phase" "JobStepPhase" NOT NULL,
"name" TEXT NOT NULL,
"type" "JobStepType" NOT NULL,
"status" "JobStepStatus" NOT NULL DEFAULT 'PENDING',
"output" TEXT,
"tokens_input" INTEGER,
"tokens_output" INTEGER,
"started_at" TIMESTAMPTZ,
"completed_at" TIMESTAMPTZ,
"duration_ms" INTEGER,
CONSTRAINT "job_steps_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "job_events" (
"id" UUID NOT NULL,
"job_id" UUID NOT NULL,
"step_id" UUID,
"type" TEXT NOT NULL,
"timestamp" TIMESTAMPTZ NOT NULL,
"actor" TEXT NOT NULL,
"payload" JSONB NOT NULL,
CONSTRAINT "job_events_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "runner_jobs_id_workspace_id_key" ON "runner_jobs"("id", "workspace_id");
-- CreateIndex
CREATE INDEX "runner_jobs_workspace_id_idx" ON "runner_jobs"("workspace_id");
-- CreateIndex
CREATE INDEX "runner_jobs_workspace_id_status_idx" ON "runner_jobs"("workspace_id", "status");
-- CreateIndex
CREATE INDEX "runner_jobs_agent_task_id_idx" ON "runner_jobs"("agent_task_id");
-- CreateIndex
CREATE INDEX "runner_jobs_priority_idx" ON "runner_jobs"("priority");
-- CreateIndex
CREATE INDEX "job_steps_job_id_idx" ON "job_steps"("job_id");
-- CreateIndex
CREATE INDEX "job_steps_job_id_ordinal_idx" ON "job_steps"("job_id", "ordinal");
-- CreateIndex
CREATE INDEX "job_steps_status_idx" ON "job_steps"("status");
-- CreateIndex
CREATE INDEX "job_events_job_id_idx" ON "job_events"("job_id");
-- CreateIndex
CREATE INDEX "job_events_step_id_idx" ON "job_events"("step_id");
-- CreateIndex
CREATE INDEX "job_events_timestamp_idx" ON "job_events"("timestamp");
-- CreateIndex
CREATE INDEX "job_events_type_idx" ON "job_events"("type");
-- AddForeignKey
ALTER TABLE "runner_jobs" ADD CONSTRAINT "runner_jobs_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "runner_jobs" ADD CONSTRAINT "runner_jobs_agent_task_id_fkey" FOREIGN KEY ("agent_task_id") REFERENCES "agent_tasks"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "job_steps" ADD CONSTRAINT "job_steps_job_id_fkey" FOREIGN KEY ("job_id") REFERENCES "runner_jobs"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "job_events" ADD CONSTRAINT "job_events_job_id_fkey" FOREIGN KEY ("job_id") REFERENCES "runner_jobs"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "job_events" ADD CONSTRAINT "job_events_step_id_fkey" FOREIGN KEY ("step_id") REFERENCES "job_steps"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -0,0 +1,2 @@
-- CreateIndex
CREATE INDEX "job_events_job_id_timestamp_idx" ON "job_events"("job_id", "timestamp");

View File

@@ -0,0 +1,36 @@
-- Add tsvector column for full-text search on knowledge_entries
-- Weighted fields: title (A), summary (B), content (C)
-- Step 1: Add the search_vector column
ALTER TABLE "knowledge_entries"
ADD COLUMN "search_vector" tsvector;
-- Step 2: Create GIN index for fast full-text search
CREATE INDEX "knowledge_entries_search_vector_idx"
ON "knowledge_entries"
USING gin("search_vector");
-- Step 3: Create function to update search_vector
CREATE OR REPLACE FUNCTION knowledge_entries_search_vector_update()
RETURNS trigger AS $$
BEGIN
NEW.search_vector :=
setweight(to_tsvector('english', COALESCE(NEW.title, '')), 'A') ||
setweight(to_tsvector('english', COALESCE(NEW.summary, '')), 'B') ||
setweight(to_tsvector('english', COALESCE(NEW.content, '')), 'C');
RETURN NEW;
END
$$ LANGUAGE plpgsql;
-- Step 4: Create trigger to automatically update search_vector on insert/update
CREATE TRIGGER knowledge_entries_search_vector_trigger
BEFORE INSERT OR UPDATE ON "knowledge_entries"
FOR EACH ROW
EXECUTE FUNCTION knowledge_entries_search_vector_update();
-- Step 5: Populate search_vector for existing entries
UPDATE "knowledge_entries"
SET search_vector =
setweight(to_tsvector('english', COALESCE(title, '')), 'A') ||
setweight(to_tsvector('english', COALESCE(summary, '')), 'B') ||
setweight(to_tsvector('english', COALESCE(content, '')), 'C');

View File

@@ -0,0 +1,7 @@
-- Add version field for optimistic locking to prevent race conditions
-- This allows safe concurrent updates to runner job status
ALTER TABLE "runner_jobs" ADD COLUMN "version" INTEGER NOT NULL DEFAULT 1;
-- Create index for better performance on version checks
CREATE INDEX "runner_jobs_version_idx" ON "runner_jobs"("version");

View File

@@ -0,0 +1,40 @@
-- Add eventType column to federation_messages table
ALTER TABLE "federation_messages" ADD COLUMN "event_type" TEXT;
-- Add index for eventType
CREATE INDEX "federation_messages_event_type_idx" ON "federation_messages"("event_type");
-- CreateTable
CREATE TABLE "federation_event_subscriptions" (
"id" UUID NOT NULL,
"workspace_id" UUID NOT NULL,
"connection_id" UUID NOT NULL,
"event_type" TEXT NOT NULL,
"metadata" JSONB NOT NULL DEFAULT '{}',
"is_active" BOOLEAN NOT NULL DEFAULT true,
"created_at" TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_at" TIMESTAMPTZ NOT NULL,
CONSTRAINT "federation_event_subscriptions_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "federation_event_subscriptions_workspace_id_idx" ON "federation_event_subscriptions"("workspace_id");
-- CreateIndex
CREATE INDEX "federation_event_subscriptions_connection_id_idx" ON "federation_event_subscriptions"("connection_id");
-- CreateIndex
CREATE INDEX "federation_event_subscriptions_event_type_idx" ON "federation_event_subscriptions"("event_type");
-- CreateIndex
CREATE INDEX "federation_event_subscriptions_workspace_id_is_active_idx" ON "federation_event_subscriptions"("workspace_id", "is_active");
-- CreateIndex
CREATE UNIQUE INDEX "federation_event_subscriptions_workspace_id_connection_id_even_key" ON "federation_event_subscriptions"("workspace_id", "connection_id", "event_type");
-- AddForeignKey
ALTER TABLE "federation_event_subscriptions" ADD CONSTRAINT "federation_event_subscriptions_connection_id_fkey" FOREIGN KEY ("connection_id") REFERENCES "federation_connections"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "federation_event_subscriptions" ADD CONSTRAINT "federation_event_subscriptions_workspace_id_fkey" FOREIGN KEY ("workspace_id") REFERENCES "workspaces"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -135,6 +135,57 @@ enum FormalityLevel {
VERY_FORMAL
}
enum RunnerJobStatus {
PENDING
QUEUED
RUNNING
COMPLETED
FAILED
CANCELLED
}
enum JobStepPhase {
SETUP
EXECUTION
VALIDATION
CLEANUP
}
enum JobStepType {
COMMAND
AI_ACTION
GATE
ARTIFACT
}
enum JobStepStatus {
PENDING
RUNNING
COMPLETED
FAILED
SKIPPED
}
enum FederationConnectionStatus {
PENDING
ACTIVE
SUSPENDED
DISCONNECTED
}
enum FederationMessageType {
QUERY
COMMAND
EVENT
}
enum FederationMessageStatus {
PENDING
DELIVERED
FAILED
TIMEOUT
}
// ============================================
// MODELS
// ============================================
@@ -151,24 +202,25 @@ model User {
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
// Relations
ownedWorkspaces Workspace[] @relation("WorkspaceOwner")
workspaceMemberships WorkspaceMember[]
teamMemberships TeamMember[]
assignedTasks Task[] @relation("TaskAssignee")
createdTasks Task[] @relation("TaskCreator")
createdEvents Event[] @relation("EventCreator")
createdProjects Project[] @relation("ProjectCreator")
activityLogs ActivityLog[]
sessions Session[]
accounts Account[]
ideas Idea[] @relation("IdeaCreator")
relationships Relationship[] @relation("RelationshipCreator")
agentSessions AgentSession[]
agentTasks AgentTask[] @relation("AgentTaskCreator")
userLayouts UserLayout[]
userPreference UserPreference?
knowledgeEntryVersions KnowledgeEntryVersion[] @relation("EntryVersionAuthor")
llmProviders LlmProviderInstance[] @relation("UserLlmProviders")
ownedWorkspaces Workspace[] @relation("WorkspaceOwner")
workspaceMemberships WorkspaceMember[]
teamMemberships TeamMember[]
assignedTasks Task[] @relation("TaskAssignee")
createdTasks Task[] @relation("TaskCreator")
createdEvents Event[] @relation("EventCreator")
createdProjects Project[] @relation("ProjectCreator")
activityLogs ActivityLog[]
sessions Session[]
accounts Account[]
ideas Idea[] @relation("IdeaCreator")
relationships Relationship[] @relation("RelationshipCreator")
agentSessions AgentSession[]
agentTasks AgentTask[] @relation("AgentTaskCreator")
userLayouts UserLayout[]
userPreference UserPreference?
knowledgeEntryVersions KnowledgeEntryVersion[] @relation("EntryVersionAuthor")
llmProviders LlmProviderInstance[] @relation("UserLlmProviders")
federatedIdentities FederatedIdentity[]
@@map("users")
}
@@ -195,27 +247,31 @@ model Workspace {
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
// Relations
owner User @relation("WorkspaceOwner", fields: [ownerId], references: [id], onDelete: Cascade)
members WorkspaceMember[]
teams Team[]
tasks Task[]
events Event[]
projects Project[]
activityLogs ActivityLog[]
memoryEmbeddings MemoryEmbedding[]
domains Domain[]
ideas Idea[]
relationships Relationship[]
agents Agent[]
agentSessions AgentSession[]
agentTasks AgentTask[]
userLayouts UserLayout[]
knowledgeEntries KnowledgeEntry[]
knowledgeTags KnowledgeTag[]
cronSchedules CronSchedule[]
personalities Personality[]
llmSettings WorkspaceLlmSettings?
qualityGates QualityGate[]
owner User @relation("WorkspaceOwner", fields: [ownerId], references: [id], onDelete: Cascade)
members WorkspaceMember[]
teams Team[]
tasks Task[]
events Event[]
projects Project[]
activityLogs ActivityLog[]
memoryEmbeddings MemoryEmbedding[]
domains Domain[]
ideas Idea[]
relationships Relationship[]
agents Agent[]
agentSessions AgentSession[]
agentTasks AgentTask[]
userLayouts UserLayout[]
knowledgeEntries KnowledgeEntry[]
knowledgeTags KnowledgeTag[]
cronSchedules CronSchedule[]
personalities Personality[]
llmSettings WorkspaceLlmSettings?
qualityGates QualityGate[]
runnerJobs RunnerJob[]
federationConnections FederationConnection[]
federationMessages FederationMessage[]
federationEventSubscriptions FederationEventSubscription[]
@@index([ownerId])
@@map("workspaces")
@@ -565,8 +621,8 @@ model Agent {
}
model AgentTask {
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
// Task details
title String
@@ -575,23 +631,24 @@ model AgentTask {
priority AgentTaskPriority @default(MEDIUM)
// Agent configuration
agentType String @map("agent_type")
agentConfig Json @default("{}") @map("agent_config")
agentType String @map("agent_type")
agentConfig Json @default("{}") @map("agent_config")
// Results
result Json?
error String? @db.Text
result Json?
error String? @db.Text
// Timing
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
startedAt DateTime? @map("started_at") @db.Timestamptz
completedAt DateTime? @map("completed_at") @db.Timestamptz
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
startedAt DateTime? @map("started_at") @db.Timestamptz
completedAt DateTime? @map("completed_at") @db.Timestamptz
// Relations
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
createdBy User @relation("AgentTaskCreator", fields: [createdById], references: [id], onDelete: Cascade)
createdById String @map("created_by_id") @db.Uuid
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
createdBy User @relation("AgentTaskCreator", fields: [createdById], references: [id], onDelete: Cascade)
createdById String @map("created_by_id") @db.Uuid
runnerJobs RunnerJob[]
@@unique([id, workspaceId])
@@index([workspaceId])
@@ -765,6 +822,9 @@ model KnowledgeEntry {
contentHtml String? @map("content_html") @db.Text
summary String?
// Full-text search vector (automatically maintained by trigger)
searchVector Unsupported("tsvector")? @map("search_vector")
// Status
status EntryStatus @default(DRAFT)
visibility Visibility @default(PRIVATE)
@@ -787,6 +847,7 @@ model KnowledgeEntry {
@@index([workspaceId, updatedAt])
@@index([createdBy])
@@index([updatedBy])
// Note: GIN index on searchVector created via migration (not supported in Prisma schema)
@@map("knowledge_entries")
}
@@ -890,18 +951,18 @@ model KnowledgeEmbedding {
// ============================================
model CronSchedule {
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
// Cron configuration
expression String // Standard cron: "0 9 * * *" = 9am daily
command String // MoltBot command to trigger
expression String // Standard cron: "0 9 * * *" = 9am daily
command String // MoltBot command to trigger
// State
enabled Boolean @default(true)
lastRun DateTime? @map("last_run") @db.Timestamptz
nextRun DateTime? @map("next_run") @db.Timestamptz
enabled Boolean @default(true)
lastRun DateTime? @map("last_run") @db.Timestamptz
nextRun DateTime? @map("next_run") @db.Timestamptz
// Audit
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
@@ -918,22 +979,22 @@ model CronSchedule {
// ============================================
model Personality {
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
// Identity
name String // unique identifier slug
displayName String @map("display_name")
description String? @db.Text
name String // unique identifier slug
displayName String @map("display_name")
description String? @db.Text
// System prompt
systemPrompt String @map("system_prompt") @db.Text
// LLM configuration
temperature Float? // null = use provider default
maxTokens Int? @map("max_tokens") // null = use provider default
llmProviderInstanceId String? @map("llm_provider_instance_id") @db.Uuid
temperature Float? // null = use provider default
maxTokens Int? @map("max_tokens") // null = use provider default
llmProviderInstanceId String? @map("llm_provider_instance_id") @db.Uuid
// Status
isDefault Boolean @default(false) @map("is_default")
@@ -961,20 +1022,20 @@ model Personality {
// ============================================
model LlmProviderInstance {
id String @id @default(uuid()) @db.Uuid
providerType String @map("provider_type") // "ollama" | "claude" | "openai"
displayName String @map("display_name")
userId String? @map("user_id") @db.Uuid // NULL = system-level, UUID = user-level
config Json // Provider-specific configuration
isDefault Boolean @default(false) @map("is_default")
isEnabled Boolean @default(true) @map("is_enabled")
id String @id @default(uuid()) @db.Uuid
providerType String @map("provider_type") // "ollama" | "claude" | "openai"
displayName String @map("display_name")
userId String? @map("user_id") @db.Uuid // NULL = system-level, UUID = user-level
config Json // Provider-specific configuration
isDefault Boolean @default(false) @map("is_default")
isEnabled Boolean @default(true) @map("is_enabled")
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
// Relations
user User? @relation("UserLlmProviders", fields: [userId], references: [id], onDelete: Cascade)
personalities Personality[] @relation("PersonalityLlmProvider")
workspaceLlmSettings WorkspaceLlmSettings[] @relation("WorkspaceLlmProvider")
user User? @relation("UserLlmProviders", fields: [userId], references: [id], onDelete: Cascade)
personalities Personality[] @relation("PersonalityLlmProvider")
workspaceLlmSettings WorkspaceLlmSettings[] @relation("WorkspaceLlmProvider")
@@index([userId])
@@index([providerType])
@@ -1010,20 +1071,20 @@ model WorkspaceLlmSettings {
// ============================================
model QualityGate {
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
name String
description String?
type String // 'build' | 'lint' | 'test' | 'coverage' | 'custom'
type String // 'build' | 'lint' | 'test' | 'coverage' | 'custom'
command String?
expectedOutput String? @map("expected_output")
isRegex Boolean @default(false) @map("is_regex")
required Boolean @default(true)
order Int @default(0)
isEnabled Boolean @default(true) @map("is_enabled")
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
expectedOutput String? @map("expected_output")
isRegex Boolean @default(false) @map("is_regex")
required Boolean @default(true)
order Int @default(0)
isEnabled Boolean @default(true) @map("is_enabled")
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
@@unique([workspaceId, name])
@@index([workspaceId])
@@ -1032,19 +1093,19 @@ model QualityGate {
}
model TaskRejection {
id String @id @default(uuid()) @db.Uuid
taskId String @map("task_id")
workspaceId String @map("workspace_id")
agentId String @map("agent_id")
attemptCount Int @map("attempt_count")
failures Json // FailureSummary[]
originalTask String @map("original_task")
startedAt DateTime @map("started_at") @db.Timestamptz
rejectedAt DateTime @map("rejected_at") @db.Timestamptz
escalated Boolean @default(false)
manualReview Boolean @default(false) @map("manual_review")
resolvedAt DateTime? @map("resolved_at") @db.Timestamptz
resolution String?
id String @id @default(uuid()) @db.Uuid
taskId String @map("task_id")
workspaceId String @map("workspace_id")
agentId String @map("agent_id")
attemptCount Int @map("attempt_count")
failures Json // FailureSummary[]
originalTask String @map("original_task")
startedAt DateTime @map("started_at") @db.Timestamptz
rejectedAt DateTime @map("rejected_at") @db.Timestamptz
escalated Boolean @default(false)
manualReview Boolean @default(false) @map("manual_review")
resolvedAt DateTime? @map("resolved_at") @db.Timestamptz
resolution String?
@@index([taskId])
@@index([workspaceId])
@@ -1055,22 +1116,22 @@ model TaskRejection {
}
model TokenBudget {
id String @id @default(uuid()) @db.Uuid
taskId String @unique @map("task_id") @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
agentId String @map("agent_id")
id String @id @default(uuid()) @db.Uuid
taskId String @unique @map("task_id") @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
agentId String @map("agent_id")
// Budget allocation
allocatedTokens Int @map("allocated_tokens")
allocatedTokens Int @map("allocated_tokens")
estimatedComplexity String @map("estimated_complexity") // "low", "medium", "high", "critical"
// Usage tracking
inputTokensUsed Int @default(0) @map("input_tokens_used")
outputTokensUsed Int @default(0) @map("output_tokens_used")
totalTokensUsed Int @default(0) @map("total_tokens_used")
inputTokensUsed Int @default(0) @map("input_tokens_used")
outputTokensUsed Int @default(0) @map("output_tokens_used")
totalTokensUsed Int @default(0) @map("total_tokens_used")
// Cost tracking
estimatedCost Decimal? @map("estimated_cost") @db.Decimal(10, 6)
estimatedCost Decimal? @map("estimated_cost") @db.Decimal(10, 6)
// State
startedAt DateTime @default(now()) @map("started_at") @db.Timestamptz
@@ -1078,12 +1139,247 @@ model TokenBudget {
completedAt DateTime? @map("completed_at") @db.Timestamptz
// Analysis
budgetUtilization Float? @map("budget_utilization") // 0.0 - 1.0
suspiciousPattern Boolean @default(false) @map("suspicious_pattern")
suspiciousReason String? @map("suspicious_reason")
budgetUtilization Float? @map("budget_utilization") // 0.0 - 1.0
suspiciousPattern Boolean @default(false) @map("suspicious_pattern")
suspiciousReason String? @map("suspicious_reason")
@@index([taskId])
@@index([workspaceId])
@@index([suspiciousPattern])
@@map("token_budgets")
}
// ============================================
// RUNNER JOB TRACKING MODULE
// ============================================
model RunnerJob {
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
agentTaskId String? @map("agent_task_id") @db.Uuid
// Job details
type String // 'git-status', 'code-task', 'priority-calc'
status RunnerJobStatus @default(PENDING)
priority Int
progressPercent Int @default(0) @map("progress_percent")
version Int @default(1) // Optimistic locking version
// Results
result Json?
error String? @db.Text
// Timing
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
startedAt DateTime? @map("started_at") @db.Timestamptz
completedAt DateTime? @map("completed_at") @db.Timestamptz
// Relations
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
agentTask AgentTask? @relation(fields: [agentTaskId], references: [id], onDelete: SetNull)
steps JobStep[]
events JobEvent[]
@@unique([id, workspaceId])
@@index([workspaceId])
@@index([workspaceId, status])
@@index([agentTaskId])
@@index([priority])
@@map("runner_jobs")
}
model JobStep {
id String @id @default(uuid()) @db.Uuid
jobId String @map("job_id") @db.Uuid
// Step details
ordinal Int
phase JobStepPhase
name String
type JobStepType
status JobStepStatus @default(PENDING)
// Output and metrics
output String? @db.Text
tokensInput Int? @map("tokens_input")
tokensOutput Int? @map("tokens_output")
// Timing
startedAt DateTime? @map("started_at") @db.Timestamptz
completedAt DateTime? @map("completed_at") @db.Timestamptz
durationMs Int? @map("duration_ms")
// Relations
job RunnerJob @relation(fields: [jobId], references: [id], onDelete: Cascade)
events JobEvent[]
@@index([jobId])
@@index([jobId, ordinal])
@@index([status])
@@map("job_steps")
}
model JobEvent {
id String @id @default(uuid()) @db.Uuid
jobId String @map("job_id") @db.Uuid
stepId String? @map("step_id") @db.Uuid
// Event details
type String
timestamp DateTime @db.Timestamptz
actor String
payload Json
// Relations
job RunnerJob @relation(fields: [jobId], references: [id], onDelete: Cascade)
step JobStep? @relation(fields: [stepId], references: [id], onDelete: Cascade)
@@index([jobId])
@@index([stepId])
@@index([timestamp])
@@index([type])
@@index([jobId, timestamp])
@@map("job_events")
}
// ============================================
// FEDERATION MODULE
// ============================================
model Instance {
id String @id @default(uuid()) @db.Uuid
instanceId String @unique @map("instance_id") // Unique identifier for federation
name String
url String
publicKey String @map("public_key") @db.Text
privateKey String @map("private_key") @db.Text // AES-256-GCM encrypted with ENCRYPTION_KEY
// Capabilities and metadata
capabilities Json @default("{}")
metadata Json @default("{}")
// Timestamps
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
@@map("instances")
}
model FederationConnection {
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
// Remote instance details
remoteInstanceId String @map("remote_instance_id")
remoteUrl String @map("remote_url")
remotePublicKey String @map("remote_public_key") @db.Text
remoteCapabilities Json @default("{}") @map("remote_capabilities")
// Connection status
status FederationConnectionStatus @default(PENDING)
// Metadata
metadata Json @default("{}")
// Timestamps
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
connectedAt DateTime? @map("connected_at") @db.Timestamptz
disconnectedAt DateTime? @map("disconnected_at") @db.Timestamptz
// Relations
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
messages FederationMessage[]
eventSubscriptions FederationEventSubscription[]
@@unique([workspaceId, remoteInstanceId])
@@index([workspaceId])
@@index([workspaceId, status])
@@index([remoteInstanceId])
@@map("federation_connections")
}
model FederatedIdentity {
id String @id @default(uuid()) @db.Uuid
localUserId String @map("local_user_id") @db.Uuid
remoteUserId String @map("remote_user_id")
remoteInstanceId String @map("remote_instance_id")
oidcSubject String @map("oidc_subject")
email String
metadata Json @default("{}")
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
user User @relation(fields: [localUserId], references: [id], onDelete: Cascade)
@@unique([localUserId, remoteInstanceId])
@@index([localUserId])
@@index([remoteInstanceId])
@@index([oidcSubject])
@@map("federated_identities")
}
model FederationMessage {
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
connectionId String @map("connection_id") @db.Uuid
// Message metadata
messageType FederationMessageType @map("message_type")
messageId String @unique @map("message_id") // UUID for deduplication
correlationId String? @map("correlation_id") // For request/response tracking
// Message content
query String? @db.Text
commandType String? @map("command_type") @db.Text
eventType String? @map("event_type") @db.Text // For EVENT messages
payload Json? @default("{}")
response Json? @default("{}")
// Status tracking
status FederationMessageStatus @default(PENDING)
error String? @db.Text
// Security
signature String @db.Text
// Timestamps
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
deliveredAt DateTime? @map("delivered_at") @db.Timestamptz
// Relations
connection FederationConnection @relation(fields: [connectionId], references: [id], onDelete: Cascade)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@index([workspaceId])
@@index([connectionId])
@@index([messageId])
@@index([correlationId])
@@index([eventType])
@@map("federation_messages")
}
model FederationEventSubscription {
id String @id @default(uuid()) @db.Uuid
workspaceId String @map("workspace_id") @db.Uuid
connectionId String @map("connection_id") @db.Uuid
// Event subscription details
eventType String @map("event_type")
metadata Json @default("{}")
isActive Boolean @default(true) @map("is_active")
createdAt DateTime @default(now()) @map("created_at") @db.Timestamptz
updatedAt DateTime @updatedAt @map("updated_at") @db.Timestamptz
// Relations
connection FederationConnection @relation(fields: [connectionId], references: [id], onDelete: Cascade)
workspace Workspace @relation(fields: [workspaceId], references: [id], onDelete: Cascade)
@@unique([workspaceId, connectionId, eventType])
@@index([workspaceId])
@@index([connectionId])
@@index([eventType])
@@index([workspaceId, isActive])
@@map("federation_event_subscriptions")
}

View File

@@ -340,7 +340,8 @@ pnpm prisma migrate deploy
\`\`\`
For setup instructions, see [[development-setup]].`,
summary: "Comprehensive documentation of the Mosaic Stack database schema and Prisma conventions",
summary:
"Comprehensive documentation of the Mosaic Stack database schema and Prisma conventions",
status: EntryStatus.PUBLISHED,
visibility: Visibility.WORKSPACE,
tags: ["architecture", "development"],
@@ -406,7 +407,7 @@ This is a draft document. See [[architecture-overview]] for current state.`,
// Add tags
for (const tagSlug of entryData.tags) {
const tag = tags.find(t => t.slug === tagSlug);
const tag = tags.find((t) => t.slug === tagSlug);
if (tag) {
await tx.knowledgeEntryTag.create({
data: {
@@ -427,7 +428,11 @@ This is a draft document. See [[architecture-overview]] for current state.`,
{ source: "welcome", target: "database-schema", text: "database-schema" },
{ source: "architecture-overview", target: "development-setup", text: "development-setup" },
{ source: "architecture-overview", target: "database-schema", text: "database-schema" },
{ source: "development-setup", target: "architecture-overview", text: "architecture-overview" },
{
source: "development-setup",
target: "architecture-overview",
text: "architecture-overview",
},
{ source: "development-setup", target: "database-schema", text: "database-schema" },
{ source: "database-schema", target: "architecture-overview", text: "architecture-overview" },
{ source: "database-schema", target: "development-setup", text: "development-setup" },

View File

@@ -152,10 +152,7 @@ describe("ActivityController", () => {
const result = await controller.findOne("activity-123", mockWorkspaceId);
expect(result).toEqual(mockActivity);
expect(mockActivityService.findOne).toHaveBeenCalledWith(
"activity-123",
"workspace-123"
);
expect(mockActivityService.findOne).toHaveBeenCalledWith("activity-123", "workspace-123");
});
it("should return null if activity not found", async () => {
@@ -213,11 +210,7 @@ describe("ActivityController", () => {
it("should return audit trail for a task using authenticated user's workspaceId", async () => {
mockActivityService.getAuditTrail.mockResolvedValue(mockAuditTrail);
const result = await controller.getAuditTrail(
EntityType.TASK,
"task-123",
mockWorkspaceId
);
const result = await controller.getAuditTrail(EntityType.TASK, "task-123", mockWorkspaceId);
expect(result).toEqual(mockAuditTrail);
expect(mockActivityService.getAuditTrail).toHaveBeenCalledWith(
@@ -248,11 +241,7 @@ describe("ActivityController", () => {
mockActivityService.getAuditTrail.mockResolvedValue(eventAuditTrail);
const result = await controller.getAuditTrail(
EntityType.EVENT,
"event-123",
mockWorkspaceId
);
const result = await controller.getAuditTrail(EntityType.EVENT, "event-123", mockWorkspaceId);
expect(result).toEqual(eventAuditTrail);
expect(mockActivityService.getAuditTrail).toHaveBeenCalledWith(
@@ -312,11 +301,7 @@ describe("ActivityController", () => {
it("should return empty array if workspaceId is missing (service handles gracefully)", async () => {
mockActivityService.getAuditTrail.mockResolvedValue([]);
const result = await controller.getAuditTrail(
EntityType.TASK,
"task-123",
undefined as any
);
const result = await controller.getAuditTrail(EntityType.TASK, "task-123", undefined as any);
expect(result).toEqual([]);
expect(mockActivityService.getAuditTrail).toHaveBeenCalledWith(

View File

@@ -1,6 +1,6 @@
import { Injectable, Logger } from "@nestjs/common";
import { PrismaService } from "../prisma/prisma.service";
import { ActivityAction, EntityType, Prisma } from "@prisma/client";
import { ActivityAction, EntityType, Prisma, ActivityLog } from "@prisma/client";
import type {
CreateActivityLogInput,
PaginatedActivityLogs,
@@ -20,7 +20,7 @@ export class ActivityService {
/**
* Create a new activity log entry
*/
async logActivity(input: CreateActivityLogInput) {
async logActivity(input: CreateActivityLogInput): Promise<ActivityLog> {
try {
return await this.prisma.activityLog.create({
data: input as unknown as Prisma.ActivityLogCreateInput,
@@ -167,7 +167,7 @@ export class ActivityService {
userId: string,
taskId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -186,7 +186,7 @@ export class ActivityService {
userId: string,
taskId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -205,7 +205,7 @@ export class ActivityService {
userId: string,
taskId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -224,7 +224,7 @@ export class ActivityService {
userId: string,
taskId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -238,7 +238,12 @@ export class ActivityService {
/**
* Log task assignment
*/
async logTaskAssigned(workspaceId: string, userId: string, taskId: string, assigneeId: string) {
async logTaskAssigned(
workspaceId: string,
userId: string,
taskId: string,
assigneeId: string
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -257,7 +262,7 @@ export class ActivityService {
userId: string,
eventId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -276,7 +281,7 @@ export class ActivityService {
userId: string,
eventId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -295,7 +300,7 @@ export class ActivityService {
userId: string,
eventId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -314,7 +319,7 @@ export class ActivityService {
userId: string,
projectId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -333,7 +338,7 @@ export class ActivityService {
userId: string,
projectId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -352,7 +357,7 @@ export class ActivityService {
userId: string,
projectId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -366,7 +371,11 @@ export class ActivityService {
/**
* Log workspace creation
*/
async logWorkspaceCreated(workspaceId: string, userId: string, details?: Prisma.JsonValue) {
async logWorkspaceCreated(
workspaceId: string,
userId: string,
details?: Prisma.JsonValue
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -380,7 +389,11 @@ export class ActivityService {
/**
* Log workspace update
*/
async logWorkspaceUpdated(workspaceId: string, userId: string, details?: Prisma.JsonValue) {
async logWorkspaceUpdated(
workspaceId: string,
userId: string,
details?: Prisma.JsonValue
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -399,7 +412,7 @@ export class ActivityService {
userId: string,
memberId: string,
role: string
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -413,7 +426,11 @@ export class ActivityService {
/**
* Log workspace member removed
*/
async logWorkspaceMemberRemoved(workspaceId: string, userId: string, memberId: string) {
async logWorkspaceMemberRemoved(
workspaceId: string,
userId: string,
memberId: string
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -427,7 +444,11 @@ export class ActivityService {
/**
* Log user profile update
*/
async logUserUpdated(workspaceId: string, userId: string, details?: Prisma.JsonValue) {
async logUserUpdated(
workspaceId: string,
userId: string,
details?: Prisma.JsonValue
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -446,7 +467,7 @@ export class ActivityService {
userId: string,
domainId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -465,7 +486,7 @@ export class ActivityService {
userId: string,
domainId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -484,7 +505,7 @@ export class ActivityService {
userId: string,
domainId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -503,7 +524,7 @@ export class ActivityService {
userId: string,
ideaId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -522,7 +543,7 @@ export class ActivityService {
userId: string,
ideaId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,
@@ -541,7 +562,7 @@ export class ActivityService {
userId: string,
ideaId: string,
details?: Prisma.JsonValue
) {
): Promise<ActivityLog> {
return this.logActivity({
workspaceId,
userId,

View File

@@ -25,9 +25,7 @@ describe("ActivityLoggingInterceptor", () => {
],
}).compile();
interceptor = module.get<ActivityLoggingInterceptor>(
ActivityLoggingInterceptor
);
interceptor = module.get<ActivityLoggingInterceptor>(ActivityLoggingInterceptor);
activityService = module.get<ActivityService>(ActivityService);
vi.clearAllMocks();
@@ -324,9 +322,7 @@ describe("ActivityLoggingInterceptor", () => {
const context = createMockExecutionContext("POST", {}, {}, user);
const next = createMockCallHandler({ id: "test-123" });
mockActivityService.logActivity.mockRejectedValue(
new Error("Logging failed")
);
mockActivityService.logActivity.mockRejectedValue(new Error("Logging failed"));
await new Promise<void>((resolve) => {
interceptor.intercept(context, next).subscribe(() => {
@@ -727,9 +723,7 @@ describe("ActivityLoggingInterceptor", () => {
expect(logCall.details.data.settings.apiKey).toBe("[REDACTED]");
expect(logCall.details.data.settings.public).toBe("visible_data");
expect(logCall.details.data.settings.auth.token).toBe("[REDACTED]");
expect(logCall.details.data.settings.auth.refreshToken).toBe(
"[REDACTED]"
);
expect(logCall.details.data.settings.auth.refreshToken).toBe("[REDACTED]");
resolve();
});
});

View File

@@ -86,11 +86,7 @@ describe("AgentTasksController", () => {
const result = await controller.create(createDto, workspaceId, user);
expect(mockAgentTasksService.create).toHaveBeenCalledWith(
workspaceId,
user.id,
createDto
);
expect(mockAgentTasksService.create).toHaveBeenCalledWith(workspaceId, user.id, createDto);
expect(result).toEqual(mockTask);
});
});
@@ -183,10 +179,7 @@ describe("AgentTasksController", () => {
const result = await controller.findOne(id, workspaceId);
expect(mockAgentTasksService.findOne).toHaveBeenCalledWith(
id,
workspaceId
);
expect(mockAgentTasksService.findOne).toHaveBeenCalledWith(id, workspaceId);
expect(result).toEqual(mockTask);
});
});
@@ -220,11 +213,7 @@ describe("AgentTasksController", () => {
const result = await controller.update(id, updateDto, workspaceId);
expect(mockAgentTasksService.update).toHaveBeenCalledWith(
id,
workspaceId,
updateDto
);
expect(mockAgentTasksService.update).toHaveBeenCalledWith(id, workspaceId, updateDto);
expect(result).toEqual(mockTask);
});
});
@@ -240,10 +229,7 @@ describe("AgentTasksController", () => {
const result = await controller.remove(id, workspaceId);
expect(mockAgentTasksService.remove).toHaveBeenCalledWith(
id,
workspaceId
);
expect(mockAgentTasksService.remove).toHaveBeenCalledWith(id, workspaceId);
expect(result).toEqual(mockResponse);
});
});

View File

@@ -242,9 +242,7 @@ describe("AgentTasksService", () => {
mockPrismaService.agentTask.findUnique.mockResolvedValue(null);
await expect(service.findOne(id, workspaceId)).rejects.toThrow(
NotFoundException
);
await expect(service.findOne(id, workspaceId)).rejects.toThrow(NotFoundException);
});
});
@@ -316,9 +314,7 @@ describe("AgentTasksService", () => {
mockPrismaService.agentTask.findUnique.mockResolvedValue(null);
await expect(
service.update(id, workspaceId, updateDto)
).rejects.toThrow(NotFoundException);
await expect(service.update(id, workspaceId, updateDto)).rejects.toThrow(NotFoundException);
});
});
@@ -345,9 +341,7 @@ describe("AgentTasksService", () => {
mockPrismaService.agentTask.findUnique.mockResolvedValue(null);
await expect(service.remove(id, workspaceId)).rejects.toThrow(
NotFoundException
);
await expect(service.remove(id, workspaceId)).rejects.toThrow(NotFoundException);
});
});
});

View File

@@ -1,5 +1,8 @@
import { Module } from "@nestjs/common";
import { APP_INTERCEPTOR } from "@nestjs/core";
import { APP_INTERCEPTOR, APP_GUARD } from "@nestjs/core";
import { ThrottlerModule } from "@nestjs/throttler";
import { BullModule } from "@nestjs/bullmq";
import { ThrottlerValkeyStorageService, ThrottlerApiKeyGuard } from "./common/throttler";
import { AppController } from "./app.controller";
import { AppService } from "./app.service";
import { PrismaModule } from "./prisma/prisma.module";
@@ -21,14 +24,47 @@ import { BrainModule } from "./brain/brain.module";
import { CronModule } from "./cron/cron.module";
import { AgentTasksModule } from "./agent-tasks/agent-tasks.module";
import { ValkeyModule } from "./valkey/valkey.module";
import { BullMqModule } from "./bullmq/bullmq.module";
import { StitcherModule } from "./stitcher/stitcher.module";
import { TelemetryModule, TelemetryInterceptor } from "./telemetry";
import { RunnerJobsModule } from "./runner-jobs/runner-jobs.module";
import { JobEventsModule } from "./job-events/job-events.module";
import { JobStepsModule } from "./job-steps/job-steps.module";
import { CoordinatorIntegrationModule } from "./coordinator-integration/coordinator-integration.module";
import { FederationModule } from "./federation/federation.module";
@Module({
imports: [
// Rate limiting configuration
ThrottlerModule.forRootAsync({
useFactory: () => {
const ttl = parseInt(process.env.RATE_LIMIT_TTL ?? "60", 10) * 1000; // Convert to milliseconds
const limit = parseInt(process.env.RATE_LIMIT_GLOBAL_LIMIT ?? "100", 10);
return {
throttlers: [
{
ttl,
limit,
},
],
storage: new ThrottlerValkeyStorageService(),
};
},
}),
// BullMQ job queue configuration
BullModule.forRoot({
connection: {
host: process.env.VALKEY_HOST ?? "localhost",
port: parseInt(process.env.VALKEY_PORT ?? "6379", 10),
},
}),
TelemetryModule,
PrismaModule,
DatabaseModule,
ValkeyModule,
BullMqModule,
StitcherModule,
AuthModule,
ActivityModule,
TasksModule,
@@ -45,6 +81,11 @@ import { TelemetryModule, TelemetryInterceptor } from "./telemetry";
BrainModule,
CronModule,
AgentTasksModule,
RunnerJobsModule,
JobEventsModule,
JobStepsModule,
CoordinatorIntegrationModule,
FederationModule,
],
controllers: [AppController],
providers: [
@@ -53,6 +94,10 @@ import { TelemetryModule, TelemetryInterceptor } from "./telemetry";
provide: APP_INTERCEPTOR,
useClass: TelemetryInterceptor,
},
{
provide: APP_GUARD,
useClass: ThrottlerApiKeyGuard,
},
],
})
export class AppModule {}

View File

@@ -17,14 +17,19 @@ export class AuthService {
/**
* Get BetterAuth instance
*/
getAuth() {
getAuth(): Auth {
return this.auth;
}
/**
* Get user by ID
*/
async getUserById(userId: string) {
async getUserById(userId: string): Promise<{
id: string;
email: string;
name: string;
authProviderId: string | null;
} | null> {
return this.prisma.user.findUnique({
where: { id: userId },
select: {
@@ -39,7 +44,12 @@ export class AuthService {
/**
* Get user by email
*/
async getUserByEmail(email: string) {
async getUserByEmail(email: string): Promise<{
id: string;
email: string;
name: string;
authProviderId: string | null;
} | null> {
return this.prisma.user.findUnique({
where: { email },
select: {

View File

@@ -0,0 +1,46 @@
/**
* Admin Guard
*
* Restricts access to system-level admin operations.
* Currently checks if user owns at least one workspace (indicating admin status).
* Future: Replace with proper role-based access control (RBAC).
*/
import {
Injectable,
CanActivate,
ExecutionContext,
ForbiddenException,
Logger,
} from "@nestjs/common";
import { PrismaService } from "../../prisma/prisma.service";
import type { AuthenticatedRequest } from "../../common/types/user.types";
@Injectable()
export class AdminGuard implements CanActivate {
private readonly logger = new Logger(AdminGuard.name);
constructor(private readonly prisma: PrismaService) {}
async canActivate(context: ExecutionContext): Promise<boolean> {
const request = context.switchToHttp().getRequest<AuthenticatedRequest>();
const user = request.user;
if (!user) {
throw new ForbiddenException("User not authenticated");
}
// Check if user owns any workspace (admin indicator)
// TODO: Replace with proper RBAC system admin role check
const ownedWorkspaces = await this.prisma.workspace.count({
where: { ownerId: user.id },
});
if (ownedWorkspaces === 0) {
this.logger.warn(`Non-admin user ${user.id} attempted admin operation`);
throw new ForbiddenException("This operation requires system administrator privileges");
}
return true;
}
}

View File

@@ -0,0 +1,96 @@
import { Test, TestingModule } from "@nestjs/testing";
import { BridgeModule } from "./bridge.module";
import { DiscordService } from "./discord/discord.service";
import { StitcherService } from "../stitcher/stitcher.service";
import { PrismaService } from "../prisma/prisma.service";
import { BullMqService } from "../bullmq/bullmq.service";
import { describe, it, expect, beforeEach, vi } from "vitest";
// Mock discord.js
const mockReadyCallbacks: Array<() => void> = [];
const mockClient = {
login: vi.fn().mockImplementation(async () => {
mockReadyCallbacks.forEach((cb) => cb());
return Promise.resolve();
}),
destroy: vi.fn().mockResolvedValue(undefined),
on: vi.fn(),
once: vi.fn().mockImplementation((event: string, callback: () => void) => {
if (event === "ready") {
mockReadyCallbacks.push(callback);
}
}),
user: { tag: "TestBot#1234" },
channels: {
fetch: vi.fn(),
},
guilds: {
fetch: vi.fn(),
},
};
vi.mock("discord.js", () => {
return {
Client: class MockClient {
login = mockClient.login;
destroy = mockClient.destroy;
on = mockClient.on;
once = mockClient.once;
user = mockClient.user;
channels = mockClient.channels;
guilds = mockClient.guilds;
},
Events: {
ClientReady: "ready",
MessageCreate: "messageCreate",
Error: "error",
},
GatewayIntentBits: {
Guilds: 1 << 0,
GuildMessages: 1 << 9,
MessageContent: 1 << 15,
},
};
});
describe("BridgeModule", () => {
let module: TestingModule;
beforeEach(async () => {
// Set environment variables
process.env.DISCORD_BOT_TOKEN = "test-token";
process.env.DISCORD_GUILD_ID = "test-guild-id";
process.env.DISCORD_CONTROL_CHANNEL_ID = "test-channel-id";
// Clear ready callbacks
mockReadyCallbacks.length = 0;
module = await Test.createTestingModule({
imports: [BridgeModule],
})
.overrideProvider(PrismaService)
.useValue({})
.overrideProvider(BullMqService)
.useValue({})
.compile();
// Clear all mocks
vi.clearAllMocks();
});
it("should be defined", () => {
expect(module).toBeDefined();
});
it("should provide DiscordService", () => {
const discordService = module.get<DiscordService>(DiscordService);
expect(discordService).toBeDefined();
expect(discordService).toBeInstanceOf(DiscordService);
});
it("should provide StitcherService", () => {
const stitcherService = module.get<StitcherService>(StitcherService);
expect(stitcherService).toBeDefined();
expect(stitcherService).toBeInstanceOf(StitcherService);
});
});

View File

@@ -0,0 +1,16 @@
import { Module } from "@nestjs/common";
import { DiscordService } from "./discord/discord.service";
import { StitcherModule } from "../stitcher/stitcher.module";
/**
* Bridge Module - Chat platform integrations
*
* Provides integration with chat platforms (Discord, Slack, Matrix, etc.)
* for controlling Mosaic Stack via chat commands.
*/
@Module({
imports: [StitcherModule],
providers: [DiscordService],
exports: [DiscordService],
})
export class BridgeModule {}

View File

@@ -0,0 +1,656 @@
import { Test, TestingModule } from "@nestjs/testing";
import { DiscordService } from "./discord.service";
import { StitcherService } from "../../stitcher/stitcher.service";
import { Client, Events, GatewayIntentBits, Message } from "discord.js";
import { vi, describe, it, expect, beforeEach } from "vitest";
import type { ChatMessage, ChatCommand } from "../interfaces";
// Mock discord.js Client
const mockReadyCallbacks: Array<() => void> = [];
const mockErrorCallbacks: Array<(error: Error) => void> = [];
const mockClient = {
login: vi.fn().mockImplementation(async () => {
// Trigger ready callback when login is called
mockReadyCallbacks.forEach((cb) => cb());
return Promise.resolve();
}),
destroy: vi.fn().mockResolvedValue(undefined),
on: vi.fn().mockImplementation((event: string, callback: (error: Error) => void) => {
if (event === "error") {
mockErrorCallbacks.push(callback);
}
}),
once: vi.fn().mockImplementation((event: string, callback: () => void) => {
if (event === "ready") {
mockReadyCallbacks.push(callback);
}
}),
user: { tag: "TestBot#1234" },
channels: {
fetch: vi.fn(),
},
guilds: {
fetch: vi.fn(),
},
};
vi.mock("discord.js", () => {
return {
Client: class MockClient {
login = mockClient.login;
destroy = mockClient.destroy;
on = mockClient.on;
once = mockClient.once;
user = mockClient.user;
channels = mockClient.channels;
guilds = mockClient.guilds;
},
Events: {
ClientReady: "ready",
MessageCreate: "messageCreate",
Error: "error",
},
GatewayIntentBits: {
Guilds: 1 << 0,
GuildMessages: 1 << 9,
MessageContent: 1 << 15,
},
};
});
describe("DiscordService", () => {
let service: DiscordService;
let stitcherService: StitcherService;
const mockStitcherService = {
dispatchJob: vi.fn().mockResolvedValue({
jobId: "test-job-id",
queueName: "main",
status: "PENDING",
}),
trackJobEvent: vi.fn().mockResolvedValue(undefined),
};
beforeEach(async () => {
// Set environment variables for testing
process.env.DISCORD_BOT_TOKEN = "test-token";
process.env.DISCORD_GUILD_ID = "test-guild-id";
process.env.DISCORD_CONTROL_CHANNEL_ID = "test-channel-id";
process.env.DISCORD_WORKSPACE_ID = "test-workspace-id";
// Clear callbacks
mockReadyCallbacks.length = 0;
mockErrorCallbacks.length = 0;
const module: TestingModule = await Test.createTestingModule({
providers: [
DiscordService,
{
provide: StitcherService,
useValue: mockStitcherService,
},
],
}).compile();
service = module.get<DiscordService>(DiscordService);
stitcherService = module.get<StitcherService>(StitcherService);
// Clear all mocks
vi.clearAllMocks();
});
describe("Connection Management", () => {
it("should connect to Discord", async () => {
await service.connect();
expect(mockClient.login).toHaveBeenCalledWith("test-token");
});
it("should disconnect from Discord", async () => {
await service.connect();
await service.disconnect();
expect(mockClient.destroy).toHaveBeenCalled();
});
it("should check connection status", async () => {
expect(service.isConnected()).toBe(false);
await service.connect();
expect(service.isConnected()).toBe(true);
await service.disconnect();
expect(service.isConnected()).toBe(false);
});
});
describe("Message Handling", () => {
it("should send a message to a channel", async () => {
const mockChannel = {
send: vi.fn().mockResolvedValue({}),
isTextBased: () => true,
};
(mockClient.channels.fetch as any).mockResolvedValue(mockChannel);
await service.connect();
await service.sendMessage("test-channel-id", "Hello, Discord!");
expect(mockClient.channels.fetch).toHaveBeenCalledWith("test-channel-id");
expect(mockChannel.send).toHaveBeenCalledWith("Hello, Discord!");
});
it("should throw error if channel not found", async () => {
(mockClient.channels.fetch as any).mockResolvedValue(null);
await service.connect();
await expect(service.sendMessage("invalid-channel", "Test")).rejects.toThrow(
"Channel not found"
);
});
});
describe("Thread Management", () => {
it("should create a thread for job updates", async () => {
const mockChannel = {
isTextBased: () => true,
threads: {
create: vi.fn().mockResolvedValue({
id: "thread-123",
send: vi.fn(),
}),
},
};
(mockClient.channels.fetch as any).mockResolvedValue(mockChannel);
await service.connect();
const threadId = await service.createThread({
channelId: "test-channel-id",
name: "Job #42",
message: "Starting job...",
});
expect(threadId).toBe("thread-123");
expect(mockChannel.threads.create).toHaveBeenCalledWith({
name: "Job #42",
reason: "Job updates thread",
});
});
it("should send a message to a thread", async () => {
const mockThread = {
send: vi.fn().mockResolvedValue({}),
isThread: () => true,
};
(mockClient.channels.fetch as any).mockResolvedValue(mockThread);
await service.connect();
await service.sendThreadMessage({
threadId: "thread-123",
content: "Step completed",
});
expect(mockThread.send).toHaveBeenCalledWith("Step completed");
});
});
describe("Command Parsing", () => {
it("should parse @mosaic fix command", () => {
const message: ChatMessage = {
id: "msg-1",
channelId: "channel-1",
authorId: "user-1",
authorName: "TestUser",
content: "@mosaic fix 42",
timestamp: new Date(),
};
const command = service.parseCommand(message);
expect(command).toEqual({
command: "fix",
args: ["42"],
message,
});
});
it("should parse @mosaic status command", () => {
const message: ChatMessage = {
id: "msg-2",
channelId: "channel-1",
authorId: "user-1",
authorName: "TestUser",
content: "@mosaic status job-123",
timestamp: new Date(),
};
const command = service.parseCommand(message);
expect(command).toEqual({
command: "status",
args: ["job-123"],
message,
});
});
it("should parse @mosaic cancel command", () => {
const message: ChatMessage = {
id: "msg-3",
channelId: "channel-1",
authorId: "user-1",
authorName: "TestUser",
content: "@mosaic cancel job-456",
timestamp: new Date(),
};
const command = service.parseCommand(message);
expect(command).toEqual({
command: "cancel",
args: ["job-456"],
message,
});
});
it("should parse @mosaic verbose command", () => {
const message: ChatMessage = {
id: "msg-4",
channelId: "channel-1",
authorId: "user-1",
authorName: "TestUser",
content: "@mosaic verbose job-789",
timestamp: new Date(),
};
const command = service.parseCommand(message);
expect(command).toEqual({
command: "verbose",
args: ["job-789"],
message,
});
});
it("should parse @mosaic quiet command", () => {
const message: ChatMessage = {
id: "msg-5",
channelId: "channel-1",
authorId: "user-1",
authorName: "TestUser",
content: "@mosaic quiet",
timestamp: new Date(),
};
const command = service.parseCommand(message);
expect(command).toEqual({
command: "quiet",
args: [],
message,
});
});
it("should parse @mosaic help command", () => {
const message: ChatMessage = {
id: "msg-6",
channelId: "channel-1",
authorId: "user-1",
authorName: "TestUser",
content: "@mosaic help",
timestamp: new Date(),
};
const command = service.parseCommand(message);
expect(command).toEqual({
command: "help",
args: [],
message,
});
});
it("should return null for non-command messages", () => {
const message: ChatMessage = {
id: "msg-7",
channelId: "channel-1",
authorId: "user-1",
authorName: "TestUser",
content: "Just a regular message",
timestamp: new Date(),
};
const command = service.parseCommand(message);
expect(command).toBeNull();
});
it("should return null for messages without @mosaic mention", () => {
const message: ChatMessage = {
id: "msg-8",
channelId: "channel-1",
authorId: "user-1",
authorName: "TestUser",
content: "fix 42",
timestamp: new Date(),
};
const command = service.parseCommand(message);
expect(command).toBeNull();
});
it("should handle commands with multiple arguments", () => {
const message: ChatMessage = {
id: "msg-9",
channelId: "channel-1",
authorId: "user-1",
authorName: "TestUser",
content: "@mosaic fix 42 high-priority",
timestamp: new Date(),
};
const command = service.parseCommand(message);
expect(command).toEqual({
command: "fix",
args: ["42", "high-priority"],
message,
});
});
});
describe("Command Execution", () => {
it("should forward fix command to stitcher", async () => {
const message: ChatMessage = {
id: "msg-1",
channelId: "test-channel-id",
authorId: "user-1",
authorName: "TestUser",
content: "@mosaic fix 42",
timestamp: new Date(),
};
const mockThread = {
id: "thread-123",
send: vi.fn(),
isThread: () => true,
};
const mockChannel = {
isTextBased: () => true,
threads: {
create: vi.fn().mockResolvedValue(mockThread),
},
};
// Mock channels.fetch to return channel first, then thread
(mockClient.channels.fetch as any)
.mockResolvedValueOnce(mockChannel)
.mockResolvedValueOnce(mockThread);
await service.connect();
await service.handleCommand({
command: "fix",
args: ["42"],
message,
});
expect(stitcherService.dispatchJob).toHaveBeenCalledWith({
workspaceId: "test-workspace-id",
type: "code-task",
priority: 10,
metadata: {
issueNumber: 42,
command: "fix",
channelId: "test-channel-id",
threadId: "thread-123",
authorId: "user-1",
authorName: "TestUser",
},
});
});
it("should respond with help message", async () => {
const message: ChatMessage = {
id: "msg-1",
channelId: "test-channel-id",
authorId: "user-1",
authorName: "TestUser",
content: "@mosaic help",
timestamp: new Date(),
};
const mockChannel = {
send: vi.fn(),
isTextBased: () => true,
};
(mockClient.channels.fetch as any).mockResolvedValue(mockChannel);
await service.connect();
await service.handleCommand({
command: "help",
args: [],
message,
});
expect(mockChannel.send).toHaveBeenCalledWith(expect.stringContaining("Available commands:"));
});
});
describe("Configuration", () => {
it("should throw error if DISCORD_BOT_TOKEN is not set", async () => {
delete process.env.DISCORD_BOT_TOKEN;
const module: TestingModule = await Test.createTestingModule({
providers: [
DiscordService,
{
provide: StitcherService,
useValue: mockStitcherService,
},
],
}).compile();
const newService = module.get<DiscordService>(DiscordService);
await expect(newService.connect()).rejects.toThrow("DISCORD_BOT_TOKEN is required");
// Restore for other tests
process.env.DISCORD_BOT_TOKEN = "test-token";
});
it("should throw error if DISCORD_WORKSPACE_ID is not set", async () => {
delete process.env.DISCORD_WORKSPACE_ID;
const module: TestingModule = await Test.createTestingModule({
providers: [
DiscordService,
{
provide: StitcherService,
useValue: mockStitcherService,
},
],
}).compile();
const newService = module.get<DiscordService>(DiscordService);
await expect(newService.connect()).rejects.toThrow("DISCORD_WORKSPACE_ID is required");
// Restore for other tests
process.env.DISCORD_WORKSPACE_ID = "test-workspace-id";
});
it("should use configured workspace ID from environment", async () => {
const testWorkspaceId = "configured-workspace-123";
process.env.DISCORD_WORKSPACE_ID = testWorkspaceId;
const module: TestingModule = await Test.createTestingModule({
providers: [
DiscordService,
{
provide: StitcherService,
useValue: mockStitcherService,
},
],
}).compile();
const newService = module.get<DiscordService>(DiscordService);
const message: ChatMessage = {
id: "msg-1",
channelId: "test-channel-id",
authorId: "user-1",
authorName: "TestUser",
content: "@mosaic fix 42",
timestamp: new Date(),
};
const mockThread = {
id: "thread-123",
send: vi.fn(),
isThread: () => true,
};
const mockChannel = {
isTextBased: () => true,
threads: {
create: vi.fn().mockResolvedValue(mockThread),
},
};
(mockClient.channels.fetch as any)
.mockResolvedValueOnce(mockChannel)
.mockResolvedValueOnce(mockThread);
await newService.connect();
await newService.handleCommand({
command: "fix",
args: ["42"],
message,
});
expect(mockStitcherService.dispatchJob).toHaveBeenCalledWith(
expect.objectContaining({
workspaceId: testWorkspaceId,
})
);
// Restore for other tests
process.env.DISCORD_WORKSPACE_ID = "test-workspace-id";
});
});
describe("Error Logging Security", () => {
it("should sanitize sensitive data in error logs", () => {
const loggerErrorSpy = vi.spyOn((service as any).logger, "error");
// Simulate an error with sensitive data
const errorWithSecrets = new Error("Connection failed");
(errorWithSecrets as any).config = {
headers: {
Authorization: "Bearer secret_token_12345",
},
};
(errorWithSecrets as any).token =
"MTk4NjIyNDgzNDcxOTI1MjQ4.Cl2FMQ.ZnCjm1XVW7vRze4b7Cq4se7kKWs";
// Trigger error event handler
expect(mockErrorCallbacks.length).toBeGreaterThan(0);
mockErrorCallbacks[0]?.(errorWithSecrets);
// Verify error was logged
expect(loggerErrorSpy).toHaveBeenCalled();
// Get the logged error
const loggedArgs = loggerErrorSpy.mock.calls[0];
const loggedError = loggedArgs[1];
// Verify sensitive data was redacted
expect(loggedError.config.headers.Authorization).toBe("[REDACTED]");
expect(loggedError.token).toBe("[REDACTED]");
expect(loggedError.message).toBe("Connection failed");
expect(loggedError.name).toBe("Error");
});
it("should not leak bot token in error logs", () => {
const loggerErrorSpy = vi.spyOn((service as any).logger, "error");
// Simulate an error with bot token in message
const errorWithToken = new Error(
"Discord authentication failed with token MTk4NjIyNDgzNDcxOTI1MjQ4.Cl2FMQ.ZnCjm1XVW7vRze4b7Cq4se7kKWs"
);
// Trigger error event handler
expect(mockErrorCallbacks.length).toBeGreaterThan(0);
mockErrorCallbacks[0]?.(errorWithToken);
// Verify error was logged
expect(loggerErrorSpy).toHaveBeenCalled();
// Get the logged error
const loggedArgs = loggerErrorSpy.mock.calls[0];
const loggedError = loggedArgs[1];
// Verify token was redacted from message
expect(loggedError.message).not.toContain(
"MTk4NjIyNDgzNDcxOTI1MjQ4.Cl2FMQ.ZnCjm1XVW7vRze4b7Cq4se7kKWs"
);
expect(loggedError.message).toContain("[REDACTED]");
});
it("should sanitize API keys in error logs", () => {
const loggerErrorSpy = vi.spyOn((service as any).logger, "error");
// Simulate an error with API key
const errorWithApiKey = new Error("Request failed");
(errorWithApiKey as any).apiKey = "sk_live_1234567890abcdef";
(errorWithApiKey as any).response = {
data: {
error: "Invalid API key: sk_live_1234567890abcdef",
},
};
// Trigger error event handler
expect(mockErrorCallbacks.length).toBeGreaterThan(0);
mockErrorCallbacks[0]?.(errorWithApiKey);
// Verify error was logged
expect(loggerErrorSpy).toHaveBeenCalled();
// Get the logged error
const loggedArgs = loggerErrorSpy.mock.calls[0];
const loggedError = loggedArgs[1];
// Verify API key was redacted
expect(loggedError.apiKey).toBe("[REDACTED]");
expect(loggedError.response.data.error).not.toContain("sk_live_1234567890abcdef");
expect(loggedError.response.data.error).toContain("[REDACTED]");
});
it("should preserve non-sensitive error information", () => {
const loggerErrorSpy = vi.spyOn((service as any).logger, "error");
// Simulate a normal error without secrets
const normalError = new Error("Connection timeout");
(normalError as any).code = "ETIMEDOUT";
(normalError as any).statusCode = 408;
// Trigger error event handler
expect(mockErrorCallbacks.length).toBeGreaterThan(0);
mockErrorCallbacks[0]?.(normalError);
// Verify error was logged
expect(loggerErrorSpy).toHaveBeenCalled();
// Get the logged error
const loggedArgs = loggerErrorSpy.mock.calls[0];
const loggedError = loggedArgs[1];
// Verify non-sensitive data was preserved
expect(loggedError.message).toBe("Connection timeout");
expect(loggedError.name).toBe("Error");
expect(loggedError.code).toBe("ETIMEDOUT");
expect(loggedError.statusCode).toBe(408);
});
});
});

View File

@@ -0,0 +1,396 @@
import { Injectable, Logger } from "@nestjs/common";
import { Client, Events, GatewayIntentBits, TextChannel, ThreadChannel } from "discord.js";
import { StitcherService } from "../../stitcher/stitcher.service";
import { sanitizeForLogging } from "../../common/utils";
import type {
IChatProvider,
ChatMessage,
ChatCommand,
ThreadCreateOptions,
ThreadMessageOptions,
} from "../interfaces";
/**
* Discord Service - Discord chat platform integration
*
* Responsibilities:
* - Connect to Discord via bot token
* - Listen for commands in designated channels
* - Forward commands to stitcher
* - Receive status updates from herald
* - Post updates to threads
*/
@Injectable()
export class DiscordService implements IChatProvider {
private readonly logger = new Logger(DiscordService.name);
private client: Client;
private connected = false;
private readonly botToken: string;
private readonly controlChannelId: string;
private readonly workspaceId: string;
constructor(private readonly stitcherService: StitcherService) {
this.botToken = process.env.DISCORD_BOT_TOKEN ?? "";
this.controlChannelId = process.env.DISCORD_CONTROL_CHANNEL_ID ?? "";
this.workspaceId = process.env.DISCORD_WORKSPACE_ID ?? "";
// Initialize Discord client with required intents
this.client = new Client({
intents: [
GatewayIntentBits.Guilds,
GatewayIntentBits.GuildMessages,
GatewayIntentBits.MessageContent,
],
});
this.setupEventHandlers();
}
/**
* Setup event handlers for Discord client
*/
private setupEventHandlers(): void {
this.client.once(Events.ClientReady, () => {
this.connected = true;
const userTag = this.client.user?.tag ?? "Unknown";
this.logger.log(`Discord bot connected as ${userTag}`);
});
this.client.on(Events.MessageCreate, (message) => {
// Ignore bot messages
if (message.author.bot) return;
// Check if message is in control channel
if (message.channelId !== this.controlChannelId) return;
// Parse message into ChatMessage format
const chatMessage: ChatMessage = {
id: message.id,
channelId: message.channelId,
authorId: message.author.id,
authorName: message.author.username,
content: message.content,
timestamp: message.createdAt,
...(message.channel.isThread() && { threadId: message.channelId }),
};
// Parse command
const command = this.parseCommand(chatMessage);
if (command) {
void this.handleCommand(command);
}
});
this.client.on(Events.Error, (error: Error) => {
// Sanitize error before logging to prevent secret exposure
const sanitizedError = sanitizeForLogging(error);
this.logger.error("Discord client error:", sanitizedError);
});
}
/**
* Connect to Discord
*/
async connect(): Promise<void> {
if (!this.botToken) {
throw new Error("DISCORD_BOT_TOKEN is required");
}
if (!this.workspaceId) {
throw new Error("DISCORD_WORKSPACE_ID is required");
}
this.logger.log("Connecting to Discord...");
await this.client.login(this.botToken);
}
/**
* Disconnect from Discord
*/
async disconnect(): Promise<void> {
this.logger.log("Disconnecting from Discord...");
this.connected = false;
await this.client.destroy();
}
/**
* Check if the provider is connected
*/
isConnected(): boolean {
return this.connected;
}
/**
* Send a message to a channel or thread
*/
async sendMessage(channelId: string, content: string): Promise<void> {
const channel = await this.client.channels.fetch(channelId);
if (!channel) {
throw new Error("Channel not found");
}
if (channel.isTextBased()) {
await (channel as TextChannel).send(content);
} else {
throw new Error("Channel is not text-based");
}
}
/**
* Create a thread for job updates
*/
async createThread(options: ThreadCreateOptions): Promise<string> {
const { channelId, name, message } = options;
const channel = await this.client.channels.fetch(channelId);
if (!channel) {
throw new Error("Channel not found");
}
if (!channel.isTextBased()) {
throw new Error("Channel does not support threads");
}
const thread = await (channel as TextChannel).threads.create({
name,
reason: "Job updates thread",
});
// Send initial message to thread
await thread.send(message);
return thread.id;
}
/**
* Send a message to a thread
*/
async sendThreadMessage(options: ThreadMessageOptions): Promise<void> {
const { threadId, content } = options;
const thread = await this.client.channels.fetch(threadId);
if (!thread) {
throw new Error("Thread not found");
}
if (thread.isThread()) {
await (thread as ThreadChannel).send(content);
} else {
throw new Error("Channel is not a thread");
}
}
/**
* Parse a command from a message
*/
parseCommand(message: ChatMessage): ChatCommand | null {
const { content } = message;
// Check if message mentions @mosaic
if (!content.toLowerCase().includes("@mosaic")) {
return null;
}
// Extract command and arguments
const parts = content.trim().split(/\s+/);
const mosaicIndex = parts.findIndex((part) => part.toLowerCase().includes("@mosaic"));
if (mosaicIndex === -1 || mosaicIndex === parts.length - 1) {
return null;
}
const commandPart = parts[mosaicIndex + 1];
if (!commandPart) {
return null;
}
const command = commandPart.toLowerCase();
const args = parts.slice(mosaicIndex + 2);
// Valid commands
const validCommands = ["fix", "status", "cancel", "verbose", "quiet", "help"];
if (!validCommands.includes(command)) {
return null;
}
return {
command,
args,
message,
};
}
/**
* Handle a parsed command
*/
async handleCommand(command: ChatCommand): Promise<void> {
const { command: cmd, args, message } = command;
this.logger.log(
`Handling command: ${cmd} with args: ${args.join(", ")} from ${message.authorName}`
);
switch (cmd) {
case "fix":
await this.handleFixCommand(args, message);
break;
case "status":
await this.handleStatusCommand(args, message);
break;
case "cancel":
await this.handleCancelCommand(args, message);
break;
case "verbose":
await this.handleVerboseCommand(args, message);
break;
case "quiet":
await this.handleQuietCommand(args, message);
break;
case "help":
await this.handleHelpCommand(args, message);
break;
default:
await this.sendMessage(
message.channelId,
`Unknown command: ${cmd}. Type \`@mosaic help\` for available commands.`
);
}
}
/**
* Handle fix command - Start a job for an issue
*/
private async handleFixCommand(args: string[], message: ChatMessage): Promise<void> {
if (args.length === 0 || !args[0]) {
await this.sendMessage(message.channelId, "Usage: `@mosaic fix <issue-number>`");
return;
}
const issueNumber = parseInt(args[0], 10);
if (isNaN(issueNumber)) {
await this.sendMessage(
message.channelId,
"Invalid issue number. Please provide a numeric issue number."
);
return;
}
// Create thread for job updates
const threadId = await this.createThread({
channelId: message.channelId,
name: `Job #${String(issueNumber)}`,
message: `Starting job for issue #${String(issueNumber)}...`,
});
// Dispatch job to stitcher
const result = await this.stitcherService.dispatchJob({
workspaceId: this.workspaceId,
type: "code-task",
priority: 10,
metadata: {
issueNumber,
command: "fix",
channelId: message.channelId,
threadId: threadId,
authorId: message.authorId,
authorName: message.authorName,
},
});
// Send confirmation to thread
await this.sendThreadMessage({
threadId,
content: `Job created: ${result.jobId}\nStatus: ${result.status}\nQueue: ${result.queueName}`,
});
}
/**
* Handle status command - Get job status
*/
private async handleStatusCommand(args: string[], message: ChatMessage): Promise<void> {
if (args.length === 0 || !args[0]) {
await this.sendMessage(message.channelId, "Usage: `@mosaic status <job-id>`");
return;
}
const jobId = args[0];
// TODO: Implement job status retrieval from stitcher
await this.sendMessage(
message.channelId,
`Status command not yet implemented for job: ${jobId}`
);
}
/**
* Handle cancel command - Cancel a running job
*/
private async handleCancelCommand(args: string[], message: ChatMessage): Promise<void> {
if (args.length === 0 || !args[0]) {
await this.sendMessage(message.channelId, "Usage: `@mosaic cancel <job-id>`");
return;
}
const jobId = args[0];
// TODO: Implement job cancellation in stitcher
await this.sendMessage(
message.channelId,
`Cancel command not yet implemented for job: ${jobId}`
);
}
/**
* Handle verbose command - Stream full logs to thread
*/
private async handleVerboseCommand(args: string[], message: ChatMessage): Promise<void> {
if (args.length === 0 || !args[0]) {
await this.sendMessage(message.channelId, "Usage: `@mosaic verbose <job-id>`");
return;
}
const jobId = args[0];
// TODO: Implement verbose logging
await this.sendMessage(message.channelId, `Verbose mode not yet implemented for job: ${jobId}`);
}
/**
* Handle quiet command - Reduce notifications
*/
private async handleQuietCommand(_args: string[], message: ChatMessage): Promise<void> {
// TODO: Implement quiet mode
await this.sendMessage(
message.channelId,
"Quiet mode not yet implemented. Currently showing milestone updates only."
);
}
/**
* Handle help command - Show available commands
*/
private async handleHelpCommand(_args: string[], message: ChatMessage): Promise<void> {
const helpMessage = `
**Available commands:**
\`@mosaic fix <issue>\` - Start job for issue
\`@mosaic status <job>\` - Get job status
\`@mosaic cancel <job>\` - Cancel running job
\`@mosaic verbose <job>\` - Stream full logs to thread
\`@mosaic quiet\` - Reduce notifications
\`@mosaic help\` - Show this help message
**Noise Management:**
• Main channel: Low verbosity (milestones only)
• Job threads: Medium verbosity (step completions)
• DMs: Configurable per user
`.trim();
await this.sendMessage(message.channelId, helpMessage);
}
}

View File

@@ -0,0 +1,3 @@
export * from "./bridge.module";
export * from "./discord/discord.service";
export * from "./interfaces";

View File

@@ -0,0 +1,79 @@
/**
* Chat Provider Interface
*
* Defines the contract for chat platform integrations (Discord, Slack, Matrix, etc.)
*/
export interface ChatMessage {
id: string;
channelId: string;
authorId: string;
authorName: string;
content: string;
timestamp: Date;
threadId?: string;
}
export interface ChatCommand {
command: string;
args: string[];
message: ChatMessage;
}
export interface ThreadCreateOptions {
channelId: string;
name: string;
message: string;
}
export interface ThreadMessageOptions {
threadId: string;
content: string;
}
export interface VerbosityLevel {
level: "low" | "medium" | "high";
description: string;
}
/**
* Chat Provider Interface
*
* All chat platform integrations must implement this interface
*/
export interface IChatProvider {
/**
* Connect to the chat platform
*/
connect(): Promise<void>;
/**
* Disconnect from the chat platform
*/
disconnect(): Promise<void>;
/**
* Check if the provider is connected
*/
isConnected(): boolean;
/**
* Send a message to a channel or thread
*/
sendMessage(channelId: string, content: string): Promise<void>;
/**
* Create a thread for job updates
*/
createThread(options: ThreadCreateOptions): Promise<string>;
/**
* Send a message to a thread
*/
sendThreadMessage(options: ThreadMessageOptions): Promise<void>;
/**
* Parse a command from a message
*/
parseCommand(message: ChatMessage): ChatCommand | null;
}

View File

@@ -0,0 +1 @@
export * from "./chat-provider.interface";

View File

@@ -0,0 +1,258 @@
/**
* Command Parser Service
*
* Parses chat commands from Discord, Mattermost, Slack
*/
import { Injectable } from "@nestjs/common";
import {
CommandAction,
CommandParseResult,
IssueReference,
ParsedCommand,
} from "./command.interface";
@Injectable()
export class CommandParserService {
private readonly MENTION_PATTERN = /^@mosaic(?:\s+|$)/i;
private readonly ISSUE_PATTERNS = {
// #42
current: /^#(\d+)$/,
// owner/repo#42
crossRepo: /^([a-zA-Z0-9-_]+)\/([a-zA-Z0-9-_]+)#(\d+)$/,
// https://git.example.com/owner/repo/issues/42
url: /^https?:\/\/[^/]+\/([a-zA-Z0-9-_]+)\/([a-zA-Z0-9-_]+)\/issues\/(\d+)$/,
};
/**
* Parse a chat command
*/
parseCommand(message: string): CommandParseResult {
// Normalize whitespace
const normalized = message.trim().replace(/\s+/g, " ");
// Check for @mosaic mention
if (!this.MENTION_PATTERN.test(normalized)) {
return {
success: false,
error: {
message: "Commands must start with @mosaic",
help: "Example: @mosaic fix #42",
},
};
}
// Remove @mosaic mention
const withoutMention = normalized.replace(this.MENTION_PATTERN, "");
// Tokenize
const tokens = withoutMention.split(" ").filter((t) => t.length > 0);
if (tokens.length === 0) {
return {
success: false,
error: {
message: "No action provided",
help: this.getHelpText(),
},
};
}
// Parse action
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const actionStr = tokens[0]!.toLowerCase();
const action = this.parseAction(actionStr);
if (!action) {
return {
success: false,
error: {
message: `Unknown action: ${actionStr}`,
help: this.getHelpText(),
},
};
}
// Parse arguments based on action
const args = tokens.slice(1);
return this.parseActionArguments(action, args);
}
/**
* Parse action string to CommandAction enum
*/
private parseAction(action: string): CommandAction | null {
const actionMap: Record<string, CommandAction> = {
fix: CommandAction.FIX,
status: CommandAction.STATUS,
cancel: CommandAction.CANCEL,
retry: CommandAction.RETRY,
verbose: CommandAction.VERBOSE,
quiet: CommandAction.QUIET,
help: CommandAction.HELP,
};
return actionMap[action] ?? null;
}
/**
* Parse arguments for a specific action
*/
private parseActionArguments(action: CommandAction, args: string[]): CommandParseResult {
switch (action) {
case CommandAction.FIX:
return this.parseFixCommand(args);
case CommandAction.STATUS:
case CommandAction.CANCEL:
case CommandAction.RETRY:
case CommandAction.VERBOSE:
return this.parseJobCommand(action, args);
case CommandAction.QUIET:
case CommandAction.HELP:
return this.parseNoArgCommand(action, args);
default:
return {
success: false,
error: {
message: `Unhandled action: ${String(action)}`,
},
};
}
}
/**
* Parse fix command (requires issue reference)
*/
private parseFixCommand(args: string[]): CommandParseResult {
if (args.length === 0) {
return {
success: false,
error: {
message: "Fix command requires an issue reference",
help: "Examples: @mosaic fix #42, @mosaic fix owner/repo#42, @mosaic fix https://git.example.com/owner/repo/issues/42",
},
};
}
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const issueRef = args[0]!;
const issue = this.parseIssueReference(issueRef);
if (!issue) {
return {
success: false,
error: {
message: `Invalid issue reference: ${issueRef}`,
help: "Valid formats: #42, owner/repo#42, or full URL",
},
};
}
const command: ParsedCommand = {
action: CommandAction.FIX,
issue,
rawArgs: args,
};
return { success: true, command };
}
/**
* Parse job commands (status, cancel, retry, verbose)
*/
private parseJobCommand(action: CommandAction, args: string[]): CommandParseResult {
if (args.length === 0) {
return {
success: false,
error: {
message: `${action} command requires a job ID`,
help: `Example: @mosaic ${action} job-123`,
},
};
}
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const jobId = args[0]!;
const command: ParsedCommand = {
action,
jobId,
rawArgs: args,
};
return { success: true, command };
}
/**
* Parse commands that take no arguments (quiet, help)
*/
private parseNoArgCommand(action: CommandAction, args: string[]): CommandParseResult {
const command: ParsedCommand = {
action,
rawArgs: args,
};
return { success: true, command };
}
/**
* Parse issue reference in various formats
*/
private parseIssueReference(ref: string): IssueReference | null {
// Try current repo format: #42
const currentMatch = ref.match(this.ISSUE_PATTERNS.current);
if (currentMatch) {
return {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
number: parseInt(currentMatch[1]!, 10),
};
}
// Try cross-repo format: owner/repo#42
const crossRepoMatch = ref.match(this.ISSUE_PATTERNS.crossRepo);
if (crossRepoMatch) {
return {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
number: parseInt(crossRepoMatch[3]!, 10),
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
owner: crossRepoMatch[1]!,
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
repo: crossRepoMatch[2]!,
};
}
// Try URL format: https://git.example.com/owner/repo/issues/42
const urlMatch = ref.match(this.ISSUE_PATTERNS.url);
if (urlMatch) {
return {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
number: parseInt(urlMatch[3]!, 10),
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
owner: urlMatch[1]!,
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
repo: urlMatch[2]!,
url: ref,
};
}
return null;
}
/**
* Get help text for all commands
*/
private getHelpText(): string {
return [
"Available commands:",
" @mosaic fix <issue> - Start job for issue (#42, owner/repo#42, or URL)",
" @mosaic status <job> - Get job status",
" @mosaic cancel <job> - Cancel running job",
" @mosaic retry <job> - Retry failed job",
" @mosaic verbose <job> - Enable verbose logging",
" @mosaic quiet - Reduce notifications",
" @mosaic help - Show this help",
].join("\n");
}
}

View File

@@ -0,0 +1,293 @@
/**
* Command Parser Tests
*/
import { Test, TestingModule } from "@nestjs/testing";
import { describe, it, expect, beforeEach } from "vitest";
import { CommandParserService } from "./command-parser.service";
import { CommandAction } from "./command.interface";
describe("CommandParserService", () => {
let service: CommandParserService;
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
providers: [CommandParserService],
}).compile();
service = module.get<CommandParserService>(CommandParserService);
});
describe("parseCommand", () => {
describe("fix command", () => {
it("should parse fix command with current repo issue (#42)", () => {
const result = service.parseCommand("@mosaic fix #42");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.FIX);
expect(result.command.issue).toEqual({
number: 42,
});
}
});
it("should parse fix command with cross-repo issue (owner/repo#42)", () => {
const result = service.parseCommand("@mosaic fix mosaic/stack#42");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.FIX);
expect(result.command.issue).toEqual({
number: 42,
owner: "mosaic",
repo: "stack",
});
}
});
it("should parse fix command with full URL", () => {
const result = service.parseCommand(
"@mosaic fix https://git.mosaicstack.dev/mosaic/stack/issues/42"
);
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.FIX);
expect(result.command.issue).toEqual({
number: 42,
owner: "mosaic",
repo: "stack",
url: "https://git.mosaicstack.dev/mosaic/stack/issues/42",
});
}
});
it("should return error when fix command has no issue reference", () => {
const result = service.parseCommand("@mosaic fix");
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.message).toContain("issue reference");
expect(result.error.help).toBeDefined();
}
});
it("should return error when fix command has invalid issue reference", () => {
const result = service.parseCommand("@mosaic fix invalid");
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.message).toContain("Invalid issue reference");
}
});
});
describe("status command", () => {
it("should parse status command with job ID", () => {
const result = service.parseCommand("@mosaic status job-123");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.STATUS);
expect(result.command.jobId).toBe("job-123");
}
});
it("should return error when status command has no job ID", () => {
const result = service.parseCommand("@mosaic status");
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.message).toContain("job ID");
expect(result.error.help).toBeDefined();
}
});
});
describe("cancel command", () => {
it("should parse cancel command with job ID", () => {
const result = service.parseCommand("@mosaic cancel job-123");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.CANCEL);
expect(result.command.jobId).toBe("job-123");
}
});
it("should return error when cancel command has no job ID", () => {
const result = service.parseCommand("@mosaic cancel");
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.message).toContain("job ID");
}
});
});
describe("retry command", () => {
it("should parse retry command with job ID", () => {
const result = service.parseCommand("@mosaic retry job-123");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.RETRY);
expect(result.command.jobId).toBe("job-123");
}
});
it("should return error when retry command has no job ID", () => {
const result = service.parseCommand("@mosaic retry");
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.message).toContain("job ID");
}
});
});
describe("verbose command", () => {
it("should parse verbose command with job ID", () => {
const result = service.parseCommand("@mosaic verbose job-123");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.VERBOSE);
expect(result.command.jobId).toBe("job-123");
}
});
it("should return error when verbose command has no job ID", () => {
const result = service.parseCommand("@mosaic verbose");
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.message).toContain("job ID");
}
});
});
describe("quiet command", () => {
it("should parse quiet command", () => {
const result = service.parseCommand("@mosaic quiet");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.QUIET);
}
});
});
describe("help command", () => {
it("should parse help command", () => {
const result = service.parseCommand("@mosaic help");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.HELP);
}
});
});
describe("edge cases", () => {
it("should handle extra whitespace", () => {
const result = service.parseCommand(" @mosaic fix #42 ");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.FIX);
expect(result.command.issue?.number).toBe(42);
}
});
it("should be case-insensitive for @mosaic mention", () => {
const result = service.parseCommand("@Mosaic fix #42");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.FIX);
}
});
it("should be case-insensitive for action", () => {
const result = service.parseCommand("@mosaic FIX #42");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.action).toBe(CommandAction.FIX);
}
});
it("should return error when message does not start with @mosaic", () => {
const result = service.parseCommand("fix #42");
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.message).toContain("@mosaic");
}
});
it("should return error when no action is provided", () => {
const result = service.parseCommand("@mosaic ");
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.message).toContain("action");
expect(result.error.help).toBeDefined();
}
});
it("should return error for unknown action", () => {
const result = service.parseCommand("@mosaic unknown");
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.message).toContain("Unknown action");
expect(result.error.help).toBeDefined();
}
});
});
describe("issue reference parsing", () => {
it("should parse GitHub-style issue URLs", () => {
const result = service.parseCommand("@mosaic fix https://github.com/owner/repo/issues/42");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.issue).toEqual({
number: 42,
owner: "owner",
repo: "repo",
url: "https://github.com/owner/repo/issues/42",
});
}
});
it("should parse Gitea-style issue URLs", () => {
const result = service.parseCommand(
"@mosaic fix https://git.example.com/owner/repo/issues/42"
);
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.issue).toEqual({
number: 42,
owner: "owner",
repo: "repo",
url: "https://git.example.com/owner/repo/issues/42",
});
}
});
it("should handle issue references with leading zeros", () => {
const result = service.parseCommand("@mosaic fix #042");
expect(result.success).toBe(true);
if (result.success) {
expect(result.command.issue?.number).toBe(42);
}
});
});
});
});

View File

@@ -0,0 +1,90 @@
/**
* Command Parser Interfaces
*
* Defines types for parsing chat commands across all platforms
*/
/**
* Issue reference types
*/
export interface IssueReference {
/**
* Issue number
*/
number: number;
/**
* Repository owner (optional for current repo)
*/
owner?: string;
/**
* Repository name (optional for current repo)
*/
repo?: string;
/**
* Full URL (if provided as URL)
*/
url?: string;
}
/**
* Supported command actions
*/
export enum CommandAction {
FIX = "fix",
STATUS = "status",
CANCEL = "cancel",
RETRY = "retry",
VERBOSE = "verbose",
QUIET = "quiet",
HELP = "help",
}
/**
* Parsed command result
*/
export interface ParsedCommand {
/**
* The action to perform
*/
action: CommandAction;
/**
* Issue reference (for fix command)
*/
issue?: IssueReference;
/**
* Job ID (for status, cancel, retry, verbose commands)
*/
jobId?: string;
/**
* Raw arguments
*/
rawArgs: string[];
}
/**
* Command parse error
*/
export interface CommandParseError {
/**
* Error message
*/
message: string;
/**
* Suggested help text
*/
help?: string;
}
/**
* Command parse result (success or error)
*/
export type CommandParseResult =
| { success: true; command: ParsedCommand }
| { success: false; error: CommandParseError };

View File

@@ -0,0 +1,23 @@
import { Module, Global } from "@nestjs/common";
import { BullMqService } from "./bullmq.service";
/**
* BullMqModule - Job queue module using BullMQ with Valkey backend
*
* This module provides job queue functionality for the Mosaic Component Architecture.
* It creates and manages queues for different agent profiles:
* - mosaic-jobs (main queue)
* - mosaic-jobs-runner (read-only operations)
* - mosaic-jobs-weaver (write operations)
* - mosaic-jobs-inspector (validation operations)
*
* Shares the same Valkey connection used by ValkeyService (VALKEY_URL env var).
*
* Marked as @Global to allow injection across the application without explicit imports.
*/
@Global()
@Module({
providers: [BullMqService],
exports: [BullMqService],
})
export class BullMqModule {}

View File

@@ -0,0 +1,92 @@
import { describe, it, expect, beforeEach } from "vitest";
import { Test, TestingModule } from "@nestjs/testing";
import { BullMqService } from "./bullmq.service";
import { QUEUE_NAMES } from "./queues";
describe("BullMqService", () => {
let service: BullMqService;
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
providers: [BullMqService],
}).compile();
service = module.get<BullMqService>(BullMqService);
});
describe("Module Initialization", () => {
it("should be defined", () => {
expect(service).toBeDefined();
});
it("should have parseRedisUrl method that correctly parses URLs", () => {
// Access private method through type assertion for testing
const parseRedisUrl = (
service as typeof service & {
parseRedisUrl: (url: string) => { host: string; port: number };
}
).parseRedisUrl;
// This test verifies the URL parsing logic without requiring Redis connection
expect(service).toBeDefined();
});
});
describe("Queue Name Constants", () => {
it("should define main queue name", () => {
expect(QUEUE_NAMES.MAIN).toBe("mosaic-jobs");
});
it("should define runner queue name", () => {
expect(QUEUE_NAMES.RUNNER).toBe("mosaic-jobs-runner");
});
it("should define weaver queue name", () => {
expect(QUEUE_NAMES.WEAVER).toBe("mosaic-jobs-weaver");
});
it("should define inspector queue name", () => {
expect(QUEUE_NAMES.INSPECTOR).toBe("mosaic-jobs-inspector");
});
it("should not contain colons in queue names", () => {
// BullMQ doesn't allow colons in queue names
Object.values(QUEUE_NAMES).forEach((name) => {
expect(name).not.toContain(":");
});
});
});
describe("Service Configuration", () => {
it("should use VALKEY_URL from environment if provided", () => {
const testUrl = "redis://test-host:6379";
process.env.VALKEY_URL = testUrl;
// Service should be configured to use this URL
expect(service).toBeDefined();
// Clean up
delete process.env.VALKEY_URL;
});
it("should have default fallback URL", () => {
delete process.env.VALKEY_URL;
// Service should use default redis://localhost:6379
expect(service).toBeDefined();
});
});
describe("Queue Management", () => {
it("should return null for non-existent queue", () => {
const queue = service.getQueue("non-existent-queue" as typeof QUEUE_NAMES.MAIN);
expect(queue).toBeNull();
});
it("should initialize with empty queue map", () => {
const queues = service.getQueues();
expect(queues).toBeDefined();
expect(queues).toBeInstanceOf(Map);
});
});
});

View File

@@ -0,0 +1,186 @@
import { Injectable, Logger, OnModuleInit, OnModuleDestroy } from "@nestjs/common";
import { Queue, QueueOptions } from "bullmq";
import { QUEUE_NAMES, QueueName } from "./queues";
/**
* Health status interface for BullMQ
*/
export interface BullMqHealthStatus {
connected: boolean;
queues: Record<string, number>;
}
/**
* BullMqService - Job queue service using BullMQ with Valkey backend
*
* This service provides job queue operations for the Mosaic Component Architecture:
* - Main queue for general purpose jobs
* - Runner queue for read-only operations
* - Weaver queue for write operations
* - Inspector queue for validation operations
*
* Shares the same Valkey connection used by ValkeyService (VALKEY_URL).
*/
@Injectable()
export class BullMqService implements OnModuleInit, OnModuleDestroy {
private readonly logger = new Logger(BullMqService.name);
private readonly queues = new Map<string, Queue>();
async onModuleInit(): Promise<void> {
const valkeyUrl = process.env.VALKEY_URL ?? "redis://localhost:6379";
this.logger.log(`Initializing BullMQ with Valkey at ${valkeyUrl}`);
// Parse Redis URL for connection options
const connectionOptions = this.parseRedisUrl(valkeyUrl);
const queueOptions: QueueOptions = {
connection: connectionOptions,
defaultJobOptions: {
attempts: 3,
backoff: {
type: "exponential",
delay: 1000,
},
removeOnComplete: {
age: 3600, // Keep completed jobs for 1 hour
count: 1000, // Keep last 1000 completed jobs
},
removeOnFail: {
age: 86400, // Keep failed jobs for 24 hours
},
},
};
// Create all queues
await this.createQueue(QUEUE_NAMES.MAIN, queueOptions);
await this.createQueue(QUEUE_NAMES.RUNNER, queueOptions);
await this.createQueue(QUEUE_NAMES.WEAVER, queueOptions);
await this.createQueue(QUEUE_NAMES.INSPECTOR, queueOptions);
this.logger.log(`BullMQ initialized with ${this.queues.size.toString()} queues`);
}
async onModuleDestroy(): Promise<void> {
this.logger.log("Closing BullMQ queues");
for (const [name, queue] of this.queues.entries()) {
await queue.close();
this.logger.log(`Queue closed: ${name}`);
}
this.queues.clear();
}
/**
* Create a queue with the given name and options
*/
private async createQueue(name: QueueName, options: QueueOptions): Promise<Queue> {
const queue = new Queue(name, options);
// Wait for queue to be ready
await queue.waitUntilReady();
this.queues.set(name, queue);
this.logger.log(`Queue created: ${name}`);
return queue;
}
/**
* Get a queue by name
*/
getQueue(name: QueueName): Queue | null {
return this.queues.get(name) ?? null;
}
/**
* Get all queues
*/
getQueues(): Map<string, Queue> {
return this.queues;
}
/**
* Add a job to a queue
*/
async addJob(
queueName: QueueName,
jobName: string,
data: unknown,
options?: {
priority?: number;
delay?: number;
attempts?: number;
}
): Promise<ReturnType<Queue["add"]>> {
const queue = this.queues.get(queueName);
if (!queue) {
throw new Error(`Queue not found: ${queueName}`);
}
const job = await queue.add(jobName, data, options);
this.logger.log(`Job added to ${queueName}: ${jobName} (id: ${job.id ?? "unknown"})`);
return job;
}
/**
* Health check - verify all queues are connected
*/
async healthCheck(): Promise<boolean> {
try {
for (const queue of this.queues.values()) {
// Check if queue client is connected
const client = await queue.client;
await client.ping();
}
return true;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error("BullMQ health check failed:", errorMessage);
return false;
}
}
/**
* Get health status with queue counts
*/
async getHealthStatus(): Promise<BullMqHealthStatus> {
const connected = await this.healthCheck();
const queues: Record<string, number> = {};
for (const [name, queue] of this.queues.entries()) {
try {
const count = await queue.count();
queues[name] = count;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Failed to get count for queue ${name}:`, errorMessage);
queues[name] = -1;
}
}
return { connected, queues };
}
/**
* Parse Redis URL into connection options
*/
private parseRedisUrl(url: string): { host: string; port: number } {
try {
const parsed = new URL(url);
return {
host: parsed.hostname,
port: parseInt(parsed.port || "6379", 10),
};
} catch {
this.logger.warn(`Failed to parse Redis URL: ${url}, using defaults`);
return {
host: "localhost",
port: 6379,
};
}
}
}

View File

@@ -0,0 +1,3 @@
export * from "./bullmq.module";
export * from "./bullmq.service";
export * from "./queues";

View File

@@ -0,0 +1,38 @@
/**
* Queue name constants for BullMQ
*
* These queue names follow the mosaic:jobs:* convention
* and align with the Mosaic Component Architecture (agent profiles).
*/
export const QUEUE_NAMES = {
/**
* Main job queue - general purpose jobs
*/
MAIN: "mosaic-jobs",
/**
* Runner profile jobs - read-only operations
* - Fetches information
* - Gathers context
* - Reads repositories
*/
RUNNER: "mosaic-jobs-runner",
/**
* Weaver profile jobs - write operations
* - Implements code changes
* - Writes files
* - Scoped to worktree
*/
WEAVER: "mosaic-jobs-weaver",
/**
* Inspector profile jobs - validation operations
* - Runs quality gates (build, lint, test)
* - No modifications allowed
*/
INSPECTOR: "mosaic-jobs-inspector",
} as const;
export type QueueName = (typeof QUEUE_NAMES)[keyof typeof QUEUE_NAMES];

View File

@@ -5,6 +5,7 @@ This directory contains shared guards and decorators for workspace-based permiss
## Overview
The permission system provides:
- **Workspace isolation** via Row-Level Security (RLS)
- **Role-based access control** (RBAC) using workspace member roles
- **Declarative permission requirements** using decorators
@@ -18,6 +19,7 @@ Located in `../auth/guards/auth.guard.ts`
Verifies user authentication and attaches user data to the request.
**Sets on request:**
- `request.user` - Authenticated user object
- `request.session` - User session data
@@ -26,23 +28,27 @@ Verifies user authentication and attaches user data to the request.
Validates workspace access and sets up RLS context.
**Responsibilities:**
1. Extracts workspace ID from request (header, param, or body)
2. Verifies user is a member of the workspace
3. Sets the current user context for RLS policies
4. Attaches workspace context to the request
**Sets on request:**
- `request.workspace.id` - Validated workspace ID
- `request.user.workspaceId` - Workspace ID (for backward compatibility)
**Workspace ID Sources (in priority order):**
1. `X-Workspace-Id` header
2. `:workspaceId` URL parameter
3. `workspaceId` in request body
**Example:**
```typescript
@Controller('tasks')
@Controller("tasks")
@UseGuards(AuthGuard, WorkspaceGuard)
export class TasksController {
@Get()
@@ -57,23 +63,26 @@ export class TasksController {
Enforces role-based access control using workspace member roles.
**Responsibilities:**
1. Reads required permission from `@RequirePermission()` decorator
2. Fetches user's role in the workspace
3. Checks if role satisfies the required permission
4. Attaches role to request for convenience
**Sets on request:**
- `request.user.workspaceRole` - User's role in the workspace
**Must be used after AuthGuard and WorkspaceGuard.**
**Example:**
```typescript
@Controller('admin')
@Controller("admin")
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
export class AdminController {
@RequirePermission(Permission.WORKSPACE_ADMIN)
@Delete('data')
@Delete("data")
async deleteData() {
// Only ADMIN or OWNER can execute
}
@@ -88,14 +97,15 @@ Specifies the minimum permission level required for a route.
**Permission Levels:**
| Permission | Allowed Roles | Use Case |
|------------|--------------|----------|
| `WORKSPACE_OWNER` | OWNER | Critical operations (delete workspace, transfer ownership) |
| `WORKSPACE_ADMIN` | OWNER, ADMIN | Administrative functions (manage members, settings) |
| `WORKSPACE_MEMBER` | OWNER, ADMIN, MEMBER | Standard operations (create/edit content) |
| `WORKSPACE_ANY` | All roles including GUEST | Read-only or basic access |
| Permission | Allowed Roles | Use Case |
| ------------------ | ------------------------- | ---------------------------------------------------------- |
| `WORKSPACE_OWNER` | OWNER | Critical operations (delete workspace, transfer ownership) |
| `WORKSPACE_ADMIN` | OWNER, ADMIN | Administrative functions (manage members, settings) |
| `WORKSPACE_MEMBER` | OWNER, ADMIN, MEMBER | Standard operations (create/edit content) |
| `WORKSPACE_ANY` | All roles including GUEST | Read-only or basic access |
**Example:**
```typescript
@RequirePermission(Permission.WORKSPACE_ADMIN)
@Post('invite')
@@ -109,6 +119,7 @@ async inviteMember(@Body() inviteDto: InviteDto) {
Parameter decorator to extract the validated workspace ID.
**Example:**
```typescript
@Get()
async getTasks(@Workspace() workspaceId: string) {
@@ -121,6 +132,7 @@ async getTasks(@Workspace() workspaceId: string) {
Parameter decorator to extract the full workspace context.
**Example:**
```typescript
@Get()
async getTasks(@WorkspaceContext() workspace: { id: string }) {
@@ -135,6 +147,7 @@ Located in `../auth/decorators/current-user.decorator.ts`
Extracts the authenticated user from the request.
**Example:**
```typescript
@Post()
async create(@CurrentUser() user: any, @Body() dto: CreateDto) {
@@ -153,7 +166,7 @@ import { WorkspaceGuard, PermissionGuard } from "../common/guards";
import { Workspace, Permission, RequirePermission } from "../common/decorators";
import { CurrentUser } from "../auth/decorators/current-user.decorator";
@Controller('resources')
@Controller("resources")
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
export class ResourcesController {
@Get()
@@ -164,17 +177,13 @@ export class ResourcesController {
@Post()
@RequirePermission(Permission.WORKSPACE_MEMBER)
async create(
@Workspace() workspaceId: string,
@CurrentUser() user: any,
@Body() dto: CreateDto
) {
async create(@Workspace() workspaceId: string, @CurrentUser() user: any, @Body() dto: CreateDto) {
// Members and above can create
}
@Delete(':id')
@Delete(":id")
@RequirePermission(Permission.WORKSPACE_ADMIN)
async delete(@Param('id') id: string) {
async delete(@Param("id") id: string) {
// Only admins can delete
}
}
@@ -185,24 +194,32 @@ export class ResourcesController {
Different endpoints can have different permission requirements:
```typescript
@Controller('projects')
@Controller("projects")
@UseGuards(AuthGuard, WorkspaceGuard, PermissionGuard)
export class ProjectsController {
@Get()
@RequirePermission(Permission.WORKSPACE_ANY)
async list() { /* Anyone can view */ }
async list() {
/* Anyone can view */
}
@Post()
@RequirePermission(Permission.WORKSPACE_MEMBER)
async create() { /* Members can create */ }
async create() {
/* Members can create */
}
@Patch('settings')
@Patch("settings")
@RequirePermission(Permission.WORKSPACE_ADMIN)
async updateSettings() { /* Only admins */ }
async updateSettings() {
/* Only admins */
}
@Delete()
@RequirePermission(Permission.WORKSPACE_OWNER)
async deleteProject() { /* Only owner */ }
async deleteProject() {
/* Only owner */
}
}
```
@@ -211,17 +228,19 @@ export class ProjectsController {
The workspace ID can be provided in multiple ways:
**Via Header (Recommended for SPAs):**
```typescript
// Frontend
fetch('/api/tasks', {
fetch("/api/tasks", {
headers: {
'Authorization': 'Bearer <token>',
'X-Workspace-Id': 'workspace-uuid',
}
})
Authorization: "Bearer <token>",
"X-Workspace-Id": "workspace-uuid",
},
});
```
**Via URL Parameter:**
```typescript
@Get(':workspaceId/tasks')
async getTasks(@Param('workspaceId') workspaceId: string) {
@@ -230,6 +249,7 @@ async getTasks(@Param('workspaceId') workspaceId: string) {
```
**Via Request Body:**
```typescript
@Post()
async create(@Body() dto: { workspaceId: string; name: string }) {
@@ -240,6 +260,7 @@ async create(@Body() dto: { workspaceId: string; name: string }) {
## Row-Level Security (RLS)
When `WorkspaceGuard` is applied, it automatically:
1. Calls `setCurrentUser(userId)` to set the RLS context
2. All subsequent database queries are automatically filtered by RLS policies
3. Users can only access data in workspaces they're members of
@@ -249,10 +270,12 @@ When `WorkspaceGuard` is applied, it automatically:
## Testing
Tests are provided for both guards:
- `workspace.guard.spec.ts` - WorkspaceGuard tests
- `permission.guard.spec.ts` - PermissionGuard tests
**Run tests:**
```bash
npm test -- workspace.guard.spec
npm test -- permission.guard.spec

View File

@@ -104,7 +104,7 @@ describe("BaseFilterDto", () => {
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
expect(errors.some(e => e.property === "sortOrder")).toBe(true);
expect(errors.some((e) => e.property === "sortOrder")).toBe(true);
});
it("should accept comma-separated sortBy fields", async () => {
@@ -134,7 +134,7 @@ describe("BaseFilterDto", () => {
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
expect(errors.some(e => e.property === "dateFrom")).toBe(true);
expect(errors.some((e) => e.property === "dateFrom")).toBe(true);
});
it("should reject invalid date format for dateTo", async () => {
@@ -144,7 +144,7 @@ describe("BaseFilterDto", () => {
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
expect(errors.some(e => e.property === "dateTo")).toBe(true);
expect(errors.some((e) => e.property === "dateTo")).toBe(true);
});
it("should trim whitespace from search query", async () => {
@@ -165,6 +165,6 @@ describe("BaseFilterDto", () => {
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
expect(errors.some(e => e.property === "search")).toBe(true);
expect(errors.some((e) => e.property === "search")).toBe(true);
});
});

View File

@@ -0,0 +1,23 @@
import { ConflictException } from "@nestjs/common";
/**
* Exception thrown when a concurrent update conflict is detected
* This occurs when optimistic locking detects that a record has been
* modified by another process between read and write operations
*/
export class ConcurrentUpdateException extends ConflictException {
constructor(resourceType: string, resourceId: string, currentVersion?: number) {
const message = currentVersion
? `Concurrent update detected for ${resourceType} ${resourceId} at version ${String(currentVersion)}. The record was modified by another process.`
: `Concurrent update detected for ${resourceType} ${resourceId}. The record was modified by another process.`;
super({
message,
error: "Concurrent Update Conflict",
resourceType,
resourceId,
currentVersion,
retryable: true,
});
}
}

View File

@@ -0,0 +1,146 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { ExecutionContext, UnauthorizedException } from "@nestjs/common";
import { ConfigService } from "@nestjs/config";
import { ApiKeyGuard } from "./api-key.guard";
describe("ApiKeyGuard", () => {
let guard: ApiKeyGuard;
let mockConfigService: ConfigService;
beforeEach(() => {
mockConfigService = {
get: vi.fn(),
} as unknown as ConfigService;
guard = new ApiKeyGuard(mockConfigService);
});
const createMockExecutionContext = (headers: Record<string, string>): ExecutionContext => {
return {
switchToHttp: () => ({
getRequest: () => ({
headers,
}),
}),
} as ExecutionContext;
};
describe("canActivate", () => {
it("should return true when valid API key is provided", () => {
const validApiKey = "test-api-key-12345";
vi.mocked(mockConfigService.get).mockReturnValue(validApiKey);
const context = createMockExecutionContext({
"x-api-key": validApiKey,
});
const result = guard.canActivate(context);
expect(result).toBe(true);
expect(mockConfigService.get).toHaveBeenCalledWith("COORDINATOR_API_KEY");
});
it("should throw UnauthorizedException when no API key is provided", () => {
const context = createMockExecutionContext({});
expect(() => guard.canActivate(context)).toThrow(UnauthorizedException);
expect(() => guard.canActivate(context)).toThrow("No API key provided");
});
it("should throw UnauthorizedException when API key is invalid", () => {
const validApiKey = "correct-api-key";
const invalidApiKey = "wrong-api-key";
vi.mocked(mockConfigService.get).mockReturnValue(validApiKey);
const context = createMockExecutionContext({
"x-api-key": invalidApiKey,
});
expect(() => guard.canActivate(context)).toThrow(UnauthorizedException);
expect(() => guard.canActivate(context)).toThrow("Invalid API key");
});
it("should throw UnauthorizedException when COORDINATOR_API_KEY is not configured", () => {
vi.mocked(mockConfigService.get).mockReturnValue(undefined);
const context = createMockExecutionContext({
"x-api-key": "some-key",
});
expect(() => guard.canActivate(context)).toThrow(UnauthorizedException);
expect(() => guard.canActivate(context)).toThrow("API key authentication not configured");
});
it("should handle uppercase header name (X-API-Key)", () => {
const validApiKey = "test-api-key-12345";
vi.mocked(mockConfigService.get).mockReturnValue(validApiKey);
const context = createMockExecutionContext({
"X-API-Key": validApiKey,
});
const result = guard.canActivate(context);
expect(result).toBe(true);
});
it("should handle mixed case header name (X-Api-Key)", () => {
const validApiKey = "test-api-key-12345";
vi.mocked(mockConfigService.get).mockReturnValue(validApiKey);
const context = createMockExecutionContext({
"X-Api-Key": validApiKey,
});
const result = guard.canActivate(context);
expect(result).toBe(true);
});
it("should reject empty string API key", () => {
vi.mocked(mockConfigService.get).mockReturnValue("valid-key");
const context = createMockExecutionContext({
"x-api-key": "",
});
expect(() => guard.canActivate(context)).toThrow(UnauthorizedException);
expect(() => guard.canActivate(context)).toThrow("No API key provided");
});
it("should use constant-time comparison to prevent timing attacks", () => {
const validApiKey = "test-api-key-12345";
vi.mocked(mockConfigService.get).mockReturnValue(validApiKey);
const startTime = Date.now();
const context1 = createMockExecutionContext({
"x-api-key": "wrong-key-short",
});
try {
guard.canActivate(context1);
} catch {
// Expected to fail
}
const shortKeyTime = Date.now() - startTime;
const startTime2 = Date.now();
const context2 = createMockExecutionContext({
"x-api-key": "test-api-key-12344", // Very close to correct key
});
try {
guard.canActivate(context2);
} catch {
// Expected to fail
}
const longKeyTime = Date.now() - startTime2;
// Times should be similar (within 10ms) to prevent timing attacks
// Note: This is a simplified test; real timing attack prevention
// is handled by crypto.timingSafeEqual
expect(Math.abs(shortKeyTime - longKeyTime)).toBeLessThan(10);
});
});
});

View File

@@ -0,0 +1,81 @@
import { Injectable, CanActivate, ExecutionContext, UnauthorizedException } from "@nestjs/common";
import { ConfigService } from "@nestjs/config";
import { timingSafeEqual } from "crypto";
/**
* ApiKeyGuard - Authentication guard for service-to-service communication
*
* Validates the X-API-Key header against the COORDINATOR_API_KEY environment variable.
* Uses constant-time comparison to prevent timing attacks.
*
* Usage:
* @UseGuards(ApiKeyGuard)
* @Controller('coordinator')
* export class CoordinatorIntegrationController { ... }
*/
@Injectable()
export class ApiKeyGuard implements CanActivate {
constructor(private readonly configService: ConfigService) {}
canActivate(context: ExecutionContext): boolean {
const request = context.switchToHttp().getRequest<{ headers: Record<string, string> }>();
const providedKey = this.extractApiKeyFromHeader(request);
if (!providedKey) {
throw new UnauthorizedException("No API key provided");
}
const configuredKey = this.configService.get<string>("COORDINATOR_API_KEY");
if (!configuredKey) {
throw new UnauthorizedException("API key authentication not configured");
}
if (!this.isValidApiKey(providedKey, configuredKey)) {
throw new UnauthorizedException("Invalid API key");
}
return true;
}
/**
* Extract API key from X-API-Key header (case-insensitive)
*/
private extractApiKeyFromHeader(request: {
headers: Record<string, string>;
}): string | undefined {
const headers = request.headers;
// Check common variations (lowercase, uppercase, mixed case)
const apiKey =
headers["x-api-key"] ?? headers["X-API-Key"] ?? headers["X-Api-Key"] ?? headers["x-api-key"];
// Return undefined if key is empty string
if (typeof apiKey === "string" && apiKey.trim() === "") {
return undefined;
}
return apiKey;
}
/**
* Validate API key using constant-time comparison to prevent timing attacks
*/
private isValidApiKey(providedKey: string, configuredKey: string): boolean {
try {
// Convert strings to buffers for constant-time comparison
const providedBuffer = Buffer.from(providedKey, "utf8");
const configuredBuffer = Buffer.from(configuredKey, "utf8");
// Keys must be same length for timingSafeEqual
if (providedBuffer.length !== configuredBuffer.length) {
return false;
}
return timingSafeEqual(providedBuffer, configuredBuffer);
} catch {
// If comparison fails for any reason, reject
return false;
}
}
}

View File

@@ -1,2 +1,3 @@
export * from "./workspace.guard";
export * from "./permission.guard";
export * from "./api-key.guard";

View File

@@ -44,10 +44,7 @@ describe("PermissionGuard", () => {
vi.clearAllMocks();
});
const createMockExecutionContext = (
user: any,
workspace: any
): ExecutionContext => {
const createMockExecutionContext = (user: any, workspace: any): ExecutionContext => {
const mockRequest = {
user,
workspace,
@@ -67,10 +64,7 @@ describe("PermissionGuard", () => {
const workspaceId = "workspace-456";
it("should allow access when no permission is required", async () => {
const context = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context = createMockExecutionContext({ id: userId }, { id: workspaceId });
mockReflector.getAllAndOverride.mockReturnValue(undefined);
@@ -80,10 +74,7 @@ describe("PermissionGuard", () => {
});
it("should allow OWNER to access WORKSPACE_OWNER permission", async () => {
const context = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context = createMockExecutionContext({ id: userId }, { id: workspaceId });
mockReflector.getAllAndOverride.mockReturnValue(Permission.WORKSPACE_OWNER);
mockPrismaService.workspaceMember.findUnique.mockResolvedValue({
@@ -99,30 +90,19 @@ describe("PermissionGuard", () => {
});
it("should deny ADMIN access to WORKSPACE_OWNER permission", async () => {
const context = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context = createMockExecutionContext({ id: userId }, { id: workspaceId });
mockReflector.getAllAndOverride.mockReturnValue(Permission.WORKSPACE_OWNER);
mockPrismaService.workspaceMember.findUnique.mockResolvedValue({
role: WorkspaceMemberRole.ADMIN,
});
await expect(guard.canActivate(context)).rejects.toThrow(
ForbiddenException
);
await expect(guard.canActivate(context)).rejects.toThrow(ForbiddenException);
});
it("should allow OWNER and ADMIN to access WORKSPACE_ADMIN permission", async () => {
const context1 = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context2 = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context1 = createMockExecutionContext({ id: userId }, { id: workspaceId });
const context2 = createMockExecutionContext({ id: userId }, { id: workspaceId });
mockReflector.getAllAndOverride.mockReturnValue(Permission.WORKSPACE_ADMIN);
@@ -140,34 +120,20 @@ describe("PermissionGuard", () => {
});
it("should deny MEMBER access to WORKSPACE_ADMIN permission", async () => {
const context = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context = createMockExecutionContext({ id: userId }, { id: workspaceId });
mockReflector.getAllAndOverride.mockReturnValue(Permission.WORKSPACE_ADMIN);
mockPrismaService.workspaceMember.findUnique.mockResolvedValue({
role: WorkspaceMemberRole.MEMBER,
});
await expect(guard.canActivate(context)).rejects.toThrow(
ForbiddenException
);
await expect(guard.canActivate(context)).rejects.toThrow(ForbiddenException);
});
it("should allow OWNER, ADMIN, and MEMBER to access WORKSPACE_MEMBER permission", async () => {
const context1 = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context2 = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context3 = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context1 = createMockExecutionContext({ id: userId }, { id: workspaceId });
const context2 = createMockExecutionContext({ id: userId }, { id: workspaceId });
const context3 = createMockExecutionContext({ id: userId }, { id: workspaceId });
mockReflector.getAllAndOverride.mockReturnValue(Permission.WORKSPACE_MEMBER);
@@ -191,26 +157,18 @@ describe("PermissionGuard", () => {
});
it("should deny GUEST access to WORKSPACE_MEMBER permission", async () => {
const context = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context = createMockExecutionContext({ id: userId }, { id: workspaceId });
mockReflector.getAllAndOverride.mockReturnValue(Permission.WORKSPACE_MEMBER);
mockPrismaService.workspaceMember.findUnique.mockResolvedValue({
role: WorkspaceMemberRole.GUEST,
});
await expect(guard.canActivate(context)).rejects.toThrow(
ForbiddenException
);
await expect(guard.canActivate(context)).rejects.toThrow(ForbiddenException);
});
it("should allow any role (including GUEST) to access WORKSPACE_ANY permission", async () => {
const context = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context = createMockExecutionContext({ id: userId }, { id: workspaceId });
mockReflector.getAllAndOverride.mockReturnValue(Permission.WORKSPACE_ANY);
mockPrismaService.workspaceMember.findUnique.mockResolvedValue({
@@ -227,9 +185,7 @@ describe("PermissionGuard", () => {
mockReflector.getAllAndOverride.mockReturnValue(Permission.WORKSPACE_MEMBER);
await expect(guard.canActivate(context)).rejects.toThrow(
ForbiddenException
);
await expect(guard.canActivate(context)).rejects.toThrow(ForbiddenException);
});
it("should throw ForbiddenException when workspace context is missing", async () => {
@@ -237,42 +193,28 @@ describe("PermissionGuard", () => {
mockReflector.getAllAndOverride.mockReturnValue(Permission.WORKSPACE_MEMBER);
await expect(guard.canActivate(context)).rejects.toThrow(
ForbiddenException
);
await expect(guard.canActivate(context)).rejects.toThrow(ForbiddenException);
});
it("should throw ForbiddenException when user is not a workspace member", async () => {
const context = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context = createMockExecutionContext({ id: userId }, { id: workspaceId });
mockReflector.getAllAndOverride.mockReturnValue(Permission.WORKSPACE_MEMBER);
mockPrismaService.workspaceMember.findUnique.mockResolvedValue(null);
await expect(guard.canActivate(context)).rejects.toThrow(
ForbiddenException
);
await expect(guard.canActivate(context)).rejects.toThrow(ForbiddenException);
await expect(guard.canActivate(context)).rejects.toThrow(
"You are not a member of this workspace"
);
});
it("should handle database errors gracefully", async () => {
const context = createMockExecutionContext(
{ id: userId },
{ id: workspaceId }
);
const context = createMockExecutionContext({ id: userId }, { id: workspaceId });
mockReflector.getAllAndOverride.mockReturnValue(Permission.WORKSPACE_MEMBER);
mockPrismaService.workspaceMember.findUnique.mockRejectedValue(
new Error("Database error")
);
mockPrismaService.workspaceMember.findUnique.mockRejectedValue(new Error("Database error"));
await expect(guard.canActivate(context)).rejects.toThrow(
ForbiddenException
);
await expect(guard.canActivate(context)).rejects.toThrow(ForbiddenException);
});
});
});

View File

@@ -58,10 +58,7 @@ describe("WorkspaceGuard", () => {
const workspaceId = "workspace-456";
it("should allow access when user is a workspace member (via header)", async () => {
const context = createMockExecutionContext(
{ id: userId },
{ "x-workspace-id": workspaceId }
);
const context = createMockExecutionContext({ id: userId }, { "x-workspace-id": workspaceId });
mockPrismaService.workspaceMember.findUnique.mockResolvedValue({
workspaceId,
@@ -87,11 +84,7 @@ describe("WorkspaceGuard", () => {
});
it("should allow access when user is a workspace member (via URL param)", async () => {
const context = createMockExecutionContext(
{ id: userId },
{},
{ workspaceId }
);
const context = createMockExecutionContext({ id: userId }, {}, { workspaceId });
mockPrismaService.workspaceMember.findUnique.mockResolvedValue({
workspaceId,
@@ -105,12 +98,7 @@ describe("WorkspaceGuard", () => {
});
it("should allow access when user is a workspace member (via body)", async () => {
const context = createMockExecutionContext(
{ id: userId },
{},
{},
{ workspaceId }
);
const context = createMockExecutionContext({ id: userId }, {}, {}, { workspaceId });
mockPrismaService.workspaceMember.findUnique.mockResolvedValue({
workspaceId,
@@ -154,59 +142,38 @@ describe("WorkspaceGuard", () => {
});
it("should throw ForbiddenException when user is not authenticated", async () => {
const context = createMockExecutionContext(
null,
{ "x-workspace-id": workspaceId }
);
const context = createMockExecutionContext(null, { "x-workspace-id": workspaceId });
await expect(guard.canActivate(context)).rejects.toThrow(
ForbiddenException
);
await expect(guard.canActivate(context)).rejects.toThrow(
"User not authenticated"
);
await expect(guard.canActivate(context)).rejects.toThrow(ForbiddenException);
await expect(guard.canActivate(context)).rejects.toThrow("User not authenticated");
});
it("should throw BadRequestException when workspace ID is missing", async () => {
const context = createMockExecutionContext({ id: userId });
await expect(guard.canActivate(context)).rejects.toThrow(
BadRequestException
);
await expect(guard.canActivate(context)).rejects.toThrow(
"Workspace ID is required"
);
await expect(guard.canActivate(context)).rejects.toThrow(BadRequestException);
await expect(guard.canActivate(context)).rejects.toThrow("Workspace ID is required");
});
it("should throw ForbiddenException when user is not a workspace member", async () => {
const context = createMockExecutionContext(
{ id: userId },
{ "x-workspace-id": workspaceId }
);
const context = createMockExecutionContext({ id: userId }, { "x-workspace-id": workspaceId });
mockPrismaService.workspaceMember.findUnique.mockResolvedValue(null);
await expect(guard.canActivate(context)).rejects.toThrow(
ForbiddenException
);
await expect(guard.canActivate(context)).rejects.toThrow(ForbiddenException);
await expect(guard.canActivate(context)).rejects.toThrow(
"You do not have access to this workspace"
);
});
it("should handle database errors gracefully", async () => {
const context = createMockExecutionContext(
{ id: userId },
{ "x-workspace-id": workspaceId }
);
const context = createMockExecutionContext({ id: userId }, { "x-workspace-id": workspaceId });
mockPrismaService.workspaceMember.findUnique.mockRejectedValue(
new Error("Database connection failed")
);
await expect(guard.canActivate(context)).rejects.toThrow(
ForbiddenException
);
await expect(guard.canActivate(context)).rejects.toThrow(ForbiddenException);
});
});
});

View File

@@ -0,0 +1,2 @@
export { ThrottlerApiKeyGuard } from "./throttler-api-key.guard";
export { ThrottlerValkeyStorageService } from "./throttler-storage.service";

View File

@@ -0,0 +1,44 @@
import { Injectable, ExecutionContext } from "@nestjs/common";
import { ThrottlerGuard, ThrottlerException } from "@nestjs/throttler";
import { Request } from "express";
/**
* Custom ThrottlerGuard that tracks rate limits by API key instead of IP
*
* This guard extracts the API key from the X-API-Key header and uses it
* as the tracking key for rate limiting. This ensures that different API
* keys have independent rate limits.
*/
@Injectable()
export class ThrottlerApiKeyGuard extends ThrottlerGuard {
/**
* Generate tracking key based on API key from X-API-Key header
*
* If no API key is present, falls back to IP-based tracking.
*/
protected getTracker(req: Request): Promise<string> {
const apiKey = req.headers["x-api-key"] as string | undefined;
if (apiKey) {
// Track by API key
return Promise.resolve(`apikey:${apiKey}`);
}
// Fallback to IP tracking
const ip = req.ip ?? req.socket.remoteAddress ?? "unknown";
return Promise.resolve(`ip:${ip}`);
}
/**
* Override to add custom error handling and logging
*/
protected async throwThrottlingException(context: ExecutionContext): Promise<void> {
const request = context.switchToHttp().getRequest<Request>();
const tracker = await this.getTracker(request);
// Log rate limit violations for security monitoring
console.warn(`Rate limit exceeded for ${tracker} on ${request.method} ${request.url}`);
throw new ThrottlerException("Rate limit exceeded. Please try again later.");
}
}

View File

@@ -0,0 +1,179 @@
import { Injectable, OnModuleInit, Logger } from "@nestjs/common";
import { ThrottlerStorage } from "@nestjs/throttler";
import Redis from "ioredis";
/**
* Throttler storage record interface
* Matches @nestjs/throttler's ThrottlerStorageRecord
*/
interface ThrottlerStorageRecord {
totalHits: number;
timeToExpire: number;
isBlocked: boolean;
timeToBlockExpire: number;
}
/**
* Redis-based storage for rate limiting using Valkey
*
* This service uses Valkey (Redis-compatible) as the storage backend
* for rate limiting. This allows rate limits to work across multiple
* API instances in a distributed environment.
*
* If Redis is unavailable, falls back to in-memory storage.
*/
@Injectable()
export class ThrottlerValkeyStorageService implements ThrottlerStorage, OnModuleInit {
private readonly logger = new Logger(ThrottlerValkeyStorageService.name);
private client: Redis | undefined = undefined;
private readonly THROTTLER_PREFIX = "mosaic:throttler:";
private readonly fallbackStorage = new Map<string, number[]>();
private useRedis = false;
async onModuleInit(): Promise<void> {
const valkeyUrl = process.env.VALKEY_URL ?? "redis://localhost:6379";
try {
this.logger.log(`Connecting to Valkey for rate limiting at ${valkeyUrl}`);
this.client = new Redis(valkeyUrl, {
maxRetriesPerRequest: 3,
retryStrategy: (times: number) => {
const delay = Math.min(times * 50, 2000);
return delay;
},
lazyConnect: true, // Don't connect immediately
});
// Try to connect
await this.client.connect();
await this.client.ping();
this.useRedis = true;
this.logger.log("Valkey connected successfully for rate limiting");
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.warn(`Failed to connect to Valkey for rate limiting: ${errorMessage}`);
this.logger.warn("Falling back to in-memory rate limiting storage");
this.useRedis = false;
this.client = undefined;
}
}
/**
* Increment the number of requests for a given key
*
* @param key - Throttle key (e.g., "apikey:xxx" or "ip:192.168.1.1")
* @param ttl - Time to live in milliseconds
* @param limit - Maximum number of requests allowed
* @param blockDuration - Duration to block in milliseconds (not used in this implementation)
* @param _throttlerName - Name of the throttler (not used in this implementation)
* @returns Promise resolving to the current throttler storage record
*/
async increment(
key: string,
ttl: number,
limit: number,
blockDuration: number,
_throttlerName: string
): Promise<ThrottlerStorageRecord> {
const throttleKey = this.getThrottleKey(key);
let totalHits: number;
if (this.useRedis && this.client) {
try {
const result = await this.client.multi().incr(throttleKey).pexpire(throttleKey, ttl).exec();
if (result?.[0]?.[1]) {
totalHits = result[0][1] as number;
} else {
totalHits = this.incrementMemory(throttleKey, ttl);
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Redis increment failed: ${errorMessage}`);
// Fall through to in-memory
totalHits = this.incrementMemory(throttleKey, ttl);
}
} else {
// In-memory fallback
totalHits = this.incrementMemory(throttleKey, ttl);
}
// Return ThrottlerStorageRecord
const isBlocked = totalHits > limit;
return {
totalHits,
timeToExpire: ttl,
isBlocked,
timeToBlockExpire: isBlocked ? blockDuration : 0,
};
}
/**
* Get the current number of requests for a given key
*
* @param key - Throttle key
* @returns Promise resolving to the current number of requests
*/
async get(key: string): Promise<number> {
const throttleKey = this.getThrottleKey(key);
if (this.useRedis && this.client) {
try {
const value = await this.client.get(throttleKey);
return value ? parseInt(value, 10) : 0;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
this.logger.error(`Redis get failed: ${errorMessage}`);
// Fall through to in-memory
}
}
// In-memory fallback
return this.getMemory(throttleKey);
}
/**
* In-memory increment implementation
*/
private incrementMemory(key: string, ttl: number): number {
const now = Date.now();
const timestamps = this.fallbackStorage.get(key) ?? [];
// Remove expired timestamps
const validTimestamps = timestamps.filter((timestamp) => now - timestamp < ttl);
// Add new timestamp
validTimestamps.push(now);
// Store updated timestamps
this.fallbackStorage.set(key, validTimestamps);
return validTimestamps.length;
}
/**
* In-memory get implementation
*/
private getMemory(key: string): number {
const timestamps = this.fallbackStorage.get(key);
return timestamps ? timestamps.length : 0;
}
/**
* Get throttle key with prefix
*/
private getThrottleKey(key: string): string {
return `${this.THROTTLER_PREFIX}${key}`;
}
/**
* Clean up on module destroy
*/
async onModuleDestroy(): Promise<void> {
if (this.client) {
await this.client.quit();
}
}
}

View File

@@ -1 +1,2 @@
export * from "./query-builder";
export * from "./log-sanitizer";

View File

@@ -0,0 +1,311 @@
import { describe, it, expect } from "vitest";
import { sanitizeForLogging } from "./log-sanitizer";
describe("sanitizeForLogging", () => {
describe("String sanitization", () => {
it("should redact API keys", () => {
const input = "Error with API key: sk_live_1234567890abcdef";
const result = sanitizeForLogging(input);
expect(result).toBe("Error with API key: [REDACTED]");
});
it("should redact bearer tokens", () => {
const input = "Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9";
const result = sanitizeForLogging(input);
expect(result).toBe("Authorization: Bearer [REDACTED]");
});
it("should redact Discord bot tokens", () => {
const input = "Bot token: MTk4NjIyNDgzNDcxOTI1MjQ4.Cl2FMQ.ZnCjm1XVW7vRze4b7Cq4se7kKWs";
const result = sanitizeForLogging(input);
expect(result).toBe("Bot token: [REDACTED]");
});
it("should redact passwords in strings", () => {
const input = 'Connection failed with password="secret123"';
const result = sanitizeForLogging(input);
expect(result).toBe('Connection failed with password="[REDACTED]"');
});
it("should redact email addresses", () => {
const input = "User email: user@example.com failed to authenticate";
const result = sanitizeForLogging(input);
expect(result).toBe("User email: [REDACTED] failed to authenticate");
});
it("should redact database connection strings", () => {
const input = "postgresql://user:password123@localhost:5432/mydb";
const result = sanitizeForLogging(input);
expect(result).toBe("postgresql://user:[REDACTED]@localhost:5432/mydb");
});
it("should redact authorization headers", () => {
const input = "Authorization: Basic dXNlcjpwYXNzd29yZA==";
const result = sanitizeForLogging(input);
expect(result).toBe("Authorization: Basic [REDACTED]");
});
it("should preserve non-sensitive strings", () => {
const input = "This is a regular log message without secrets";
const result = sanitizeForLogging(input);
expect(result).toBe("This is a regular log message without secrets");
});
it("should redact environment variable style secrets", () => {
const input = "API_KEY=abc123def456 failed";
const result = sanitizeForLogging(input);
expect(result).toBe("API_KEY=[REDACTED] failed");
});
it("should redact multiple secrets in one string", () => {
const input = "token=xyz123 and password=secret456";
const result = sanitizeForLogging(input);
expect(result).toBe("token=[REDACTED] and password=[REDACTED]");
});
});
describe("Object sanitization", () => {
it("should redact secrets in flat objects", () => {
const input = {
message: "Error occurred",
apiKey: "sk_live_1234567890",
token: "Bearer abc123",
};
const result = sanitizeForLogging(input);
expect(result).toEqual({
message: "Error occurred",
apiKey: "[REDACTED]",
token: "[REDACTED]",
});
});
it("should redact secrets in nested objects", () => {
const input = {
error: {
message: "Auth failed",
credentials: {
username: "admin",
password: "secret123",
},
},
};
const result = sanitizeForLogging(input);
expect(result).toEqual({
error: {
message: "Auth failed",
credentials: {
username: "admin",
password: "[REDACTED]",
},
},
});
});
it("should redact secrets based on key names", () => {
const input = {
apiKey: "secret",
api_key: "secret",
API_KEY: "secret",
bearerToken: "token",
accessToken: "token",
password: "pass",
secret: "secret",
client_secret: "secret",
};
const result = sanitizeForLogging(input);
expect(result).toEqual({
apiKey: "[REDACTED]",
api_key: "[REDACTED]",
API_KEY: "[REDACTED]",
bearerToken: "[REDACTED]",
accessToken: "[REDACTED]",
password: "[REDACTED]",
secret: "[REDACTED]",
client_secret: "[REDACTED]",
});
});
it("should preserve non-sensitive object properties", () => {
const input = {
message: "Test message",
statusCode: 500,
timestamp: new Date("2024-01-01"),
count: 42,
};
const result = sanitizeForLogging(input);
expect(result).toEqual({
message: "Test message",
statusCode: 500,
timestamp: new Date("2024-01-01"),
count: 42,
});
});
it("should handle objects with null and undefined values", () => {
const input = {
message: "Error",
token: null,
apiKey: undefined,
data: "value",
};
const result = sanitizeForLogging(input);
expect(result).toEqual({
message: "Error",
token: null,
apiKey: undefined,
data: "value",
});
});
});
describe("Array sanitization", () => {
it("should sanitize strings in arrays", () => {
const input = ["normal message", "token=abc123", "another message"];
const result = sanitizeForLogging(input);
expect(result).toEqual(["normal message", "token=[REDACTED]", "another message"]);
});
it("should sanitize objects in arrays", () => {
const input = [
{ message: "ok" },
{ message: "error", apiKey: "secret123" },
{ message: "info" },
];
const result = sanitizeForLogging(input);
expect(result).toEqual([
{ message: "ok" },
{ message: "error", apiKey: "[REDACTED]" },
{ message: "info" },
]);
});
it("should handle nested arrays", () => {
const input = [["token=abc"], ["password=xyz"]];
const result = sanitizeForLogging(input);
expect(result).toEqual([["token=[REDACTED]"], ["password=[REDACTED]"]]);
});
});
describe("Error object sanitization", () => {
it("should sanitize Error objects", () => {
const error = new Error("Auth failed with token abc123");
const result = sanitizeForLogging(error);
expect(result.message).toBe("Auth failed with token [REDACTED]");
expect(result.name).toBe("Error");
});
it("should sanitize custom error properties", () => {
const error = new Error("Request failed");
(error as any).config = {
headers: {
Authorization: "Bearer secret123",
},
};
const result = sanitizeForLogging(error);
expect(result.config.headers.Authorization).toBe("[REDACTED]");
});
it("should handle errors with nested objects", () => {
const error = new Error("Discord error");
(error as any).response = {
status: 401,
data: {
message: "Invalid token",
token: "abc123",
},
};
const result = sanitizeForLogging(error);
expect(result.response.data.token).toBe("[REDACTED]");
});
});
describe("Edge cases", () => {
it("should handle null input", () => {
const result = sanitizeForLogging(null);
expect(result).toBeNull();
});
it("should handle undefined input", () => {
const result = sanitizeForLogging(undefined);
expect(result).toBeUndefined();
});
it("should handle numbers", () => {
const result = sanitizeForLogging(42);
expect(result).toBe(42);
});
it("should handle booleans", () => {
const result = sanitizeForLogging(true);
expect(result).toBe(true);
});
it("should handle empty objects", () => {
const result = sanitizeForLogging({});
expect(result).toEqual({});
});
it("should handle empty arrays", () => {
const result = sanitizeForLogging([]);
expect(result).toEqual([]);
});
it("should handle circular references", () => {
const obj: any = { name: "test" };
obj.self = obj;
const result = sanitizeForLogging(obj);
expect(result.name).toBe("test");
expect(result.self).toBe("[Circular Reference]");
});
it("should handle large objects without performance issues", () => {
const largeObj: any = {};
for (let i = 0; i < 1000; i++) {
largeObj[`key${i}`] = `value${i}`;
}
largeObj.password = "secret123";
const start = Date.now();
const result = sanitizeForLogging(largeObj);
const duration = Date.now() - start;
expect(result.password).toBe("[REDACTED]");
expect(duration).toBeLessThan(100); // Should complete in under 100ms
});
});
describe("Discord-specific cases", () => {
it("should sanitize Discord bot token format", () => {
const input = {
error: "Failed to connect",
token: "MTk4NjIyNDgzNDcxOTI1MjQ4.Cl2FMQ.ZnCjm1XVW7vRze4b7Cq4se7kKWs",
};
const result = sanitizeForLogging(input);
expect(result.token).toBe("[REDACTED]");
});
it("should sanitize Discord error with config", () => {
const error = {
message: "Request failed",
config: {
headers: {
Authorization: "Bot MTk4NjIyNDgzNDcxOTI1MjQ4.Cl2FMQ.ZnCjm1XVW7vRze4b7Cq4se7kKWs",
},
},
};
const result = sanitizeForLogging(error);
expect(result.config.headers.Authorization).toBe("[REDACTED]");
});
it("should sanitize workspace IDs if configured", () => {
const input = {
message: "Job dispatched",
workspaceId: "ws_123456789",
};
const result = sanitizeForLogging(input);
// Workspace IDs are preserved by default (not considered sensitive)
// Can be redacted if needed in future
expect(result.workspaceId).toBe("ws_123456789");
});
});
});

View File

@@ -0,0 +1,185 @@
/**
* Log Sanitizer Utility
*
* Sanitizes sensitive information from logs to prevent secret exposure.
* This is critical for security when logging errors, especially to external
* services like Discord.
*
* @module log-sanitizer
*/
/**
* Patterns for detecting sensitive data in strings
* Order matters - more specific patterns should come first
*/
const SENSITIVE_PATTERNS = [
// Quoted passwords and secrets (must come before general key-value patterns)
{ pattern: /(password|secret|token|key)\s*=\s*"([^"]+)"/gi, replacement: '$1="[REDACTED]"' },
{ pattern: /(password|secret|token|key)\s*=\s*'([^']+)'/gi, replacement: "$1='[REDACTED]'" },
// Discord bot tokens (specific format, must come before generic token patterns)
{
pattern: /\b[MN][A-Za-z\d]{23,25}\.[A-Za-z\d]{6}\.[A-Za-z\d_-]{27,}\b/g,
replacement: "[REDACTED]",
},
// API Keys and tokens (Stripe-style)
{ pattern: /\b(?:sk|pk)_(?:live|test)_[a-zA-Z0-9]{16,}/gi, replacement: "[REDACTED]" },
// Bearer tokens
{ pattern: /Bearer\s+[A-Za-z0-9\-._~+/]+=*/gi, replacement: "Bearer [REDACTED]" },
// JWT tokens
{ pattern: /eyJ[a-zA-Z0-9_-]*\.eyJ[a-zA-Z0-9_-]*\.[a-zA-Z0-9_-]*/g, replacement: "[REDACTED]" },
// Authorization Basic
{ pattern: /Basic\s+[A-Za-z0-9+/]+=*/gi, replacement: "Basic [REDACTED]" },
// Email addresses
{ pattern: /\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b/g, replacement: "[REDACTED]" },
// Connection string passwords
{ pattern: /(:\/\/[^:]+:)([^@]+)(@)/g, replacement: "$1[REDACTED]$3" },
// Generic tokens in text with colon (e.g., "token: abc123")
{
pattern: /\b(token|password|secret|key)\s*:\s+([a-zA-Z0-9._-]{6,})/gi,
replacement: "$1: [REDACTED]",
},
// Generic tokens in text without colon (e.g., "token abc123")
{
pattern: /\b(token|password|secret|key)\s+([a-zA-Z0-9._-]{6,})/gi,
replacement: "$1 [REDACTED]",
},
// Key-value pairs with = sign (should be last as it's most general)
{
pattern:
/\b(token|password|secret|api[_-]?key|apikey|client[_-]?secret|bearer)\s*=\s*[^\s,;)}\]"']+/gi,
replacement: "$1=[REDACTED]",
},
];
/**
* Sensitive key names that should have their values redacted
*/
const SENSITIVE_KEYS = [
"password",
"secret",
"token",
"apikey",
"api_key",
"apiKey",
"API_KEY",
"bearertoken",
"bearerToken",
"bearer_token",
"accesstoken",
"accessToken",
"access_token",
"refreshtoken",
"refreshToken",
"refresh_token",
"clientsecret",
"clientSecret",
"client_secret",
"authorization",
"Authorization",
];
/**
* Checks if a key name is sensitive
*/
function isSensitiveKey(key: string): boolean {
const lowerKey = key.toLowerCase();
return SENSITIVE_KEYS.some((sensitiveKey) => lowerKey.includes(sensitiveKey.toLowerCase()));
}
/**
* Sanitizes a string by redacting sensitive patterns
*/
function sanitizeString(value: string): string {
let sanitized = value;
for (const { pattern, replacement } of SENSITIVE_PATTERNS) {
sanitized = sanitized.replace(pattern, replacement);
}
return sanitized;
}
/**
* Type guard to check if value is an object
*/
function isObject(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null && !Array.isArray(value);
}
/**
* Sanitizes data for logging by redacting sensitive information
*
* @param data - The data to sanitize (can be string, object, array, etc.)
* @param seen - Internal set to track circular references
* @returns Sanitized version of the data with secrets redacted
*
* @example
* ```typescript
* const error = new Error("Auth failed");
* error.config = { headers: { Authorization: "Bearer secret123" } };
* const sanitized = sanitizeForLogging(error);
* // sanitized.config.headers.Authorization === "[REDACTED]"
* ```
*/
export function sanitizeForLogging(data: unknown, seen = new WeakSet()): unknown {
// Handle primitives
if (data === null || data === undefined) {
return data;
}
if (typeof data === "boolean" || typeof data === "number") {
return data;
}
if (typeof data === "string") {
return sanitizeString(data);
}
// Handle arrays
if (Array.isArray(data)) {
return data.map((item) => sanitizeForLogging(item, seen));
}
// Handle Date objects (preserve them as-is)
if (data instanceof Date) {
return data;
}
// Handle objects (including Error objects)
if (isObject(data)) {
// Check for circular references
if (seen.has(data)) {
return "[Circular Reference]";
}
seen.add(data);
const sanitized: Record<string, unknown> = {};
// Handle Error objects specially to preserve their properties
if (data instanceof Error) {
sanitized.name = data.name;
sanitized.message = sanitizeString(data.message);
if (data.stack) {
sanitized.stack = sanitizeString(data.stack);
}
}
// Process all enumerable properties
for (const key in data) {
if (Object.prototype.hasOwnProperty.call(data, key)) {
const value = data[key];
// If the key is sensitive, redact the value
if (isSensitiveKey(key)) {
sanitized[key] = value === null || value === undefined ? value : "[REDACTED]";
} else {
// Recursively sanitize nested values
sanitized[key] = sanitizeForLogging(value, seen);
}
}
}
return sanitized;
}
// Return other types as-is (functions, symbols, etc.)
return data as unknown;
}

View File

@@ -27,18 +27,14 @@ describe("QueryBuilder", () => {
it("should handle single field", () => {
const result = QueryBuilder.buildSearchFilter("test", ["title"]);
expect(result).toEqual({
OR: [
{ title: { contains: "test", mode: "insensitive" } },
],
OR: [{ title: { contains: "test", mode: "insensitive" } }],
});
});
it("should trim search query", () => {
const result = QueryBuilder.buildSearchFilter(" test ", ["title"]);
expect(result).toEqual({
OR: [
{ title: { contains: "test", mode: "insensitive" } },
],
OR: [{ title: { contains: "test", mode: "insensitive" } }],
});
});
});
@@ -56,26 +52,17 @@ describe("QueryBuilder", () => {
it("should build multi-field sort", () => {
const result = QueryBuilder.buildSortOrder("priority,dueDate", SortOrder.DESC);
expect(result).toEqual([
{ priority: "desc" },
{ dueDate: "desc" },
]);
expect(result).toEqual([{ priority: "desc" }, { dueDate: "desc" }]);
});
it("should handle mixed sorting with custom order per field", () => {
const result = QueryBuilder.buildSortOrder("priority:asc,dueDate:desc");
expect(result).toEqual([
{ priority: "asc" },
{ dueDate: "desc" },
]);
expect(result).toEqual([{ priority: "asc" }, { dueDate: "desc" }]);
});
it("should use default order when not specified per field", () => {
const result = QueryBuilder.buildSortOrder("priority,dueDate", SortOrder.ASC);
expect(result).toEqual([
{ priority: "asc" },
{ dueDate: "asc" },
]);
expect(result).toEqual([{ priority: "asc" }, { dueDate: "asc" }]);
});
});

View File

@@ -0,0 +1,196 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { Test, TestingModule } from "@nestjs/testing";
import { ConfigService } from "@nestjs/config";
import { RunnerJobStatus } from "@prisma/client";
import { CoordinatorIntegrationController } from "./coordinator-integration.controller";
import { CoordinatorIntegrationService } from "./coordinator-integration.service";
import type { CoordinatorJobResult, CoordinatorHealthStatus } from "./interfaces";
import { CoordinatorJobStatus } from "./dto";
import { ApiKeyGuard } from "../common/guards";
describe("CoordinatorIntegrationController", () => {
let controller: CoordinatorIntegrationController;
const mockJobResult: CoordinatorJobResult = {
jobId: "job-123",
status: "PENDING",
queueName: "mosaic:main",
};
const mockJob = {
id: "job-123",
workspaceId: "workspace-123",
type: "code-task",
status: RunnerJobStatus.PENDING,
priority: 10,
progressPercent: 0,
agentTaskId: null,
result: null,
error: null,
startedAt: null,
completedAt: null,
createdAt: new Date(),
updatedAt: new Date(),
};
const mockHealthStatus: CoordinatorHealthStatus = {
api: true,
bullmq: {
connected: true,
queues: { main: 5, runner: 2 },
},
timestamp: new Date(),
};
const mockService = {
createJob: vi.fn(),
updateJobStatus: vi.fn(),
updateJobProgress: vi.fn(),
completeJob: vi.fn(),
failJob: vi.fn(),
getJobDetails: vi.fn(),
getIntegrationHealth: vi.fn(),
};
const mockConfigService = {
get: vi.fn().mockReturnValue("test-api-key-12345"),
};
beforeEach(async () => {
vi.clearAllMocks();
const module: TestingModule = await Test.createTestingModule({
controllers: [CoordinatorIntegrationController],
providers: [
{ provide: CoordinatorIntegrationService, useValue: mockService },
{ provide: ConfigService, useValue: mockConfigService },
],
})
.overrideGuard(ApiKeyGuard)
.useValue({ canActivate: () => true })
.compile();
controller = module.get<CoordinatorIntegrationController>(CoordinatorIntegrationController);
});
describe("POST /coordinator/jobs", () => {
it("should create a job and return job result", async () => {
const dto = {
workspaceId: "workspace-123",
type: "code-task",
issueNumber: 42,
repository: "mosaic/stack",
};
mockService.createJob.mockResolvedValue(mockJobResult);
const result = await controller.createJob(dto);
expect(result).toEqual(mockJobResult);
expect(mockService.createJob).toHaveBeenCalledWith(dto);
});
});
describe("PATCH /coordinator/jobs/:id/status", () => {
it("should update job status", async () => {
const updatedJob = { ...mockJob, status: RunnerJobStatus.RUNNING };
mockService.updateJobStatus.mockResolvedValue(updatedJob);
const result = await controller.updateJobStatus("job-123", {
status: CoordinatorJobStatus.RUNNING,
agentId: "agent-42",
});
expect(result.status).toBe(RunnerJobStatus.RUNNING);
expect(mockService.updateJobStatus).toHaveBeenCalledWith("job-123", {
status: CoordinatorJobStatus.RUNNING,
agentId: "agent-42",
});
});
});
describe("PATCH /coordinator/jobs/:id/progress", () => {
it("should update job progress", async () => {
const updatedJob = { ...mockJob, progressPercent: 50 };
mockService.updateJobProgress.mockResolvedValue(updatedJob);
const result = await controller.updateJobProgress("job-123", {
progressPercent: 50,
currentStep: "Running tests",
});
expect(result.progressPercent).toBe(50);
expect(mockService.updateJobProgress).toHaveBeenCalledWith("job-123", {
progressPercent: 50,
currentStep: "Running tests",
});
});
});
describe("POST /coordinator/jobs/:id/complete", () => {
it("should complete a job", async () => {
const completedJob = {
...mockJob,
status: RunnerJobStatus.COMPLETED,
progressPercent: 100,
};
mockService.completeJob.mockResolvedValue(completedJob);
const result = await controller.completeJob("job-123", {
result: { commitSha: "abc123" },
});
expect(result.status).toBe(RunnerJobStatus.COMPLETED);
expect(mockService.completeJob).toHaveBeenCalledWith("job-123", {
result: { commitSha: "abc123" },
});
});
});
describe("POST /coordinator/jobs/:id/fail", () => {
it("should fail a job", async () => {
const failedJob = {
...mockJob,
status: RunnerJobStatus.FAILED,
error: "Test failed",
};
mockService.failJob.mockResolvedValue(failedJob);
const result = await controller.failJob("job-123", {
error: "Test failed",
gateResults: { lint: true, test: false },
});
expect(result.status).toBe(RunnerJobStatus.FAILED);
expect(result.error).toBe("Test failed");
expect(mockService.failJob).toHaveBeenCalledWith("job-123", {
error: "Test failed",
gateResults: { lint: true, test: false },
});
});
});
describe("GET /coordinator/jobs/:id", () => {
it("should return job details", async () => {
const jobWithDetails = { ...mockJob, steps: [], events: [] };
mockService.getJobDetails.mockResolvedValue(jobWithDetails);
const result = await controller.getJobDetails("job-123");
expect(result).toEqual(jobWithDetails);
expect(mockService.getJobDetails).toHaveBeenCalledWith("job-123");
});
});
describe("GET /coordinator/health", () => {
it("should return integration health status", async () => {
mockService.getIntegrationHealth.mockResolvedValue(mockHealthStatus);
const result = await controller.getHealth();
expect(result.api).toBe(true);
expect(result.bullmq.connected).toBe(true);
expect(mockService.getIntegrationHealth).toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,127 @@
import { Controller, Post, Patch, Get, Body, Param, UseGuards } from "@nestjs/common";
import { Throttle } from "@nestjs/throttler";
import { CoordinatorIntegrationService } from "./coordinator-integration.service";
import {
CreateCoordinatorJobDto,
UpdateJobStatusDto,
UpdateJobProgressDto,
CompleteJobDto,
FailJobDto,
} from "./dto";
import type { CoordinatorJobResult, CoordinatorHealthStatus } from "./interfaces";
import { ApiKeyGuard } from "../common/guards";
/**
* CoordinatorIntegrationController - REST API for Python coordinator communication
*
* SECURITY:
* - All endpoints require API key authentication via X-API-Key header
* - Rate limiting: 100 requests per minute per API key (default)
* - Health endpoint: 300 requests per minute (higher for monitoring)
*
* Endpoints:
* - POST /coordinator/jobs - Create a job from coordinator
* - PATCH /coordinator/jobs/:id/status - Update job status
* - PATCH /coordinator/jobs/:id/progress - Update job progress
* - POST /coordinator/jobs/:id/complete - Mark job as complete
* - POST /coordinator/jobs/:id/fail - Mark job as failed
* - GET /coordinator/jobs/:id - Get job details
* - GET /coordinator/health - Integration health check
*/
@Controller("coordinator")
@UseGuards(ApiKeyGuard)
@Throttle({ default: { ttl: 60000, limit: 100 } }) // 100 requests per minute
export class CoordinatorIntegrationController {
constructor(private readonly service: CoordinatorIntegrationService) {}
/**
* Create a job from the coordinator
*
* Rate limit: 100 requests per minute per API key
*/
@Post("jobs")
@Throttle({ default: { ttl: 60000, limit: 100 } })
async createJob(@Body() dto: CreateCoordinatorJobDto): Promise<CoordinatorJobResult> {
return this.service.createJob(dto);
}
/**
* Update job status from the coordinator
*
* Rate limit: 100 requests per minute per API key
*/
@Patch("jobs/:id/status")
@Throttle({ default: { ttl: 60000, limit: 100 } })
async updateJobStatus(
@Param("id") id: string,
@Body() dto: UpdateJobStatusDto
): Promise<Awaited<ReturnType<typeof this.service.updateJobStatus>>> {
return this.service.updateJobStatus(id, dto);
}
/**
* Update job progress from the coordinator
*
* Rate limit: 100 requests per minute per API key
*/
@Patch("jobs/:id/progress")
@Throttle({ default: { ttl: 60000, limit: 100 } })
async updateJobProgress(
@Param("id") id: string,
@Body() dto: UpdateJobProgressDto
): Promise<Awaited<ReturnType<typeof this.service.updateJobProgress>>> {
return this.service.updateJobProgress(id, dto);
}
/**
* Mark job as complete from the coordinator
*
* Rate limit: 100 requests per minute per API key
*/
@Post("jobs/:id/complete")
@Throttle({ default: { ttl: 60000, limit: 100 } })
async completeJob(
@Param("id") id: string,
@Body() dto: CompleteJobDto
): Promise<Awaited<ReturnType<typeof this.service.completeJob>>> {
return this.service.completeJob(id, dto);
}
/**
* Mark job as failed from the coordinator
*
* Rate limit: 100 requests per minute per API key
*/
@Post("jobs/:id/fail")
@Throttle({ default: { ttl: 60000, limit: 100 } })
async failJob(
@Param("id") id: string,
@Body() dto: FailJobDto
): Promise<Awaited<ReturnType<typeof this.service.failJob>>> {
return this.service.failJob(id, dto);
}
/**
* Get job details with events and steps
*
* Rate limit: 100 requests per minute per API key
*/
@Get("jobs/:id")
@Throttle({ default: { ttl: 60000, limit: 100 } })
async getJobDetails(
@Param("id") id: string
): Promise<Awaited<ReturnType<typeof this.service.getJobDetails>>> {
return this.service.getJobDetails(id);
}
/**
* Integration health check
*
* Rate limit: 300 requests per minute (higher for monitoring)
*/
@Get("health")
@Throttle({ default: { ttl: 60000, limit: 300 } })
async getHealth(): Promise<CoordinatorHealthStatus> {
return this.service.getIntegrationHealth();
}
}

View File

@@ -0,0 +1,28 @@
import { Module } from "@nestjs/common";
import { ConfigModule } from "@nestjs/config";
import { CoordinatorIntegrationController } from "./coordinator-integration.controller";
import { CoordinatorIntegrationService } from "./coordinator-integration.service";
import { PrismaModule } from "../prisma/prisma.module";
import { BullMqModule } from "../bullmq/bullmq.module";
import { JobEventsModule } from "../job-events/job-events.module";
import { HeraldModule } from "../herald/herald.module";
/**
* CoordinatorIntegrationModule - Bridge between Python coordinator and NestJS API
*
* Provides REST endpoints for the M4.1 coordinator (Python FastAPI) to
* communicate with the M4.2 infrastructure (NestJS).
*
* Key integration points:
* - Job creation from coordinator webhook events
* - Job status updates during processing
* - Job completion and failure handling
* - Event bridging to Herald for Discord notifications
*/
@Module({
imports: [ConfigModule, PrismaModule, BullMqModule, JobEventsModule, HeraldModule],
controllers: [CoordinatorIntegrationController],
providers: [CoordinatorIntegrationService],
exports: [CoordinatorIntegrationService],
})
export class CoordinatorIntegrationModule {}

View File

@@ -0,0 +1,284 @@
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
import { Test, TestingModule } from "@nestjs/testing";
import { INestApplication, HttpStatus } from "@nestjs/common";
import request from "supertest";
import { CoordinatorIntegrationController } from "./coordinator-integration.controller";
import { CoordinatorIntegrationService } from "./coordinator-integration.service";
import { ThrottlerModule } from "@nestjs/throttler";
import { APP_GUARD } from "@nestjs/core";
import { ConfigService } from "@nestjs/config";
import { ApiKeyGuard } from "../common/guards";
import { ThrottlerApiKeyGuard } from "../common/throttler";
/**
* Rate Limiting Tests for Coordinator Integration Endpoints
*
* These tests verify that rate limiting is properly enforced on coordinator
* endpoints to prevent DoS attacks.
*
* Test Coverage:
* - Rate limit enforcement (429 status)
* - Retry-After header inclusion
* - Per-API-key rate limiting
* - Higher limits for health endpoints
*/
describe("CoordinatorIntegrationController - Rate Limiting", () => {
let app: INestApplication;
let service: CoordinatorIntegrationService;
const mockCoordinatorService = {
createJob: vi.fn().mockResolvedValue({
jobId: "coord-job-123",
status: "PENDING",
}),
updateJobStatus: vi.fn().mockResolvedValue({
jobId: "coord-job-123",
status: "RUNNING",
}),
updateJobProgress: vi.fn().mockResolvedValue({
jobId: "coord-job-123",
progress: 50,
}),
completeJob: vi.fn().mockResolvedValue({
jobId: "coord-job-123",
status: "COMPLETED",
}),
failJob: vi.fn().mockResolvedValue({
jobId: "coord-job-123",
status: "FAILED",
}),
getJobDetails: vi.fn().mockResolvedValue({
jobId: "coord-job-123",
status: "RUNNING",
}),
getIntegrationHealth: vi.fn().mockResolvedValue({
status: "healthy",
timestamp: new Date().toISOString(),
}),
};
const mockConfigService = {
get: vi.fn((key: string) => {
const config: Record<string, string | number> = {
COORDINATOR_API_KEY: "test-coordinator-key",
RATE_LIMIT_TTL: "1", // 1 second for faster tests
RATE_LIMIT_COORDINATOR_LIMIT: "100",
RATE_LIMIT_HEALTH_LIMIT: "300",
};
return config[key];
}),
};
beforeEach(async () => {
const moduleFixture: TestingModule = await Test.createTestingModule({
imports: [
ThrottlerModule.forRoot([
{
ttl: 1000, // 1 second for testing
limit: 100, // Default limit
},
]),
],
controllers: [CoordinatorIntegrationController],
providers: [
{ provide: CoordinatorIntegrationService, useValue: mockCoordinatorService },
{ provide: ConfigService, useValue: mockConfigService },
{
provide: APP_GUARD,
useClass: ThrottlerApiKeyGuard,
},
],
})
.overrideGuard(ApiKeyGuard)
.useValue({ canActivate: () => true })
.compile();
app = moduleFixture.createNestApplication();
await app.init();
service = moduleFixture.get<CoordinatorIntegrationService>(CoordinatorIntegrationService);
vi.clearAllMocks();
});
afterEach(async () => {
await app.close();
});
describe("POST /coordinator/jobs - Rate Limiting", () => {
it("should allow requests within rate limit", async () => {
const payload = {
workspaceId: "workspace-123",
type: "data-processing",
data: { input: "test" },
};
// Make 3 requests (within limit of 100)
for (let i = 0; i < 3; i++) {
const response = await request(app.getHttpServer())
.post("/coordinator/jobs")
.set("X-API-Key", "test-coordinator-key")
.send(payload);
expect(response.status).toBe(HttpStatus.CREATED);
}
expect(mockCoordinatorService.createJob).toHaveBeenCalledTimes(3);
});
it("should return 429 when rate limit is exceeded", async () => {
const payload = {
workspaceId: "workspace-123",
type: "data-processing",
data: { input: "test" },
};
// Exhaust rate limit (100 requests)
for (let i = 0; i < 100; i++) {
await request(app.getHttpServer())
.post("/coordinator/jobs")
.set("X-API-Key", "test-coordinator-key")
.send(payload);
}
// The 101st request should be rate limited
const response = await request(app.getHttpServer())
.post("/coordinator/jobs")
.set("X-API-Key", "test-coordinator-key")
.send(payload);
expect(response.status).toBe(HttpStatus.TOO_MANY_REQUESTS);
});
it("should include Retry-After header in 429 response", async () => {
const payload = {
workspaceId: "workspace-123",
type: "data-processing",
data: { input: "test" },
};
// Exhaust rate limit (100 requests)
for (let i = 0; i < 100; i++) {
await request(app.getHttpServer())
.post("/coordinator/jobs")
.set("X-API-Key", "test-coordinator-key")
.send(payload);
}
// Get rate limited response
const response = await request(app.getHttpServer())
.post("/coordinator/jobs")
.set("X-API-Key", "test-coordinator-key")
.send(payload);
expect(response.status).toBe(HttpStatus.TOO_MANY_REQUESTS);
expect(response.headers).toHaveProperty("retry-after");
expect(parseInt(response.headers["retry-after"])).toBeGreaterThan(0);
});
});
describe("PATCH /coordinator/jobs/:id/status - Rate Limiting", () => {
it("should allow requests within rate limit", async () => {
const jobId = "coord-job-123";
const payload = { status: "RUNNING" };
// Make 3 requests (within limit of 100)
for (let i = 0; i < 3; i++) {
const response = await request(app.getHttpServer())
.patch(`/coordinator/jobs/${jobId}/status`)
.set("X-API-Key", "test-coordinator-key")
.send(payload);
expect(response.status).toBe(HttpStatus.OK);
}
expect(mockCoordinatorService.updateJobStatus).toHaveBeenCalledTimes(3);
});
it("should return 429 when rate limit is exceeded", async () => {
const jobId = "coord-job-123";
const payload = { status: "RUNNING" };
// Exhaust rate limit (100 requests)
for (let i = 0; i < 100; i++) {
await request(app.getHttpServer())
.patch(`/coordinator/jobs/${jobId}/status`)
.set("X-API-Key", "test-coordinator-key")
.send(payload);
}
// The 101st request should be rate limited
const response = await request(app.getHttpServer())
.patch(`/coordinator/jobs/${jobId}/status`)
.set("X-API-Key", "test-coordinator-key")
.send(payload);
expect(response.status).toBe(HttpStatus.TOO_MANY_REQUESTS);
});
});
describe("GET /coordinator/health - Rate Limiting", () => {
it("should have higher rate limit than other endpoints", async () => {
// Health endpoint should allow 300 requests (higher than default 100)
// Test with a smaller sample to keep test fast
for (let i = 0; i < 10; i++) {
const response = await request(app.getHttpServer())
.get("/coordinator/health")
.set("X-API-Key", "test-coordinator-key");
expect(response.status).toBe(HttpStatus.OK);
}
expect(mockCoordinatorService.getIntegrationHealth).toHaveBeenCalledTimes(10);
});
it("should return 429 when health endpoint limit is exceeded", async () => {
// Exhaust health endpoint limit (300 requests)
for (let i = 0; i < 300; i++) {
await request(app.getHttpServer())
.get("/coordinator/health")
.set("X-API-Key", "test-coordinator-key");
}
// The 301st request should be rate limited
const response = await request(app.getHttpServer())
.get("/coordinator/health")
.set("X-API-Key", "test-coordinator-key");
expect(response.status).toBe(HttpStatus.TOO_MANY_REQUESTS);
});
});
describe("Per-API-Key Rate Limiting", () => {
it("should enforce rate limits per API key independently", async () => {
const payload = {
workspaceId: "workspace-123",
type: "data-processing",
data: { input: "test" },
};
// Exhaust rate limit for first API key (100 requests)
for (let i = 0; i < 100; i++) {
await request(app.getHttpServer())
.post("/coordinator/jobs")
.set("X-API-Key", "test-coordinator-key-1")
.send(payload);
}
// First API key should be rate limited
const response1 = await request(app.getHttpServer())
.post("/coordinator/jobs")
.set("X-API-Key", "test-coordinator-key-1")
.send(payload);
expect(response1.status).toBe(HttpStatus.TOO_MANY_REQUESTS);
// Second API key should still be allowed
const response2 = await request(app.getHttpServer())
.post("/coordinator/jobs")
.set("X-API-Key", "test-coordinator-key-2")
.send(payload);
expect(response2.status).toBe(HttpStatus.CREATED);
});
});
});

View File

@@ -0,0 +1,154 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { Test, TestingModule } from "@nestjs/testing";
import { UnauthorizedException } from "@nestjs/common";
import { ConfigService } from "@nestjs/config";
import { CoordinatorIntegrationController } from "./coordinator-integration.controller";
import { CoordinatorIntegrationService } from "./coordinator-integration.service";
import { ApiKeyGuard } from "../common/guards/api-key.guard";
/**
* Security tests for CoordinatorIntegrationController
*
* These tests verify that all coordinator endpoints require authentication
* and reject requests without valid API keys.
*/
describe("CoordinatorIntegrationController - Security", () => {
let controller: CoordinatorIntegrationController;
let guard: ApiKeyGuard;
const mockService = {
createJob: vi.fn(),
updateJobStatus: vi.fn(),
updateJobProgress: vi.fn(),
completeJob: vi.fn(),
failJob: vi.fn(),
getJobDetails: vi.fn(),
getIntegrationHealth: vi.fn(),
};
const mockConfigService = {
get: vi.fn().mockReturnValue("test-api-key-12345"),
};
beforeEach(async () => {
vi.clearAllMocks();
const module: TestingModule = await Test.createTestingModule({
controllers: [CoordinatorIntegrationController],
providers: [
{ provide: CoordinatorIntegrationService, useValue: mockService },
{ provide: ConfigService, useValue: mockConfigService },
ApiKeyGuard,
],
}).compile();
controller = module.get<CoordinatorIntegrationController>(CoordinatorIntegrationController);
guard = module.get<ApiKeyGuard>(ApiKeyGuard);
});
describe("Authentication Requirements", () => {
it("should have ApiKeyGuard applied to controller", () => {
const guards = Reflect.getMetadata("__guards__", CoordinatorIntegrationController);
expect(guards).toBeDefined();
expect(guards).toContain(ApiKeyGuard);
});
it("POST /coordinator/jobs should require authentication", async () => {
const mockContext = {
switchToHttp: () => ({
getRequest: () => ({ headers: {} }),
}),
};
await expect(guard.canActivate(mockContext as any)).rejects.toThrow(UnauthorizedException);
});
it("PATCH /coordinator/jobs/:id/status should require authentication", async () => {
const mockContext = {
switchToHttp: () => ({
getRequest: () => ({ headers: {} }),
}),
};
await expect(guard.canActivate(mockContext as any)).rejects.toThrow(UnauthorizedException);
});
it("PATCH /coordinator/jobs/:id/progress should require authentication", async () => {
const mockContext = {
switchToHttp: () => ({
getRequest: () => ({ headers: {} }),
}),
};
await expect(guard.canActivate(mockContext as any)).rejects.toThrow(UnauthorizedException);
});
it("POST /coordinator/jobs/:id/complete should require authentication", async () => {
const mockContext = {
switchToHttp: () => ({
getRequest: () => ({ headers: {} }),
}),
};
await expect(guard.canActivate(mockContext as any)).rejects.toThrow(UnauthorizedException);
});
it("POST /coordinator/jobs/:id/fail should require authentication", async () => {
const mockContext = {
switchToHttp: () => ({
getRequest: () => ({ headers: {} }),
}),
};
await expect(guard.canActivate(mockContext as any)).rejects.toThrow(UnauthorizedException);
});
it("GET /coordinator/jobs/:id should require authentication", async () => {
const mockContext = {
switchToHttp: () => ({
getRequest: () => ({ headers: {} }),
}),
};
await expect(guard.canActivate(mockContext as any)).rejects.toThrow(UnauthorizedException);
});
it("GET /coordinator/health should require authentication", async () => {
const mockContext = {
switchToHttp: () => ({
getRequest: () => ({ headers: {} }),
}),
};
await expect(guard.canActivate(mockContext as any)).rejects.toThrow(UnauthorizedException);
});
});
describe("Valid Authentication", () => {
it("should allow requests with valid API key", async () => {
const mockContext = {
switchToHttp: () => ({
getRequest: () => ({
headers: { "x-api-key": "test-api-key-12345" },
}),
}),
};
const result = await guard.canActivate(mockContext as any);
expect(result).toBe(true);
});
it("should reject requests with invalid API key", async () => {
const mockContext = {
switchToHttp: () => ({
getRequest: () => ({
headers: { "x-api-key": "wrong-api-key" },
}),
}),
};
await expect(guard.canActivate(mockContext as any)).rejects.toThrow(UnauthorizedException);
await expect(guard.canActivate(mockContext as any)).rejects.toThrow("Invalid API key");
});
});
});

View File

@@ -0,0 +1,392 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { Test, TestingModule } from "@nestjs/testing";
import { ConflictException } from "@nestjs/common";
import { CoordinatorIntegrationService } from "./coordinator-integration.service";
import { PrismaService } from "../prisma/prisma.service";
import { JobEventsService } from "../job-events/job-events.service";
import { HeraldService } from "../herald/herald.service";
import { BullMqService } from "../bullmq/bullmq.service";
import { RunnerJobStatus } from "@prisma/client";
import { CoordinatorJobStatus, UpdateJobStatusDto } from "./dto";
/**
* Concurrency tests for CoordinatorIntegrationService
* Focus on race conditions during coordinator job status updates
*/
describe("CoordinatorIntegrationService - Concurrency", () => {
let service: CoordinatorIntegrationService;
let prisma: PrismaService;
const mockJobEventsService = {
emitJobCreated: vi.fn(),
emitJobStarted: vi.fn(),
emitJobCompleted: vi.fn(),
emitJobFailed: vi.fn(),
emitEvent: vi.fn(),
};
const mockHeraldService = {
broadcastJobEvent: vi.fn(),
};
const mockBullMqService = {
addJob: vi.fn(),
};
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
providers: [
CoordinatorIntegrationService,
{
provide: PrismaService,
useValue: {
runnerJob: {
findUnique: vi.fn(),
update: vi.fn(),
updateMany: vi.fn(),
},
$transaction: vi.fn(),
$queryRaw: vi.fn(),
},
},
{
provide: JobEventsService,
useValue: mockJobEventsService,
},
{
provide: HeraldService,
useValue: mockHeraldService,
},
{
provide: BullMqService,
useValue: mockBullMqService,
},
],
}).compile();
service = module.get<CoordinatorIntegrationService>(CoordinatorIntegrationService);
prisma = module.get<PrismaService>(PrismaService);
vi.clearAllMocks();
});
describe("concurrent status updates from coordinator", () => {
it("should use SELECT FOR UPDATE to prevent race conditions", async () => {
const jobId = "job-123";
const dto: UpdateJobStatusDto = {
status: CoordinatorJobStatus.RUNNING,
agentId: "agent-1",
agentType: "python",
};
const mockJob = {
id: jobId,
status: RunnerJobStatus.PENDING,
workspaceId: "workspace-123",
version: 1,
};
const updatedJob = {
...mockJob,
status: RunnerJobStatus.RUNNING,
startedAt: new Date(),
version: 2,
};
// Mock transaction with SELECT FOR UPDATE
const mockTxClient = {
$queryRaw: vi.fn().mockResolvedValue([mockJob]),
runnerJob: {
update: vi.fn().mockResolvedValue(updatedJob),
},
};
vi.mocked(prisma.$transaction).mockImplementation(async (callback: any) => {
return callback(mockTxClient);
});
const mockEvent = {
id: "event-1",
jobId,
type: "job.started",
timestamp: new Date(),
};
vi.mocked(mockJobEventsService.emitJobStarted).mockResolvedValue(mockEvent as any);
const result = await service.updateJobStatus(jobId, dto);
expect(result.status).toBe(RunnerJobStatus.RUNNING);
// Verify SELECT FOR UPDATE was used
expect(mockTxClient.$queryRaw).toHaveBeenCalledWith(
expect.anything() // Raw SQL with FOR UPDATE
);
});
it("should handle concurrent status updates by coordinator and API", async () => {
const jobId = "job-123";
// Coordinator tries to mark as RUNNING
const coordinatorDto: UpdateJobStatusDto = {
status: CoordinatorJobStatus.RUNNING,
};
// Simulate transaction lock timeout (another process holds lock)
vi.mocked(prisma.$transaction).mockRejectedValue(new Error("could not obtain lock on row"));
await expect(service.updateJobStatus(jobId, coordinatorDto)).rejects.toThrow();
});
it("should serialize concurrent status transitions", async () => {
const jobId = "job-123";
const mockJob = {
id: jobId,
status: RunnerJobStatus.PENDING,
workspaceId: "workspace-123",
version: 1,
};
// Simulate transaction that waits for lock, then proceeds
const mockTxClient = {
$queryRaw: vi.fn().mockResolvedValue([mockJob]),
runnerJob: {
update: vi.fn().mockResolvedValue({
...mockJob,
status: RunnerJobStatus.RUNNING,
version: 2,
}),
},
};
vi.mocked(prisma.$transaction).mockImplementation(async (callback: any) => {
// Simulate delay while waiting for lock
await new Promise((resolve) => setTimeout(resolve, 100));
return callback(mockTxClient);
});
const dto: UpdateJobStatusDto = {
status: CoordinatorJobStatus.RUNNING,
};
vi.mocked(mockJobEventsService.emitJobStarted).mockResolvedValue({
id: "event-1",
jobId,
type: "job.started",
timestamp: new Date(),
} as any);
const result = await service.updateJobStatus(jobId, dto);
expect(result.status).toBe(RunnerJobStatus.RUNNING);
expect(prisma.$transaction).toHaveBeenCalled();
});
});
describe("concurrent completion from coordinator", () => {
it("should prevent double completion using transaction", async () => {
const jobId = "job-123";
const mockJob = {
id: jobId,
status: RunnerJobStatus.RUNNING,
workspaceId: "workspace-123",
startedAt: new Date(),
version: 2,
};
const completedJob = {
...mockJob,
status: RunnerJobStatus.COMPLETED,
completedAt: new Date(),
progressPercent: 100,
result: { success: true },
version: 3,
};
const mockTxClient = {
$queryRaw: vi.fn().mockResolvedValue([mockJob]),
runnerJob: {
update: vi.fn().mockResolvedValue(completedJob),
},
};
vi.mocked(prisma.$transaction).mockImplementation(async (callback: any) => {
return callback(mockTxClient);
});
vi.mocked(mockJobEventsService.emitJobCompleted).mockResolvedValue({
id: "event-1",
jobId,
type: "job.completed",
timestamp: new Date(),
} as any);
const result = await service.completeJob(jobId, {
result: { success: true },
tokensUsed: 1000,
durationSeconds: 120,
});
expect(result.status).toBe(RunnerJobStatus.COMPLETED);
expect(mockTxClient.$queryRaw).toHaveBeenCalled();
});
it("should handle concurrent completion and failure attempts", async () => {
const jobId = "job-123";
const mockJob = {
id: jobId,
status: RunnerJobStatus.RUNNING,
workspaceId: "workspace-123",
startedAt: new Date(),
version: 2,
};
// First transaction (completion) succeeds
const completedJob = {
...mockJob,
status: RunnerJobStatus.COMPLETED,
completedAt: new Date(),
version: 3,
};
// Second transaction (failure) sees completed job and should fail
const mockTxClient1 = {
$queryRaw: vi.fn().mockResolvedValue([mockJob]),
runnerJob: {
update: vi.fn().mockResolvedValue(completedJob),
},
};
const mockTxClient2 = {
$queryRaw: vi.fn().mockResolvedValue([completedJob]), // Job already completed
runnerJob: {
update: vi.fn(),
},
};
vi.mocked(prisma.$transaction)
.mockImplementationOnce(async (callback: any) => callback(mockTxClient1))
.mockImplementationOnce(async (callback: any) => callback(mockTxClient2));
vi.mocked(mockJobEventsService.emitJobCompleted).mockResolvedValue({
id: "event-1",
jobId,
type: "job.completed",
timestamp: new Date(),
} as any);
// First call (completion) succeeds
const result1 = await service.completeJob(jobId, {
result: { success: true },
});
expect(result1.status).toBe(RunnerJobStatus.COMPLETED);
// Second call (failure) should be rejected due to invalid status transition
await expect(
service.failJob(jobId, {
error: "Something went wrong",
})
).rejects.toThrow();
});
});
describe("concurrent progress updates from coordinator", () => {
it("should handle rapid progress updates safely", async () => {
const jobId = "job-123";
const progressUpdates = [25, 50, 75];
for (const progress of progressUpdates) {
const mockJob = {
id: jobId,
status: RunnerJobStatus.RUNNING,
progressPercent: progress - 25,
version: progress / 25, // version increases with each update
};
const updatedJob = {
...mockJob,
progressPercent: progress,
version: mockJob.version + 1,
};
vi.mocked(prisma.runnerJob.findUnique).mockResolvedValue(mockJob as any);
vi.mocked(prisma.runnerJob.updateMany).mockResolvedValue({ count: 1 });
vi.mocked(prisma.runnerJob.findUnique).mockResolvedValueOnce(updatedJob as any);
const result = await service.updateJobProgress(jobId, {
progressPercent: progress,
});
expect(result.progressPercent).toBe(progress);
}
expect(mockJobEventsService.emitEvent).toHaveBeenCalledTimes(3);
});
it("should detect version conflicts in progress updates", async () => {
const jobId = "job-123";
const mockJob = {
id: jobId,
status: RunnerJobStatus.RUNNING,
progressPercent: 50,
version: 2,
};
vi.mocked(prisma.runnerJob.findUnique).mockResolvedValue(mockJob as any);
// Simulate version conflict (another update happened)
vi.mocked(prisma.runnerJob.updateMany).mockResolvedValue({ count: 0 });
await expect(
service.updateJobProgress(jobId, {
progressPercent: 75,
})
).rejects.toThrow(ConflictException);
});
});
describe("transaction isolation", () => {
it("should use appropriate transaction isolation level", async () => {
const jobId = "job-123";
const mockJob = {
id: jobId,
status: RunnerJobStatus.PENDING,
version: 1,
};
const mockTxClient = {
$queryRaw: vi.fn().mockResolvedValue([mockJob]),
runnerJob: {
update: vi.fn().mockResolvedValue({
...mockJob,
status: RunnerJobStatus.RUNNING,
version: 2,
}),
},
};
vi.mocked(prisma.$transaction).mockImplementation(async (callback: any) => {
return callback(mockTxClient);
});
vi.mocked(mockJobEventsService.emitJobStarted).mockResolvedValue({
id: "event-1",
jobId,
type: "job.started",
timestamp: new Date(),
} as any);
await service.updateJobStatus(jobId, {
status: CoordinatorJobStatus.RUNNING,
});
// Verify transaction was used (isolates the operation)
expect(prisma.$transaction).toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,310 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { Test, TestingModule } from "@nestjs/testing";
import { NotFoundException, BadRequestException } from "@nestjs/common";
import { RunnerJobStatus } from "@prisma/client";
import { CoordinatorIntegrationService } from "./coordinator-integration.service";
import { PrismaService } from "../prisma/prisma.service";
import { JobEventsService } from "../job-events/job-events.service";
import { HeraldService } from "../herald/herald.service";
import { BullMqService } from "../bullmq/bullmq.service";
describe("CoordinatorIntegrationService", () => {
let service: CoordinatorIntegrationService;
let prismaService: PrismaService;
let jobEventsService: JobEventsService;
let heraldService: HeraldService;
let bullMqService: BullMqService;
const mockWorkspace = {
id: "workspace-123",
name: "Test Workspace",
slug: "test-workspace",
settings: {},
createdAt: new Date(),
updatedAt: new Date(),
};
const mockJob = {
id: "job-123",
workspaceId: "workspace-123",
type: "code-task",
status: RunnerJobStatus.PENDING,
priority: 10,
progressPercent: 0,
agentTaskId: null,
result: null,
error: null,
startedAt: null,
completedAt: null,
createdAt: new Date(),
updatedAt: new Date(),
};
const mockEvent = {
id: "event-123",
jobId: "job-123",
stepId: null,
type: "job.created",
timestamp: new Date(),
actor: "coordinator",
payload: {},
};
const mockPrismaService = {
workspace: {
findUnique: vi.fn(),
},
runnerJob: {
create: vi.fn(),
findUnique: vi.fn(),
update: vi.fn(),
},
};
const mockJobEventsService = {
emitEvent: vi.fn(),
emitJobCreated: vi.fn(),
emitJobStarted: vi.fn(),
emitJobCompleted: vi.fn(),
emitJobFailed: vi.fn(),
};
const mockHeraldService = {
broadcastJobEvent: vi.fn(),
};
const mockBullMqService = {
addJob: vi.fn(),
healthCheck: vi.fn(),
getHealthStatus: vi.fn(),
};
beforeEach(async () => {
vi.clearAllMocks();
const module: TestingModule = await Test.createTestingModule({
providers: [
CoordinatorIntegrationService,
{ provide: PrismaService, useValue: mockPrismaService },
{ provide: JobEventsService, useValue: mockJobEventsService },
{ provide: HeraldService, useValue: mockHeraldService },
{ provide: BullMqService, useValue: mockBullMqService },
],
}).compile();
service = module.get<CoordinatorIntegrationService>(CoordinatorIntegrationService);
prismaService = module.get<PrismaService>(PrismaService);
jobEventsService = module.get<JobEventsService>(JobEventsService);
heraldService = module.get<HeraldService>(HeraldService);
bullMqService = module.get<BullMqService>(BullMqService);
});
describe("createJob", () => {
it("should create a job and add it to the queue", async () => {
const dto = {
workspaceId: "workspace-123",
type: "code-task",
issueNumber: 42,
repository: "mosaic/stack",
priority: 10,
metadata: { assignedAgent: "sonnet" },
};
mockPrismaService.workspace.findUnique.mockResolvedValue(mockWorkspace);
mockPrismaService.runnerJob.create.mockResolvedValue(mockJob);
mockJobEventsService.emitJobCreated.mockResolvedValue(mockEvent);
mockBullMqService.addJob.mockResolvedValue({ id: "bullmq-job-123" });
const result = await service.createJob(dto);
expect(result).toHaveProperty("jobId", mockJob.id);
expect(result).toHaveProperty("status", "PENDING");
expect(mockPrismaService.runnerJob.create).toHaveBeenCalled();
expect(mockJobEventsService.emitJobCreated).toHaveBeenCalledWith(
mockJob.id,
expect.any(Object)
);
expect(mockBullMqService.addJob).toHaveBeenCalled();
});
it("should throw NotFoundException if workspace does not exist", async () => {
const dto = {
workspaceId: "non-existent",
type: "code-task",
issueNumber: 42,
repository: "mosaic/stack",
};
mockPrismaService.workspace.findUnique.mockResolvedValue(null);
await expect(service.createJob(dto)).rejects.toThrow(NotFoundException);
});
});
describe("updateJobStatus", () => {
it("should update job status to RUNNING", async () => {
const updatedJob = { ...mockJob, status: RunnerJobStatus.RUNNING, startedAt: new Date() };
mockPrismaService.runnerJob.findUnique.mockResolvedValue(mockJob);
mockPrismaService.runnerJob.update.mockResolvedValue(updatedJob);
mockJobEventsService.emitJobStarted.mockResolvedValue(mockEvent);
mockHeraldService.broadcastJobEvent.mockResolvedValue(undefined);
const result = await service.updateJobStatus("job-123", {
status: "RUNNING" as const,
agentId: "agent-42",
});
expect(result.status).toBe(RunnerJobStatus.RUNNING);
expect(mockJobEventsService.emitJobStarted).toHaveBeenCalled();
});
it("should throw NotFoundException if job does not exist", async () => {
mockPrismaService.runnerJob.findUnique.mockResolvedValue(null);
await expect(
service.updateJobStatus("non-existent", { status: "RUNNING" as const })
).rejects.toThrow(NotFoundException);
});
it("should throw BadRequestException for invalid status transition", async () => {
const completedJob = { ...mockJob, status: RunnerJobStatus.COMPLETED };
mockPrismaService.runnerJob.findUnique.mockResolvedValue(completedJob);
await expect(
service.updateJobStatus("job-123", { status: "RUNNING" as const })
).rejects.toThrow(BadRequestException);
});
});
describe("updateJobProgress", () => {
it("should update job progress percentage", async () => {
const runningJob = { ...mockJob, status: RunnerJobStatus.RUNNING };
const updatedJob = { ...runningJob, progressPercent: 50 };
mockPrismaService.runnerJob.findUnique.mockResolvedValue(runningJob);
mockPrismaService.runnerJob.update.mockResolvedValue(updatedJob);
mockJobEventsService.emitEvent.mockResolvedValue(mockEvent);
const result = await service.updateJobProgress("job-123", {
progressPercent: 50,
currentStep: "Running tests",
});
expect(result.progressPercent).toBe(50);
expect(mockJobEventsService.emitEvent).toHaveBeenCalledWith(
"job-123",
expect.objectContaining({ type: "job.progress" })
);
});
it("should throw BadRequestException if job is not running", async () => {
mockPrismaService.runnerJob.findUnique.mockResolvedValue(mockJob);
await expect(service.updateJobProgress("job-123", { progressPercent: 50 })).rejects.toThrow(
BadRequestException
);
});
});
describe("completeJob", () => {
it("should mark job as completed and broadcast", async () => {
const runningJob = { ...mockJob, status: RunnerJobStatus.RUNNING, startedAt: new Date() };
const completedJob = {
...runningJob,
status: RunnerJobStatus.COMPLETED,
progressPercent: 100,
completedAt: new Date(),
};
mockPrismaService.runnerJob.findUnique.mockResolvedValue(runningJob);
mockPrismaService.runnerJob.update.mockResolvedValue(completedJob);
mockJobEventsService.emitJobCompleted.mockResolvedValue(mockEvent);
mockHeraldService.broadcastJobEvent.mockResolvedValue(undefined);
const result = await service.completeJob("job-123", {
result: { commitSha: "abc123" },
});
expect(result.status).toBe(RunnerJobStatus.COMPLETED);
expect(result.progressPercent).toBe(100);
expect(mockJobEventsService.emitJobCompleted).toHaveBeenCalled();
expect(mockHeraldService.broadcastJobEvent).toHaveBeenCalled();
});
});
describe("failJob", () => {
it("should mark job as failed and broadcast", async () => {
const runningJob = { ...mockJob, status: RunnerJobStatus.RUNNING };
const failedJob = {
...runningJob,
status: RunnerJobStatus.FAILED,
error: "Test failed",
completedAt: new Date(),
};
mockPrismaService.runnerJob.findUnique.mockResolvedValue(runningJob);
mockPrismaService.runnerJob.update.mockResolvedValue(failedJob);
mockJobEventsService.emitJobFailed.mockResolvedValue(mockEvent);
mockHeraldService.broadcastJobEvent.mockResolvedValue(undefined);
const result = await service.failJob("job-123", {
error: "Test failed",
gateResults: { lint: false, test: false },
});
expect(result.status).toBe(RunnerJobStatus.FAILED);
expect(result.error).toBe("Test failed");
expect(mockJobEventsService.emitJobFailed).toHaveBeenCalled();
expect(mockHeraldService.broadcastJobEvent).toHaveBeenCalled();
});
});
describe("getIntegrationHealth", () => {
it("should return health status with all components", async () => {
mockBullMqService.getHealthStatus.mockResolvedValue({
connected: true,
queues: { main: 5, runner: 2 },
});
const result = await service.getIntegrationHealth();
expect(result).toHaveProperty("api", true);
expect(result).toHaveProperty("bullmq");
expect(result.bullmq.connected).toBe(true);
});
it("should handle BullMQ health check failure gracefully", async () => {
mockBullMqService.getHealthStatus.mockRejectedValue(new Error("Connection failed"));
const result = await service.getIntegrationHealth();
expect(result.api).toBe(true);
expect(result.bullmq.connected).toBe(false);
});
});
describe("getJobDetails", () => {
it("should return job with events and steps", async () => {
const jobWithDetails = {
...mockJob,
steps: [],
events: [mockEvent],
};
mockPrismaService.runnerJob.findUnique.mockResolvedValue(jobWithDetails);
const result = await service.getJobDetails("job-123");
expect(result).toHaveProperty("id", "job-123");
expect(result).toHaveProperty("events");
expect(result).toHaveProperty("steps");
});
it("should throw NotFoundException if job does not exist", async () => {
mockPrismaService.runnerJob.findUnique.mockResolvedValue(null);
await expect(service.getJobDetails("non-existent")).rejects.toThrow(NotFoundException);
});
});
});

View File

@@ -0,0 +1,431 @@
import { Injectable, Logger, NotFoundException, BadRequestException } from "@nestjs/common";
import { Prisma, RunnerJobStatus } from "@prisma/client";
import { PrismaService } from "../prisma/prisma.service";
import { JobEventsService } from "../job-events/job-events.service";
import { HeraldService } from "../herald/herald.service";
import { BullMqService } from "../bullmq/bullmq.service";
import { QUEUE_NAMES } from "../bullmq/queues";
import { JOB_PROGRESS } from "../job-events/event-types";
import { ConcurrentUpdateException } from "../common/exceptions/concurrent-update.exception";
import {
CoordinatorJobStatus,
type CreateCoordinatorJobDto,
type UpdateJobStatusDto,
type UpdateJobProgressDto,
type CompleteJobDto,
type FailJobDto,
} from "./dto";
import type { CoordinatorJobResult, CoordinatorHealthStatus } from "./interfaces";
/**
* CoordinatorIntegrationService - Bridge between Python coordinator and NestJS API
*
* Responsibilities:
* - Create jobs from coordinator webhook events
* - Update job status as coordinator processes
* - Handle job completion and failure
* - Broadcast events via Herald
* - Provide integration health status
*/
@Injectable()
export class CoordinatorIntegrationService {
private readonly logger = new Logger(CoordinatorIntegrationService.name);
constructor(
private readonly prisma: PrismaService,
private readonly jobEvents: JobEventsService,
private readonly herald: HeraldService,
private readonly bullMq: BullMqService
) {}
/**
* Create a job from the coordinator
*/
async createJob(dto: CreateCoordinatorJobDto): Promise<CoordinatorJobResult> {
this.logger.log(`Creating job for issue #${String(dto.issueNumber)} from ${dto.repository}`);
// Verify workspace exists
const workspace = await this.prisma.workspace.findUnique({
where: { id: dto.workspaceId },
select: { id: true },
});
if (!workspace) {
throw new NotFoundException(`Workspace with ID ${dto.workspaceId} not found`);
}
// Create RunnerJob in database
const job = await this.prisma.runnerJob.create({
data: {
workspaceId: dto.workspaceId,
type: dto.type,
priority: dto.priority ?? 10,
status: RunnerJobStatus.PENDING,
progressPercent: 0,
},
});
// Emit job.created event
await this.jobEvents.emitJobCreated(job.id, {
issueNumber: dto.issueNumber,
repository: dto.repository,
type: dto.type,
priority: dto.priority ?? 10,
metadata: dto.metadata,
source: "coordinator",
});
// Add job to BullMQ queue
await this.bullMq.addJob(
QUEUE_NAMES.MAIN,
dto.type,
{
jobId: job.id,
workspaceId: dto.workspaceId,
issueNumber: dto.issueNumber,
repository: dto.repository,
metadata: dto.metadata,
},
{ priority: dto.priority ?? 10 }
);
this.logger.log(`Job ${job.id} created and queued for issue #${String(dto.issueNumber)}`);
return {
jobId: job.id,
status: job.status,
queueName: QUEUE_NAMES.MAIN,
};
}
/**
* Update job status from the coordinator using transaction with SELECT FOR UPDATE
* This ensures serialized access to job status updates from the coordinator
*/
async updateJobStatus(
jobId: string,
dto: UpdateJobStatusDto
): Promise<Awaited<ReturnType<typeof this.prisma.runnerJob.update>>> {
this.logger.log(`Updating job ${jobId} status to ${dto.status}`);
return this.prisma.$transaction(async (tx) => {
// Use SELECT FOR UPDATE to lock the row during this transaction
// This prevents concurrent updates from coordinator and ensures serialization
const jobs = await tx.$queryRaw<
{ id: string; status: RunnerJobStatus; workspace_id: string; version: number }[]
>`
SELECT id, status, workspace_id, version
FROM runner_jobs
WHERE id = ${jobId}::uuid
FOR UPDATE
`;
if (jobs.length === 0) {
throw new NotFoundException(`RunnerJob with ID ${jobId} not found`);
}
const job = jobs[0];
if (!job) {
throw new NotFoundException(`RunnerJob with ID ${jobId} not found`);
}
// Validate status transition
if (!this.isValidStatusTransition(job.status, dto.status as RunnerJobStatus)) {
throw new BadRequestException(
`Invalid status transition from ${job.status} to ${dto.status}`
);
}
const updateData: Prisma.RunnerJobUpdateInput = {
status: dto.status as RunnerJobStatus,
version: { increment: 1 },
};
// Set startedAt when transitioning to RUNNING
if (dto.status === CoordinatorJobStatus.RUNNING) {
updateData.startedAt = new Date();
}
const updatedJob = await tx.runnerJob.update({
where: { id: jobId },
data: updateData,
});
// Emit appropriate event (outside of critical section but inside transaction)
if (dto.status === CoordinatorJobStatus.RUNNING) {
const event = await this.jobEvents.emitJobStarted(jobId, {
agentId: dto.agentId,
agentType: dto.agentType,
});
// Broadcast via Herald
await this.herald.broadcastJobEvent(jobId, event);
}
return updatedJob;
});
}
/**
* Update job progress from the coordinator with optimistic locking
*/
async updateJobProgress(
jobId: string,
dto: UpdateJobProgressDto
): Promise<Awaited<ReturnType<typeof this.prisma.runnerJob.findUnique>>> {
this.logger.log(`Updating job ${jobId} progress to ${String(dto.progressPercent)}%`);
// Read current job state
const job = await this.prisma.runnerJob.findUnique({
where: { id: jobId },
select: { id: true, status: true, version: true },
});
if (!job) {
throw new NotFoundException(`RunnerJob with ID ${jobId} not found`);
}
if (job.status !== RunnerJobStatus.RUNNING) {
throw new BadRequestException(`Cannot update progress for job with status ${job.status}`);
}
// Use updateMany with version check for optimistic locking
const result = await this.prisma.runnerJob.updateMany({
where: {
id: jobId,
version: job.version,
},
data: {
progressPercent: dto.progressPercent,
version: { increment: 1 },
},
});
if (result.count === 0) {
throw new ConcurrentUpdateException("RunnerJob", jobId, job.version);
}
// Fetch updated job
const updatedJob = await this.prisma.runnerJob.findUnique({
where: { id: jobId },
});
if (!updatedJob) {
throw new NotFoundException(`RunnerJob with ID ${jobId} not found after update`);
}
// Emit progress event
await this.jobEvents.emitEvent(jobId, {
type: JOB_PROGRESS,
actor: "coordinator",
payload: {
progressPercent: dto.progressPercent,
currentStep: dto.currentStep,
tokensUsed: dto.tokensUsed,
},
});
return updatedJob;
}
/**
* Mark job as completed from the coordinator using transaction with SELECT FOR UPDATE
*/
async completeJob(
jobId: string,
dto: CompleteJobDto
): Promise<Awaited<ReturnType<typeof this.prisma.runnerJob.update>>> {
this.logger.log(`Completing job ${jobId}`);
return this.prisma.$transaction(async (tx) => {
// Lock the row to prevent concurrent completion/failure
const jobs = await tx.$queryRaw<
{ id: string; status: RunnerJobStatus; started_at: Date | null; version: number }[]
>`
SELECT id, status, started_at, version
FROM runner_jobs
WHERE id = ${jobId}::uuid
FOR UPDATE
`;
if (jobs.length === 0) {
throw new NotFoundException(`RunnerJob with ID ${jobId} not found`);
}
const job = jobs[0];
if (!job) {
throw new NotFoundException(`RunnerJob with ID ${jobId} not found`);
}
// Validate status transition
if (!this.isValidStatusTransition(job.status, RunnerJobStatus.COMPLETED)) {
throw new BadRequestException(`Cannot complete job with status ${job.status}`);
}
// Calculate duration if not provided
let durationSeconds = dto.durationSeconds;
if (durationSeconds === undefined && job.started_at) {
durationSeconds = Math.round(
(new Date().getTime() - new Date(job.started_at).getTime()) / 1000
);
}
const updateData: Prisma.RunnerJobUpdateInput = {
status: RunnerJobStatus.COMPLETED,
progressPercent: 100,
completedAt: new Date(),
version: { increment: 1 },
};
if (dto.result) {
updateData.result = dto.result as Prisma.InputJsonValue;
}
const updatedJob = await tx.runnerJob.update({
where: { id: jobId },
data: updateData,
});
// Emit completion event
const event = await this.jobEvents.emitJobCompleted(jobId, {
result: dto.result,
tokensUsed: dto.tokensUsed,
durationSeconds,
});
// Broadcast via Herald
await this.herald.broadcastJobEvent(jobId, event);
return updatedJob;
});
}
/**
* Mark job as failed from the coordinator using transaction with SELECT FOR UPDATE
*/
async failJob(
jobId: string,
dto: FailJobDto
): Promise<Awaited<ReturnType<typeof this.prisma.runnerJob.update>>> {
this.logger.log(`Failing job ${jobId}: ${dto.error}`);
return this.prisma.$transaction(async (tx) => {
// Lock the row to prevent concurrent completion/failure
const jobs = await tx.$queryRaw<{ id: string; status: RunnerJobStatus; version: number }[]>`
SELECT id, status, version
FROM runner_jobs
WHERE id = ${jobId}::uuid
FOR UPDATE
`;
if (jobs.length === 0) {
throw new NotFoundException(`RunnerJob with ID ${jobId} not found`);
}
const job = jobs[0];
if (!job) {
throw new NotFoundException(`RunnerJob with ID ${jobId} not found`);
}
// Validate status transition
if (!this.isValidStatusTransition(job.status, RunnerJobStatus.FAILED)) {
throw new BadRequestException(`Cannot fail job with status ${job.status}`);
}
const updatedJob = await tx.runnerJob.update({
where: { id: jobId },
data: {
status: RunnerJobStatus.FAILED,
error: dto.error,
completedAt: new Date(),
version: { increment: 1 },
},
});
// Emit failure event
const event = await this.jobEvents.emitJobFailed(jobId, {
error: dto.error,
gateResults: dto.gateResults,
failedStep: dto.failedStep,
continuationPrompt: dto.continuationPrompt,
});
// Broadcast via Herald
await this.herald.broadcastJobEvent(jobId, event);
return updatedJob;
});
}
/**
* Get job details with events and steps
*/
async getJobDetails(
jobId: string
): Promise<Awaited<ReturnType<typeof this.prisma.runnerJob.findUnique>>> {
const job = await this.prisma.runnerJob.findUnique({
where: { id: jobId },
include: {
steps: {
orderBy: { ordinal: "asc" },
},
events: {
orderBy: { timestamp: "asc" },
},
},
});
if (!job) {
throw new NotFoundException(`RunnerJob with ID ${jobId} not found`);
}
return job;
}
/**
* Get integration health status
*/
async getIntegrationHealth(): Promise<CoordinatorHealthStatus> {
let bullmqStatus = { connected: false, queues: {} as Record<string, number> };
try {
bullmqStatus = await this.bullMq.getHealthStatus();
} catch (error) {
this.logger.error("Failed to get BullMQ health status", error);
}
return {
api: true,
bullmq: bullmqStatus,
timestamp: new Date(),
};
}
/**
* Validate status transitions
*/
private isValidStatusTransition(
currentStatus: RunnerJobStatus,
newStatus: RunnerJobStatus
): boolean {
// Define valid transitions
const validTransitions: Record<RunnerJobStatus, RunnerJobStatus[]> = {
[RunnerJobStatus.PENDING]: [
RunnerJobStatus.QUEUED,
RunnerJobStatus.RUNNING,
RunnerJobStatus.CANCELLED,
],
[RunnerJobStatus.QUEUED]: [RunnerJobStatus.RUNNING, RunnerJobStatus.CANCELLED],
[RunnerJobStatus.RUNNING]: [
RunnerJobStatus.COMPLETED,
RunnerJobStatus.FAILED,
RunnerJobStatus.CANCELLED,
],
[RunnerJobStatus.COMPLETED]: [],
[RunnerJobStatus.FAILED]: [],
[RunnerJobStatus.CANCELLED]: [],
};
return validTransitions[currentStatus].includes(newStatus);
}
}

View File

@@ -0,0 +1,20 @@
import { IsOptional, IsObject, IsNumber, Min } from "class-validator";
/**
* DTO for completing a job from the coordinator
*/
export class CompleteJobDto {
@IsOptional()
@IsObject()
result?: Record<string, unknown>;
@IsOptional()
@IsNumber()
@Min(0)
tokensUsed?: number;
@IsOptional()
@IsNumber()
@Min(0)
durationSeconds?: number;
}

View File

@@ -0,0 +1,44 @@
import {
IsString,
IsOptional,
IsNumber,
IsObject,
Min,
Max,
IsUUID,
MinLength,
MaxLength,
IsInt,
} from "class-validator";
/**
* DTO for creating a job from the coordinator
*/
export class CreateCoordinatorJobDto {
@IsUUID("4", { message: "workspaceId must be a valid UUID v4" })
workspaceId!: string;
@IsString({ message: "type must be a string" })
@MinLength(1, { message: "type must not be empty" })
@MaxLength(100, { message: "type must not exceed 100 characters" })
type!: string; // 'code-task', 'git-status', 'priority-calc'
@IsInt({ message: "issueNumber must be an integer" })
@Min(1, { message: "issueNumber must be at least 1" })
issueNumber!: number;
@IsString({ message: "repository must be a string" })
@MinLength(1, { message: "repository must not be empty" })
@MaxLength(512, { message: "repository must not exceed 512 characters" })
repository!: string;
@IsOptional()
@IsNumber({}, { message: "priority must be a number" })
@Min(1, { message: "priority must be at least 1" })
@Max(100, { message: "priority must not exceed 100" })
priority?: number;
@IsOptional()
@IsObject({ message: "metadata must be an object" })
metadata?: Record<string, unknown>;
}

View File

@@ -0,0 +1,416 @@
import { describe, it, expect } from "vitest";
import { validate } from "class-validator";
import { plainToInstance } from "class-transformer";
import { CreateCoordinatorJobDto } from "./create-coordinator-job.dto";
import { FailJobDto } from "./fail-job.dto";
import { UpdateJobProgressDto } from "./update-job-progress.dto";
import { UpdateJobStatusDto, CoordinatorJobStatus } from "./update-job-status.dto";
import { CompleteJobDto } from "./complete-job.dto";
/**
* Comprehensive validation tests for Coordinator Integration DTOs
*
* These tests verify that input validation prevents:
* - SQL injection attacks
* - XSS attacks
* - Command injection
* - Data corruption
* - Type confusion vulnerabilities
* - Buffer overflow attacks
*/
describe("Coordinator Integration DTOs - Input Validation", () => {
describe("CreateCoordinatorJobDto", () => {
it("should pass validation with valid data", async () => {
const dto = plainToInstance(CreateCoordinatorJobDto, {
workspaceId: "123e4567-e89b-42d3-a456-426614174000",
type: "code-task",
issueNumber: 42,
repository: "owner/repo",
priority: 5,
metadata: { key: "value" },
});
const errors = await validate(dto);
expect(errors).toHaveLength(0);
});
it("should reject missing workspaceId", async () => {
const dto = plainToInstance(CreateCoordinatorJobDto, {
type: "code-task",
issueNumber: 42,
repository: "owner/repo",
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
expect(errors[0].property).toBe("workspaceId");
});
it("should reject invalid UUID format for workspaceId", async () => {
const dto = plainToInstance(CreateCoordinatorJobDto, {
workspaceId: "not-a-uuid",
type: "code-task",
issueNumber: 42,
repository: "owner/repo",
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const workspaceIdError = errors.find((e) => e.property === "workspaceId");
expect(workspaceIdError).toBeDefined();
});
it("should reject empty type string", async () => {
const dto = plainToInstance(CreateCoordinatorJobDto, {
workspaceId: "123e4567-e89b-42d3-a456-426614174000",
type: "",
issueNumber: 42,
repository: "owner/repo",
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const typeError = errors.find((e) => e.property === "type");
expect(typeError).toBeDefined();
});
it("should reject excessively long type string (SQL injection prevention)", async () => {
const dto = plainToInstance(CreateCoordinatorJobDto, {
workspaceId: "123e4567-e89b-42d3-a456-426614174000",
type: "a".repeat(256),
issueNumber: 42,
repository: "owner/repo",
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const typeError = errors.find((e) => e.property === "type");
expect(typeError).toBeDefined();
});
it("should reject negative issue number", async () => {
const dto = plainToInstance(CreateCoordinatorJobDto, {
workspaceId: "123e4567-e89b-42d3-a456-426614174000",
type: "code-task",
issueNumber: -1,
repository: "owner/repo",
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const issueError = errors.find((e) => e.property === "issueNumber");
expect(issueError).toBeDefined();
});
it("should reject empty repository string", async () => {
const dto = plainToInstance(CreateCoordinatorJobDto, {
workspaceId: "123e4567-e89b-42d3-a456-426614174000",
type: "code-task",
issueNumber: 42,
repository: "",
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const repoError = errors.find((e) => e.property === "repository");
expect(repoError).toBeDefined();
});
it("should reject excessively long repository string (buffer overflow prevention)", async () => {
const dto = plainToInstance(CreateCoordinatorJobDto, {
workspaceId: "123e4567-e89b-42d3-a456-426614174000",
type: "code-task",
issueNumber: 42,
repository: "a".repeat(513),
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const repoError = errors.find((e) => e.property === "repository");
expect(repoError).toBeDefined();
});
it("should reject priority below 1", async () => {
const dto = plainToInstance(CreateCoordinatorJobDto, {
workspaceId: "123e4567-e89b-42d3-a456-426614174000",
type: "code-task",
issueNumber: 42,
repository: "owner/repo",
priority: 0,
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const priorityError = errors.find((e) => e.property === "priority");
expect(priorityError).toBeDefined();
});
it("should reject priority above 100", async () => {
const dto = plainToInstance(CreateCoordinatorJobDto, {
workspaceId: "123e4567-e89b-42d3-a456-426614174000",
type: "code-task",
issueNumber: 42,
repository: "owner/repo",
priority: 101,
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const priorityError = errors.find((e) => e.property === "priority");
expect(priorityError).toBeDefined();
});
});
describe("FailJobDto", () => {
it("should pass validation with valid data", async () => {
const dto = plainToInstance(FailJobDto, {
error: "Build failed",
gateResults: { passed: false },
failedStep: "compile",
continuationPrompt: "Fix the syntax error",
});
const errors = await validate(dto);
expect(errors).toHaveLength(0);
});
it("should reject missing error field", async () => {
const dto = plainToInstance(FailJobDto, {});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
expect(errors[0].property).toBe("error");
});
it("should reject empty error string", async () => {
const dto = plainToInstance(FailJobDto, {
error: "",
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const errorField = errors.find((e) => e.property === "error");
expect(errorField).toBeDefined();
});
it("should reject excessively long error string (XSS prevention)", async () => {
const dto = plainToInstance(FailJobDto, {
error: "a".repeat(10001),
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const errorField = errors.find((e) => e.property === "error");
expect(errorField).toBeDefined();
});
it("should reject excessively long failedStep string", async () => {
const dto = plainToInstance(FailJobDto, {
error: "Build failed",
failedStep: "a".repeat(256),
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const stepError = errors.find((e) => e.property === "failedStep");
expect(stepError).toBeDefined();
});
it("should reject excessively long continuationPrompt string", async () => {
const dto = plainToInstance(FailJobDto, {
error: "Build failed",
continuationPrompt: "a".repeat(5001),
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const promptError = errors.find((e) => e.property === "continuationPrompt");
expect(promptError).toBeDefined();
});
});
describe("UpdateJobProgressDto", () => {
it("should pass validation with valid data", async () => {
const dto = plainToInstance(UpdateJobProgressDto, {
progressPercent: 50,
currentStep: "Building",
tokensUsed: 1000,
});
const errors = await validate(dto);
expect(errors).toHaveLength(0);
});
it("should reject negative progress percent", async () => {
const dto = plainToInstance(UpdateJobProgressDto, {
progressPercent: -1,
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const progressError = errors.find((e) => e.property === "progressPercent");
expect(progressError).toBeDefined();
});
it("should reject progress percent above 100", async () => {
const dto = plainToInstance(UpdateJobProgressDto, {
progressPercent: 101,
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const progressError = errors.find((e) => e.property === "progressPercent");
expect(progressError).toBeDefined();
});
it("should reject empty currentStep string", async () => {
const dto = plainToInstance(UpdateJobProgressDto, {
progressPercent: 50,
currentStep: "",
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const stepError = errors.find((e) => e.property === "currentStep");
expect(stepError).toBeDefined();
});
it("should reject excessively long currentStep string", async () => {
const dto = plainToInstance(UpdateJobProgressDto, {
progressPercent: 50,
currentStep: "a".repeat(256),
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const stepError = errors.find((e) => e.property === "currentStep");
expect(stepError).toBeDefined();
});
it("should reject negative tokensUsed", async () => {
const dto = plainToInstance(UpdateJobProgressDto, {
progressPercent: 50,
tokensUsed: -1,
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const tokenError = errors.find((e) => e.property === "tokensUsed");
expect(tokenError).toBeDefined();
});
});
describe("UpdateJobStatusDto", () => {
it("should pass validation with valid data", async () => {
const dto = plainToInstance(UpdateJobStatusDto, {
status: CoordinatorJobStatus.RUNNING,
agentId: "agent-123",
agentType: "coordinator",
});
const errors = await validate(dto);
expect(errors).toHaveLength(0);
});
it("should reject invalid status enum", async () => {
const dto = plainToInstance(UpdateJobStatusDto, {
status: "INVALID_STATUS" as any,
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const statusError = errors.find((e) => e.property === "status");
expect(statusError).toBeDefined();
});
it("should reject empty agentId string", async () => {
const dto = plainToInstance(UpdateJobStatusDto, {
status: CoordinatorJobStatus.RUNNING,
agentId: "",
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const agentIdError = errors.find((e) => e.property === "agentId");
expect(agentIdError).toBeDefined();
});
it("should reject excessively long agentId string", async () => {
const dto = plainToInstance(UpdateJobStatusDto, {
status: CoordinatorJobStatus.RUNNING,
agentId: "a".repeat(256),
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const agentIdError = errors.find((e) => e.property === "agentId");
expect(agentIdError).toBeDefined();
});
it("should reject empty agentType string", async () => {
const dto = plainToInstance(UpdateJobStatusDto, {
status: CoordinatorJobStatus.RUNNING,
agentType: "",
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const agentTypeError = errors.find((e) => e.property === "agentType");
expect(agentTypeError).toBeDefined();
});
it("should reject excessively long agentType string", async () => {
const dto = plainToInstance(UpdateJobStatusDto, {
status: CoordinatorJobStatus.RUNNING,
agentType: "a".repeat(101),
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const agentTypeError = errors.find((e) => e.property === "agentType");
expect(agentTypeError).toBeDefined();
});
});
describe("CompleteJobDto", () => {
it("should pass validation with valid data", async () => {
const dto = plainToInstance(CompleteJobDto, {
result: { success: true },
tokensUsed: 5000,
durationSeconds: 120,
});
const errors = await validate(dto);
expect(errors).toHaveLength(0);
});
it("should reject negative tokensUsed", async () => {
const dto = plainToInstance(CompleteJobDto, {
tokensUsed: -1,
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const tokenError = errors.find((e) => e.property === "tokensUsed");
expect(tokenError).toBeDefined();
});
it("should reject negative durationSeconds", async () => {
const dto = plainToInstance(CompleteJobDto, {
durationSeconds: -1,
});
const errors = await validate(dto);
expect(errors.length).toBeGreaterThan(0);
const durationError = errors.find((e) => e.property === "durationSeconds");
expect(durationError).toBeDefined();
});
it("should pass validation with all fields empty (all optional)", async () => {
const dto = plainToInstance(CompleteJobDto, {});
const errors = await validate(dto);
expect(errors).toHaveLength(0);
});
});
});

View File

@@ -0,0 +1,26 @@
import { IsString, IsOptional, IsObject, MinLength, MaxLength } from "class-validator";
import type { QualityGateResult } from "../interfaces";
/**
* DTO for failing a job from the coordinator
*/
export class FailJobDto {
@IsString({ message: "error must be a string" })
@MinLength(1, { message: "error must not be empty" })
@MaxLength(10000, { message: "error must not exceed 10000 characters" })
error!: string;
@IsOptional()
@IsObject({ message: "gateResults must be an object" })
gateResults?: QualityGateResult;
@IsOptional()
@IsString({ message: "failedStep must be a string" })
@MaxLength(255, { message: "failedStep must not exceed 255 characters" })
failedStep?: string;
@IsOptional()
@IsString({ message: "continuationPrompt must be a string" })
@MaxLength(5000, { message: "continuationPrompt must not exceed 5000 characters" })
continuationPrompt?: string;
}

View File

@@ -0,0 +1,5 @@
export * from "./create-coordinator-job.dto";
export * from "./update-job-status.dto";
export * from "./update-job-progress.dto";
export * from "./complete-job.dto";
export * from "./fail-job.dto";

View File

@@ -0,0 +1,22 @@
import { IsNumber, IsOptional, IsString, Min, Max, MinLength, MaxLength } from "class-validator";
/**
* DTO for updating job progress from the coordinator
*/
export class UpdateJobProgressDto {
@IsNumber({}, { message: "progressPercent must be a number" })
@Min(0, { message: "progressPercent must be at least 0" })
@Max(100, { message: "progressPercent must not exceed 100" })
progressPercent!: number;
@IsOptional()
@IsString({ message: "currentStep must be a string" })
@MinLength(1, { message: "currentStep must not be empty" })
@MaxLength(255, { message: "currentStep must not exceed 255 characters" })
currentStep?: string;
@IsOptional()
@IsNumber({}, { message: "tokensUsed must be a number" })
@Min(0, { message: "tokensUsed must be at least 0" })
tokensUsed?: number;
}

View File

@@ -0,0 +1,29 @@
import { IsString, IsOptional, IsEnum, MinLength, MaxLength } from "class-validator";
/**
* Valid status values for coordinator status updates
*/
export enum CoordinatorJobStatus {
RUNNING = "RUNNING",
PENDING = "PENDING",
}
/**
* DTO for updating job status from the coordinator
*/
export class UpdateJobStatusDto {
@IsEnum(CoordinatorJobStatus, { message: "status must be a valid CoordinatorJobStatus" })
status!: CoordinatorJobStatus;
@IsOptional()
@IsString({ message: "agentId must be a string" })
@MinLength(1, { message: "agentId must not be empty" })
@MaxLength(255, { message: "agentId must not exceed 255 characters" })
agentId?: string;
@IsOptional()
@IsString({ message: "agentType must be a string" })
@MinLength(1, { message: "agentType must not be empty" })
@MaxLength(100, { message: "agentType must not exceed 100 characters" })
agentType?: string;
}

View File

@@ -0,0 +1,5 @@
export * from "./coordinator-integration.module";
export * from "./coordinator-integration.service";
export * from "./coordinator-integration.controller";
export * from "./dto";
export * from "./interfaces";

View File

@@ -0,0 +1,41 @@
/**
* Result of job creation from coordinator
*/
export interface CoordinatorJobResult {
jobId: string;
status: string;
queueName: string;
estimatedStartTime?: Date;
}
/**
* Health status for coordinator integration
*/
export interface CoordinatorHealthStatus {
api: boolean;
bullmq: {
connected: boolean;
queues: Record<string, number>;
};
timestamp: Date;
}
/**
* Quality gate result from coordinator
*/
export interface QualityGateResult {
lint?: boolean;
typecheck?: boolean;
test?: boolean;
coverage?: boolean;
build?: boolean;
}
/**
* Agent assignment info from coordinator
*/
export interface AgentAssignment {
agentType: string; // 'sonnet', 'opus', 'haiku', 'glm'
agentId: string;
estimatedContext: number;
}

View File

@@ -0,0 +1 @@
export * from "./coordinator-job.interface";

80
apps/api/src/cors.spec.ts Normal file
View File

@@ -0,0 +1,80 @@
import { describe, it, expect } from "vitest";
/**
* CORS Configuration Tests
*
* These tests verify that CORS is configured correctly for cookie-based authentication.
*
* CRITICAL REQUIREMENTS:
* - credentials: true (allows cookies to be sent)
* - origin: must be specific origins, NOT wildcard (security requirement with credentials)
* - Access-Control-Allow-Credentials: true header
* - Access-Control-Allow-Origin: specific origin (not *)
*/
describe("CORS Configuration", () => {
describe("Configuration requirements", () => {
it("should document required CORS settings for cookie-based auth", () => {
// This test documents the requirements
const requiredSettings = {
origin: ["http://localhost:3000", "https://app.mosaicstack.dev"],
credentials: true,
allowedHeaders: ["Content-Type", "Authorization", "Cookie"],
exposedHeaders: ["Set-Cookie"],
methods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
};
expect(requiredSettings.credentials).toBe(true);
expect(requiredSettings.origin).not.toContain("*");
expect(requiredSettings.allowedHeaders).toContain("Cookie");
});
it("should NOT use wildcard origin with credentials (security violation)", () => {
// Wildcard origin with credentials is a security violation
// This test ensures we never use that combination
const validConfig1 = { origin: "*", credentials: false };
const validConfig2 = { origin: "http://localhost:3000", credentials: true };
const invalidConfig = { origin: "*", credentials: true };
// Valid configs
expect(validConfig1.origin === "*" && !validConfig1.credentials).toBe(true);
expect(validConfig2.origin !== "*" && validConfig2.credentials).toBe(true);
// Invalid config check - this combination should NOT be allowed
const isInvalidCombination = invalidConfig.origin === "*" && invalidConfig.credentials;
expect(isInvalidCombination).toBe(true); // This IS an invalid combination
// We will prevent this in our CORS config
});
});
describe("Origin validation", () => {
it("should define allowed origins list", () => {
const allowedOrigins = [
process.env.NEXT_PUBLIC_APP_URL ?? "http://localhost:3000",
"http://localhost:3001", // API origin (dev)
"https://app.mosaicstack.dev", // Production web
"https://api.mosaicstack.dev", // Production API
];
expect(allowedOrigins).toHaveLength(4);
expect(allowedOrigins).toContain("http://localhost:3000");
expect(allowedOrigins).toContain("https://app.mosaicstack.dev");
});
it("should match exact origins, not partial matches", () => {
const origin = "http://localhost:3000";
const maliciousOrigin = "http://localhost:3000.evil.com";
expect(origin).toBe("http://localhost:3000");
expect(maliciousOrigin).not.toBe(origin);
});
it("should support dynamic origin from environment variable", () => {
const defaultOrigin = "http://localhost:3000";
const envOrigin = process.env.NEXT_PUBLIC_APP_URL ?? defaultOrigin;
expect(envOrigin).toBeDefined();
expect(typeof envOrigin).toBe("string");
});
});
});

View File

@@ -83,8 +83,20 @@ describe("CronService", () => {
it("should return all schedules for a workspace", async () => {
const workspaceId = "ws-123";
const expectedSchedules = [
{ id: "cron-1", workspaceId, expression: "0 9 * * *", command: "morning briefing", enabled: true },
{ id: "cron-2", workspaceId, expression: "0 17 * * *", command: "evening summary", enabled: true },
{
id: "cron-1",
workspaceId,
expression: "0 9 * * *",
command: "morning briefing",
enabled: true,
},
{
id: "cron-2",
workspaceId,
expression: "0 17 * * *",
command: "evening summary",
enabled: true,
},
];
mockPrisma.cronSchedule.findMany.mockResolvedValue(expectedSchedules);

View File

@@ -103,18 +103,10 @@ describe("DomainsController", () => {
mockDomainsService.create.mockResolvedValue(mockDomain);
const result = await controller.create(
createDto,
mockWorkspaceId,
mockUser
);
const result = await controller.create(createDto, mockWorkspaceId, mockUser);
expect(result).toEqual(mockDomain);
expect(service.create).toHaveBeenCalledWith(
mockWorkspaceId,
mockUserId,
createDto
);
expect(service.create).toHaveBeenCalledWith(mockWorkspaceId, mockUserId, createDto);
});
});
@@ -170,10 +162,7 @@ describe("DomainsController", () => {
const result = await controller.findOne(mockDomainId, mockWorkspaceId);
expect(result).toEqual(mockDomain);
expect(service.findOne).toHaveBeenCalledWith(
mockDomainId,
mockWorkspaceId
);
expect(service.findOne).toHaveBeenCalledWith(mockDomainId, mockWorkspaceId);
});
});
@@ -187,12 +176,7 @@ describe("DomainsController", () => {
const updatedDomain = { ...mockDomain, ...updateDto };
mockDomainsService.update.mockResolvedValue(updatedDomain);
const result = await controller.update(
mockDomainId,
updateDto,
mockWorkspaceId,
mockUser
);
const result = await controller.update(mockDomainId, updateDto, mockWorkspaceId, mockUser);
expect(result).toEqual(updatedDomain);
expect(service.update).toHaveBeenCalledWith(
@@ -210,11 +194,7 @@ describe("DomainsController", () => {
await controller.remove(mockDomainId, mockWorkspaceId, mockUser);
expect(service.remove).toHaveBeenCalledWith(
mockDomainId,
mockWorkspaceId,
mockUserId
);
expect(service.remove).toHaveBeenCalledWith(mockDomainId, mockWorkspaceId, mockUserId);
});
});
});

View File

@@ -1,9 +1,13 @@
import { Injectable, NotFoundException } from "@nestjs/common";
import { Prisma } from "@prisma/client";
import { Prisma, Domain } from "@prisma/client";
import { PrismaService } from "../prisma/prisma.service";
import { ActivityService } from "../activity/activity.service";
import type { CreateDomainDto, UpdateDomainDto, QueryDomainsDto } from "./dto";
type DomainWithCount = Domain & {
_count: { tasks: number; events: number; projects: number; ideas: number };
};
/**
* Service for managing domains
*/
@@ -17,7 +21,11 @@ export class DomainsService {
/**
* Create a new domain
*/
async create(workspaceId: string, userId: string, createDomainDto: CreateDomainDto) {
async create(
workspaceId: string,
userId: string,
createDomainDto: CreateDomainDto
): Promise<DomainWithCount> {
const domain = await this.prisma.domain.create({
data: {
name: createDomainDto.name,
@@ -49,7 +57,15 @@ export class DomainsService {
/**
* Get paginated domains with filters
*/
async findAll(query: QueryDomainsDto) {
async findAll(query: QueryDomainsDto): Promise<{
data: DomainWithCount[];
meta: {
total: number;
page: number;
limit: number;
totalPages: number;
};
}> {
const page = query.page ?? 1;
const limit = query.limit ?? 50;
const skip = (page - 1) * limit;
@@ -101,7 +117,7 @@ export class DomainsService {
/**
* Get a single domain by ID
*/
async findOne(id: string, workspaceId: string) {
async findOne(id: string, workspaceId: string): Promise<DomainWithCount> {
const domain = await this.prisma.domain.findUnique({
where: {
id,
@@ -124,7 +140,12 @@ export class DomainsService {
/**
* Update a domain
*/
async update(id: string, workspaceId: string, userId: string, updateDomainDto: UpdateDomainDto) {
async update(
id: string,
workspaceId: string,
userId: string,
updateDomainDto: UpdateDomainDto
): Promise<DomainWithCount> {
// Verify domain exists
const existingDomain = await this.prisma.domain.findUnique({
where: { id, workspaceId },
@@ -170,7 +191,7 @@ export class DomainsService {
/**
* Delete a domain
*/
async remove(id: string, workspaceId: string, userId: string) {
async remove(id: string, workspaceId: string, userId: string): Promise<void> {
// Verify domain exists
const domain = await this.prisma.domain.findUnique({
where: { id, workspaceId },

View File

@@ -63,11 +63,7 @@ describe("EventsController", () => {
const result = await controller.create(createDto, mockWorkspaceId, mockUser);
expect(result).toEqual(mockEvent);
expect(service.create).toHaveBeenCalledWith(
mockWorkspaceId,
mockUserId,
createDto
);
expect(service.create).toHaveBeenCalledWith(mockWorkspaceId, mockUserId, createDto);
});
it("should pass undefined workspaceId to service (validation handled by guards in production)", async () => {
@@ -153,7 +149,12 @@ describe("EventsController", () => {
await controller.update(mockEventId, updateDto, undefined as any, mockUser);
expect(mockEventsService.update).toHaveBeenCalledWith(mockEventId, undefined, mockUserId, updateDto);
expect(mockEventsService.update).toHaveBeenCalledWith(
mockEventId,
undefined,
mockUserId,
updateDto
);
});
});
@@ -163,11 +164,7 @@ describe("EventsController", () => {
await controller.remove(mockEventId, mockWorkspaceId, mockUser);
expect(service.remove).toHaveBeenCalledWith(
mockEventId,
mockWorkspaceId,
mockUserId
);
expect(service.remove).toHaveBeenCalledWith(mockEventId, mockWorkspaceId, mockUserId);
});
it("should pass undefined workspaceId to service (validation handled by guards in production)", async () => {

View File

@@ -1,9 +1,14 @@
import { Injectable, NotFoundException } from "@nestjs/common";
import { Prisma } from "@prisma/client";
import { Prisma, Event } from "@prisma/client";
import { PrismaService } from "../prisma/prisma.service";
import { ActivityService } from "../activity/activity.service";
import type { CreateEventDto, UpdateEventDto, QueryEventsDto } from "./dto";
type EventWithRelations = Event & {
creator: { id: string; name: string; email: string };
project: { id: string; name: string; color: string | null } | null;
};
/**
* Service for managing events
*/
@@ -17,7 +22,11 @@ export class EventsService {
/**
* Create a new event
*/
async create(workspaceId: string, userId: string, createEventDto: CreateEventDto) {
async create(
workspaceId: string,
userId: string,
createEventDto: CreateEventDto
): Promise<EventWithRelations> {
const projectConnection = createEventDto.projectId
? { connect: { id: createEventDto.projectId } }
: undefined;
@@ -60,7 +69,15 @@ export class EventsService {
/**
* Get paginated events with filters
*/
async findAll(query: QueryEventsDto) {
async findAll(query: QueryEventsDto): Promise<{
data: EventWithRelations[];
meta: {
total: number;
page: number;
limit: number;
totalPages: number;
};
}> {
const page = query.page ?? 1;
const limit = query.limit ?? 50;
const skip = (page - 1) * limit;
@@ -125,7 +142,7 @@ export class EventsService {
/**
* Get a single event by ID
*/
async findOne(id: string, workspaceId: string) {
async findOne(id: string, workspaceId: string): Promise<EventWithRelations> {
const event = await this.prisma.event.findUnique({
where: {
id,
@@ -151,7 +168,12 @@ export class EventsService {
/**
* Update an event
*/
async update(id: string, workspaceId: string, userId: string, updateEventDto: UpdateEventDto) {
async update(
id: string,
workspaceId: string,
userId: string,
updateEventDto: UpdateEventDto
): Promise<EventWithRelations> {
// Verify event exists
const existingEvent = await this.prisma.event.findUnique({
where: { id, workspaceId },
@@ -208,7 +230,7 @@ export class EventsService {
/**
* Delete an event
*/
async remove(id: string, workspaceId: string, userId: string) {
async remove(id: string, workspaceId: string, userId: string): Promise<void> {
// Verify event exists
const event = await this.prisma.event.findUnique({
where: { id, workspaceId },

View File

@@ -0,0 +1,126 @@
/**
* Federation Audit Service
*
* Logs security-sensitive operations for compliance and monitoring.
* Uses application logger since ActivityLog requires workspace context.
*/
import { Injectable, Logger } from "@nestjs/common";
@Injectable()
export class FederationAuditService {
private readonly logger = new Logger(FederationAuditService.name);
/**
* Log instance keypair regeneration (system-level operation)
* Logged to application logs for security audit trail
*/
logKeypairRegeneration(userId: string, instanceId: string): void {
this.logger.warn({
event: "FEDERATION_KEYPAIR_REGENERATED",
userId,
instanceId,
timestamp: new Date().toISOString(),
securityEvent: true,
});
}
/**
* Log instance configuration update (system-level operation)
* Logged to application logs for security audit trail
*/
logInstanceConfigurationUpdate(
userId: string,
instanceId: string,
updates: Record<string, unknown>
): void {
this.logger.log({
event: "FEDERATION_INSTANCE_CONFIG_UPDATED",
userId,
instanceId,
updates,
timestamp: new Date().toISOString(),
securityEvent: true,
});
}
/**
* Log federated authentication initiation
*/
logFederatedAuthInitiation(userId: string, remoteInstanceId: string): void {
this.logger.log({
event: "FEDERATION_AUTH_INITIATED",
userId,
remoteInstanceId,
timestamp: new Date().toISOString(),
});
}
/**
* Log federated identity linking
*/
logFederatedIdentityLinked(userId: string, remoteInstanceId: string): void {
this.logger.log({
event: "FEDERATION_IDENTITY_LINKED",
userId,
remoteInstanceId,
timestamp: new Date().toISOString(),
securityEvent: true,
});
}
/**
* Log federated identity revocation
*/
logFederatedIdentityRevoked(userId: string, remoteInstanceId: string): void {
this.logger.warn({
event: "FEDERATION_IDENTITY_REVOKED",
userId,
remoteInstanceId,
timestamp: new Date().toISOString(),
securityEvent: true,
});
}
/**
* Log identity verification attempt
*/
logIdentityVerification(userId: string, remoteInstanceId: string, success: boolean): void {
const level = success ? "log" : "warn";
this.logger[level]({
event: "FEDERATION_IDENTITY_VERIFIED",
userId,
remoteInstanceId,
success,
timestamp: new Date().toISOString(),
securityEvent: true,
});
}
/**
* Log identity linking (create mapping)
*/
logIdentityLinking(localUserId: string, remoteInstanceId: string, remoteUserId: string): void {
this.logger.log({
event: "FEDERATION_IDENTITY_LINKED",
localUserId,
remoteUserId,
remoteInstanceId,
timestamp: new Date().toISOString(),
securityEvent: true,
});
}
/**
* Log identity revocation (remove mapping)
*/
logIdentityRevocation(localUserId: string, remoteInstanceId: string): void {
this.logger.warn({
event: "FEDERATION_IDENTITY_REVOKED",
localUserId,
remoteInstanceId,
timestamp: new Date().toISOString(),
securityEvent: true,
});
}
}

View File

@@ -0,0 +1,236 @@
/**
* Command Controller Tests
*/
import { describe, it, expect, beforeEach, vi } from "vitest";
import { Test, TestingModule } from "@nestjs/testing";
import { CommandController } from "./command.controller";
import { CommandService } from "./command.service";
import { AuthGuard } from "../auth/guards/auth.guard";
import { FederationMessageType, FederationMessageStatus } from "@prisma/client";
import type { AuthenticatedRequest } from "../common/types/user.types";
import type { CommandMessage, CommandResponse } from "./types/message.types";
describe("CommandController", () => {
let controller: CommandController;
let commandService: CommandService;
const mockWorkspaceId = "workspace-123";
const mockUserId = "user-123";
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
controllers: [CommandController],
providers: [
{
provide: CommandService,
useValue: {
sendCommand: vi.fn(),
handleIncomingCommand: vi.fn(),
getCommandMessages: vi.fn(),
getCommandMessage: vi.fn(),
},
},
],
})
.overrideGuard(AuthGuard)
.useValue({ canActivate: () => true })
.compile();
controller = module.get<CommandController>(CommandController);
commandService = module.get<CommandService>(CommandService);
});
describe("sendCommand", () => {
it("should send a command", async () => {
const req = {
user: { id: mockUserId, workspaceId: mockWorkspaceId },
} as AuthenticatedRequest;
const dto = {
connectionId: "conn-123",
commandType: "spawn_agent",
payload: { agentType: "task_executor" },
};
const mockResult = {
id: "msg-123",
workspaceId: mockWorkspaceId,
connectionId: "conn-123",
messageType: FederationMessageType.COMMAND,
messageId: "cmd-123",
commandType: "spawn_agent",
payload: { agentType: "task_executor" },
status: FederationMessageStatus.PENDING,
createdAt: new Date(),
updatedAt: new Date(),
};
vi.spyOn(commandService, "sendCommand").mockResolvedValue(mockResult as never);
const result = await controller.sendCommand(req, dto);
expect(result).toEqual(mockResult);
expect(commandService.sendCommand).toHaveBeenCalledWith(
mockWorkspaceId,
"conn-123",
"spawn_agent",
{ agentType: "task_executor" }
);
});
it("should throw error if workspace ID not found", async () => {
const req = {
user: { id: mockUserId },
} as AuthenticatedRequest;
const dto = {
connectionId: "conn-123",
commandType: "test",
payload: {},
};
await expect(controller.sendCommand(req, dto)).rejects.toThrow(
"Workspace ID not found in request"
);
});
});
describe("handleIncomingCommand", () => {
it("should handle an incoming command", async () => {
const dto: CommandMessage = {
messageId: "cmd-123",
instanceId: "remote-instance",
commandType: "spawn_agent",
payload: { agentType: "task_executor" },
timestamp: Date.now(),
signature: "signature-123",
};
const mockResponse: CommandResponse = {
messageId: "resp-123",
correlationId: "cmd-123",
instanceId: "local-instance",
success: true,
data: { result: "success" },
timestamp: Date.now(),
signature: "response-signature",
};
vi.spyOn(commandService, "handleIncomingCommand").mockResolvedValue(mockResponse);
const result = await controller.handleIncomingCommand(dto);
expect(result).toEqual(mockResponse);
expect(commandService.handleIncomingCommand).toHaveBeenCalledWith(dto);
});
});
describe("getCommands", () => {
it("should return all commands for workspace", async () => {
const req = {
user: { id: mockUserId, workspaceId: mockWorkspaceId },
} as AuthenticatedRequest;
const mockCommands = [
{
id: "msg-1",
workspaceId: mockWorkspaceId,
connectionId: "conn-123",
messageType: FederationMessageType.COMMAND,
messageId: "cmd-1",
commandType: "test",
payload: {},
status: FederationMessageStatus.DELIVERED,
createdAt: new Date(),
updatedAt: new Date(),
},
];
vi.spyOn(commandService, "getCommandMessages").mockResolvedValue(mockCommands as never);
const result = await controller.getCommands(req);
expect(result).toEqual(mockCommands);
expect(commandService.getCommandMessages).toHaveBeenCalledWith(mockWorkspaceId, undefined);
});
it("should filter commands by status", async () => {
const req = {
user: { id: mockUserId, workspaceId: mockWorkspaceId },
} as AuthenticatedRequest;
const mockCommands = [
{
id: "msg-1",
workspaceId: mockWorkspaceId,
connectionId: "conn-123",
messageType: FederationMessageType.COMMAND,
messageId: "cmd-1",
commandType: "test",
payload: {},
status: FederationMessageStatus.PENDING,
createdAt: new Date(),
updatedAt: new Date(),
},
];
vi.spyOn(commandService, "getCommandMessages").mockResolvedValue(mockCommands as never);
await controller.getCommands(req, FederationMessageStatus.PENDING);
expect(commandService.getCommandMessages).toHaveBeenCalledWith(
mockWorkspaceId,
FederationMessageStatus.PENDING
);
});
it("should throw error if workspace ID not found", async () => {
const req = {
user: { id: mockUserId },
} as AuthenticatedRequest;
await expect(controller.getCommands(req)).rejects.toThrow(
"Workspace ID not found in request"
);
});
});
describe("getCommand", () => {
it("should return a single command", async () => {
const req = {
user: { id: mockUserId, workspaceId: mockWorkspaceId },
} as AuthenticatedRequest;
const mockCommand = {
id: "msg-1",
workspaceId: mockWorkspaceId,
connectionId: "conn-123",
messageType: FederationMessageType.COMMAND,
messageId: "cmd-1",
commandType: "test",
payload: { key: "value" },
status: FederationMessageStatus.DELIVERED,
createdAt: new Date(),
updatedAt: new Date(),
};
vi.spyOn(commandService, "getCommandMessage").mockResolvedValue(mockCommand as never);
const result = await controller.getCommand(req, "msg-1");
expect(result).toEqual(mockCommand);
expect(commandService.getCommandMessage).toHaveBeenCalledWith(mockWorkspaceId, "msg-1");
});
it("should throw error if workspace ID not found", async () => {
const req = {
user: { id: mockUserId },
} as AuthenticatedRequest;
await expect(controller.getCommand(req, "msg-1")).rejects.toThrow(
"Workspace ID not found in request"
);
});
});
});

View File

@@ -0,0 +1,91 @@
/**
* Command Controller
*
* API endpoints for federated command messages.
*/
import { Controller, Post, Get, Body, Param, Query, UseGuards, Req, Logger } from "@nestjs/common";
import { CommandService } from "./command.service";
import { AuthGuard } from "../auth/guards/auth.guard";
import { SendCommandDto, IncomingCommandDto } from "./dto/command.dto";
import type { AuthenticatedRequest } from "../common/types/user.types";
import type { CommandMessageDetails, CommandResponse } from "./types/message.types";
import type { FederationMessageStatus } from "@prisma/client";
@Controller("api/v1/federation")
export class CommandController {
private readonly logger = new Logger(CommandController.name);
constructor(private readonly commandService: CommandService) {}
/**
* Send a command to a remote instance
* Requires authentication
*/
@Post("command")
@UseGuards(AuthGuard)
async sendCommand(
@Req() req: AuthenticatedRequest,
@Body() dto: SendCommandDto
): Promise<CommandMessageDetails> {
if (!req.user?.workspaceId) {
throw new Error("Workspace ID not found in request");
}
this.logger.log(
`User ${req.user.id} sending command to connection ${dto.connectionId} in workspace ${req.user.workspaceId}`
);
return this.commandService.sendCommand(
req.user.workspaceId,
dto.connectionId,
dto.commandType,
dto.payload
);
}
/**
* Handle incoming command from remote instance
* Public endpoint - no authentication required (signature-based verification)
*/
@Post("incoming/command")
async handleIncomingCommand(@Body() dto: IncomingCommandDto): Promise<CommandResponse> {
this.logger.log(`Received command from ${dto.instanceId}: ${dto.messageId}`);
return this.commandService.handleIncomingCommand(dto);
}
/**
* Get all command messages for the workspace
* Requires authentication
*/
@Get("commands")
@UseGuards(AuthGuard)
async getCommands(
@Req() req: AuthenticatedRequest,
@Query("status") status?: FederationMessageStatus
): Promise<CommandMessageDetails[]> {
if (!req.user?.workspaceId) {
throw new Error("Workspace ID not found in request");
}
return this.commandService.getCommandMessages(req.user.workspaceId, status);
}
/**
* Get a single command message
* Requires authentication
*/
@Get("commands/:id")
@UseGuards(AuthGuard)
async getCommand(
@Req() req: AuthenticatedRequest,
@Param("id") messageId: string
): Promise<CommandMessageDetails> {
if (!req.user?.workspaceId) {
throw new Error("Workspace ID not found in request");
}
return this.commandService.getCommandMessage(req.user.workspaceId, messageId);
}
}

View File

@@ -0,0 +1,574 @@
/**
* Command Service Tests
*/
import { describe, it, expect, beforeEach, vi } from "vitest";
import { Test, TestingModule } from "@nestjs/testing";
import { HttpService } from "@nestjs/axios";
import { CommandService } from "./command.service";
import { PrismaService } from "../prisma/prisma.service";
import { FederationService } from "./federation.service";
import { SignatureService } from "./signature.service";
import {
FederationConnectionStatus,
FederationMessageType,
FederationMessageStatus,
} from "@prisma/client";
import { of } from "rxjs";
import type { CommandMessage, CommandResponse } from "./types/message.types";
describe("CommandService", () => {
let service: CommandService;
let prisma: PrismaService;
let federationService: FederationService;
let signatureService: SignatureService;
let httpService: HttpService;
const mockWorkspaceId = "workspace-123";
const mockConnectionId = "connection-123";
const mockInstanceId = "instance-456";
const mockRemoteUrl = "https://remote.example.com";
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
providers: [
CommandService,
{
provide: PrismaService,
useValue: {
federationConnection: {
findUnique: vi.fn(),
findFirst: vi.fn(),
},
federationMessage: {
create: vi.fn(),
update: vi.fn(),
findMany: vi.fn(),
findUnique: vi.fn(),
findFirst: vi.fn(),
},
},
},
{
provide: FederationService,
useValue: {
getInstanceIdentity: vi.fn(),
},
},
{
provide: SignatureService,
useValue: {
signMessage: vi.fn(),
verifyMessage: vi.fn(),
validateTimestamp: vi.fn(),
},
},
{
provide: HttpService,
useValue: {
post: vi.fn(),
},
},
],
}).compile();
service = module.get<CommandService>(CommandService);
prisma = module.get<PrismaService>(PrismaService);
federationService = module.get<FederationService>(FederationService);
signatureService = module.get<SignatureService>(SignatureService);
httpService = module.get<HttpService>(HttpService);
});
describe("sendCommand", () => {
it("should send a command to a remote instance", async () => {
const commandType = "spawn_agent";
const payload = { agentType: "task_executor" };
const mockConnection = {
id: mockConnectionId,
workspaceId: mockWorkspaceId,
status: FederationConnectionStatus.ACTIVE,
remoteUrl: mockRemoteUrl,
remoteInstanceId: mockInstanceId,
};
const mockIdentity = {
instanceId: "local-instance",
displayName: "Local Instance",
};
const mockMessage = {
id: "msg-123",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
messageType: FederationMessageType.COMMAND,
messageId: expect.any(String),
correlationId: null,
query: null,
commandType,
payload,
response: {},
status: FederationMessageStatus.PENDING,
error: null,
signature: "signature-123",
createdAt: new Date(),
updatedAt: new Date(),
deliveredAt: null,
};
vi.spyOn(prisma.federationConnection, "findUnique").mockResolvedValue(
mockConnection as never
);
vi.spyOn(federationService, "getInstanceIdentity").mockResolvedValue(mockIdentity as never);
vi.spyOn(signatureService, "signMessage").mockResolvedValue("signature-123");
vi.spyOn(prisma.federationMessage, "create").mockResolvedValue(mockMessage as never);
vi.spyOn(httpService, "post").mockReturnValue(of({} as never));
const result = await service.sendCommand(
mockWorkspaceId,
mockConnectionId,
commandType,
payload
);
expect(result).toMatchObject({
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
messageType: FederationMessageType.COMMAND,
commandType,
status: FederationMessageStatus.PENDING,
});
expect(httpService.post).toHaveBeenCalledWith(
`${mockRemoteUrl}/api/v1/federation/incoming/command`,
expect.objectContaining({
messageId: expect.any(String),
instanceId: "local-instance",
commandType,
payload,
timestamp: expect.any(Number),
signature: "signature-123",
})
);
});
it("should throw error if connection not found", async () => {
vi.spyOn(prisma.federationConnection, "findUnique").mockResolvedValue(null);
await expect(
service.sendCommand(mockWorkspaceId, mockConnectionId, "test", {})
).rejects.toThrow("Connection not found");
});
it("should throw error if connection is not active", async () => {
const mockConnection = {
id: mockConnectionId,
workspaceId: mockWorkspaceId,
status: FederationConnectionStatus.SUSPENDED,
};
vi.spyOn(prisma.federationConnection, "findUnique").mockResolvedValue(
mockConnection as never
);
await expect(
service.sendCommand(mockWorkspaceId, mockConnectionId, "test", {})
).rejects.toThrow("Connection is not active");
});
it("should mark command as failed if sending fails", async () => {
const mockConnection = {
id: mockConnectionId,
workspaceId: mockWorkspaceId,
status: FederationConnectionStatus.ACTIVE,
remoteUrl: mockRemoteUrl,
};
const mockIdentity = {
instanceId: "local-instance",
displayName: "Local Instance",
};
const mockMessage = {
id: "msg-123",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
messageType: FederationMessageType.COMMAND,
messageId: "test-msg-id",
correlationId: null,
query: null,
commandType: "test",
payload: {},
response: {},
status: FederationMessageStatus.PENDING,
error: null,
signature: "signature-123",
createdAt: new Date(),
updatedAt: new Date(),
deliveredAt: null,
};
vi.spyOn(prisma.federationConnection, "findUnique").mockResolvedValue(
mockConnection as never
);
vi.spyOn(federationService, "getInstanceIdentity").mockResolvedValue(mockIdentity as never);
vi.spyOn(signatureService, "signMessage").mockResolvedValue("signature-123");
vi.spyOn(prisma.federationMessage, "create").mockResolvedValue(mockMessage as never);
vi.spyOn(httpService, "post").mockReturnValue(
new (class {
subscribe(handlers: { error: (err: Error) => void }) {
handlers.error(new Error("Network error"));
}
})() as never
);
vi.spyOn(prisma.federationMessage, "update").mockResolvedValue(mockMessage as never);
await expect(
service.sendCommand(mockWorkspaceId, mockConnectionId, "test", {})
).rejects.toThrow("Failed to send command");
expect(prisma.federationMessage.update).toHaveBeenCalledWith({
where: { id: "msg-123" },
data: {
status: FederationMessageStatus.FAILED,
error: "Network error",
},
});
});
});
describe("handleIncomingCommand", () => {
it("should process a valid incoming command", async () => {
const commandMessage: CommandMessage = {
messageId: "cmd-123",
instanceId: mockInstanceId,
commandType: "spawn_agent",
payload: { agentType: "task_executor" },
timestamp: Date.now(),
signature: "signature-123",
};
const mockConnection = {
id: mockConnectionId,
remoteInstanceId: mockInstanceId,
status: FederationConnectionStatus.ACTIVE,
};
const mockIdentity = {
instanceId: "local-instance",
displayName: "Local Instance",
};
vi.spyOn(signatureService, "validateTimestamp").mockReturnValue(true);
vi.spyOn(prisma.federationConnection, "findFirst").mockResolvedValue(mockConnection as never);
vi.spyOn(signatureService, "verifyMessage").mockResolvedValue({
valid: true,
error: null,
} as never);
vi.spyOn(federationService, "getInstanceIdentity").mockResolvedValue(mockIdentity as never);
vi.spyOn(signatureService, "signMessage").mockResolvedValue("response-signature");
const response = await service.handleIncomingCommand(commandMessage);
expect(response).toMatchObject({
correlationId: "cmd-123",
instanceId: "local-instance",
success: true,
});
expect(signatureService.validateTimestamp).toHaveBeenCalledWith(commandMessage.timestamp);
expect(signatureService.verifyMessage).toHaveBeenCalledWith(
expect.objectContaining({
messageId: "cmd-123",
instanceId: mockInstanceId,
commandType: "spawn_agent",
}),
"signature-123",
mockInstanceId
);
});
it("should reject command with invalid timestamp", async () => {
const commandMessage: CommandMessage = {
messageId: "cmd-123",
instanceId: mockInstanceId,
commandType: "test",
payload: {},
timestamp: Date.now() - 1000000,
signature: "signature-123",
};
vi.spyOn(signatureService, "validateTimestamp").mockReturnValue(false);
await expect(service.handleIncomingCommand(commandMessage)).rejects.toThrow(
"Command timestamp is outside acceptable range"
);
});
it("should reject command if no connection found", async () => {
const commandMessage: CommandMessage = {
messageId: "cmd-123",
instanceId: mockInstanceId,
commandType: "test",
payload: {},
timestamp: Date.now(),
signature: "signature-123",
};
vi.spyOn(signatureService, "validateTimestamp").mockReturnValue(true);
vi.spyOn(prisma.federationConnection, "findFirst").mockResolvedValue(null);
await expect(service.handleIncomingCommand(commandMessage)).rejects.toThrow(
"No connection found for remote instance"
);
});
it("should reject command with invalid signature", async () => {
const commandMessage: CommandMessage = {
messageId: "cmd-123",
instanceId: mockInstanceId,
commandType: "test",
payload: {},
timestamp: Date.now(),
signature: "invalid-signature",
};
const mockConnection = {
id: mockConnectionId,
remoteInstanceId: mockInstanceId,
status: FederationConnectionStatus.ACTIVE,
};
vi.spyOn(signatureService, "validateTimestamp").mockReturnValue(true);
vi.spyOn(prisma.federationConnection, "findFirst").mockResolvedValue(mockConnection as never);
vi.spyOn(signatureService, "verifyMessage").mockResolvedValue({
valid: false,
error: "Invalid signature",
} as never);
await expect(service.handleIncomingCommand(commandMessage)).rejects.toThrow(
"Invalid signature"
);
});
});
describe("processCommandResponse", () => {
it("should process a successful command response", async () => {
const response: CommandResponse = {
messageId: "resp-123",
correlationId: "cmd-123",
instanceId: mockInstanceId,
success: true,
data: { result: "success" },
timestamp: Date.now(),
signature: "signature-123",
};
const mockMessage = {
id: "msg-123",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
messageType: FederationMessageType.COMMAND,
messageId: "cmd-123",
correlationId: null,
query: null,
commandType: "test",
payload: {},
response: {},
status: FederationMessageStatus.PENDING,
error: null,
signature: "signature-123",
createdAt: new Date(),
updatedAt: new Date(),
deliveredAt: null,
};
vi.spyOn(signatureService, "validateTimestamp").mockReturnValue(true);
vi.spyOn(prisma.federationMessage, "findFirst").mockResolvedValue(mockMessage as never);
vi.spyOn(signatureService, "verifyMessage").mockResolvedValue({
valid: true,
error: null,
} as never);
vi.spyOn(prisma.federationMessage, "update").mockResolvedValue(mockMessage as never);
await service.processCommandResponse(response);
expect(prisma.federationMessage.update).toHaveBeenCalledWith({
where: { id: "msg-123" },
data: {
status: FederationMessageStatus.DELIVERED,
deliveredAt: expect.any(Date),
response: { result: "success" },
},
});
});
it("should handle failed command response", async () => {
const response: CommandResponse = {
messageId: "resp-123",
correlationId: "cmd-123",
instanceId: mockInstanceId,
success: false,
error: "Command execution failed",
timestamp: Date.now(),
signature: "signature-123",
};
const mockMessage = {
id: "msg-123",
messageType: FederationMessageType.COMMAND,
messageId: "cmd-123",
};
vi.spyOn(signatureService, "validateTimestamp").mockReturnValue(true);
vi.spyOn(prisma.federationMessage, "findFirst").mockResolvedValue(mockMessage as never);
vi.spyOn(signatureService, "verifyMessage").mockResolvedValue({
valid: true,
error: null,
} as never);
vi.spyOn(prisma.federationMessage, "update").mockResolvedValue(mockMessage as never);
await service.processCommandResponse(response);
expect(prisma.federationMessage.update).toHaveBeenCalledWith({
where: { id: "msg-123" },
data: {
status: FederationMessageStatus.FAILED,
deliveredAt: expect.any(Date),
error: "Command execution failed",
},
});
});
it("should reject response with invalid timestamp", async () => {
const response: CommandResponse = {
messageId: "resp-123",
correlationId: "cmd-123",
instanceId: mockInstanceId,
success: true,
timestamp: Date.now() - 1000000,
signature: "signature-123",
};
vi.spyOn(signatureService, "validateTimestamp").mockReturnValue(false);
await expect(service.processCommandResponse(response)).rejects.toThrow(
"Response timestamp is outside acceptable range"
);
});
});
describe("getCommandMessages", () => {
it("should return all command messages for a workspace", async () => {
const mockMessages = [
{
id: "msg-1",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
messageType: FederationMessageType.COMMAND,
messageId: "cmd-1",
correlationId: null,
query: null,
commandType: "test",
payload: {},
response: {},
status: FederationMessageStatus.DELIVERED,
error: null,
signature: "sig-1",
createdAt: new Date(),
updatedAt: new Date(),
deliveredAt: new Date(),
},
];
vi.spyOn(prisma.federationMessage, "findMany").mockResolvedValue(mockMessages as never);
const result = await service.getCommandMessages(mockWorkspaceId);
expect(result).toHaveLength(1);
expect(result[0]).toMatchObject({
workspaceId: mockWorkspaceId,
messageType: FederationMessageType.COMMAND,
commandType: "test",
});
});
it("should filter command messages by status", async () => {
const mockMessages = [
{
id: "msg-1",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
messageType: FederationMessageType.COMMAND,
messageId: "cmd-1",
correlationId: null,
query: null,
commandType: "test",
payload: {},
response: {},
status: FederationMessageStatus.PENDING,
error: null,
signature: "sig-1",
createdAt: new Date(),
updatedAt: new Date(),
deliveredAt: null,
},
];
vi.spyOn(prisma.federationMessage, "findMany").mockResolvedValue(mockMessages as never);
await service.getCommandMessages(mockWorkspaceId, FederationMessageStatus.PENDING);
expect(prisma.federationMessage.findMany).toHaveBeenCalledWith({
where: {
workspaceId: mockWorkspaceId,
messageType: FederationMessageType.COMMAND,
status: FederationMessageStatus.PENDING,
},
orderBy: { createdAt: "desc" },
});
});
});
describe("getCommandMessage", () => {
it("should return a single command message", async () => {
const mockMessage = {
id: "msg-1",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
messageType: FederationMessageType.COMMAND,
messageId: "cmd-1",
correlationId: null,
query: null,
commandType: "test",
payload: { key: "value" },
response: {},
status: FederationMessageStatus.DELIVERED,
error: null,
signature: "sig-1",
createdAt: new Date(),
updatedAt: new Date(),
deliveredAt: new Date(),
};
vi.spyOn(prisma.federationMessage, "findUnique").mockResolvedValue(mockMessage as never);
const result = await service.getCommandMessage(mockWorkspaceId, "msg-1");
expect(result).toMatchObject({
id: "msg-1",
workspaceId: mockWorkspaceId,
commandType: "test",
payload: { key: "value" },
});
});
it("should throw error if command message not found", async () => {
vi.spyOn(prisma.federationMessage, "findUnique").mockResolvedValue(null);
await expect(service.getCommandMessage(mockWorkspaceId, "invalid-id")).rejects.toThrow(
"Command message not found"
);
});
});
});

View File

@@ -0,0 +1,386 @@
/**
* Command Service
*
* Handles federated command messages.
*/
import { Injectable, Logger } from "@nestjs/common";
import { ModuleRef } from "@nestjs/core";
import { HttpService } from "@nestjs/axios";
import { randomUUID } from "crypto";
import { firstValueFrom } from "rxjs";
import { PrismaService } from "../prisma/prisma.service";
import { FederationService } from "./federation.service";
import { SignatureService } from "./signature.service";
import {
FederationConnectionStatus,
FederationMessageType,
FederationMessageStatus,
} from "@prisma/client";
import type { CommandMessage, CommandResponse, CommandMessageDetails } from "./types/message.types";
@Injectable()
export class CommandService {
private readonly logger = new Logger(CommandService.name);
constructor(
private readonly prisma: PrismaService,
private readonly federationService: FederationService,
private readonly signatureService: SignatureService,
private readonly httpService: HttpService,
private readonly moduleRef: ModuleRef
) {}
/**
* Send a command to a remote instance
*/
async sendCommand(
workspaceId: string,
connectionId: string,
commandType: string,
payload: Record<string, unknown>
): Promise<CommandMessageDetails> {
// Validate connection exists and is active
const connection = await this.prisma.federationConnection.findUnique({
where: { id: connectionId, workspaceId },
});
if (!connection) {
throw new Error("Connection not found");
}
if (connection.status !== FederationConnectionStatus.ACTIVE) {
throw new Error("Connection is not active");
}
// Get local instance identity
const identity = await this.federationService.getInstanceIdentity();
// Create command message
const messageId = randomUUID();
const timestamp = Date.now();
const commandPayload: Record<string, unknown> = {
messageId,
instanceId: identity.instanceId,
commandType,
payload,
timestamp,
};
// Sign the command
const signature = await this.signatureService.signMessage(commandPayload);
const signedCommand = {
messageId,
instanceId: identity.instanceId,
commandType,
payload,
timestamp,
signature,
} as CommandMessage;
// Store message in database
const message = await this.prisma.federationMessage.create({
data: {
workspaceId,
connectionId,
messageType: FederationMessageType.COMMAND,
messageId,
commandType,
payload: payload as never,
status: FederationMessageStatus.PENDING,
signature,
},
});
// Send command to remote instance
try {
const remoteUrl = `${connection.remoteUrl}/api/v1/federation/incoming/command`;
await firstValueFrom(this.httpService.post(remoteUrl, signedCommand));
this.logger.log(`Command sent to ${connection.remoteUrl}: ${messageId}`);
} catch (error) {
this.logger.error(`Failed to send command to ${connection.remoteUrl}`, error);
// Update message status to failed
await this.prisma.federationMessage.update({
where: { id: message.id },
data: {
status: FederationMessageStatus.FAILED,
error: error instanceof Error ? error.message : "Unknown error",
},
});
throw new Error("Failed to send command");
}
return this.mapToCommandMessageDetails(message);
}
/**
* Handle incoming command from remote instance
*/
async handleIncomingCommand(commandMessage: CommandMessage): Promise<CommandResponse> {
this.logger.log(
`Received command from ${commandMessage.instanceId}: ${commandMessage.messageId}`
);
// Validate timestamp
if (!this.signatureService.validateTimestamp(commandMessage.timestamp)) {
throw new Error("Command timestamp is outside acceptable range");
}
// Find connection for remote instance
const connection = await this.prisma.federationConnection.findFirst({
where: {
remoteInstanceId: commandMessage.instanceId,
status: FederationConnectionStatus.ACTIVE,
},
});
if (!connection) {
throw new Error("No connection found for remote instance");
}
// Validate connection is active
if (connection.status !== FederationConnectionStatus.ACTIVE) {
throw new Error("Connection is not active");
}
// Verify signature
const { signature, ...messageToVerify } = commandMessage;
const verificationResult = await this.signatureService.verifyMessage(
messageToVerify,
signature,
commandMessage.instanceId
);
if (!verificationResult.valid) {
throw new Error(verificationResult.error ?? "Invalid signature");
}
// Process command
let responseData: unknown;
let success = true;
let errorMessage: string | undefined;
try {
// Route agent commands to FederationAgentService
if (commandMessage.commandType.startsWith("agent.")) {
// Import FederationAgentService dynamically to avoid circular dependency
const { FederationAgentService } = await import("./federation-agent.service");
const federationAgentService = this.moduleRef.get(FederationAgentService, {
strict: false,
});
const agentResponse = await federationAgentService.handleAgentCommand(
commandMessage.instanceId,
commandMessage.commandType,
commandMessage.payload
);
success = agentResponse.success;
responseData = agentResponse.data;
errorMessage = agentResponse.error;
} else {
// Other command types can be added here
responseData = { message: "Command received and processed" };
}
} catch (error) {
success = false;
errorMessage = error instanceof Error ? error.message : "Command processing failed";
this.logger.error(`Command processing failed: ${errorMessage}`);
}
// Get local instance identity
const identity = await this.federationService.getInstanceIdentity();
// Create response
const responseMessageId = randomUUID();
const responseTimestamp = Date.now();
const responsePayload: Record<string, unknown> = {
messageId: responseMessageId,
correlationId: commandMessage.messageId,
instanceId: identity.instanceId,
success,
timestamp: responseTimestamp,
};
if (responseData !== undefined) {
responsePayload.data = responseData;
}
if (errorMessage !== undefined) {
responsePayload.error = errorMessage;
}
// Sign the response
const responseSignature = await this.signatureService.signMessage(responsePayload);
const response = {
messageId: responseMessageId,
correlationId: commandMessage.messageId,
instanceId: identity.instanceId,
success,
...(responseData !== undefined ? { data: responseData } : {}),
...(errorMessage !== undefined ? { error: errorMessage } : {}),
timestamp: responseTimestamp,
signature: responseSignature,
} as CommandResponse;
return response;
}
/**
* Get all command messages for a workspace
*/
async getCommandMessages(
workspaceId: string,
status?: FederationMessageStatus
): Promise<CommandMessageDetails[]> {
const where: Record<string, unknown> = {
workspaceId,
messageType: FederationMessageType.COMMAND,
};
if (status) {
where.status = status;
}
const messages = await this.prisma.federationMessage.findMany({
where,
orderBy: { createdAt: "desc" },
});
return messages.map((msg) => this.mapToCommandMessageDetails(msg));
}
/**
* Get a single command message
*/
async getCommandMessage(workspaceId: string, messageId: string): Promise<CommandMessageDetails> {
const message = await this.prisma.federationMessage.findUnique({
where: { id: messageId, workspaceId },
});
if (!message) {
throw new Error("Command message not found");
}
return this.mapToCommandMessageDetails(message);
}
/**
* Process a command response from remote instance
*/
async processCommandResponse(response: CommandResponse): Promise<void> {
this.logger.log(`Received response for command: ${response.correlationId}`);
// Validate timestamp
if (!this.signatureService.validateTimestamp(response.timestamp)) {
throw new Error("Response timestamp is outside acceptable range");
}
// Find original command message
const message = await this.prisma.federationMessage.findFirst({
where: {
messageId: response.correlationId,
messageType: FederationMessageType.COMMAND,
},
});
if (!message) {
throw new Error("Original command message not found");
}
// Verify signature
const { signature, ...responseToVerify } = response;
const verificationResult = await this.signatureService.verifyMessage(
responseToVerify,
signature,
response.instanceId
);
if (!verificationResult.valid) {
throw new Error(verificationResult.error ?? "Invalid signature");
}
// Update message with response
const updateData: Record<string, unknown> = {
status: response.success ? FederationMessageStatus.DELIVERED : FederationMessageStatus.FAILED,
deliveredAt: new Date(),
};
if (response.data !== undefined) {
updateData.response = response.data;
}
if (response.error !== undefined) {
updateData.error = response.error;
}
await this.prisma.federationMessage.update({
where: { id: message.id },
data: updateData,
});
this.logger.log(`Command response processed: ${response.correlationId}`);
}
/**
* Map Prisma FederationMessage to CommandMessageDetails
*/
private mapToCommandMessageDetails(message: {
id: string;
workspaceId: string;
connectionId: string;
messageType: FederationMessageType;
messageId: string;
correlationId: string | null;
query: string | null;
commandType: string | null;
payload: unknown;
response: unknown;
status: FederationMessageStatus;
error: string | null;
createdAt: Date;
updatedAt: Date;
deliveredAt: Date | null;
}): CommandMessageDetails {
const details: CommandMessageDetails = {
id: message.id,
workspaceId: message.workspaceId,
connectionId: message.connectionId,
messageType: message.messageType,
messageId: message.messageId,
response: message.response,
status: message.status,
createdAt: message.createdAt,
updatedAt: message.updatedAt,
};
if (message.correlationId !== null) {
details.correlationId = message.correlationId;
}
if (message.commandType !== null) {
details.commandType = message.commandType;
}
if (message.payload !== null && typeof message.payload === "object") {
details.payload = message.payload as Record<string, unknown>;
}
if (message.error !== null) {
details.error = message.error;
}
if (message.deliveredAt !== null) {
details.deliveredAt = message.deliveredAt;
}
return details;
}
}

View File

@@ -0,0 +1,422 @@
/**
* Connection Service Tests
*
* Tests for federation connection management.
*/
import { describe, it, expect, beforeEach, vi } from "vitest";
import { Test, TestingModule } from "@nestjs/testing";
import { HttpService } from "@nestjs/axios";
import { ConnectionService } from "./connection.service";
import { FederationService } from "./federation.service";
import { SignatureService } from "./signature.service";
import { PrismaService } from "../prisma/prisma.service";
import { FederationConnectionStatus } from "@prisma/client";
import { FederationConnection } from "@prisma/client";
import { of, throwError } from "rxjs";
import type { AxiosResponse } from "axios";
describe("ConnectionService", () => {
let service: ConnectionService;
let prismaService: PrismaService;
let federationService: FederationService;
let signatureService: SignatureService;
let httpService: HttpService;
const mockWorkspaceId = "workspace-123";
const mockRemoteUrl = "https://remote.example.com";
const mockInstanceIdentity = {
id: "local-id",
instanceId: "local-instance-123",
name: "Local Instance",
url: "https://local.example.com",
publicKey: "-----BEGIN PUBLIC KEY-----\nLOCAL\n-----END PUBLIC KEY-----",
privateKey: "-----BEGIN PRIVATE KEY-----\nLOCAL\n-----END PRIVATE KEY-----",
capabilities: {
supportsQuery: true,
supportsCommand: true,
protocolVersion: "1.0",
},
metadata: {},
createdAt: new Date(),
updatedAt: new Date(),
};
const mockRemoteIdentity = {
id: "remote-id",
instanceId: "remote-instance-456",
name: "Remote Instance",
url: mockRemoteUrl,
publicKey: "-----BEGIN PUBLIC KEY-----\nREMOTE\n-----END PUBLIC KEY-----",
capabilities: {
supportsQuery: true,
protocolVersion: "1.0",
},
metadata: {},
createdAt: new Date(),
updatedAt: new Date(),
};
const mockConnection: FederationConnection = {
id: "conn-123",
workspaceId: mockWorkspaceId,
remoteInstanceId: mockRemoteIdentity.instanceId,
remoteUrl: mockRemoteUrl,
remotePublicKey: mockRemoteIdentity.publicKey,
remoteCapabilities: mockRemoteIdentity.capabilities,
status: FederationConnectionStatus.PENDING,
metadata: {},
createdAt: new Date(),
updatedAt: new Date(),
connectedAt: null,
disconnectedAt: null,
};
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
providers: [
ConnectionService,
{
provide: PrismaService,
useValue: {
federationConnection: {
create: vi.fn(),
findFirst: vi.fn(),
findUnique: vi.fn(),
findMany: vi.fn(),
update: vi.fn(),
},
},
},
{
provide: FederationService,
useValue: {
getInstanceIdentity: vi.fn().mockResolvedValue(mockInstanceIdentity),
getPublicIdentity: vi.fn().mockResolvedValue(mockInstanceIdentity),
},
},
{
provide: SignatureService,
useValue: {
signMessage: vi.fn().mockResolvedValue("mock-signature"),
verifyConnectionRequest: vi.fn().mockReturnValue({ valid: true }),
},
},
{
provide: HttpService,
useValue: {
get: vi.fn(),
post: vi.fn(),
},
},
],
}).compile();
service = module.get<ConnectionService>(ConnectionService);
prismaService = module.get<PrismaService>(PrismaService);
federationService = module.get<FederationService>(FederationService);
signatureService = module.get<SignatureService>(SignatureService);
httpService = module.get<HttpService>(HttpService);
});
it("should be defined", () => {
expect(service).toBeDefined();
});
describe("initiateConnection", () => {
it("should create a pending connection", async () => {
const mockAxiosResponse: AxiosResponse = {
data: mockRemoteIdentity,
status: 200,
statusText: "OK",
headers: {},
config: {} as never,
};
vi.spyOn(httpService, "get").mockReturnValue(of(mockAxiosResponse));
vi.spyOn(httpService, "post").mockReturnValue(
of({ data: { accepted: true } } as AxiosResponse)
);
vi.spyOn(prismaService.federationConnection, "create").mockResolvedValue(mockConnection);
const result = await service.initiateConnection(mockWorkspaceId, mockRemoteUrl);
expect(result).toBeDefined();
expect(result.status).toBe(FederationConnectionStatus.PENDING);
expect(result.remoteUrl).toBe(mockRemoteUrl);
expect(prismaService.federationConnection.create).toHaveBeenCalledWith(
expect.objectContaining({
data: expect.objectContaining({
workspaceId: mockWorkspaceId,
remoteUrl: mockRemoteUrl,
status: FederationConnectionStatus.PENDING,
}),
})
);
});
it("should fetch remote instance identity", async () => {
const mockAxiosResponse: AxiosResponse = {
data: mockRemoteIdentity,
status: 200,
statusText: "OK",
headers: {},
config: {} as never,
};
vi.spyOn(httpService, "get").mockReturnValue(of(mockAxiosResponse));
vi.spyOn(httpService, "post").mockReturnValue(
of({ data: { accepted: true } } as AxiosResponse)
);
vi.spyOn(prismaService.federationConnection, "create").mockResolvedValue(mockConnection);
await service.initiateConnection(mockWorkspaceId, mockRemoteUrl);
expect(httpService.get).toHaveBeenCalledWith(`${mockRemoteUrl}/api/v1/federation/instance`);
});
it("should throw error if remote instance not reachable", async () => {
vi.spyOn(httpService, "get").mockReturnValue(throwError(() => new Error("Network error")));
await expect(service.initiateConnection(mockWorkspaceId, mockRemoteUrl)).rejects.toThrow();
});
it("should send signed connection request", async () => {
const mockAxiosResponse: AxiosResponse = {
data: mockRemoteIdentity,
status: 200,
statusText: "OK",
headers: {},
config: {} as never,
};
const postSpy = vi
.spyOn(httpService, "post")
.mockReturnValue(of({ data: { accepted: true } } as AxiosResponse));
vi.spyOn(httpService, "get").mockReturnValue(of(mockAxiosResponse));
vi.spyOn(prismaService.federationConnection, "create").mockResolvedValue(mockConnection);
await service.initiateConnection(mockWorkspaceId, mockRemoteUrl);
expect(postSpy).toHaveBeenCalledWith(
`${mockRemoteUrl}/api/v1/federation/incoming/connect`,
expect.objectContaining({
instanceId: mockInstanceIdentity.instanceId,
instanceUrl: mockInstanceIdentity.url,
publicKey: mockInstanceIdentity.publicKey,
signature: "mock-signature",
})
);
});
});
describe("acceptConnection", () => {
it("should update connection status to ACTIVE", async () => {
vi.spyOn(prismaService.federationConnection, "findFirst").mockResolvedValue(mockConnection);
vi.spyOn(prismaService.federationConnection, "update").mockReturnValue({
...mockConnection,
status: FederationConnectionStatus.ACTIVE,
connectedAt: new Date(),
});
const result = await service.acceptConnection(mockWorkspaceId, mockConnection.id);
expect(result.status).toBe(FederationConnectionStatus.ACTIVE);
expect(result.connectedAt).toBeDefined();
expect(prismaService.federationConnection.update).toHaveBeenCalledWith(
expect.objectContaining({
where: expect.objectContaining({
id: mockConnection.id,
}),
data: expect.objectContaining({
status: FederationConnectionStatus.ACTIVE,
connectedAt: expect.any(Date),
}),
})
);
});
it("should throw error if connection not found", async () => {
vi.spyOn(prismaService.federationConnection, "findFirst").mockResolvedValue(null);
await expect(service.acceptConnection(mockWorkspaceId, "non-existent-id")).rejects.toThrow(
"Connection not found"
);
});
it("should enforce workspace isolation", async () => {
vi.spyOn(prismaService.federationConnection, "findFirst").mockResolvedValue(null);
await expect(
service.acceptConnection("different-workspace", mockConnection.id)
).rejects.toThrow("Connection not found");
});
});
describe("rejectConnection", () => {
it("should update connection status to DISCONNECTED", async () => {
vi.spyOn(prismaService.federationConnection, "findFirst").mockResolvedValue(mockConnection);
vi.spyOn(prismaService.federationConnection, "update").mockReturnValue({
...mockConnection,
status: FederationConnectionStatus.DISCONNECTED,
metadata: { rejectionReason: "Not approved" },
});
const result = await service.rejectConnection(
mockWorkspaceId,
mockConnection.id,
"Not approved"
);
expect(result.status).toBe(FederationConnectionStatus.DISCONNECTED);
expect(result.metadata).toHaveProperty("rejectionReason", "Not approved");
});
it("should throw error if connection not found", async () => {
vi.spyOn(prismaService.federationConnection, "findFirst").mockResolvedValue(null);
await expect(
service.rejectConnection(mockWorkspaceId, "non-existent-id", "Reason")
).rejects.toThrow("Connection not found");
});
});
describe("disconnect", () => {
const activeConnection: FederationConnection = {
...mockConnection,
status: FederationConnectionStatus.ACTIVE,
connectedAt: new Date(),
};
it("should disconnect active connection", async () => {
vi.spyOn(prismaService.federationConnection, "findFirst").mockResolvedValue(activeConnection);
vi.spyOn(prismaService.federationConnection, "update").mockReturnValue({
...activeConnection,
status: FederationConnectionStatus.DISCONNECTED,
disconnectedAt: new Date(),
});
const result = await service.disconnect(
mockWorkspaceId,
mockConnection.id,
"Manual disconnect"
);
expect(result.status).toBe(FederationConnectionStatus.DISCONNECTED);
expect(result.disconnectedAt).toBeDefined();
});
it("should store disconnection reason in metadata", async () => {
vi.spyOn(prismaService.federationConnection, "findFirst").mockResolvedValue(activeConnection);
vi.spyOn(prismaService.federationConnection, "update").mockReturnValue({
...activeConnection,
status: FederationConnectionStatus.DISCONNECTED,
disconnectedAt: new Date(),
metadata: { disconnectReason: "Test reason" },
});
const result = await service.disconnect(mockWorkspaceId, mockConnection.id, "Test reason");
expect(result.metadata).toHaveProperty("disconnectReason", "Test reason");
});
});
describe("getConnections", () => {
it("should list all connections for workspace", async () => {
const connections = [mockConnection];
vi.spyOn(prismaService.federationConnection, "findMany").mockResolvedValue(connections);
const result = await service.getConnections(mockWorkspaceId);
expect(result).toEqual(connections);
expect(prismaService.federationConnection.findMany).toHaveBeenCalledWith(
expect.objectContaining({
where: { workspaceId: mockWorkspaceId },
})
);
});
it("should filter by status if provided", async () => {
const connections = [mockConnection];
vi.spyOn(prismaService.federationConnection, "findMany").mockResolvedValue(connections);
await service.getConnections(mockWorkspaceId, FederationConnectionStatus.ACTIVE);
expect(prismaService.federationConnection.findMany).toHaveBeenCalledWith(
expect.objectContaining({
where: {
workspaceId: mockWorkspaceId,
status: FederationConnectionStatus.ACTIVE,
},
})
);
});
});
describe("getConnection", () => {
it("should return connection details", async () => {
vi.spyOn(prismaService.federationConnection, "findFirst").mockResolvedValue(mockConnection);
const result = await service.getConnection(mockWorkspaceId, mockConnection.id);
expect(result).toEqual(mockConnection);
});
it("should throw error if connection not found", async () => {
vi.spyOn(prismaService.federationConnection, "findFirst").mockResolvedValue(null);
await expect(service.getConnection(mockWorkspaceId, "non-existent-id")).rejects.toThrow(
"Connection not found"
);
});
it("should enforce workspace isolation", async () => {
vi.spyOn(prismaService.federationConnection, "findFirst").mockResolvedValue(null);
await expect(service.getConnection("different-workspace", mockConnection.id)).rejects.toThrow(
"Connection not found"
);
});
});
describe("handleIncomingConnectionRequest", () => {
const mockRequest = {
instanceId: mockRemoteIdentity.instanceId,
instanceUrl: mockRemoteIdentity.url,
publicKey: mockRemoteIdentity.publicKey,
capabilities: mockRemoteIdentity.capabilities,
timestamp: Date.now(),
signature: "valid-signature",
};
it("should validate connection request signature", async () => {
const verifySpy = vi.spyOn(signatureService, "verifyConnectionRequest");
vi.spyOn(prismaService.federationConnection, "create").mockResolvedValue(mockConnection);
await service.handleIncomingConnectionRequest(mockWorkspaceId, mockRequest);
expect(verifySpy).toHaveBeenCalledWith(mockRequest);
});
it("should create pending connection for valid request", async () => {
vi.spyOn(signatureService, "verifyConnectionRequest").mockReturnValue({ valid: true });
vi.spyOn(prismaService.federationConnection, "create").mockResolvedValue(mockConnection);
const result = await service.handleIncomingConnectionRequest(mockWorkspaceId, mockRequest);
expect(result.status).toBe(FederationConnectionStatus.PENDING);
expect(prismaService.federationConnection.create).toHaveBeenCalled();
});
it("should reject request with invalid signature", async () => {
vi.spyOn(signatureService, "verifyConnectionRequest").mockReturnValue({
valid: false,
error: "Invalid signature",
});
await expect(
service.handleIncomingConnectionRequest(mockWorkspaceId, mockRequest)
).rejects.toThrow("Invalid connection request signature");
});
});
});

View File

@@ -0,0 +1,338 @@
/**
* Connection Service
*
* Manages federation connections between instances.
*/
import {
Injectable,
Logger,
NotFoundException,
UnauthorizedException,
ServiceUnavailableException,
} from "@nestjs/common";
import { HttpService } from "@nestjs/axios";
import { FederationConnectionStatus, Prisma } from "@prisma/client";
import { PrismaService } from "../prisma/prisma.service";
import { FederationService } from "./federation.service";
import { SignatureService } from "./signature.service";
import { firstValueFrom } from "rxjs";
import type { ConnectionRequest, ConnectionDetails } from "./types/connection.types";
import type { PublicInstanceIdentity } from "./types/instance.types";
@Injectable()
export class ConnectionService {
private readonly logger = new Logger(ConnectionService.name);
constructor(
private readonly prisma: PrismaService,
private readonly federationService: FederationService,
private readonly signatureService: SignatureService,
private readonly httpService: HttpService
) {}
/**
* Initiate a connection to a remote instance
*/
async initiateConnection(workspaceId: string, remoteUrl: string): Promise<ConnectionDetails> {
this.logger.log(`Initiating connection to ${remoteUrl} for workspace ${workspaceId}`);
// Fetch remote instance identity
const remoteIdentity = await this.fetchRemoteIdentity(remoteUrl);
// Get our instance identity
const localIdentity = await this.federationService.getInstanceIdentity();
// Create connection record with PENDING status
const connection = await this.prisma.federationConnection.create({
data: {
workspaceId,
remoteInstanceId: remoteIdentity.instanceId,
remoteUrl: this.normalizeUrl(remoteUrl),
remotePublicKey: remoteIdentity.publicKey,
remoteCapabilities: remoteIdentity.capabilities as Prisma.JsonObject,
status: FederationConnectionStatus.PENDING,
metadata: {},
},
});
// Create signed connection request
const request: Omit<ConnectionRequest, "signature"> = {
instanceId: localIdentity.instanceId,
instanceUrl: localIdentity.url,
publicKey: localIdentity.publicKey,
capabilities: localIdentity.capabilities,
timestamp: Date.now(),
};
const signature = await this.signatureService.signMessage(request);
const signedRequest: ConnectionRequest = { ...request, signature };
// Send connection request to remote instance (fire-and-forget for now)
try {
await firstValueFrom(
this.httpService.post(`${remoteUrl}/api/v1/federation/incoming/connect`, signedRequest)
);
this.logger.log(`Connection request sent to ${remoteUrl}`);
} catch (error) {
this.logger.error(`Failed to send connection request to ${remoteUrl}`, error);
// Connection is still created in PENDING state, can be retried
}
return this.mapToConnectionDetails(connection);
}
/**
* Accept a pending connection
*/
async acceptConnection(
workspaceId: string,
connectionId: string,
metadata?: Record<string, unknown>
): Promise<ConnectionDetails> {
this.logger.log(`Accepting connection ${connectionId} for workspace ${workspaceId}`);
// Verify connection exists and belongs to workspace
const connection = await this.prisma.federationConnection.findFirst({
where: {
id: connectionId,
workspaceId,
},
});
if (!connection) {
throw new NotFoundException("Connection not found");
}
// Update status to ACTIVE
const updated = await this.prisma.federationConnection.update({
where: {
id: connectionId,
},
data: {
status: FederationConnectionStatus.ACTIVE,
connectedAt: new Date(),
metadata: (metadata ?? connection.metadata) as Prisma.JsonObject,
},
});
this.logger.log(`Connection ${connectionId} activated`);
return this.mapToConnectionDetails(updated);
}
/**
* Reject a pending connection
*/
async rejectConnection(
workspaceId: string,
connectionId: string,
reason: string
): Promise<ConnectionDetails> {
this.logger.log(`Rejecting connection ${connectionId}: ${reason}`);
// Verify connection exists and belongs to workspace
const connection = await this.prisma.federationConnection.findFirst({
where: {
id: connectionId,
workspaceId,
},
});
if (!connection) {
throw new NotFoundException("Connection not found");
}
// Update status to DISCONNECTED with rejection reason
const updated = await this.prisma.federationConnection.update({
where: {
id: connectionId,
},
data: {
status: FederationConnectionStatus.DISCONNECTED,
metadata: {
...(connection.metadata as Record<string, unknown>),
rejectionReason: reason,
} as Prisma.JsonObject,
},
});
return this.mapToConnectionDetails(updated);
}
/**
* Disconnect an active connection
*/
async disconnect(
workspaceId: string,
connectionId: string,
reason?: string
): Promise<ConnectionDetails> {
this.logger.log(`Disconnecting connection ${connectionId}`);
// Verify connection exists and belongs to workspace
const connection = await this.prisma.federationConnection.findFirst({
where: {
id: connectionId,
workspaceId,
},
});
if (!connection) {
throw new NotFoundException("Connection not found");
}
// Update status to DISCONNECTED
const updated = await this.prisma.federationConnection.update({
where: {
id: connectionId,
},
data: {
status: FederationConnectionStatus.DISCONNECTED,
disconnectedAt: new Date(),
metadata: {
...(connection.metadata as Record<string, unknown>),
...(reason ? { disconnectReason: reason } : {}),
} as Prisma.JsonObject,
},
});
return this.mapToConnectionDetails(updated);
}
/**
* Get all connections for a workspace
*/
async getConnections(
workspaceId: string,
status?: FederationConnectionStatus
): Promise<ConnectionDetails[]> {
const connections = await this.prisma.federationConnection.findMany({
where: {
workspaceId,
...(status ? { status } : {}),
},
orderBy: {
createdAt: "desc",
},
});
return connections.map((conn) => this.mapToConnectionDetails(conn));
}
/**
* Get a single connection
*/
async getConnection(workspaceId: string, connectionId: string): Promise<ConnectionDetails> {
const connection = await this.prisma.federationConnection.findFirst({
where: {
id: connectionId,
workspaceId,
},
});
if (!connection) {
throw new NotFoundException("Connection not found");
}
return this.mapToConnectionDetails(connection);
}
/**
* Handle incoming connection request from remote instance
*/
async handleIncomingConnectionRequest(
workspaceId: string,
request: ConnectionRequest
): Promise<ConnectionDetails> {
this.logger.log(`Received connection request from ${request.instanceId}`);
// Verify signature
const validation = this.signatureService.verifyConnectionRequest(request);
if (!validation.valid) {
const errorMsg: string = validation.error ?? "Unknown error";
this.logger.warn(`Invalid connection request from ${request.instanceId}: ${errorMsg}`);
throw new UnauthorizedException("Invalid connection request signature");
}
// Create pending connection
const connection = await this.prisma.federationConnection.create({
data: {
workspaceId,
remoteInstanceId: request.instanceId,
remoteUrl: this.normalizeUrl(request.instanceUrl),
remotePublicKey: request.publicKey,
remoteCapabilities: request.capabilities as Prisma.JsonObject,
status: FederationConnectionStatus.PENDING,
metadata: {
requestTimestamp: request.timestamp,
} as Prisma.JsonObject,
},
});
this.logger.log(`Created pending connection ${connection.id} from ${request.instanceId}`);
return this.mapToConnectionDetails(connection);
}
/**
* Fetch remote instance identity via HTTP
*/
private async fetchRemoteIdentity(remoteUrl: string): Promise<PublicInstanceIdentity> {
try {
const normalizedUrl = this.normalizeUrl(remoteUrl);
const response = await firstValueFrom(
this.httpService.get<PublicInstanceIdentity>(`${normalizedUrl}/api/v1/federation/instance`)
);
return response.data;
} catch (error: unknown) {
this.logger.error(`Failed to fetch remote identity from ${remoteUrl}`, error);
const errorMessage = error instanceof Error ? error.message : "Unknown error";
throw new ServiceUnavailableException(
`Could not connect to remote instance: ${remoteUrl}: ${errorMessage}`
);
}
}
/**
* Normalize URL (remove trailing slash)
*/
private normalizeUrl(url: string): string {
return url.replace(/\/$/, "");
}
/**
* Map Prisma FederationConnection to ConnectionDetails type
*/
private mapToConnectionDetails(connection: {
id: string;
workspaceId: string;
remoteInstanceId: string;
remoteUrl: string;
remotePublicKey: string;
remoteCapabilities: unknown;
status: FederationConnectionStatus;
metadata: unknown;
createdAt: Date;
updatedAt: Date;
connectedAt: Date | null;
disconnectedAt: Date | null;
}): ConnectionDetails {
return {
id: connection.id,
workspaceId: connection.workspaceId,
remoteInstanceId: connection.remoteInstanceId,
remoteUrl: connection.remoteUrl,
remotePublicKey: connection.remotePublicKey,
remoteCapabilities: connection.remoteCapabilities as Record<string, unknown>,
status: connection.status,
metadata: connection.metadata as Record<string, unknown>,
createdAt: connection.createdAt,
updatedAt: connection.updatedAt,
connectedAt: connection.connectedAt,
disconnectedAt: connection.disconnectedAt,
};
}
}

View File

@@ -0,0 +1,162 @@
/**
* Crypto Service Tests
*/
import { describe, it, expect, beforeEach } from "vitest";
import { Test, TestingModule } from "@nestjs/testing";
import { ConfigService } from "@nestjs/config";
import { CryptoService } from "./crypto.service";
describe("CryptoService", () => {
let service: CryptoService;
// Valid 32-byte hex key for testing
const testEncryptionKey = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
providers: [
CryptoService,
{
provide: ConfigService,
useValue: {
get: (key: string) => {
if (key === "ENCRYPTION_KEY") return testEncryptionKey;
return undefined;
},
},
},
],
}).compile();
service = module.get<CryptoService>(CryptoService);
});
describe("initialization", () => {
it("should throw error if ENCRYPTION_KEY is missing", () => {
expect(() => {
new CryptoService({
get: () => undefined,
} as ConfigService);
}).toThrow("ENCRYPTION_KEY environment variable is required");
});
it("should throw error if ENCRYPTION_KEY is invalid length", () => {
expect(() => {
new CryptoService({
get: () => "invalid",
} as ConfigService);
}).toThrow("ENCRYPTION_KEY must be 64 hexadecimal characters");
});
it("should initialize successfully with valid key", () => {
expect(service).toBeDefined();
});
});
describe("encrypt", () => {
it("should encrypt plaintext data", () => {
// Arrange
const plaintext = "sensitive data";
// Act
const encrypted = service.encrypt(plaintext);
// Assert
expect(encrypted).toBeDefined();
expect(encrypted).not.toEqual(plaintext);
expect(encrypted.split(":")).toHaveLength(3); // iv:authTag:encrypted
});
it("should produce different ciphertext for same plaintext", () => {
// Arrange
const plaintext = "sensitive data";
// Act
const encrypted1 = service.encrypt(plaintext);
const encrypted2 = service.encrypt(plaintext);
// Assert
expect(encrypted1).not.toEqual(encrypted2); // Different IVs
});
it("should encrypt long data (RSA private key)", () => {
// Arrange
const longData =
"-----BEGIN PRIVATE KEY-----\n" + "a".repeat(1000) + "\n-----END PRIVATE KEY-----";
// Act
const encrypted = service.encrypt(longData);
// Assert
expect(encrypted).toBeDefined();
expect(encrypted.length).toBeGreaterThan(0);
});
});
describe("decrypt", () => {
it("should decrypt encrypted data", () => {
// Arrange
const plaintext = "sensitive data";
const encrypted = service.encrypt(plaintext);
// Act
const decrypted = service.decrypt(encrypted);
// Assert
expect(decrypted).toEqual(plaintext);
});
it("should decrypt long data", () => {
// Arrange
const longData =
"-----BEGIN PRIVATE KEY-----\n" + "a".repeat(1000) + "\n-----END PRIVATE KEY-----";
const encrypted = service.encrypt(longData);
// Act
const decrypted = service.decrypt(encrypted);
// Assert
expect(decrypted).toEqual(longData);
});
it("should throw error for invalid encrypted data format", () => {
// Arrange
const invalidData = "invalid:format";
// Act & Assert
expect(() => service.decrypt(invalidData)).toThrow("Failed to decrypt data");
});
it("should throw error for corrupted data", () => {
// Arrange
const plaintext = "sensitive data";
const encrypted = service.encrypt(plaintext);
const corrupted = encrypted.replace(/[0-9a-f]/, "x"); // Corrupt one character
// Act & Assert
expect(() => service.decrypt(corrupted)).toThrow("Failed to decrypt data");
});
});
describe("encrypt/decrypt round-trip", () => {
it("should maintain data integrity through encrypt-decrypt cycle", () => {
// Arrange
const testCases = [
"short",
"medium length string with special chars !@#$%",
"-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC\n-----END PRIVATE KEY-----",
JSON.stringify({ complex: "object", with: ["arrays", 123] }),
];
testCases.forEach((plaintext) => {
// Act
const encrypted = service.encrypt(plaintext);
const decrypted = service.decrypt(encrypted);
// Assert
expect(decrypted).toEqual(plaintext);
});
});
});
});

View File

@@ -0,0 +1,97 @@
/**
* Crypto Service
*
* Handles encryption/decryption for sensitive data.
*/
import { Injectable, Logger } from "@nestjs/common";
import { ConfigService } from "@nestjs/config";
import { createCipheriv, createDecipheriv, randomBytes } from "crypto";
@Injectable()
export class CryptoService {
private readonly logger = new Logger(CryptoService.name);
private readonly algorithm = "aes-256-gcm";
private readonly encryptionKey: Buffer;
constructor(private readonly config: ConfigService) {
const keyHex = this.config.get<string>("ENCRYPTION_KEY");
if (!keyHex) {
throw new Error("ENCRYPTION_KEY environment variable is required for private key encryption");
}
// Validate key is 64 hex characters (32 bytes for AES-256)
if (!/^[0-9a-fA-F]{64}$/.test(keyHex)) {
throw new Error("ENCRYPTION_KEY must be 64 hexadecimal characters (32 bytes)");
}
this.encryptionKey = Buffer.from(keyHex, "hex");
this.logger.log("Crypto service initialized with AES-256-GCM encryption");
}
/**
* Encrypt sensitive data (e.g., private keys)
* Returns base64-encoded string with format: iv:authTag:encrypted
*/
encrypt(plaintext: string): string {
try {
// Generate random IV (12 bytes for GCM)
const iv = randomBytes(12);
// Create cipher
const cipher = createCipheriv(this.algorithm, this.encryptionKey, iv);
// Encrypt data
let encrypted = cipher.update(plaintext, "utf8", "hex");
encrypted += cipher.final("hex");
// Get auth tag
const authTag = cipher.getAuthTag();
// Return as iv:authTag:encrypted (all hex-encoded)
return `${iv.toString("hex")}:${authTag.toString("hex")}:${encrypted}`;
} catch (error) {
this.logger.error("Encryption failed", error);
throw new Error("Failed to encrypt data");
}
}
/**
* Decrypt sensitive data
* Expects format: iv:authTag:encrypted (all hex-encoded)
*/
decrypt(encrypted: string): string {
try {
// Parse encrypted data
const parts = encrypted.split(":");
if (parts.length !== 3) {
throw new Error("Invalid encrypted data format");
}
const ivHex = parts[0];
const authTagHex = parts[1];
const encryptedData = parts[2];
if (!ivHex || !authTagHex || !encryptedData) {
throw new Error("Invalid encrypted data format");
}
const iv = Buffer.from(ivHex, "hex");
const authTag = Buffer.from(authTagHex, "hex");
// Create decipher
const decipher = createDecipheriv(this.algorithm, this.encryptionKey, iv);
decipher.setAuthTag(authTag);
// Decrypt data
const decryptedBuffer = decipher.update(encryptedData, "hex");
const finalBuffer = decipher.final();
const decrypted = Buffer.concat([decryptedBuffer, finalBuffer]).toString("utf8");
return decrypted;
} catch (error) {
this.logger.error("Decryption failed", error);
throw new Error("Failed to decrypt data");
}
}
}

View File

@@ -0,0 +1,54 @@
/**
* Command DTOs
*
* Data Transfer Objects for command message operations.
*/
import { IsString, IsObject, IsNotEmpty, IsNumber } from "class-validator";
import type { CommandMessage } from "../types/message.types";
/**
* DTO for sending a command to a remote instance
*/
export class SendCommandDto {
@IsString()
@IsNotEmpty()
connectionId!: string;
@IsString()
@IsNotEmpty()
commandType!: string;
@IsObject()
@IsNotEmpty()
payload!: Record<string, unknown>;
}
/**
* DTO for incoming command request from remote instance
*/
export class IncomingCommandDto implements CommandMessage {
@IsString()
@IsNotEmpty()
messageId!: string;
@IsString()
@IsNotEmpty()
instanceId!: string;
@IsString()
@IsNotEmpty()
commandType!: string;
@IsObject()
@IsNotEmpty()
payload!: Record<string, unknown>;
@IsNumber()
@IsNotEmpty()
timestamp!: number;
@IsString()
@IsNotEmpty()
signature!: string;
}

View File

@@ -0,0 +1,64 @@
/**
* Connection DTOs
*
* Data Transfer Objects for federation connection API.
*/
import { IsString, IsUrl, IsOptional, IsObject, IsNumber } from "class-validator";
/**
* DTO for initiating a connection
*/
export class InitiateConnectionDto {
@IsUrl()
remoteUrl!: string;
}
/**
* DTO for accepting a connection
*/
export class AcceptConnectionDto {
@IsOptional()
@IsObject()
metadata?: Record<string, unknown>;
}
/**
* DTO for rejecting a connection
*/
export class RejectConnectionDto {
@IsString()
reason!: string;
}
/**
* DTO for disconnecting a connection
*/
export class DisconnectConnectionDto {
@IsOptional()
@IsString()
reason?: string;
}
/**
* DTO for incoming connection request (from remote instance)
*/
export class IncomingConnectionRequestDto {
@IsString()
instanceId!: string;
@IsUrl()
instanceUrl!: string;
@IsString()
publicKey!: string;
@IsObject()
capabilities!: Record<string, unknown>;
@IsNumber()
timestamp!: number;
@IsString()
signature!: string;
}

View File

@@ -0,0 +1,109 @@
/**
* Event DTOs
*
* Data Transfer Objects for event subscription and publishing.
*/
import { IsString, IsNotEmpty, IsOptional, IsObject } from "class-validator";
/**
* DTO for subscribing to an event type
*/
export class SubscribeToEventDto {
@IsString()
@IsNotEmpty()
connectionId!: string;
@IsString()
@IsNotEmpty()
eventType!: string;
@IsOptional()
@IsObject()
metadata?: Record<string, unknown>;
}
/**
* DTO for unsubscribing from an event type
*/
export class UnsubscribeFromEventDto {
@IsString()
@IsNotEmpty()
connectionId!: string;
@IsString()
@IsNotEmpty()
eventType!: string;
}
/**
* DTO for publishing an event
*/
export class PublishEventDto {
@IsString()
@IsNotEmpty()
eventType!: string;
@IsObject()
@IsNotEmpty()
payload!: Record<string, unknown>;
}
/**
* DTO for incoming event request
*/
export class IncomingEventDto {
@IsString()
@IsNotEmpty()
messageId!: string;
@IsString()
@IsNotEmpty()
instanceId!: string;
@IsString()
@IsNotEmpty()
eventType!: string;
@IsObject()
@IsNotEmpty()
payload!: Record<string, unknown>;
@IsNotEmpty()
timestamp!: number;
@IsString()
@IsNotEmpty()
signature!: string;
}
/**
* DTO for incoming event acknowledgment
*/
export class IncomingEventAckDto {
@IsString()
@IsNotEmpty()
messageId!: string;
@IsString()
@IsNotEmpty()
correlationId!: string;
@IsString()
@IsNotEmpty()
instanceId!: string;
@IsNotEmpty()
received!: boolean;
@IsOptional()
@IsString()
error?: string;
@IsNotEmpty()
timestamp!: number;
@IsString()
@IsNotEmpty()
signature!: string;
}

View File

@@ -0,0 +1,51 @@
/**
* Federated Authentication DTOs
*
* Data transfer objects for federated OIDC authentication endpoints.
*/
import { IsString, IsEmail, IsOptional, IsObject } from "class-validator";
/**
* DTO for initiating federated authentication
*/
export class InitiateFederatedAuthDto {
@IsString()
remoteInstanceId!: string;
@IsOptional()
@IsString()
redirectUrl?: string;
}
/**
* DTO for linking federated identity
*/
export class LinkFederatedIdentityDto {
@IsString()
remoteInstanceId!: string;
@IsString()
remoteUserId!: string;
@IsString()
oidcSubject!: string;
@IsEmail()
email!: string;
@IsOptional()
@IsObject()
metadata?: Record<string, unknown>;
}
/**
* DTO for validating federated token
*/
export class ValidateFederatedTokenDto {
@IsString()
token!: string;
@IsString()
instanceId!: string;
}

View File

@@ -0,0 +1,98 @@
/**
* Identity Linking DTOs
*
* Data transfer objects for identity linking API endpoints.
*/
import { IsString, IsEmail, IsOptional, IsObject, IsArray, IsNumber } from "class-validator";
/**
* DTO for verifying identity from remote instance
*/
export class VerifyIdentityDto {
@IsString()
localUserId!: string;
@IsString()
remoteUserId!: string;
@IsString()
remoteInstanceId!: string;
@IsString()
oidcToken!: string;
@IsNumber()
timestamp!: number;
@IsString()
signature!: string;
}
/**
* DTO for resolving remote user to local user
*/
export class ResolveIdentityDto {
@IsString()
remoteInstanceId!: string;
@IsString()
remoteUserId!: string;
}
/**
* DTO for reverse resolving local user to remote identity
*/
export class ReverseResolveIdentityDto {
@IsString()
localUserId!: string;
@IsString()
remoteInstanceId!: string;
}
/**
* DTO for bulk identity resolution
*/
export class BulkResolveIdentityDto {
@IsString()
remoteInstanceId!: string;
@IsArray()
@IsString({ each: true })
remoteUserIds!: string[];
}
/**
* DTO for creating identity mapping
*/
export class CreateIdentityMappingDto {
@IsString()
remoteInstanceId!: string;
@IsString()
remoteUserId!: string;
@IsString()
oidcSubject!: string;
@IsEmail()
email!: string;
@IsOptional()
@IsObject()
metadata?: Record<string, unknown>;
@IsOptional()
@IsString()
oidcToken?: string;
}
/**
* DTO for updating identity mapping
*/
export class UpdateIdentityMappingDto {
@IsOptional()
@IsObject()
metadata?: Record<string, unknown>;
}

View File

@@ -0,0 +1,46 @@
/**
* Instance Configuration DTOs
*
* Data Transfer Objects for instance configuration API.
*/
import { IsString, IsBoolean, IsOptional, IsObject, ValidateNested } from "class-validator";
import { Type } from "class-transformer";
/**
* DTO for federation capabilities
*/
export class FederationCapabilitiesDto {
@IsBoolean()
supportsQuery!: boolean;
@IsBoolean()
supportsCommand!: boolean;
@IsBoolean()
supportsEvent!: boolean;
@IsBoolean()
supportsAgentSpawn!: boolean;
@IsString()
protocolVersion!: string;
}
/**
* DTO for updating instance configuration
*/
export class UpdateInstanceDto {
@IsOptional()
@IsString()
name?: string;
@IsOptional()
@ValidateNested()
@Type(() => FederationCapabilitiesDto)
capabilities?: FederationCapabilitiesDto;
@IsOptional()
@IsObject()
metadata?: Record<string, unknown>;
}

View File

@@ -0,0 +1,53 @@
/**
* Query DTOs
*
* Data Transfer Objects for query message operations.
*/
import { IsString, IsOptional, IsObject, IsNotEmpty } from "class-validator";
import type { QueryMessage } from "../types/message.types";
/**
* DTO for sending a query to a remote instance
*/
export class SendQueryDto {
@IsString()
@IsNotEmpty()
connectionId!: string;
@IsString()
@IsNotEmpty()
query!: string;
@IsOptional()
@IsObject()
context?: Record<string, unknown>;
}
/**
* DTO for incoming query request from remote instance
*/
export class IncomingQueryDto implements QueryMessage {
@IsString()
@IsNotEmpty()
messageId!: string;
@IsString()
@IsNotEmpty()
instanceId!: string;
@IsString()
@IsNotEmpty()
query!: string;
@IsOptional()
@IsObject()
context?: Record<string, unknown>;
@IsNotEmpty()
timestamp!: number;
@IsString()
@IsNotEmpty()
signature!: string;
}

View File

@@ -0,0 +1,393 @@
/**
* EventController Tests
*
* Tests for event subscription and publishing endpoints.
*/
import { describe, it, expect, beforeEach, vi } from "vitest";
import { Test, TestingModule } from "@nestjs/testing";
import { EventController } from "./event.controller";
import { EventService } from "./event.service";
import { AuthGuard } from "../auth/guards/auth.guard";
import { FederationMessageType, FederationMessageStatus } from "@prisma/client";
import type { AuthenticatedRequest } from "../common/types/user.types";
import type { EventMessage, EventAck } from "./types/message.types";
describe("EventController", () => {
let controller: EventController;
let eventService: EventService;
const mockEventService = {
subscribeToEventType: vi.fn(),
unsubscribeFromEventType: vi.fn(),
publishEvent: vi.fn(),
getEventSubscriptions: vi.fn(),
getEventMessages: vi.fn(),
getEventMessage: vi.fn(),
handleIncomingEvent: vi.fn(),
processEventAck: vi.fn(),
};
const mockWorkspaceId = "workspace-123";
const mockUserId = "user-123";
const mockConnectionId = "connection-123";
const mockEventType = "task.created";
beforeEach(async () => {
vi.clearAllMocks();
const module: TestingModule = await Test.createTestingModule({
controllers: [EventController],
providers: [
{
provide: EventService,
useValue: mockEventService,
},
],
})
.overrideGuard(AuthGuard)
.useValue({ canActivate: () => true })
.compile();
controller = module.get<EventController>(EventController);
eventService = module.get<EventService>(EventService);
});
describe("subscribeToEvent", () => {
it("should subscribe to an event type", async () => {
const req = {
user: {
id: mockUserId,
workspaceId: mockWorkspaceId,
},
} as AuthenticatedRequest;
const dto = {
connectionId: mockConnectionId,
eventType: mockEventType,
metadata: { key: "value" },
};
const mockSubscription = {
id: "sub-123",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
eventType: mockEventType,
metadata: { key: "value" },
isActive: true,
createdAt: new Date(),
updatedAt: new Date(),
};
mockEventService.subscribeToEventType.mockResolvedValue(mockSubscription);
const result = await controller.subscribeToEvent(req, dto);
expect(result).toEqual(mockSubscription);
expect(mockEventService.subscribeToEventType).toHaveBeenCalledWith(
mockWorkspaceId,
mockConnectionId,
mockEventType,
{ key: "value" }
);
});
it("should throw error if workspace not found", async () => {
const req = {
user: {
id: mockUserId,
},
} as AuthenticatedRequest;
const dto = {
connectionId: mockConnectionId,
eventType: mockEventType,
};
await expect(controller.subscribeToEvent(req, dto)).rejects.toThrow(
"Workspace ID not found in request"
);
});
});
describe("unsubscribeFromEvent", () => {
it("should unsubscribe from an event type", async () => {
const req = {
user: {
id: mockUserId,
workspaceId: mockWorkspaceId,
},
} as AuthenticatedRequest;
const dto = {
connectionId: mockConnectionId,
eventType: mockEventType,
};
mockEventService.unsubscribeFromEventType.mockResolvedValue(undefined);
await controller.unsubscribeFromEvent(req, dto);
expect(mockEventService.unsubscribeFromEventType).toHaveBeenCalledWith(
mockWorkspaceId,
mockConnectionId,
mockEventType
);
});
});
describe("publishEvent", () => {
it("should publish an event", async () => {
const req = {
user: {
id: mockUserId,
workspaceId: mockWorkspaceId,
},
} as AuthenticatedRequest;
const dto = {
eventType: mockEventType,
payload: { data: "test" },
};
const mockMessages = [
{
id: "msg-123",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
messageType: FederationMessageType.EVENT,
messageId: "msg-id-123",
eventType: mockEventType,
payload: { data: "test" },
status: FederationMessageStatus.DELIVERED,
createdAt: new Date(),
updatedAt: new Date(),
},
];
mockEventService.publishEvent.mockResolvedValue(mockMessages);
const result = await controller.publishEvent(req, dto);
expect(result).toEqual(mockMessages);
expect(mockEventService.publishEvent).toHaveBeenCalledWith(mockWorkspaceId, mockEventType, {
data: "test",
});
});
});
describe("getSubscriptions", () => {
it("should return all subscriptions for workspace", async () => {
const req = {
user: {
id: mockUserId,
workspaceId: mockWorkspaceId,
},
} as AuthenticatedRequest;
const mockSubscriptions = [
{
id: "sub-1",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
eventType: "task.created",
metadata: {},
isActive: true,
createdAt: new Date(),
updatedAt: new Date(),
},
];
mockEventService.getEventSubscriptions.mockResolvedValue(mockSubscriptions);
const result = await controller.getSubscriptions(req);
expect(result).toEqual(mockSubscriptions);
expect(mockEventService.getEventSubscriptions).toHaveBeenCalledWith(
mockWorkspaceId,
undefined
);
});
it("should filter by connectionId when provided", async () => {
const req = {
user: {
id: mockUserId,
workspaceId: mockWorkspaceId,
},
} as AuthenticatedRequest;
const mockSubscriptions = [
{
id: "sub-1",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
eventType: "task.created",
metadata: {},
isActive: true,
createdAt: new Date(),
updatedAt: new Date(),
},
];
mockEventService.getEventSubscriptions.mockResolvedValue(mockSubscriptions);
const result = await controller.getSubscriptions(req, mockConnectionId);
expect(result).toEqual(mockSubscriptions);
expect(mockEventService.getEventSubscriptions).toHaveBeenCalledWith(
mockWorkspaceId,
mockConnectionId
);
});
});
describe("getEventMessages", () => {
it("should return all event messages for workspace", async () => {
const req = {
user: {
id: mockUserId,
workspaceId: mockWorkspaceId,
},
} as AuthenticatedRequest;
const mockMessages = [
{
id: "msg-1",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
messageType: FederationMessageType.EVENT,
messageId: "msg-id-1",
eventType: "task.created",
payload: { data: "test1" },
status: FederationMessageStatus.DELIVERED,
createdAt: new Date(),
updatedAt: new Date(),
},
];
mockEventService.getEventMessages.mockResolvedValue(mockMessages);
const result = await controller.getEventMessages(req);
expect(result).toEqual(mockMessages);
expect(mockEventService.getEventMessages).toHaveBeenCalledWith(mockWorkspaceId, undefined);
});
it("should filter by status when provided", async () => {
const req = {
user: {
id: mockUserId,
workspaceId: mockWorkspaceId,
},
} as AuthenticatedRequest;
const mockMessages = [
{
id: "msg-1",
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
messageType: FederationMessageType.EVENT,
messageId: "msg-id-1",
eventType: "task.created",
payload: { data: "test1" },
status: FederationMessageStatus.PENDING,
createdAt: new Date(),
updatedAt: new Date(),
},
];
mockEventService.getEventMessages.mockResolvedValue(mockMessages);
const result = await controller.getEventMessages(req, FederationMessageStatus.PENDING);
expect(result).toEqual(mockMessages);
expect(mockEventService.getEventMessages).toHaveBeenCalledWith(
mockWorkspaceId,
FederationMessageStatus.PENDING
);
});
});
describe("getEventMessage", () => {
it("should return a single event message", async () => {
const req = {
user: {
id: mockUserId,
workspaceId: mockWorkspaceId,
},
} as AuthenticatedRequest;
const messageId = "msg-123";
const mockMessage = {
id: messageId,
workspaceId: mockWorkspaceId,
connectionId: mockConnectionId,
messageType: FederationMessageType.EVENT,
messageId: "msg-id-123",
eventType: "task.created",
payload: { data: "test" },
status: FederationMessageStatus.DELIVERED,
createdAt: new Date(),
updatedAt: new Date(),
};
mockEventService.getEventMessage.mockResolvedValue(mockMessage);
const result = await controller.getEventMessage(req, messageId);
expect(result).toEqual(mockMessage);
expect(mockEventService.getEventMessage).toHaveBeenCalledWith(mockWorkspaceId, messageId);
});
});
describe("handleIncomingEvent", () => {
it("should handle incoming event and return acknowledgment", async () => {
const eventMessage: EventMessage = {
messageId: "msg-123",
instanceId: "remote-instance-123",
eventType: "task.created",
payload: { data: "test" },
timestamp: Date.now(),
signature: "signature-123",
};
const mockAck: EventAck = {
messageId: "ack-123",
correlationId: eventMessage.messageId,
instanceId: "local-instance-123",
received: true,
timestamp: Date.now(),
signature: "ack-signature-123",
};
mockEventService.handleIncomingEvent.mockResolvedValue(mockAck);
const result = await controller.handleIncomingEvent(eventMessage);
expect(result).toEqual(mockAck);
expect(mockEventService.handleIncomingEvent).toHaveBeenCalledWith(eventMessage);
});
});
describe("handleIncomingEventAck", () => {
it("should process event acknowledgment", async () => {
const ack: EventAck = {
messageId: "ack-123",
correlationId: "msg-123",
instanceId: "remote-instance-123",
received: true,
timestamp: Date.now(),
signature: "ack-signature-123",
};
mockEventService.processEventAck.mockResolvedValue(undefined);
const result = await controller.handleIncomingEventAck(ack);
expect(result).toEqual({ status: "acknowledged" });
expect(mockEventService.processEventAck).toHaveBeenCalledWith(ack);
});
});
});

Some files were not shown because too many files have changed in this diff Show More