Merge branch 'develop' into work/m4-llm
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
ci/woodpecker/pr/woodpecker Pipeline failed

This commit is contained in:
2026-02-04 02:28:50 +00:00
16 changed files with 1151 additions and 1204 deletions

View File

@@ -1,339 +0,0 @@
# Woodpecker CI - Enhanced Quality Gates + Auto-Merge
# Features:
# - Strict quality gates (all checks must pass)
# - Security scanning (SAST, dependency audit, secrets)
# - Test coverage enforcement (≥85%)
# - Automated PR merging when all checks pass
when:
- event: [push, pull_request, manual]
variables:
- &node_image "node:20-alpine"
- &install_deps |
corepack enable
pnpm install --frozen-lockfile
- &use_deps |
corepack enable
- &kaniko_setup |
mkdir -p /kaniko/.docker
echo "{\"auths\":{\"git.mosaicstack.dev\":{\"username\":\"$GITEA_USER\",\"password\":\"$GITEA_TOKEN\"}}}" > /kaniko/.docker/config.json
steps:
# ======================
# PHASE 1: Setup
# ======================
install:
image: *node_image
commands:
- *install_deps
prisma-generate:
image: *node_image
environment:
SKIP_ENV_VALIDATION: "true"
commands:
- *use_deps
- pnpm --filter "@mosaic/api" prisma:generate
depends_on:
- install
# ======================
# PHASE 2: Security Review
# ======================
security-audit-deps:
image: *node_image
commands:
- *use_deps
- echo "=== Dependency Security Audit ==="
- pnpm audit --audit-level=high
depends_on:
- install
failure: fail # STRICT: Block on security vulnerabilities
security-scan-secrets:
image: alpine/git:latest
commands:
- echo "=== Secret Scanning ==="
- apk add --no-cache bash
- |
# Check for common secrets patterns
echo "Scanning for hardcoded secrets..."
if git grep -E "(password|secret|api_key|private_key)\s*=\s*['\"]" -- '*.ts' '*.tsx' '*.js' '*.jsx' ':!*test*' ':!*spec*'; then
echo "❌ Found hardcoded secrets!"
exit 1
fi
- echo "✅ No hardcoded secrets detected"
depends_on:
- install
when:
- event: pull_request
failure: fail # STRICT: Block on secret detection
security-scan-sast:
image: returntocorp/semgrep
commands:
- echo "=== SAST Security Scanning ==="
- |
semgrep scan \
--config=auto \
--error \
--exclude='node_modules' \
--exclude='dist' \
--exclude='*.test.ts' \
--exclude='*.spec.ts' \
--metrics=off \
--quiet \
|| true # TODO: Make strict after baseline cleanup
- echo "✅ SAST scan complete"
depends_on:
- install
when:
- event: pull_request
failure: ignore # TODO: Change to 'fail' after fixing baseline issues
# ======================
# PHASE 3: Code Review
# ======================
lint:
image: *node_image
environment:
SKIP_ENV_VALIDATION: "true"
commands:
- *use_deps
- echo "=== Lint Check ==="
- pnpm lint
depends_on:
- install
when:
- evaluate: 'CI_PIPELINE_EVENT != "pull_request" || CI_COMMIT_BRANCH != "main"'
failure: fail # STRICT: Block on lint errors
typecheck:
image: *node_image
environment:
SKIP_ENV_VALIDATION: "true"
commands:
- *use_deps
- echo "=== TypeScript Type Check ==="
- pnpm typecheck
depends_on:
- prisma-generate
failure: fail # STRICT: Block on type errors
# ======================
# PHASE 4: QA
# ======================
test-unit:
image: *node_image
environment:
SKIP_ENV_VALIDATION: "true"
commands:
- *use_deps
- echo "=== Unit Tests ==="
- pnpm test
depends_on:
- prisma-generate
failure: fail # STRICT: Block on test failures
test-coverage:
image: *node_image
environment:
SKIP_ENV_VALIDATION: "true"
commands:
- *use_deps
- echo "=== Test Coverage Check ==="
- |
pnpm test:coverage --reporter=json --reporter=text > coverage-output.txt 2>&1 || true
cat coverage-output.txt
# TODO: Parse coverage report and enforce ≥85% threshold
echo "⚠️ Coverage enforcement not yet implemented"
depends_on:
- prisma-generate
when:
- event: pull_request
failure: ignore # TODO: Change to 'fail' after implementing coverage parser
# ======================
# PHASE 5: Build Verification
# ======================
build:
image: *node_image
environment:
SKIP_ENV_VALIDATION: "true"
NODE_ENV: "production"
commands:
- *use_deps
- echo "=== Production Build ==="
- pnpm build
depends_on:
- typecheck
- security-audit-deps
- prisma-generate
failure: fail # STRICT: Block on build failures
# ======================
# PHASE 6: Auto-Merge (PR only)
# ======================
pr-auto-merge:
image: alpine:latest
secrets:
- gitea_token
commands:
- echo "=== PR Auto-Merge Check ==="
- apk add --no-cache curl jq
- |
# Only run for PRs targeting develop
if [ "$CI_PIPELINE_EVENT" != "pull_request" ]; then
echo "⏭️ Skipping: Not a pull request"
exit 0
fi
# Extract PR number from CI environment
PR_NUMBER=$(echo "$CI_COMMIT_REF" | grep -oP 'pull/\K\d+' || echo "")
if [ -z "$PR_NUMBER" ]; then
echo "⏭️ Skipping: Cannot determine PR number"
exit 0
fi
echo "📋 Checking PR #$PR_NUMBER for auto-merge eligibility..."
# Get PR details
PR_DATA=$(curl -s -H "Authorization: token $GITEA_TOKEN" \
"https://git.mosaicstack.dev/api/v1/repos/mosaic/stack/pulls/$PR_NUMBER")
# Check if PR is mergeable
IS_MERGEABLE=$(echo "$PR_DATA" | jq -r '.mergeable // false')
BASE_BRANCH=$(echo "$PR_DATA" | jq -r '.base.ref // ""')
PR_STATE=$(echo "$PR_DATA" | jq -r '.state // ""')
if [ "$PR_STATE" != "open" ]; then
echo "⏭️ Skipping: PR is not open (state: $PR_STATE)"
exit 0
fi
if [ "$BASE_BRANCH" != "develop" ]; then
echo "⏭️ Skipping: PR does not target develop (targets: $BASE_BRANCH)"
exit 0
fi
if [ "$IS_MERGEABLE" != "true" ]; then
echo "❌ PR is not mergeable (conflicts or other issues)"
exit 0
fi
# All checks passed - merge the PR
echo "✅ All quality gates passed - attempting auto-merge..."
MERGE_RESULT=$(curl -s -X POST \
-H "Authorization: token $GITEA_TOKEN" \
-H "Content-Type: application/json" \
-d '{"Do":"merge","MergeMessageField":"","MergeTitleField":"","delete_branch_after_merge":true,"force_merge":false,"merge_when_checks_succeed":false}' \
"https://git.mosaicstack.dev/api/v1/repos/mosaic/stack/pulls/$PR_NUMBER/merge")
if echo "$MERGE_RESULT" | jq -e '.merged' > /dev/null 2>&1; then
echo "🎉 PR #$PR_NUMBER successfully merged to develop!"
else
ERROR_MSG=$(echo "$MERGE_RESULT" | jq -r '.message // "Unknown error"')
echo "❌ Failed to merge PR: $ERROR_MSG"
exit 1
fi
depends_on:
- build
- test-unit
- lint
- typecheck
- security-audit-deps
when:
- event: pull_request
evaluate: 'CI_COMMIT_TARGET_BRANCH == "develop"'
failure: ignore # Don't fail pipeline if auto-merge fails
# ======================
# PHASE 7: Docker Build & Push (develop/main only)
# ======================
docker-build-api:
image: gcr.io/kaniko-project/executor:debug
environment:
GITEA_USER:
from_secret: gitea_username
GITEA_TOKEN:
from_secret: gitea_token
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
CI_COMMIT_SHA: ${CI_COMMIT_SHA}
commands:
- *kaniko_setup
- |
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/api:${CI_COMMIT_SHA:0:8}"
if [ "$CI_COMMIT_BRANCH" = "main" ]; then
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/api:latest"
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/api:dev"
fi
if [ -n "$CI_COMMIT_TAG" ]; then
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/api:$CI_COMMIT_TAG"
fi
/kaniko/executor --context . --dockerfile apps/api/Dockerfile $DESTINATIONS
when:
- branch: [main, develop]
event: [push, manual, tag]
depends_on:
- build
docker-build-web:
image: gcr.io/kaniko-project/executor:debug
environment:
GITEA_USER:
from_secret: gitea_username
GITEA_TOKEN:
from_secret: gitea_token
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
CI_COMMIT_SHA: ${CI_COMMIT_SHA}
commands:
- *kaniko_setup
- |
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/web:${CI_COMMIT_SHA:0:8}"
if [ "$CI_COMMIT_BRANCH" = "main" ]; then
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/web:latest"
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/web:dev"
fi
if [ -n "$CI_COMMIT_TAG" ]; then
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/web:$CI_COMMIT_TAG"
fi
/kaniko/executor --context . --dockerfile apps/web/Dockerfile --build-arg NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev $DESTINATIONS
when:
- branch: [main, develop]
event: [push, manual, tag]
depends_on:
- build
docker-build-postgres:
image: gcr.io/kaniko-project/executor:debug
environment:
GITEA_USER:
from_secret: gitea_username
GITEA_TOKEN:
from_secret: gitea_token
CI_COMMIT_BRANCH: ${CI_COMMIT_BRANCH}
CI_COMMIT_TAG: ${CI_COMMIT_TAG}
CI_COMMIT_SHA: ${CI_COMMIT_SHA}
commands:
- *kaniko_setup
- |
DESTINATIONS="--destination git.mosaicstack.dev/mosaic/postgres:${CI_COMMIT_SHA:0:8}"
if [ "$CI_COMMIT_BRANCH" = "main" ]; then
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/postgres:latest"
elif [ "$CI_COMMIT_BRANCH" = "develop" ]; then
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/postgres:dev"
fi
if [ -n "$CI_COMMIT_TAG" ]; then
DESTINATIONS="$DESTINATIONS --destination git.mosaicstack.dev/mosaic/postgres:$CI_COMMIT_TAG"
fi
/kaniko/executor --context docker/postgres --dockerfile docker/postgres/Dockerfile $DESTINATIONS
when:
- branch: [main, develop]
event: [push, manual, tag]
depends_on:
- build

View File

@@ -34,7 +34,7 @@ steps:
SKIP_ENV_VALIDATION: "true"
commands:
- *use_deps
- pnpm lint || true # Non-blocking while fixing legacy code
- pnpm lint
depends_on:
- install
when:
@@ -66,7 +66,7 @@ steps:
SKIP_ENV_VALIDATION: "true"
commands:
- *use_deps
- pnpm test || true # Non-blocking while fixing legacy tests
- pnpm test
depends_on:
- prisma-generate

View File

@@ -142,4 +142,69 @@ export class FederationAuditService {
securityEvent: true,
});
}
/**
* Log incoming connection attempt
* Logged for all incoming connection requests (security monitoring)
*/
logIncomingConnectionAttempt(data: {
workspaceId: string;
remoteInstanceId: string;
remoteUrl: string;
timestamp: number;
}): void {
this.logger.log({
event: "FEDERATION_INCOMING_CONNECTION_ATTEMPT",
workspaceId: data.workspaceId,
remoteInstanceId: data.remoteInstanceId,
remoteUrl: data.remoteUrl,
requestTimestamp: new Date(data.timestamp).toISOString(),
timestamp: new Date().toISOString(),
securityEvent: true,
});
}
/**
* Log incoming connection created
* Logged when an incoming connection is successfully created
*/
logIncomingConnectionCreated(data: {
workspaceId: string;
connectionId: string;
remoteInstanceId: string;
remoteUrl: string;
}): void {
this.logger.log({
event: "FEDERATION_INCOMING_CONNECTION_CREATED",
workspaceId: data.workspaceId,
connectionId: data.connectionId,
remoteInstanceId: data.remoteInstanceId,
remoteUrl: data.remoteUrl,
timestamp: new Date().toISOString(),
securityEvent: true,
});
}
/**
* Log incoming connection rejected
* Logged when an incoming connection is rejected (security event)
*/
logIncomingConnectionRejected(data: {
workspaceId: string;
remoteInstanceId: string;
remoteUrl?: string;
reason: string;
error?: string;
}): void {
this.logger.warn({
event: "FEDERATION_INCOMING_CONNECTION_REJECTED",
workspaceId: data.workspaceId,
remoteInstanceId: data.remoteInstanceId,
remoteUrl: data.remoteUrl,
reason: data.reason,
error: data.error,
timestamp: new Date().toISOString(),
securityEvent: true,
});
}
}

View File

@@ -10,6 +10,7 @@ import { HttpService } from "@nestjs/axios";
import { ConnectionService } from "./connection.service";
import { FederationService } from "./federation.service";
import { SignatureService } from "./signature.service";
import { FederationAuditService } from "./audit.service";
import { PrismaService } from "../prisma/prisma.service";
import { FederationConnectionStatus } from "@prisma/client";
import { FederationConnection } from "@prisma/client";
@@ -22,6 +23,7 @@ describe("ConnectionService", () => {
let federationService: FederationService;
let signatureService: SignatureService;
let httpService: HttpService;
let auditService: FederationAuditService;
const mockWorkspaceId = "workspace-123";
const mockRemoteUrl = "https://remote.example.com";
@@ -85,6 +87,7 @@ describe("ConnectionService", () => {
findUnique: vi.fn(),
findMany: vi.fn(),
update: vi.fn(),
delete: vi.fn(),
},
},
},
@@ -109,6 +112,14 @@ describe("ConnectionService", () => {
post: vi.fn(),
},
},
{
provide: FederationAuditService,
useValue: {
logIncomingConnectionAttempt: vi.fn(),
logIncomingConnectionCreated: vi.fn(),
logIncomingConnectionRejected: vi.fn(),
},
},
],
}).compile();
@@ -117,6 +128,7 @@ describe("ConnectionService", () => {
federationService = module.get<FederationService>(FederationService);
signatureService = module.get<SignatureService>(SignatureService);
httpService = module.get<HttpService>(HttpService);
auditService = module.get<FederationAuditService>(FederationAuditService);
});
it("should be defined", () => {
@@ -208,6 +220,36 @@ describe("ConnectionService", () => {
})
);
});
it("should delete connection and throw error if request fails", async () => {
const mockAxiosResponse: AxiosResponse = {
data: mockRemoteIdentity,
status: 200,
statusText: "OK",
headers: {},
config: {} as never,
};
vi.spyOn(httpService, "get").mockReturnValue(of(mockAxiosResponse));
vi.spyOn(httpService, "post").mockReturnValue(
throwError(() => new Error("Connection refused"))
);
const createSpy = vi
.spyOn(prismaService.federationConnection, "create")
.mockResolvedValue(mockConnection);
const deleteSpy = vi
.spyOn(prismaService.federationConnection, "delete")
.mockResolvedValue(mockConnection);
await expect(service.initiateConnection(mockWorkspaceId, mockRemoteUrl)).rejects.toThrow(
"Failed to initiate connection"
);
expect(createSpy).toHaveBeenCalled();
expect(deleteSpy).toHaveBeenCalledWith({
where: { id: mockConnection.id },
});
});
});
describe("acceptConnection", () => {
@@ -418,5 +460,55 @@ describe("ConnectionService", () => {
service.handleIncomingConnectionRequest(mockWorkspaceId, mockRequest)
).rejects.toThrow("Invalid connection request signature");
});
it("should log incoming connection attempt", async () => {
vi.spyOn(signatureService, "verifyConnectionRequest").mockReturnValue({ valid: true });
vi.spyOn(prismaService.federationConnection, "create").mockResolvedValue(mockConnection);
const auditSpy = vi.spyOn(auditService, "logIncomingConnectionAttempt");
await service.handleIncomingConnectionRequest(mockWorkspaceId, mockRequest);
expect(auditSpy).toHaveBeenCalledWith({
workspaceId: mockWorkspaceId,
remoteInstanceId: mockRequest.instanceId,
remoteUrl: mockRequest.instanceUrl,
timestamp: mockRequest.timestamp,
});
});
it("should log connection created on success", async () => {
vi.spyOn(signatureService, "verifyConnectionRequest").mockReturnValue({ valid: true });
vi.spyOn(prismaService.federationConnection, "create").mockResolvedValue(mockConnection);
const auditSpy = vi.spyOn(auditService, "logIncomingConnectionCreated");
await service.handleIncomingConnectionRequest(mockWorkspaceId, mockRequest);
expect(auditSpy).toHaveBeenCalledWith({
workspaceId: mockWorkspaceId,
connectionId: mockConnection.id,
remoteInstanceId: mockRequest.instanceId,
remoteUrl: mockRequest.instanceUrl,
});
});
it("should log connection rejected on invalid signature", async () => {
vi.spyOn(signatureService, "verifyConnectionRequest").mockReturnValue({
valid: false,
error: "Invalid signature",
});
const auditSpy = vi.spyOn(auditService, "logIncomingConnectionRejected");
await expect(
service.handleIncomingConnectionRequest(mockWorkspaceId, mockRequest)
).rejects.toThrow();
expect(auditSpy).toHaveBeenCalledWith({
workspaceId: mockWorkspaceId,
remoteInstanceId: mockRequest.instanceId,
remoteUrl: mockRequest.instanceUrl,
reason: "Invalid signature",
error: "Invalid signature",
});
});
});
});

View File

@@ -10,12 +10,14 @@ import {
NotFoundException,
UnauthorizedException,
ServiceUnavailableException,
BadRequestException,
} from "@nestjs/common";
import { HttpService } from "@nestjs/axios";
import { FederationConnectionStatus, Prisma } from "@prisma/client";
import { PrismaService } from "../prisma/prisma.service";
import { FederationService } from "./federation.service";
import { SignatureService } from "./signature.service";
import { FederationAuditService } from "./audit.service";
import { firstValueFrom } from "rxjs";
import type { ConnectionRequest, ConnectionDetails } from "./types/connection.types";
import type { PublicInstanceIdentity } from "./types/instance.types";
@@ -28,7 +30,8 @@ export class ConnectionService {
private readonly prisma: PrismaService,
private readonly federationService: FederationService,
private readonly signatureService: SignatureService,
private readonly httpService: HttpService
private readonly httpService: HttpService,
private readonly auditService: FederationAuditService
) {}
/**
@@ -68,7 +71,7 @@ export class ConnectionService {
const signature = await this.signatureService.signMessage(request);
const signedRequest: ConnectionRequest = { ...request, signature };
// Send connection request to remote instance (fire-and-forget for now)
// Send connection request to remote instance
try {
await firstValueFrom(
this.httpService.post(`${remoteUrl}/api/v1/federation/incoming/connect`, signedRequest)
@@ -76,7 +79,16 @@ export class ConnectionService {
this.logger.log(`Connection request sent to ${remoteUrl}`);
} catch (error) {
this.logger.error(`Failed to send connection request to ${remoteUrl}`, error);
// Connection is still created in PENDING state, can be retried
// Delete the failed connection to prevent zombie connections in PENDING state
await this.prisma.federationConnection.delete({
where: { id: connection.id },
});
const errorMessage = error instanceof Error ? error.message : "Unknown error";
throw new BadRequestException(
`Failed to initiate connection to ${remoteUrl}: ${errorMessage}`
);
}
return this.mapToConnectionDetails(connection);
@@ -265,12 +277,30 @@ export class ConnectionService {
): Promise<ConnectionDetails> {
this.logger.log(`Received connection request from ${request.instanceId}`);
// Audit log: Incoming connection attempt
this.auditService.logIncomingConnectionAttempt({
workspaceId,
remoteInstanceId: request.instanceId,
remoteUrl: request.instanceUrl,
timestamp: request.timestamp,
});
// Verify signature
const validation = this.signatureService.verifyConnectionRequest(request);
if (!validation.valid) {
const errorMsg: string = validation.error ?? "Unknown error";
this.logger.warn(`Invalid connection request from ${request.instanceId}: ${errorMsg}`);
// Audit log: Connection rejected
this.auditService.logIncomingConnectionRejected({
workspaceId,
remoteInstanceId: request.instanceId,
remoteUrl: request.instanceUrl,
reason: "Invalid signature",
error: errorMsg,
});
throw new UnauthorizedException("Invalid connection request signature");
}
@@ -291,6 +321,14 @@ export class ConnectionService {
this.logger.log(`Created pending connection ${connection.id} from ${request.instanceId}`);
// Audit log: Connection created
this.auditService.logIncomingConnectionCreated({
workspaceId,
connectionId: connection.id,
remoteInstanceId: request.instanceId,
remoteUrl: request.instanceUrl,
});
return this.mapToConnectionDetails(connection);
}

View File

@@ -7,10 +7,65 @@ import {
IsOptional,
ArrayNotEmpty,
IsIn,
Validate,
ValidatorConstraint,
ValidatorConstraintInterface,
ValidationArguments,
} from "class-validator";
import { Type } from "class-transformer";
import { AgentType } from "../../../spawner/types/agent-spawner.types";
import { GateProfileType } from "../../../coordinator/types/gate-config.types";
import { validateBranchName, validateRepositoryUrl } from "../../../git/git-validation.util";
/**
* Custom validator for git branch names
* Uses whitelist-based validation to prevent command injection
*/
@ValidatorConstraint({ name: "isValidBranchName", async: false })
export class IsValidBranchName implements ValidatorConstraintInterface {
validate(branchName: string, _args: ValidationArguments): boolean {
try {
validateBranchName(branchName);
return true;
} catch {
return false;
}
}
defaultMessage(args: ValidationArguments): string {
try {
validateBranchName(args.value as string);
return "Branch name is invalid";
} catch (error) {
return error instanceof Error ? error.message : "Branch name is invalid";
}
}
}
/**
* Custom validator for git repository URLs
* Prevents SSRF and command injection via dangerous protocols
*/
@ValidatorConstraint({ name: "isValidRepositoryUrl", async: false })
export class IsValidRepositoryUrl implements ValidatorConstraintInterface {
validate(repositoryUrl: string, _args: ValidationArguments): boolean {
try {
validateRepositoryUrl(repositoryUrl);
return true;
} catch {
return false;
}
}
defaultMessage(args: ValidationArguments): string {
try {
validateRepositoryUrl(args.value as string);
return "Repository URL is invalid";
} catch (error) {
return error instanceof Error ? error.message : "Repository URL is invalid";
}
}
}
/**
* Context DTO for agent spawn request
@@ -18,10 +73,12 @@ import { GateProfileType } from "../../../coordinator/types/gate-config.types";
export class AgentContextDto {
@IsString()
@IsNotEmpty()
@Validate(IsValidRepositoryUrl)
repository!: string;
@IsString()
@IsNotEmpty()
@Validate(IsValidBranchName)
branch!: string;
@IsArray()

View File

@@ -0,0 +1,238 @@
/**
* Git Validation Utility Tests
*
* Tests for command injection prevention in git operations
*/
import { describe, it, expect } from "vitest";
import { BadRequestException } from "@nestjs/common";
import {
validateBranchName,
validateRepositoryUrl,
validateSpawnContext,
} from "./git-validation.util";
describe("validateBranchName", () => {
describe("Valid branch names", () => {
it("should accept standard branch names", () => {
expect(() => validateBranchName("main")).not.toThrow();
expect(() => validateBranchName("develop")).not.toThrow();
expect(() => validateBranchName("master")).not.toThrow();
});
it("should accept feature branch names with slashes", () => {
expect(() => validateBranchName("feature/add-login")).not.toThrow();
expect(() => validateBranchName("fix/bug-123")).not.toThrow();
expect(() => validateBranchName("hotfix/security-patch")).not.toThrow();
});
it("should accept branch names with hyphens and underscores", () => {
expect(() => validateBranchName("feature-branch")).not.toThrow();
expect(() => validateBranchName("feature_branch")).not.toThrow();
expect(() => validateBranchName("feature-branch_v2")).not.toThrow();
});
it("should accept branch names with dots", () => {
expect(() => validateBranchName("release/1.0.0")).not.toThrow();
expect(() => validateBranchName("v2.5.1")).not.toThrow();
});
it("should accept branch names with numbers", () => {
expect(() => validateBranchName("feature-123")).not.toThrow();
expect(() => validateBranchName("123-bugfix")).not.toThrow();
});
});
describe("Invalid branch names (Command Injection)", () => {
it("should reject empty or whitespace-only names", () => {
expect(() => validateBranchName("")).toThrow(BadRequestException);
expect(() => validateBranchName(" ")).toThrow(BadRequestException);
expect(() => validateBranchName("\t")).toThrow(BadRequestException);
});
it("should reject names starting with hyphen (option injection)", () => {
expect(() => validateBranchName("--config")).toThrow(BadRequestException);
expect(() => validateBranchName("-malicious")).toThrow(BadRequestException);
});
it("should reject names with double dots (range specification)", () => {
expect(() => validateBranchName("feature..main")).toThrow(BadRequestException);
expect(() => validateBranchName("..malicious")).toThrow(BadRequestException);
});
it("should reject names with path traversal patterns", () => {
expect(() => validateBranchName("../etc/passwd")).toThrow(BadRequestException);
expect(() => validateBranchName("feature/../main")).toThrow(BadRequestException);
expect(() => validateBranchName("malicious/..")).toThrow(BadRequestException);
});
it("should reject names ending with .lock (reserved by git)", () => {
expect(() => validateBranchName("feature.lock")).toThrow(BadRequestException);
expect(() => validateBranchName("main.lock")).toThrow(BadRequestException);
});
it("should reject names with special shell characters", () => {
expect(() => validateBranchName("feature;rm -rf /")).toThrow(BadRequestException);
expect(() => validateBranchName("feature$malicious")).toThrow(BadRequestException);
expect(() => validateBranchName("feature`whoami`")).toThrow(BadRequestException);
expect(() => validateBranchName("feature$(whoami)")).toThrow(BadRequestException);
expect(() => validateBranchName("feature|malicious")).toThrow(BadRequestException);
expect(() => validateBranchName("feature&malicious")).toThrow(BadRequestException);
});
it("should reject names with control characters", () => {
expect(() => validateBranchName("feature\x00malicious")).toThrow(BadRequestException);
expect(() => validateBranchName("feature\x1Fmalicious")).toThrow(BadRequestException);
expect(() => validateBranchName("feature\x7Fmalicious")).toThrow(BadRequestException);
});
it("should reject names exceeding maximum length", () => {
const longName = "a".repeat(256);
expect(() => validateBranchName(longName)).toThrow(BadRequestException);
});
it("should reject names with spaces", () => {
expect(() => validateBranchName("feature branch")).toThrow(BadRequestException);
expect(() => validateBranchName("feature branch")).toThrow(BadRequestException);
});
});
});
describe("validateRepositoryUrl", () => {
describe("Valid repository URLs", () => {
it("should accept HTTPS URLs", () => {
expect(() => validateRepositoryUrl("https://github.com/user/repo.git")).not.toThrow();
expect(() => validateRepositoryUrl("https://gitlab.com/group/project.git")).not.toThrow();
expect(() => validateRepositoryUrl("https://bitbucket.org/user/repo.git")).not.toThrow();
});
it("should accept HTTP URLs (for development)", () => {
expect(() => validateRepositoryUrl("http://git.example.com/repo.git")).not.toThrow();
});
it("should accept SSH URLs with git@ format", () => {
expect(() => validateRepositoryUrl("git@github.com:user/repo.git")).not.toThrow();
expect(() => validateRepositoryUrl("ssh://git@gitlab.com/user/repo.git")).not.toThrow();
});
it("should accept git:// protocol", () => {
expect(() => validateRepositoryUrl("git://github.com/user/repo.git")).not.toThrow();
});
});
describe("Invalid repository URLs (Security Risks)", () => {
it("should reject empty or whitespace-only URLs", () => {
expect(() => validateRepositoryUrl("")).toThrow(BadRequestException);
expect(() => validateRepositoryUrl(" ")).toThrow(BadRequestException);
});
it("should reject dangerous protocols (file://)", () => {
expect(() => validateRepositoryUrl("file:///etc/passwd")).toThrow(BadRequestException);
expect(() => validateRepositoryUrl("file://C:/Windows/System32")).toThrow(
BadRequestException
);
});
it("should reject dangerous protocols (javascript:, data:)", () => {
expect(() => validateRepositoryUrl("javascript:alert('XSS')")).toThrow(BadRequestException);
expect(() => validateRepositoryUrl("data:text/html,<script>alert('XSS')</script>")).toThrow(
BadRequestException
);
});
it("should reject localhost URLs (SSRF protection)", () => {
expect(() => validateRepositoryUrl("https://localhost/repo.git")).toThrow(
BadRequestException
);
expect(() => validateRepositoryUrl("https://127.0.0.1/repo.git")).toThrow(
BadRequestException
);
expect(() => validateRepositoryUrl("https://0.0.0.0/repo.git")).toThrow(BadRequestException);
expect(() => validateRepositoryUrl("http://::1/repo.git")).toThrow(BadRequestException);
});
it("should reject internal network URLs (SSRF protection)", () => {
expect(() => validateRepositoryUrl("https://192.168.1.1/repo.git")).toThrow(
BadRequestException
);
expect(() => validateRepositoryUrl("https://10.0.0.1/repo.git")).toThrow(BadRequestException);
expect(() => validateRepositoryUrl("https://172.16.0.1/repo.git")).toThrow(
BadRequestException
);
});
it("should reject URLs with embedded credentials", () => {
expect(() => validateRepositoryUrl("https://user:pass@github.com/repo.git")).toThrow(
BadRequestException
);
});
it("should reject URLs with shell special characters", () => {
expect(() => validateRepositoryUrl("https://github.com/repo.git;whoami")).toThrow(
BadRequestException
);
expect(() => validateRepositoryUrl("https://github.com/repo.git|malicious")).toThrow(
BadRequestException
);
expect(() => validateRepositoryUrl("https://github.com/repo.git&malicious")).toThrow(
BadRequestException
);
expect(() => validateRepositoryUrl("https://github.com/repo.git$malicious")).toThrow(
BadRequestException
);
expect(() => validateRepositoryUrl("https://github.com/repo.git`whoami`")).toThrow(
BadRequestException
);
});
it("should reject URLs exceeding maximum length", () => {
const longUrl = "https://github.com/" + "a".repeat(2000) + ".git";
expect(() => validateRepositoryUrl(longUrl)).toThrow(BadRequestException);
});
it("should reject unknown/dangerous protocols", () => {
expect(() => validateRepositoryUrl("ftp://example.com/repo.git")).toThrow(
BadRequestException
);
expect(() => validateRepositoryUrl("telnet://example.com")).toThrow(BadRequestException);
});
});
});
describe("validateSpawnContext", () => {
it("should validate both repository and branch", () => {
expect(() =>
validateSpawnContext({
repository: "https://github.com/user/repo.git",
branch: "feature/add-login",
})
).not.toThrow();
});
it("should reject invalid repository", () => {
expect(() =>
validateSpawnContext({
repository: "file:///etc/passwd",
branch: "main",
})
).toThrow(BadRequestException);
});
it("should reject invalid branch", () => {
expect(() =>
validateSpawnContext({
repository: "https://github.com/user/repo.git",
branch: "--config malicious",
})
).toThrow(BadRequestException);
});
it("should reject both invalid repository and branch", () => {
expect(() =>
validateSpawnContext({
repository: "javascript:alert('XSS')",
branch: "$(whoami)",
})
).toThrow(BadRequestException);
});
});

View File

@@ -0,0 +1,219 @@
/**
* Git Input Validation Utility
*
* Provides strict validation for git references (branch names, repository URLs)
* to prevent command injection vulnerabilities.
*
* Security: Whitelist-based approach - only allow known-safe characters.
*/
import { BadRequestException, Logger } from "@nestjs/common";
const logger = new Logger("GitValidation");
/**
* Validates a git branch name for safety
*
* Allowed format: alphanumeric, hyphens, underscores, forward slashes
* Examples: "main", "feature/add-login", "fix/bug_123"
*
* Rejected: Special characters that could be interpreted as git syntax
* Examples: "--option", "$(command)", ";malicious", "`command`"
*
* @param branchName - The branch name to validate
* @throws BadRequestException if branch name is invalid
*/
export function validateBranchName(branchName: string): void {
// Check for empty or whitespace-only
if (!branchName || branchName.trim().length === 0) {
throw new BadRequestException("Branch name cannot be empty");
}
// Check length (git has a 255 char limit for ref names)
if (branchName.length > 255) {
throw new BadRequestException("Branch name exceeds maximum length (255 characters)");
}
// Whitelist: only allow alphanumeric, hyphens, underscores, forward slashes, dots
// This prevents all forms of command injection
const safePattern = /^[a-zA-Z0-9/_.-]+$/;
if (!safePattern.test(branchName)) {
logger.warn({
event: "GIT_COMMAND_INJECTION_BLOCKED",
input: branchName,
reason: "Invalid characters detected",
securityEvent: true,
timestamp: new Date().toISOString(),
});
throw new BadRequestException(
`Branch name contains invalid characters. Only alphanumeric, hyphens, underscores, slashes, and dots are allowed.`
);
}
// Prevent git option injection (branch names starting with -)
if (branchName.startsWith("-")) {
logger.warn({
event: "GIT_OPTION_INJECTION_BLOCKED",
input: branchName,
reason: "Branch name starts with hyphen (option injection attempt)",
securityEvent: true,
timestamp: new Date().toISOString(),
});
throw new BadRequestException(
"Branch name cannot start with a hyphen (prevents option injection)"
);
}
// Prevent double dots (used for range specifications in git)
if (branchName.includes("..")) {
logger.warn({
event: "GIT_RANGE_INJECTION_BLOCKED",
input: branchName,
reason: "Double dots detected (git range specification)",
securityEvent: true,
timestamp: new Date().toISOString(),
});
throw new BadRequestException("Branch name cannot contain consecutive dots (..)");
}
// Prevent path traversal patterns
if (branchName.includes("/../") || branchName.startsWith("../") || branchName.endsWith("/..")) {
logger.warn({
event: "GIT_PATH_TRAVERSAL_BLOCKED",
input: branchName,
reason: "Path traversal pattern detected",
securityEvent: true,
timestamp: new Date().toISOString(),
});
throw new BadRequestException("Branch name cannot contain path traversal patterns");
}
// Prevent ending with .lock (reserved by git)
if (branchName.endsWith(".lock")) {
throw new BadRequestException("Branch name cannot end with .lock (reserved by git)");
}
// Prevent control characters
// eslint-disable-next-line no-control-regex
if (/[\x00-\x1F\x7F]/.test(branchName)) {
throw new BadRequestException("Branch name cannot contain control characters");
}
}
/**
* Validates a git repository URL for safety
*
* Allowed protocols: https, http (dev only), ssh (git@)
* Prevents: file://, javascript:, data:, and other dangerous protocols
*
* @param repositoryUrl - The repository URL to validate
* @throws BadRequestException if URL is invalid or uses dangerous protocol
*/
export function validateRepositoryUrl(repositoryUrl: string): void {
// Check for empty or whitespace-only
if (!repositoryUrl || repositoryUrl.trim().length === 0) {
throw new BadRequestException("Repository URL cannot be empty");
}
// Check length (reasonable limit for URLs)
if (repositoryUrl.length > 2000) {
throw new BadRequestException("Repository URL exceeds maximum length (2000 characters)");
}
// Remove whitespace
const url = repositoryUrl.trim();
// Whitelist allowed protocols
const httpsPattern = /^https:\/\//i;
const httpPattern = /^http:\/\//i; // Only for development
const sshGitPattern = /^git@[a-zA-Z0-9.-]+:/; // git@host:repo format
const sshUrlPattern = /^ssh:\/\/git@[a-zA-Z0-9.-]+(\/|:)/; // ssh://git@host/repo or ssh://git@host:repo
if (
!httpsPattern.test(url) &&
!httpPattern.test(url) &&
!sshGitPattern.test(url) &&
!sshUrlPattern.test(url) &&
!url.startsWith("git://")
) {
throw new BadRequestException(
"Repository URL must use https://, http://, ssh://, git://, or git@ protocol"
);
}
// Prevent dangerous protocols
const dangerousProtocols = [
"file://",
"javascript:",
"data:",
"vbscript:",
"about:",
"chrome:",
"view-source:",
];
for (const dangerous of dangerousProtocols) {
if (url.toLowerCase().startsWith(dangerous)) {
logger.warn({
event: "GIT_DANGEROUS_PROTOCOL_BLOCKED",
input: url,
protocol: dangerous,
reason: `Dangerous protocol detected: ${dangerous}`,
securityEvent: true,
timestamp: new Date().toISOString(),
});
throw new BadRequestException(
`Repository URL cannot use ${dangerous} protocol (security risk)`
);
}
}
// Prevent localhost/internal network access (SSRF protection)
const localhostPatterns = [
/https?:\/\/(localhost|127\.0\.0\.1|0\.0\.0\.0|::1)/i,
/https?:\/\/192\.168\./i,
/https?:\/\/10\./i,
/https?:\/\/172\.(1[6-9]|2\d|3[01])\./i,
];
for (const pattern of localhostPatterns) {
if (pattern.test(url)) {
logger.warn({
event: "GIT_SSRF_ATTEMPT_BLOCKED",
input: url,
reason: "Repository URL points to localhost or internal network",
securityEvent: true,
timestamp: new Date().toISOString(),
});
throw new BadRequestException(
"Repository URL cannot point to localhost or internal networks (SSRF protection)"
);
}
}
// Prevent credential injection in URL
if (url.includes("@") && !sshGitPattern.test(url) && !sshUrlPattern.test(url)) {
// Extract the part before @ to check if it looks like credentials
const beforeAt = url.split("@")[0];
if (beforeAt.includes("://") && beforeAt.split("://")[1].includes(":")) {
throw new BadRequestException("Repository URL cannot contain embedded credentials");
}
}
// Prevent control characters and dangerous characters in URL
// eslint-disable-next-line no-control-regex
if (/[\x00-\x1F\x7F`$;|&]/.test(url)) {
throw new BadRequestException("Repository URL contains invalid or dangerous characters");
}
}
/**
* Validates a complete agent spawn context
*
* @param context - The spawn context with repository and branch
* @throws BadRequestException if any field is invalid
*/
export function validateSpawnContext(context: { repository: string; branch: string }): void {
validateRepositoryUrl(context.repository);
validateBranchName(context.branch);
}

View File

@@ -3,6 +3,7 @@ import { simpleGit, SimpleGit } from "simple-git";
import * as path from "path";
import { GitOperationsService } from "./git-operations.service";
import { WorktreeInfo, WorktreeError } from "./types";
import { validateBranchName } from "./git-validation.util";
/**
* Result of worktree cleanup operation
@@ -70,6 +71,10 @@ export class WorktreeManagerService {
throw new Error("taskId is required");
}
// Validate baseBranch to prevent command injection
// This is defense-in-depth - DTO validation should catch this first
validateBranchName(baseBranch);
const worktreePath = this.getWorktreePath(repoPath, agentId, taskId);
const branchName = this.getBranchName(agentId, taskId);
@@ -79,6 +84,7 @@ export class WorktreeManagerService {
const git = this.getGit(repoPath);
// Create worktree with new branch
// baseBranch is validated above to prevent command injection
await git.raw(["worktree", "add", worktreePath, "-b", branchName, baseBranch]);
this.logger.log(`Successfully created worktree at ${worktreePath}`);

View File

@@ -1,309 +0,0 @@
# Automated PR Merge System
## Overview
The Mosaic Stack automated PR merge system ensures all pull requests meet strict quality, security, and testing standards before being merged to `develop`. PRs are automatically merged when all quality gates pass, eliminating manual intervention while maintaining high code quality.
## Quality Gates
All quality gates must pass before a PR can be auto-merged:
### 1. Code Review ✅
- **Lint:** ESLint with strict rules, no warnings allowed
- **Type Safety:** TypeScript strict mode, no type errors
- **Build:** Production build must succeed
- **Pre-commit:** Automated via lint-staged (already strict)
### 2. Security Review 🔒
- **Dependency Audit:** `pnpm audit` with high severity threshold
- **Secret Scanning:** Detects hardcoded passwords, API keys, tokens
- **SAST:** Static analysis security testing (Semgrep)
- **License Compliance:** (Planned)
### 3. Quality Assurance 🧪
- **Unit Tests:** All tests must pass
- **Test Coverage:** ≥85% coverage requirement (enforced)
- **Integration Tests:** (Planned)
- **E2E Tests:** (Planned)
## How It Works
### 1. Developer Creates PR
```bash
# Create feature branch
git checkout -b feature/my-feature develop
# Make changes, commit
git add .
git commit -m "feat: add new feature"
# Push and create PR
git push -u origin feature/my-feature
tea pr create --base develop --title "feat: add new feature"
```
### 2. CI Pipeline Runs
Woodpecker CI automatically runs all quality gates:
```mermaid
graph TD
A[PR Created] --> B[Install Dependencies]
B --> C[Security Audit]
B --> D[Secret Scanning]
B --> E[SAST Scanning]
B --> F[Lint Check]
B --> G[Type Check]
B --> H[Unit Tests]
H --> I[Coverage Check]
C --> J{All Checks Pass?}
D --> J
E --> J
F --> J
G --> J
I --> J
J -->|Yes| K[Build Verification]
K --> L[Auto-Merge to develop]
J -->|No| M[Block Merge]
```
### 3. Automatic Merge
If all checks pass:
- ✅ PR automatically merges to `develop`
- ✅ Feature branch automatically deleted
- ✅ Developer notified of successful merge
If any check fails:
- ❌ PR blocked from merging
- ❌ Developer notified of failure
- ❌ Must fix issues before retry
## Configuration
### Woodpecker CI
The enhanced Woodpecker CI configuration is in `.woodpecker.enhanced.yml`. Key features:
```yaml
# Strict quality gates (all must pass)
lint:
failure: fail # Block on any lint error/warning
typecheck:
failure: fail # Block on any type error
test-unit:
failure: fail # Block on any test failure
# Security scanning
security-audit-deps:
failure: fail # Block on high/critical vulnerabilities
security-scan-secrets:
failure: fail # Block on hardcoded secrets
# Auto-merge step (runs after all checks pass)
pr-auto-merge:
when:
- event: pull_request
evaluate: 'CI_COMMIT_TARGET_BRANCH == "develop"'
depends_on:
- build
- test-unit
- lint
- typecheck
- security-audit-deps
```
### Required Secrets
Configure these in Gitea settings → Secrets:
- `gitea_token` - API token with repo write access
- `gitea_username` - Gitea username for Docker registry
### Branch Protection
Recommended Gitea branch protection for `develop`:
```json
{
"branch_name": "develop",
"enable_push": false,
"enable_push_whitelist": false,
"enable_merge_whitelist": false,
"enable_status_check": true,
"required_status_checks": [
"ci/woodpecker/pr/lint",
"ci/woodpecker/pr/typecheck",
"ci/woodpecker/pr/test-unit",
"ci/woodpecker/pr/security-audit-deps",
"ci/woodpecker/pr/build"
],
"enable_approvals_whitelist": false,
"required_approvals": 0
}
```
## Manual Auto-Merge
You can manually trigger auto-merge for a specific PR:
```bash
# Set Gitea API token
export GITEA_TOKEN="your-token-here"
# Merge PR #123 to develop
./scripts/ci/auto-merge-pr.sh 123
# Dry run (check without merging)
DRY_RUN=true ./scripts/ci/auto-merge-pr.sh 123
```
## Quality Gate Strictness Levels
### Current (Enhanced) Configuration
| Check | Status | Blocking | Notes |
| ---------------- | --------- | -------- | ---------------------------------------- |
| Dependency Audit | ✅ Active | Yes | Blocks on high+ vulnerabilities |
| Secret Scanning | ✅ Active | Yes | Blocks on hardcoded secrets |
| SAST (Semgrep) | ⚠️ Active | No\* | \*TODO: Enable after baseline cleanup |
| Lint | ✅ Active | Yes | Zero warnings/errors |
| TypeScript | ✅ Active | Yes | Strict mode, no errors |
| Unit Tests | ✅ Active | Yes | All tests must pass |
| Test Coverage | ⚠️ Active | No\* | \*TODO: Enable after implementing parser |
| Build | ✅ Active | Yes | Production build must succeed |
### Migration Plan
**Phase 1: Current State**
- Lint and tests non-blocking (`|| true`)
- Basic security audit
- Manual PR merging
**Phase 2: Enhanced (This PR)** ← WE ARE HERE
- All checks strict and blocking
- Security scanning (deps, secrets, SAST)
- Auto-merge enabled for clean PRs
**Phase 3: Future Enhancements**
- SAST fully enforced (after baseline cleanup)
- Test coverage threshold enforced (≥85%)
- Integration and E2E tests
- License compliance checking
- Performance regression testing
## Troubleshooting
### PR Not Auto-Merging
Check these common issues:
1. **Merge Conflicts**
```bash
# Rebase on develop
git fetch origin develop
git rebase origin/develop
git push --force-with-lease
```
2. **Failed Quality Gates**
```bash
# Check CI logs
woodpecker pipeline ls mosaic/stack
woodpecker log show mosaic/stack <pipeline-number>
```
3. **Missing Status Checks**
- Ensure all required checks are configured
- Verify Woodpecker CI is running
- Check webhook configuration
### Bypassing Auto-Merge
In rare cases where manual merge is needed:
```bash
# Manually merge via CLI (requires admin access)
tea pr merge <pr-number> --style merge
```
**⚠️ WARNING:** Manual merges bypass quality gates. Only use in emergencies.
## Best Practices
### For Developers
1. **Run checks locally before pushing:**
```bash
pnpm lint
pnpm typecheck
pnpm test
pnpm build
```
2. **Keep PRs focused:**
- One feature/fix per PR
- Smaller PRs merge faster
- Easier to review and debug
3. **Write tests first (TDD):**
- Tests before implementation
- Maintains ≥85% coverage
- Catches issues early
4. **Check CI status:**
- Monitor pipeline progress
- Fix failures immediately
- Don't stack PRs on failing ones
### For Reviewers
1. **Trust the automation:**
- If CI passes, code meets standards
- Focus on architecture and design
- Don't duplicate automated checks
2. **Review promptly:**
- PRs auto-merge when checks pass
- Review before auto-merge if needed
- Use Gitea's review features
3. **Provide constructive feedback:**
- Suggest improvements
- Link to documentation
- Explain reasoning
## Metrics & Monitoring
Track these metrics to measure effectiveness:
- **Auto-merge rate:** % of PRs merged automatically
- **Average time to merge:** From PR creation to merge
- **Quality gate failures:** Which checks fail most often
- **Rollback rate:** % of merges that need revert
## References
- [Quality Rails Status](docs/quality-rails-status.md)
- [Woodpecker CI Documentation](https://woodpecker-ci.org/docs)
- [Gitea API Documentation](https://docs.gitea.io/en-us/api-usage/)
- [Design Principles](docs/DESIGN-PRINCIPLES.md)
---
**Questions?** Contact the platform team or create an issue.

View File

@@ -1,366 +0,0 @@
# Migration Guide: Enhanced CI/CD with Auto-Merge
## Overview
This guide walks through migrating from the current Woodpecker CI configuration to the enhanced version with strict quality gates and automated PR merging.
## Pre-Migration Checklist
Before activating the enhanced pipeline, ensure:
- [ ] **All existing PRs are merged or closed**
- Enhanced pipeline has strict gates that may block old PRs
- Review and clean up pending PRs first
- [ ] **Baseline quality metrics recorded**
```bash
# Run on clean develop branch
pnpm lint 2>&1 | tee baseline-lint.txt
pnpm typecheck 2>&1 | tee baseline-typecheck.txt
pnpm test 2>&1 | tee baseline-tests.txt
```
- [ ] **Gitea API token created**
- Go to Settings → Applications → Generate New Token
- Scopes: `repo` (full control)
- Save token securely
- [ ] **Woodpecker secrets configured**
```bash
# Add gitea_token secret
woodpecker secret add \
--repository mosaic/stack \
--name gitea_token \
--value "your-token-here"
```
- [ ] **Team notified of change**
- Announce strict quality gates
- Share this migration guide
- Schedule migration during low-activity period
## Migration Steps
### Step 1: Backup Current Configuration
```bash
# Backup current .woodpecker.yml
cp .woodpecker.yml .woodpecker.yml.backup
# Backup current git state
git branch backup-pre-migration
git push origin backup-pre-migration
```
### Step 2: Activate Enhanced Configuration
```bash
# Replace .woodpecker.yml with enhanced version
cp .woodpecker.enhanced.yml .woodpecker.yml
# Review changes
git diff .woodpecker.yml.backup .woodpecker.yml
```
Key changes:
- ✅ Removed `|| true` from lint step (now strict)
- ✅ Removed `|| true` from test step (now strict)
- ✅ Added security scanning steps
- ✅ Added test coverage step
- ✅ Added pr-auto-merge step
### Step 3: Test with a Dry-Run PR
Create a test PR to verify the enhanced pipeline:
```bash
# Create test branch
git checkout -b test/enhanced-ci develop
# Make a trivial change
echo "# CI Test" >> README.md
git add README.md
git commit -m "test: verify enhanced CI pipeline"
# Push and create PR
git push -u origin test/enhanced-ci
tea pr create \
--base develop \
--title "test: Verify enhanced CI pipeline" \
--description "Test PR to verify all quality gates work correctly"
```
Monitor the pipeline:
```bash
# Watch pipeline status
woodpecker pipeline ls mosaic/stack
# View logs if needed
woodpecker log show mosaic/stack <pipeline-number>
```
Expected behavior:
- ✅ All quality gates run
- ✅ Security scans complete
- ✅ Tests and coverage checks run
- ✅ PR auto-merges if all checks pass
### Step 4: Configure Branch Protection
Set up branch protection for `develop`:
**Option A: Via Gitea Web UI**
1. Go to Settings → Branches
2. Add branch protection rule for `develop`
3. Enable: "Enable Status Check"
4. Add required checks:
- `ci/woodpecker/pr/lint`
- `ci/woodpecker/pr/typecheck`
- `ci/woodpecker/pr/test-unit`
- `ci/woodpecker/pr/security-audit-deps`
- `ci/woodpecker/pr/build`
**Option B: Via API**
```bash
curl -X POST "https://git.mosaicstack.dev/api/v1/repos/mosaic/stack/branch_protections" \
-H "Authorization: token $GITEA_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"branch_name": "develop",
"enable_push": false,
"enable_status_check": true,
"status_check_contexts": [
"ci/woodpecker/pr/lint",
"ci/woodpecker/pr/typecheck",
"ci/woodpecker/pr/test-unit",
"ci/woodpecker/pr/security-audit-deps",
"ci/woodpecker/pr/build"
]
}'
```
### Step 5: Gradual Rollout
**Phase 1: Monitor (Week 1)**
- Enhanced CI active, auto-merge disabled
- Monitor quality gate failures
- Collect metrics on pass/fail rates
```yaml
# In .woodpecker.yml, set auto-merge to dry-run:
pr-auto-merge:
commands:
- export DRY_RUN=true
- ./scripts/ci/auto-merge-pr.sh
```
**Phase 2: Enable Auto-Merge (Week 2)**
- Remove DRY_RUN flag
- Enable auto-merge for clean PRs
- Monitor merge success rate
**Phase 3: Enforce Coverage (Week 3)**
- Enable test coverage threshold
- Set minimum to 85%
- Block PRs below threshold
**Phase 4: Full Enforcement (Week 4)**
- Enable SAST as blocking
- Enforce all quality gates
- Remove any remaining fallbacks
### Step 6: Cleanup
After successful migration:
```bash
# Remove backup files
rm .woodpecker.yml.backup
git branch -D backup-pre-migration
git push origin --delete backup-pre-migration
# Remove old test PR
tea pr close <test-pr-number>
```
## Rollback Plan
If issues arise during migration:
### Immediate Rollback
```bash
# Restore original configuration
cp .woodpecker.yml.backup .woodpecker.yml
git add .woodpecker.yml
git commit -m "rollback: Restore original CI configuration"
git push origin develop
```
### Partial Rollback
If only specific gates are problematic:
```yaml
# Make specific checks non-blocking temporarily
lint:
commands:
- pnpm lint || true # Non-blocking during stabilization
failure: ignore
```
## Post-Migration Verification
After migration, verify:
- [ ] **All quality gates run on PRs**
```bash
# Check recent PR pipelines
tea pr list --state all --limit 10
```
- [ ] **Auto-merge works correctly**
- Create test PR with passing checks
- Verify auto-merge occurs
- Check branch deletion
- [ ] **Failures block correctly**
- Create test PR with lint errors
- Verify PR is blocked
- Verify error messages are clear
- [ ] **Metrics tracked**
- Auto-merge rate
- Average time to merge
- Quality gate failure rate
## Troubleshooting
### Issue: PRs not auto-merging
**Diagnosis:**
```bash
# Check if pr-auto-merge step ran
woodpecker log show mosaic/stack <pipeline> | grep "pr-auto-merge"
# Check Gitea token permissions
curl -H "Authorization: token $GITEA_TOKEN" \
https://git.mosaicstack.dev/api/v1/user
```
**Solutions:**
1. Verify `gitea_token` secret is configured
2. Check token has `repo` scope
3. Ensure PR targets `develop`
4. Verify all dependencies passed
### Issue: Quality gates failing unexpectedly
**Diagnosis:**
```bash
# Run checks locally
pnpm lint
pnpm typecheck
pnpm test
# Compare with baseline
diff baseline-lint.txt <(pnpm lint 2>&1)
```
**Solutions:**
1. Fix actual code issues
2. Update baseline if needed
3. Temporarily make check non-blocking
4. Investigate CI environment differences
### Issue: Security scans too strict
**Diagnosis:**
```bash
# Run security scan locally
pnpm audit --audit-level=high
# Check specific vulnerability
pnpm audit --json | jq '.vulnerabilities'
```
**Solutions:**
1. Update dependencies: `pnpm update`
2. Add audit exceptions if false positive
3. Lower severity threshold temporarily
4. Fix actual vulnerabilities
## Success Criteria
Migration is successful when:
-**100% of clean PRs auto-merge**
- No manual intervention needed
- Merge within 5 minutes of CI completion
-**Zero false-positive blocks**
- All blocked PRs have actual issues
- No spurious failures
-**Developer satisfaction high**
- Fast feedback loops
- Clear error messages
- Minimal friction
-**Quality maintained or improved**
- No increase in bugs reaching develop
- Test coverage ≥85%
- Security vulnerabilities caught early
## Next Steps
After successful migration:
1. **Monitor and optimize**
- Track metrics weekly
- Identify bottlenecks
- Optimize slow steps
2. **Expand coverage**
- Add integration tests
- Add E2E tests
- Add performance tests
3. **Enhance security**
- Enable SAST fully
- Add license compliance
- Add container scanning
4. **Improve developer experience**
- Add pre-push hooks
- Create quality dashboard
- Automate changelog generation
## Support
- **Documentation:** [docs/AUTOMATED-PR-MERGE.md](AUTOMATED-PR-MERGE.md)
- **Issues:** https://git.mosaicstack.dev/mosaic/stack/issues
- **Team Chat:** #engineering on Mattermost
---
**Migration Owner:** Platform Team
**Last Updated:** 2026-02-03

View File

@@ -0,0 +1,80 @@
# Issue #274: Sanitize agent spawn command payloads (command injection risk)
## Objective
Add input validation and sanitization to agent spawn command payloads to prevent command injection vulnerabilities in git operations.
## Security Impact
**Severity:** P0 (Critical) - Blocks production deployment
**Attack Vector:** Federated instances can inject malicious commands via branch names
**Risk:** Command injection in git operations allowing arbitrary code execution
## Vulnerability Details
### Attack Flow
1. Attacker sends federation command with malicious branch name
2. Payload passes through command service without validation
3. Branch name used directly in `git worktree add` command
4. Malicious git syntax executed on orchestrator
### Vulnerable Code
**File:** `apps/orchestrator/src/git/worktree-manager.service.ts:82`
```typescript
await git.raw(["worktree", "add", worktreePath, "-b", branchName, baseBranch]);
```
**Input Source:** Federation command payload → no validation → git command
### Attack Example
```json
{
"commandType": "agent.spawn",
"payload": {
"context": {
"branch": "feature/--config user.core.sshCommand=malicious"
}
}
}
```
## Approach
### 1. Add Input Validation DTOs
- Strict regex for branch names (alphanumeric + hyphens + underscores + slashes)
- Repository URL validation (https/ssh only)
- Reject dangerous characters (`;`, `$`, `` ` ``, `--`, etc.)
### 2. Create Sanitization Utility
- Whitelist-based approach
- Validate before any git operation
- Clear error messages on rejection
### 3. Apply at Multiple Layers
- DTO validation (first line of defense)
- Service-level sanitization (defense in depth)
- Git operation wrapper (last resort)
## Progress
- [ ] Create validation utility
- [ ] Update SpawnAgentDto with strict validation
- [ ] Update SpawnAgentCommandPayload type
- [ ] Add sanitization in WorktreeManagerService
- [ ] Add tests for validation
- [ ] Add tests for sanitization
- [ ] Security vulnerability FIXED
- [ ] Create PR
- [ ] Merge to develop
- [ ] Close issue #274
## Implementation Status
**IN PROGRESS** - Adding input validation and sanitization

View File

@@ -0,0 +1,82 @@
# Issue #275: Fix silent connection initiation failures
## Objective
Fix silent connection initiation failures where HTTP errors are caught but success is returned to the user, leaving zombie connections in PENDING state forever.
## Location
`apps/api/src/federation/connection.service.ts:72-80`
## Problem
Current code:
```typescript
try {
await firstValueFrom(
this.httpService.post(`${remoteUrl}/api/v1/federation/incoming/connect`, signedRequest)
);
this.logger.log(`Connection request sent to ${remoteUrl}`);
} catch (error) {
this.logger.error(`Failed to send connection request to ${remoteUrl}`, error);
// Connection is still created in PENDING state, can be retried
}
return this.mapToConnectionDetails(connection);
```
Issues:
- Catches HTTP failures but returns success
- Connection stays in PENDING state forever
- Creates zombie connections
- User sees success message but connection actually failed
## Solution
1. Delete the failed connection from database
2. Throw exception with clear error message
3. User gets immediate feedback that connection failed
## Implementation
```typescript
try {
await firstValueFrom(
this.httpService.post(`${remoteUrl}/api/v1/federation/incoming/connect`, signedRequest)
);
this.logger.log(`Connection request sent to ${remoteUrl}`);
} catch (error) {
this.logger.error(`Failed to send connection request to ${remoteUrl}`, error);
// Delete the failed connection to prevent zombie connections
await this.prisma.federationConnection.delete({
where: { id: connection.id },
});
throw new BadRequestException(
`Failed to initiate connection to ${remoteUrl}: ${error instanceof Error ? error.message : "Unknown error"}`
);
}
```
## Testing
Test scenarios:
1. Remote instance is unreachable - should throw exception and delete connection
2. Remote instance returns error - should throw exception and delete connection
3. Remote instance times out - should throw exception and delete connection
4. Remote instance returns success - should create connection in PENDING state
## Progress
- [ ] Create scratchpad
- [ ] Implement fix in connection.service.ts
- [ ] Add/update tests
- [ ] Run quality gates
- [ ] Commit changes
- [ ] Create PR
- [ ] Merge to develop
- [ ] Close issue #275

View File

@@ -0,0 +1,149 @@
# Issue #276: Add workspace authorization on incoming connections
## Objective
Add proper workspace authorization and controls for incoming federation connections.
## Location
`apps/api/src/federation/federation.controller.ts:211-233`
## Current Problem
```typescript
@Post("incoming/connect")
@Throttle({ short: { limit: 3, ttl: 1000 } })
async handleIncomingConnection(
@Body() dto: IncomingConnectionRequestDto
): Promise<{ status: string; connectionId?: string }> {
this.logger.log(`Received connection request from ${dto.instanceId}`);
// LIMITATION: Incoming connections are created in a default workspace
const workspaceId = process.env.DEFAULT_WORKSPACE_ID ?? "default";
const connection = await this.connectionService.handleIncomingConnectionRequest(
workspaceId,
dto
);
return {
status: "pending",
connectionId: connection.id,
};
}
```
Issues:
- No authorization check - any remote instance can create connections
- No admin approval workflow
- Limited audit logging
- No allowlist/denylist checking
- Hardcoded default workspace
## Security Impact
- **Authorization bypass**: Remote instances can force connections without permission
- **Workspace pollution**: Unwanted connections clutter the default workspace
- **No control**: Administrators have no way to pre-approve or block instances
## Solution Approach
### Phase 1: Audit Logging (This fix)
Add comprehensive audit logging for all incoming connection attempts before implementing full authorization.
Changes:
1. Log all incoming connection requests with full details
2. Log successful connection creations
3. Log any validation failures
4. Include remote instance details in logs
### Phase 2: Authorization Framework (Future)
- Add workspace routing configuration
- Implement allowlist/denylist at instance level
- Add admin approval workflow
- Implement automatic approval for trusted instances
## Implementation (Phase 1)
Add comprehensive audit logging to connection.service.ts:
```typescript
async handleIncomingConnectionRequest(
workspaceId: string,
request: ConnectionRequest
): Promise<ConnectionDetails> {
// Audit log: Incoming connection attempt
this.auditService.logIncomingConnectionAttempt({
workspaceId,
remoteInstanceId: request.instanceId,
remoteUrl: request.instanceUrl,
timestamp: request.timestamp,
});
// Verify signature
const verification = this.signatureService.verifyConnectionRequest(request);
if (!verification.valid) {
// Audit log: Failed verification
this.auditService.logConnectionRejected({
workspaceId,
remoteInstanceId: request.instanceId,
reason: 'Invalid signature',
error: verification.error,
});
throw new UnauthorizedException(
`Invalid connection request signature: ${verification.error}`
);
}
// Create connection (existing logic)
const connection = await this.prisma.federationConnection.create({...});
// Audit log: Connection created
this.auditService.logIncomingConnectionCreated({
workspaceId,
connectionId: connection.id,
remoteInstanceId: request.instanceId,
remoteUrl: request.instanceUrl,
});
return this.mapToConnectionDetails(connection);
}
```
## Testing
Test scenarios:
1. Incoming connection with valid signature → logged and created
2. Incoming connection with invalid signature → logged and rejected
3. Verify all audit logs contain required fields
4. Verify workspace isolation in logs
## Progress
- [ ] Create scratchpad
- [ ] Add audit logging methods to FederationAuditService
- [ ] Update handleIncomingConnectionRequest with audit logging
- [ ] Add tests for audit logging
- [ ] Run quality gates
- [ ] Commit changes
- [ ] Create PR
- [ ] Merge to develop
- [ ] Close issue #276
- [ ] Create follow-up issue for Phase 2 (full authorization)
## Notes
This implements the audit logging requirement from the issue. Full authorization (allowlist/denylist, admin approval) will be implemented in a follow-up issue as it requires:
- Database schema changes (allowlist/denylist tables)
- New configuration endpoints
- Admin UI changes
- More extensive testing
Audit logging provides immediate visibility and security monitoring without requiring major architectural changes.

View File

@@ -0,0 +1,120 @@
# Issue #277: Add comprehensive audit logging for security events
## Objective
Add comprehensive audit logging for critical security events to enable forensic analysis and attack detection.
## Missing Logging Areas
### 1. Failed signature verifications
- **Current**: DEBUG level only
- **Location**: `signature.service.ts`
- **Required**: WARN level with full details
### 2. Failed OIDC validations
- **Current**: No details logged
- **Location**: `auth` module
- **Required**: Full validation failure details
### 3. Capability bypass attempts
- **Current**: Not logged
- **Location**: `capability.guard.ts`
- **Required**: Log all denied capabilities
### 4. Rate limit violations
- **Current**: Not logged
- **Location**: ThrottlerGuard
- **Required**: Log rate limit hits
### 5. Command injection attempts
- **Current**: Not logged
- **Location**: `git-validation.util.ts` (recently added)
- **Required**: Log validation rejections
## Already Implemented
From issue #276 (commit 744290a):
- ✅ Incoming connection attempts
- ✅ Failed signature verifications for connections
- ✅ Connection created events
From issue #274 (commit 7a84d96):
- ✅ Git command validation (but not logged)
## Implementation Plan
### Priority 1: Add missing audit methods
1. `logSignatureVerificationFailed()` - Failed signatures
2. `logRateLimitViolation()` - Rate limit hits
3. `logCommandInjectionAttempt()` - Malicious input attempts
### Priority 2: Update existing code
1. Add logging to signature.service.ts
2. Add logging to git-validation.util.ts (throw + log)
3. Document rate limit violations (if not already handled by NestJS)
### Priority 3: Review capability guard
1. Check if logCapabilityDenied is being called
2. Add calls if missing
## Status Assessment
After reviewing issue #276, we already have:
- ✅ logCapabilityDenied() method
- ✅ logIncomingConnectionAttempt()
- ✅ logIncomingConnectionRejected()
- ✅ Signature verification failures for connections
What's actually missing:
1. General signature verification failures (outside connection context)
2. Rate limit violation logging
3. Command injection attempt logging
## Implementation Approach
Focus on what's truly missing and actionable:
1. **Add command injection attempt logging**
- Update git-validation.util.ts to log before throwing
- Create logCommandInjectionAttempt() method
2. **Add rate limit logging**
- Check if NestJS throttler already logs
- Add custom logging if needed
3. **Verify capability logging**
- Check that capability.guard.ts calls logCapabilityDenied
## Progress
- [ ] Create scratchpad
- [ ] Add logCommandInjectionAttempt() to audit service
- [ ] Update git-validation.util.ts to log attempts
- [ ] Check capability guard logging
- [ ] Check rate limit logging
- [ ] Add tests
- [ ] Run quality gates
- [ ] Commit changes
- [ ] Push and close issue
## Notes
Some of the required logging may already be in place. Need to verify:
1. Capability guard usage
2. Rate limiter behavior
3. OIDC validation (may be in auth module, not federation)
Focus on concrete, implementable improvements rather than theoretical gaps.

View File

@@ -1,185 +0,0 @@
#!/usr/bin/env bash
#
# Auto-Merge PR Script
#
# Automatically merges a PR to develop if all quality gates pass.
# This script can be called from Woodpecker CI or manually.
#
# Usage:
# auto-merge-pr.sh <pr_number>
#
# Environment variables:
# GITEA_TOKEN - Gitea API token (required)
# GITEA_URL - Gitea instance URL (default: https://git.mosaicstack.dev)
# REPO_OWNER - Repository owner (default: mosaic)
# REPO_NAME - Repository name (default: stack)
# TARGET_BRANCH - Target branch for auto-merge (default: develop)
# DRY_RUN - If set, only check eligibility without merging (default: false)
set -euo pipefail
# Configuration
GITEA_URL="${GITEA_URL:-https://git.mosaicstack.dev}"
REPO_OWNER="${REPO_OWNER:-mosaic}"
REPO_NAME="${REPO_NAME:-stack}"
TARGET_BRANCH="${TARGET_BRANCH:-develop}"
DRY_RUN="${DRY_RUN:-false}"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Functions
log_info() {
echo -e "${BLUE} $1${NC}"
}
log_success() {
echo -e "${GREEN}$1${NC}"
}
log_warning() {
echo -e "${YELLOW}⚠️ $1${NC}"
}
log_error() {
echo -e "${RED}$1${NC}"
}
# Check requirements
if [ -z "${GITEA_TOKEN:-}" ]; then
log_error "GITEA_TOKEN environment variable is required"
exit 1
fi
if [ $# -lt 1 ]; then
log_error "Usage: $0 <pr_number>"
exit 1
fi
PR_NUMBER="$1"
API_BASE="$GITEA_URL/api/v1/repos/$REPO_OWNER/$REPO_NAME"
log_info "Checking PR #$PR_NUMBER for auto-merge eligibility..."
# Fetch PR details
PR_DATA=$(curl -sf -H "Authorization: token $GITEA_TOKEN" \
"$API_BASE/pulls/$PR_NUMBER" || {
log_error "Failed to fetch PR #$PR_NUMBER"
exit 1
})
# Extract PR information
PR_STATE=$(echo "$PR_DATA" | jq -r '.state // ""')
PR_MERGED=$(echo "$PR_DATA" | jq -r '.merged // false')
BASE_BRANCH=$(echo "$PR_DATA" | jq -r '.base.ref // ""')
HEAD_BRANCH=$(echo "$PR_DATA" | jq -r '.head.ref // ""')
IS_MERGEABLE=$(echo "$PR_DATA" | jq -r '.mergeable // false')
HAS_CONFLICTS=$(echo "$PR_DATA" | jq -r '.mergeable_state // "unknown"')
PR_TITLE=$(echo "$PR_DATA" | jq -r '.title // "Unknown"')
log_info "PR #$PR_NUMBER: $PR_TITLE"
log_info " State: $PR_STATE"
log_info " Base: $BASE_BRANCH ← Head: $HEAD_BRANCH"
log_info " Mergeable: $IS_MERGEABLE ($HAS_CONFLICTS)"
# Check if PR is already merged
if [ "$PR_MERGED" = "true" ]; then
log_success "PR is already merged"
exit 0
fi
# Check if PR is open
if [ "$PR_STATE" != "open" ]; then
log_warning "PR is not open (state: $PR_STATE)"
exit 0
fi
# Check if PR targets the correct branch
if [ "$BASE_BRANCH" != "$TARGET_BRANCH" ]; then
log_warning "PR does not target $TARGET_BRANCH (targets: $BASE_BRANCH)"
exit 0
fi
# Check if PR is mergeable
if [ "$IS_MERGEABLE" != "true" ]; then
log_error "PR is not mergeable (state: $HAS_CONFLICTS)"
exit 1
fi
# Fetch CI status checks
log_info "Checking CI status..."
STATUS_DATA=$(curl -sf -H "Authorization: token $GITEA_TOKEN" \
"$API_BASE/statuses/$(echo "$PR_DATA" | jq -r '.head.sha')" || echo "[]")
# Count status check results
TOTAL_CHECKS=$(echo "$STATUS_DATA" | jq 'length')
SUCCESS_CHECKS=$(echo "$STATUS_DATA" | jq '[.[] | select(.state == "success")] | length')
PENDING_CHECKS=$(echo "$STATUS_DATA" | jq '[.[] | select(.state == "pending")] | length')
FAILURE_CHECKS=$(echo "$STATUS_DATA" | jq '[.[] | select(.state == "failure" or .state == "error")] | length')
log_info " Total checks: $TOTAL_CHECKS"
log_info " Success: $SUCCESS_CHECKS"
log_info " Pending: $PENDING_CHECKS"
log_info " Failed: $FAILURE_CHECKS"
# Check if there are any failures
if [ "$FAILURE_CHECKS" -gt 0 ]; then
log_error "PR has $FAILURE_CHECKS failed status checks"
echo "$STATUS_DATA" | jq -r '.[] | select(.state == "failure" or .state == "error") | " - \(.context): \(.description)"'
exit 1
fi
# Check if there are pending checks
if [ "$PENDING_CHECKS" -gt 0 ]; then
log_warning "PR has $PENDING_CHECKS pending status checks - waiting..."
exit 0
fi
# Check if all required checks passed
if [ "$TOTAL_CHECKS" -eq 0 ]; then
log_warning "No status checks found - proceeding with caution"
elif [ "$SUCCESS_CHECKS" -ne "$TOTAL_CHECKS" ]; then
log_warning "Not all status checks are successful ($SUCCESS_CHECKS/$TOTAL_CHECKS)"
fi
# All checks passed - ready to merge
log_success "All quality gates passed!"
if [ "$DRY_RUN" = "true" ]; then
log_info "DRY RUN: Would merge PR #$PR_NUMBER to $TARGET_BRANCH"
exit 0
fi
# Perform the merge
log_info "Merging PR #$PR_NUMBER to $TARGET_BRANCH..."
MERGE_RESULT=$(curl -sf -X POST \
-H "Authorization: token $GITEA_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"Do": "merge",
"MergeMessageField": "",
"MergeTitleField": "",
"delete_branch_after_merge": true,
"force_merge": false,
"merge_when_checks_succeed": false
}' \
"$API_BASE/pulls/$PR_NUMBER/merge" || {
log_error "Failed to merge PR #$PR_NUMBER"
exit 1
})
# Check if merge was successful
if echo "$MERGE_RESULT" | jq -e '.merged' > /dev/null 2>&1; then
MERGE_SHA=$(echo "$MERGE_RESULT" | jq -r '.sha // "unknown"')
log_success "Successfully merged PR #$PR_NUMBER to $TARGET_BRANCH"
log_success " Merge commit: $MERGE_SHA"
log_success " Branch $HEAD_BRANCH deleted"
else
ERROR_MSG=$(echo "$MERGE_RESULT" | jq -r '.message // "Unknown error"')
log_error "Merge failed: $ERROR_MSG"
exit 1
fi