feat: integrate framework files into monorepo under packages/mosaic/framework/
All checks were successful
ci/woodpecker/push/ci Pipeline was successful
ci/woodpecker/pr/ci Pipeline was successful

Moves all Mosaic framework runtime files from the separate bootstrap repo
into the monorepo as canonical source. The @mosaic/mosaic npm package now
ships the complete framework — bin scripts, runtime configs, tools, and
templates — enabling standalone installation via npm install.

Structure:
  packages/mosaic/framework/
  ├── bin/          28 CLI scripts (mosaic, mosaic-doctor, mosaic-sync-skills, etc.)
  ├── runtime/      Runtime adapters (claude, codex, opencode, pi, mcp)
  ├── tools/        Shell tooling (git, prdy, orchestrator, quality, etc.)
  ├── templates/    Agent and repo templates
  ├── defaults/     Default identity files (AGENTS.md, STANDARDS.md, SOUL.md, etc.)
  ├── install.sh    Legacy bash installer
  └── remote-install.sh  One-liner remote installer

Key files with Pi support and recent fixes:
- bin/mosaic: launch_pi() with skills-local loop
- bin/mosaic-doctor: --fix auto-wiring for all 4 harnesses
- bin/mosaic-sync-skills: Pi as 4th link target, symlink-aware find
- bin/mosaic-link-runtime-assets: Pi settings.json patching
- bin/mosaic-migrate-local-skills: Pi skill roots, symlink find
- runtime/pi/RUNTIME.md + mosaic-extension.ts

Package ships 251 framework files in the npm tarball (278KB compressed).
This commit is contained in:
Jason Woltje
2026-04-01 21:19:21 -05:00
parent f3cb3e6852
commit b38cfac760
252 changed files with 31477 additions and 1 deletions

View File

@@ -0,0 +1,284 @@
#!/usr/bin/env bash
#
# credentials.sh — Shared credential loader for Mosaic tool suites
#
# Usage: source ~/.config/mosaic/tools/_lib/credentials.sh
# load_credentials <service-name>
#
# credentials.json is the single source of truth.
# For Woodpecker, credentials are also synced to ~/.woodpecker/<instance>.env.
#
# Supported services:
# portainer, coolify, authentik, glpi, github,
# gitea-mosaicstack, gitea-usc, woodpecker, cloudflare,
# turbo-cache, openbrain
#
# After loading, service-specific env vars are exported.
# Run `load_credentials --help` for details.
MOSAIC_CREDENTIALS_FILE="${MOSAIC_CREDENTIALS_FILE:-$HOME/src/jarvis-brain/credentials.json}"
_mosaic_require_jq() {
if ! command -v jq &>/dev/null; then
echo "Error: jq is required but not installed" >&2
return 1
fi
}
_mosaic_read_cred() {
local jq_path="$1"
if [[ ! -f "$MOSAIC_CREDENTIALS_FILE" ]]; then
echo "Error: Credentials file not found: $MOSAIC_CREDENTIALS_FILE" >&2
return 1
fi
jq -r "$jq_path // empty" "$MOSAIC_CREDENTIALS_FILE"
}
# Sync Woodpecker credentials to ~/.woodpecker/<instance>.env
# Only writes when values differ to avoid unnecessary disk writes.
_mosaic_sync_woodpecker_env() {
local instance="$1" url="$2" token="$3"
local env_file="$HOME/.woodpecker/${instance}.env"
[[ -d "$HOME/.woodpecker" ]] || return 0
local expected
expected=$(printf '# %s Woodpecker CI\nexport WOODPECKER_SERVER="%s"\nexport WOODPECKER_TOKEN="%s"\n' \
"$instance" "$url" "$token")
if [[ -f "$env_file" ]]; then
local current_url current_token
current_url=$(grep -oP '(?<=WOODPECKER_SERVER=").*(?=")' "$env_file" 2>/dev/null || true)
current_token=$(grep -oP '(?<=WOODPECKER_TOKEN=").*(?=")' "$env_file" 2>/dev/null || true)
[[ "$current_url" == "$url" && "$current_token" == "$token" ]] && return 0
fi
printf '%s\n' "$expected" > "$env_file"
}
load_credentials() {
local service="$1"
if [[ -z "$service" || "$service" == "--help" ]]; then
cat <<'EOF'
Usage: load_credentials <service>
Services and exported variables:
portainer → PORTAINER_URL, PORTAINER_API_KEY
coolify → COOLIFY_URL, COOLIFY_TOKEN
authentik → AUTHENTIK_URL, AUTHENTIK_TOKEN, AUTHENTIK_TEST_USER, AUTHENTIK_TEST_PASSWORD (uses default instance)
authentik-<name> → AUTHENTIK_URL, AUTHENTIK_TOKEN, AUTHENTIK_TEST_USER, AUTHENTIK_TEST_PASSWORD (specific instance, e.g. authentik-usc)
glpi → GLPI_URL, GLPI_APP_TOKEN, GLPI_USER_TOKEN
github → GITHUB_TOKEN
gitea-mosaicstack → GITEA_URL, GITEA_TOKEN
gitea-usc → GITEA_URL, GITEA_TOKEN
woodpecker → WOODPECKER_URL, WOODPECKER_TOKEN (uses default instance)
woodpecker-<name> → WOODPECKER_URL, WOODPECKER_TOKEN (specific instance, e.g. woodpecker-usc)
cloudflare → CLOUDFLARE_API_TOKEN (uses default instance)
cloudflare-<name> → CLOUDFLARE_API_TOKEN (specific instance, e.g. cloudflare-personal)
turbo-cache → TURBO_API, TURBO_TOKEN, TURBO_TEAM
openbrain → OPENBRAIN_URL, OPENBRAIN_TOKEN
EOF
return 0
fi
_mosaic_require_jq || return 1
case "$service" in
portainer)
export PORTAINER_URL="${PORTAINER_URL:-$(_mosaic_read_cred '.portainer.url')}"
export PORTAINER_API_KEY="${PORTAINER_API_KEY:-$(_mosaic_read_cred '.portainer.api_key')}"
PORTAINER_URL="${PORTAINER_URL%/}"
[[ -n "$PORTAINER_URL" ]] || { echo "Error: portainer.url not found" >&2; return 1; }
[[ -n "$PORTAINER_API_KEY" ]] || { echo "Error: portainer.api_key not found" >&2; return 1; }
;;
coolify)
export COOLIFY_URL="${COOLIFY_URL:-$(_mosaic_read_cred '.coolify.url')}"
export COOLIFY_TOKEN="${COOLIFY_TOKEN:-$(_mosaic_read_cred '.coolify.app_token')}"
COOLIFY_URL="${COOLIFY_URL%/}"
[[ -n "$COOLIFY_URL" ]] || { echo "Error: coolify.url not found" >&2; return 1; }
[[ -n "$COOLIFY_TOKEN" ]] || { echo "Error: coolify.app_token not found" >&2; return 1; }
;;
authentik-*)
local ak_instance="${service#authentik-}"
export AUTHENTIK_URL="$(_mosaic_read_cred ".authentik.${ak_instance}.url")"
export AUTHENTIK_TOKEN="$(_mosaic_read_cred ".authentik.${ak_instance}.token")"
export AUTHENTIK_TEST_USER="$(_mosaic_read_cred ".authentik.${ak_instance}.test_user.username")"
export AUTHENTIK_TEST_PASSWORD="$(_mosaic_read_cred ".authentik.${ak_instance}.test_user.password")"
export AUTHENTIK_INSTANCE="$ak_instance"
AUTHENTIK_URL="${AUTHENTIK_URL%/}"
[[ -n "$AUTHENTIK_URL" ]] || { echo "Error: authentik.${ak_instance}.url not found" >&2; return 1; }
;;
authentik)
local ak_default
ak_default="${AUTHENTIK_INSTANCE:-$(_mosaic_read_cred '.authentik.default')}"
if [[ -z "$ak_default" ]]; then
# Fallback: try legacy flat structure (.authentik.url)
local legacy_url
legacy_url="$(_mosaic_read_cred '.authentik.url')"
if [[ -n "$legacy_url" ]]; then
export AUTHENTIK_URL="${AUTHENTIK_URL:-$legacy_url}"
export AUTHENTIK_TOKEN="${AUTHENTIK_TOKEN:-$(_mosaic_read_cred '.authentik.token')}"
export AUTHENTIK_TEST_USER="${AUTHENTIK_TEST_USER:-$(_mosaic_read_cred '.authentik.test_user.username')}"
export AUTHENTIK_TEST_PASSWORD="${AUTHENTIK_TEST_PASSWORD:-$(_mosaic_read_cred '.authentik.test_user.password')}"
AUTHENTIK_URL="${AUTHENTIK_URL%/}"
[[ -n "$AUTHENTIK_URL" ]] || { echo "Error: authentik.url not found" >&2; return 1; }
else
echo "Error: authentik.default not set and no AUTHENTIK_INSTANCE env var" >&2
echo "Available instances: $(jq -r '.authentik | keys | join(", ")' "$MOSAIC_CREDENTIALS_FILE" 2>/dev/null)" >&2
return 1
fi
else
load_credentials "authentik-${ak_default}"
fi
;;
glpi)
export GLPI_URL="${GLPI_URL:-$(_mosaic_read_cred '.glpi.url')}"
export GLPI_APP_TOKEN="${GLPI_APP_TOKEN:-$(_mosaic_read_cred '.glpi.app_token')}"
export GLPI_USER_TOKEN="${GLPI_USER_TOKEN:-$(_mosaic_read_cred '.glpi.user_token')}"
GLPI_URL="${GLPI_URL%/}"
[[ -n "$GLPI_URL" ]] || { echo "Error: glpi.url not found" >&2; return 1; }
;;
github)
export GITHUB_TOKEN="${GITHUB_TOKEN:-$(_mosaic_read_cred '.github.token')}"
[[ -n "$GITHUB_TOKEN" ]] || { echo "Error: github.token not found" >&2; return 1; }
;;
gitea-mosaicstack)
export GITEA_URL="${GITEA_URL:-$(_mosaic_read_cred '.gitea.mosaicstack.url')}"
export GITEA_TOKEN="${GITEA_TOKEN:-$(_mosaic_read_cred '.gitea.mosaicstack.token')}"
GITEA_URL="${GITEA_URL%/}"
[[ -n "$GITEA_URL" ]] || { echo "Error: gitea.mosaicstack.url not found" >&2; return 1; }
[[ -n "$GITEA_TOKEN" ]] || { echo "Error: gitea.mosaicstack.token not found" >&2; return 1; }
;;
gitea-usc)
export GITEA_URL="${GITEA_URL:-$(_mosaic_read_cred '.gitea.usc.url')}"
export GITEA_TOKEN="${GITEA_TOKEN:-$(_mosaic_read_cred '.gitea.usc.token')}"
GITEA_URL="${GITEA_URL%/}"
[[ -n "$GITEA_URL" ]] || { echo "Error: gitea.usc.url not found" >&2; return 1; }
[[ -n "$GITEA_TOKEN" ]] || { echo "Error: gitea.usc.token not found" >&2; return 1; }
;;
woodpecker-*)
local wp_instance="${service#woodpecker-}"
# credentials.json is authoritative — always read from it, ignore env
export WOODPECKER_URL="$(_mosaic_read_cred ".woodpecker.${wp_instance}.url")"
export WOODPECKER_TOKEN="$(_mosaic_read_cred ".woodpecker.${wp_instance}.token")"
export WOODPECKER_INSTANCE="$wp_instance"
WOODPECKER_URL="${WOODPECKER_URL%/}"
[[ -n "$WOODPECKER_URL" ]] || { echo "Error: woodpecker.${wp_instance}.url not found" >&2; return 1; }
[[ -n "$WOODPECKER_TOKEN" ]] || { echo "Error: woodpecker.${wp_instance}.token not found" >&2; return 1; }
# Sync to ~/.woodpecker/<instance>.env so the wp CLI wrapper stays current
_mosaic_sync_woodpecker_env "$wp_instance" "$WOODPECKER_URL" "$WOODPECKER_TOKEN"
;;
woodpecker)
# Resolve default instance, then load it
local wp_default
wp_default="${WOODPECKER_INSTANCE:-$(_mosaic_read_cred '.woodpecker.default')}"
if [[ -z "$wp_default" ]]; then
# Fallback: try legacy flat structure (.woodpecker.url / .woodpecker.token)
local legacy_url
legacy_url="$(_mosaic_read_cred '.woodpecker.url')"
if [[ -n "$legacy_url" ]]; then
export WOODPECKER_URL="${WOODPECKER_URL:-$legacy_url}"
export WOODPECKER_TOKEN="${WOODPECKER_TOKEN:-$(_mosaic_read_cred '.woodpecker.token')}"
WOODPECKER_URL="${WOODPECKER_URL%/}"
[[ -n "$WOODPECKER_URL" ]] || { echo "Error: woodpecker.url not found" >&2; return 1; }
[[ -n "$WOODPECKER_TOKEN" ]] || { echo "Error: woodpecker.token not found" >&2; return 1; }
else
echo "Error: woodpecker.default not set and no WOODPECKER_INSTANCE env var" >&2
echo "Available instances: $(jq -r '.woodpecker | keys | join(", ")' "$MOSAIC_CREDENTIALS_FILE" 2>/dev/null)" >&2
return 1
fi
else
load_credentials "woodpecker-${wp_default}"
fi
;;
cloudflare-*)
local cf_instance="${service#cloudflare-}"
export CLOUDFLARE_API_TOKEN="${CLOUDFLARE_API_TOKEN:-$(_mosaic_read_cred ".cloudflare.${cf_instance}.api_token")}"
export CLOUDFLARE_INSTANCE="$cf_instance"
[[ -n "$CLOUDFLARE_API_TOKEN" ]] || { echo "Error: cloudflare.${cf_instance}.api_token not found" >&2; return 1; }
;;
cloudflare)
# Resolve default instance, then load it
local cf_default
cf_default="${CLOUDFLARE_INSTANCE:-$(_mosaic_read_cred '.cloudflare.default')}"
if [[ -z "$cf_default" ]]; then
echo "Error: cloudflare.default not set and no CLOUDFLARE_INSTANCE env var" >&2
return 1
fi
load_credentials "cloudflare-${cf_default}"
;;
turbo-cache)
export TURBO_API="${TURBO_API:-$(_mosaic_read_cred '.turbo_cache.api_url')}"
export TURBO_TOKEN="${TURBO_TOKEN:-$(_mosaic_read_cred '.turbo_cache.token')}"
export TURBO_TEAM="${TURBO_TEAM:-$(_mosaic_read_cred '.turbo_cache.team')}"
[[ -n "$TURBO_API" ]] || { echo "Error: turbo_cache.api_url not found" >&2; return 1; }
[[ -n "$TURBO_TOKEN" ]] || { echo "Error: turbo_cache.token not found" >&2; return 1; }
[[ -n "$TURBO_TEAM" ]] || { echo "Error: turbo_cache.team not found" >&2; return 1; }
;;
openbrain)
export OPENBRAIN_URL="${OPENBRAIN_URL:-$(_mosaic_read_cred '.openbrain.url')}"
export OPENBRAIN_TOKEN="${OPENBRAIN_TOKEN:-$(_mosaic_read_cred '.openbrain.api_key')}"
OPENBRAIN_URL="${OPENBRAIN_URL%/}"
[[ -n "$OPENBRAIN_URL" ]] || { echo "Error: openbrain.url not found" >&2; return 1; }
[[ -n "$OPENBRAIN_TOKEN" ]] || { echo "Error: openbrain.api_key not found" >&2; return 1; }
;;
*)
echo "Error: Unknown service '$service'" >&2
echo "Supported: portainer, coolify, authentik[-<name>], glpi, github, gitea-mosaicstack, gitea-usc, woodpecker[-<name>], cloudflare[-<name>], turbo-cache, openbrain" >&2
return 1
;;
esac
}
# Common HTTP helper — makes a curl request and separates body from status code
# Usage: mosaic_http GET "/api/v1/endpoint" "Authorization: Bearer $TOKEN" [base_url]
# Returns: body on stdout, sets MOSAIC_HTTP_CODE
mosaic_http() {
local method="$1"
local endpoint="$2"
local auth_header="$3"
local base_url="${4:-}"
local response
response=$(curl -sk -w "\n%{http_code}" -X "$method" \
-H "$auth_header" \
-H "Content-Type: application/json" \
"${base_url}${endpoint}")
MOSAIC_HTTP_CODE=$(echo "$response" | tail -n1)
echo "$response" | sed '$d'
}
# POST variant with body
# Usage: mosaic_http_post "/api/v1/endpoint" "Authorization: Bearer $TOKEN" '{"key":"val"}' [base_url]
mosaic_http_post() {
local endpoint="$1"
local auth_header="$2"
local data="$3"
local base_url="${4:-}"
local response
response=$(curl -sk -w "\n%{http_code}" -X POST \
-H "$auth_header" \
-H "Content-Type: application/json" \
-d "$data" \
"${base_url}${endpoint}")
MOSAIC_HTTP_CODE=$(echo "$response" | tail -n1)
echo "$response" | sed '$d'
}
# PATCH variant with body
mosaic_http_patch() {
local endpoint="$1"
local auth_header="$2"
local data="$3"
local base_url="${4:-}"
local response
response=$(curl -sk -w "\n%{http_code}" -X PATCH \
-H "$auth_header" \
-H "Content-Type: application/json" \
-d "$data" \
"${base_url}${endpoint}")
MOSAIC_HTTP_CODE=$(echo "$response" | tail -n1)
echo "$response" | sed '$d'
}

View File

@@ -0,0 +1,60 @@
# Authentik Tool Suite
Manage Authentik identity provider (SSO, users, groups, applications, flows) via CLI.
## Prerequisites
- `jq` installed
- Authentik credentials in `~/src/jarvis-brain/credentials.json` (or `$MOSAIC_CREDENTIALS_FILE`)
- Required fields: `authentik.url`, `authentik.username`, `authentik.password`
## Authentication
Scripts use `auth-token.sh` to auto-authenticate via username/password and cache the API token at `~/.cache/mosaic/authentik-token`. The token is validated on each use and refreshed automatically when expired.
For better security, create a long-lived API token in Authentik admin (Directory > Tokens) and set `$AUTHENTIK_TOKEN` in your environment — the scripts will use it directly.
## Scripts
| Script | Purpose |
| ----------------- | ------------------------------------------ |
| `auth-token.sh` | Authenticate and cache API token |
| `user-list.sh` | List users (search, filter by group) |
| `user-create.sh` | Create user with optional group assignment |
| `group-list.sh` | List groups |
| `app-list.sh` | List OAuth/SAML applications |
| `flow-list.sh` | List authentication flows |
| `admin-status.sh` | System health and version info |
## Common Options
All scripts support:
- `-f json` — JSON output (default: table)
- `-h` — Show help
## API Reference
- Base URL: `https://auth.diversecanvas.com`
- API prefix: `/api/v3/`
- OpenAPI schema: `/api/v3/schema/`
- Auth: Bearer token in `Authorization` header
## Examples
```bash
# List all users
~/.config/mosaic/tools/authentik/user-list.sh
# Search for a user
~/.config/mosaic/tools/authentik/user-list.sh -s "jason"
# Create a user in the admins group
~/.config/mosaic/tools/authentik/user-create.sh -u newuser -n "New User" -e new@example.com -g admins
# List OAuth applications as JSON
~/.config/mosaic/tools/authentik/app-list.sh -f json
# Check system health
~/.config/mosaic/tools/authentik/admin-status.sh
```

View File

@@ -0,0 +1,63 @@
#!/usr/bin/env bash
#
# admin-status.sh — Authentik system health and version info
#
# Usage: admin-status.sh [-f format] [-a instance]
#
# Options:
# -f format Output format: table (default), json
# -a instance Authentik instance name (e.g. usc, mosaic)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
FORMAT="table"
AK_INSTANCE=""
while getopts "f:a:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
a) AK_INSTANCE="$OPTARG" ;;
h) head -13 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-a instance]" >&2; exit 1 ;;
esac
done
if [[ -n "$AK_INSTANCE" ]]; then
load_credentials "authentik-${AK_INSTANCE}"
else
load_credentials authentik
fi
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q ${AK_INSTANCE:+-a "$AK_INSTANCE"})
response=$(curl -sk -w "\n%{http_code}" \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/admin/system/")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to get system status (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "Authentik System Status"
echo "======================="
echo "$body" | jq -r '
" URL: \(.http_host // "unknown")\n" +
" Version: \(.runtime.authentik_version // "unknown")\n" +
" Python: \(.runtime.python_version // "unknown")\n" +
" Workers: \(.runtime.gunicorn_workers // "unknown")\n" +
" Build Hash: \(.runtime.build_hash // "unknown")\n" +
" Embedded Outpost: \(.embedded_outpost_host // "unknown")"
'

View File

@@ -0,0 +1,70 @@
#!/usr/bin/env bash
#
# app-list.sh — List Authentik applications
#
# Usage: app-list.sh [-f format] [-s search] [-a instance]
#
# Options:
# -f format Output format: table (default), json
# -s search Search by application name
# -a instance Authentik instance name (e.g. usc, mosaic)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
FORMAT="table"
SEARCH=""
AK_INSTANCE=""
while getopts "f:s:a:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
s) SEARCH="$OPTARG" ;;
a) AK_INSTANCE="$OPTARG" ;;
h) head -14 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-s search] [-a instance]" >&2; exit 1 ;;
esac
done
if [[ -n "$AK_INSTANCE" ]]; then
load_credentials "authentik-${AK_INSTANCE}"
else
load_credentials authentik
fi
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q ${AK_INSTANCE:+-a "$AK_INSTANCE"})
PARAMS="ordering=name"
[[ -n "$SEARCH" ]] && PARAMS="${PARAMS}&search=${SEARCH}"
response=$(curl -sk -w "\n%{http_code}" \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/core/applications/?${PARAMS}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list applications (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.results'
exit 0
fi
echo "NAME SLUG PROVIDER LAUNCH URL"
echo "---------------------------- ---------------------------- ----------------- ----------------------------------------"
echo "$body" | jq -r '.results[] | [
.name,
.slug,
(.provider_obj.name // "none"),
(.launch_url // "—")
] | @tsv' | while IFS=$'\t' read -r name slug provider launch_url; do
printf "%-28s %-28s %-17s %s\n" \
"${name:0:28}" "${slug:0:28}" "${provider:0:17}" "$launch_url"
done

View File

@@ -0,0 +1,95 @@
#!/usr/bin/env bash
#
# auth-token.sh — Obtain and cache Authentik API token
#
# Usage: auth-token.sh [-f] [-q] [-a instance]
#
# Returns a valid Authentik API token. Checks in order:
# 1. Cached token at ~/.cache/mosaic/authentik-token-<instance> (if valid)
# 2. Pre-configured token from credentials.json (authentik.<instance>.token)
# 3. Fails with instructions to create a token in the admin UI
#
# Options:
# -f Force re-validation (ignore cached token)
# -q Quiet mode — only output the token
# -a instance Authentik instance name (e.g. usc, mosaic)
# -h Show this help
#
# Environment variables (or credentials.json):
# AUTHENTIK_URL — Authentik instance URL
# AUTHENTIK_TOKEN — Pre-configured API token (recommended)
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
FORCE=false
QUIET=false
AK_INSTANCE=""
while getopts "fqa:h" opt; do
case $opt in
f) FORCE=true ;;
q) QUIET=true ;;
a) AK_INSTANCE="$OPTARG" ;;
h) head -22 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f] [-q] [-a instance]" >&2; exit 1 ;;
esac
done
if [[ -n "$AK_INSTANCE" ]]; then
load_credentials "authentik-${AK_INSTANCE}"
else
load_credentials authentik
fi
CACHE_DIR="$HOME/.cache/mosaic"
CACHE_FILE="$CACHE_DIR/authentik-token${AUTHENTIK_INSTANCE:+-$AUTHENTIK_INSTANCE}"
_validate_token() {
local token="$1"
local http_code
http_code=$(curl -sk -o /dev/null -w "%{http_code}" \
--connect-timeout 5 --max-time 10 \
-H "Authorization: Bearer $token" \
"${AUTHENTIK_URL}/api/v3/core/users/me/")
[[ "$http_code" == "200" ]]
}
# 1. Check cached token
if [[ "$FORCE" == "false" ]] && [[ -f "$CACHE_FILE" ]]; then
cached_token=$(cat "$CACHE_FILE")
if [[ -n "$cached_token" ]] && _validate_token "$cached_token"; then
[[ "$QUIET" == "false" ]] && echo "Using cached token (valid)" >&2
echo "$cached_token"
exit 0
fi
[[ "$QUIET" == "false" ]] && echo "Cached token invalid, checking credentials..." >&2
fi
# 2. Use pre-configured token from credentials.json
if [[ -n "${AUTHENTIK_TOKEN:-}" ]]; then
if _validate_token "$AUTHENTIK_TOKEN"; then
# Cache it for faster future access
mkdir -p "$CACHE_DIR"
echo "$AUTHENTIK_TOKEN" > "$CACHE_FILE"
chmod 600 "$CACHE_FILE"
[[ "$QUIET" == "false" ]] && echo "Token validated and cached at $CACHE_FILE" >&2
echo "$AUTHENTIK_TOKEN"
exit 0
else
echo "Error: Pre-configured AUTHENTIK_TOKEN is invalid (API returned non-200)" >&2
exit 1
fi
fi
# 3. No token available
echo "Error: No Authentik API token configured" >&2
echo "" >&2
echo "To create one:" >&2
echo " 1. Log into Authentik admin: ${AUTHENTIK_URL}/if/admin/#/core/tokens" >&2
echo " 2. Click 'Create' → set identifier (e.g., 'mosaic-agent')" >&2
echo " 3. Select 'API Token' intent, uncheck 'Expiring'" >&2
echo " 4. Copy the key and add to credentials.json:" >&2
echo " Add token to credentials.json under authentik.<instance>.token" >&2
exit 1

View File

@@ -0,0 +1,70 @@
#!/usr/bin/env bash
#
# flow-list.sh — List Authentik flows
#
# Usage: flow-list.sh [-f format] [-d designation] [-a instance]
#
# Options:
# -f format Output format: table (default), json
# -d designation Filter by designation (authentication, authorization, enrollment, etc.)
# -a instance Authentik instance name (e.g. usc, mosaic)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
FORMAT="table"
DESIGNATION=""
AK_INSTANCE=""
while getopts "f:d:a:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
d) DESIGNATION="$OPTARG" ;;
a) AK_INSTANCE="$OPTARG" ;;
h) head -14 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-d designation] [-a instance]" >&2; exit 1 ;;
esac
done
if [[ -n "$AK_INSTANCE" ]]; then
load_credentials "authentik-${AK_INSTANCE}"
else
load_credentials authentik
fi
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q ${AK_INSTANCE:+-a "$AK_INSTANCE"})
PARAMS="ordering=slug"
[[ -n "$DESIGNATION" ]] && PARAMS="${PARAMS}&designation=${DESIGNATION}"
response=$(curl -sk -w "\n%{http_code}" \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/flows/instances/?${PARAMS}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list flows (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.results'
exit 0
fi
echo "NAME SLUG DESIGNATION TITLE"
echo "---------------------------- ---------------------------- ---------------- ----------------------------"
echo "$body" | jq -r '.results[] | [
.name,
.slug,
.designation,
(.title // "—")
] | @tsv' | while IFS=$'\t' read -r name slug designation title; do
printf "%-28s %-28s %-16s %s\n" \
"${name:0:28}" "${slug:0:28}" "$designation" "${title:0:28}"
done

View File

@@ -0,0 +1,69 @@
#!/usr/bin/env bash
#
# group-list.sh — List Authentik groups
#
# Usage: group-list.sh [-f format] [-s search] [-a instance]
#
# Options:
# -f format Output format: table (default), json
# -s search Search by group name
# -a instance Authentik instance name (e.g. usc, mosaic)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
FORMAT="table"
SEARCH=""
AK_INSTANCE=""
while getopts "f:s:a:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
s) SEARCH="$OPTARG" ;;
a) AK_INSTANCE="$OPTARG" ;;
h) head -13 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-s search] [-a instance]" >&2; exit 1 ;;
esac
done
if [[ -n "$AK_INSTANCE" ]]; then
load_credentials "authentik-${AK_INSTANCE}"
else
load_credentials authentik
fi
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q ${AK_INSTANCE:+-a "$AK_INSTANCE"})
PARAMS="ordering=name"
[[ -n "$SEARCH" ]] && PARAMS="${PARAMS}&search=${SEARCH}"
response=$(curl -sk -w "\n%{http_code}" \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/core/groups/?${PARAMS}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list groups (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.results'
exit 0
fi
echo "NAME PK MEMBERS SUPERUSER"
echo "---------------------------- ------------------------------------ ------- ---------"
echo "$body" | jq -r '.results[] | [
.name,
.pk,
(.users | length | tostring),
(if .is_superuser then "yes" else "no" end)
] | @tsv' | while IFS=$'\t' read -r name pk members superuser; do
printf "%-28s %-36s %-7s %s\n" "${name:0:28}" "$pk" "$members" "$superuser"
done

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env bash
#
# user-create.sh — Create an Authentik user
#
# Usage: user-create.sh -u <username> -n <name> -e <email> [-p password] [-g group] [-a instance]
#
# Options:
# -u username Username (required)
# -n name Display name (required)
# -e email Email address (required)
# -p password Initial password (optional — user gets set-password flow if omitted)
# -g group Group name to add user to (optional)
# -f format Output format: table (default), json
# -a instance Authentik instance name (e.g. usc, mosaic)
# -h Show this help
#
# Environment variables (or credentials.json):
# AUTHENTIK_URL — Authentik instance URL
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
USERNAME="" NAME="" EMAIL="" PASSWORD="" GROUP="" FORMAT="table" AK_INSTANCE=""
while getopts "u:n:e:p:g:f:a:h" opt; do
case $opt in
u) USERNAME="$OPTARG" ;;
n) NAME="$OPTARG" ;;
e) EMAIL="$OPTARG" ;;
p) PASSWORD="$OPTARG" ;;
g) GROUP="$OPTARG" ;;
f) FORMAT="$OPTARG" ;;
a) AK_INSTANCE="$OPTARG" ;;
h) head -19 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -u <username> -n <name> -e <email> [-p password] [-g group] [-a instance]" >&2; exit 1 ;;
esac
done
if [[ -n "$AK_INSTANCE" ]]; then
load_credentials "authentik-${AK_INSTANCE}"
else
load_credentials authentik
fi
if [[ -z "$USERNAME" || -z "$NAME" || -z "$EMAIL" ]]; then
echo "Error: -u username, -n name, and -e email are required" >&2
exit 1
fi
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q ${AK_INSTANCE:+-a "$AK_INSTANCE"})
# Build user payload
payload=$(jq -n \
--arg username "$USERNAME" \
--arg name "$NAME" \
--arg email "$EMAIL" \
'{username: $username, name: $name, email: $email, is_active: true}')
# Add password if provided
if [[ -n "$PASSWORD" ]]; then
payload=$(echo "$payload" | jq --arg pw "$PASSWORD" '. + {password: $pw}')
fi
# Add to group if provided
if [[ -n "$GROUP" ]]; then
# Look up group PK by name
group_response=$(curl -sk \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/core/groups/?search=${GROUP}")
group_pk=$(echo "$group_response" | jq -r ".results[] | select(.name == \"$GROUP\") | .pk" | head -1)
if [[ -n "$group_pk" ]]; then
payload=$(echo "$payload" | jq --arg gk "$group_pk" '. + {groups: [$gk]}')
else
echo "Warning: Group '$GROUP' not found — creating user without group" >&2
fi
fi
response=$(curl -sk -w "\n%{http_code}" -X POST \
-H "Authorization: Bearer $TOKEN" \
-H "Content-Type: application/json" \
-d "$payload" \
"${AUTHENTIK_URL}/api/v3/core/users/")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "201" ]]; then
echo "Error: Failed to create user (HTTP $http_code)" >&2
echo "$body" | jq -r '.' 2>/dev/null >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
else
echo "User created successfully:"
echo "$body" | jq -r '" Username: \(.username)\n Name: \(.name)\n Email: \(.email)\n PK: \(.pk)"'
fi

View File

@@ -0,0 +1,80 @@
#!/usr/bin/env bash
#
# user-list.sh — List Authentik users
#
# Usage: user-list.sh [-f format] [-s search] [-g group] [-a instance]
#
# Options:
# -f format Output format: table (default), json
# -s search Search term (matches username, name, email)
# -g group Filter by group name
# -a instance Authentik instance name (e.g. usc, mosaic)
# -h Show this help
#
# Environment variables (or credentials.json):
# AUTHENTIK_URL — Authentik instance URL
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
FORMAT="table"
SEARCH=""
GROUP=""
AK_INSTANCE=""
while getopts "f:s:g:a:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
s) SEARCH="$OPTARG" ;;
g) GROUP="$OPTARG" ;;
a) AK_INSTANCE="$OPTARG" ;;
h) head -15 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-s search] [-g group] [-a instance]" >&2; exit 1 ;;
esac
done
if [[ -n "$AK_INSTANCE" ]]; then
load_credentials "authentik-${AK_INSTANCE}"
else
load_credentials authentik
fi
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q ${AK_INSTANCE:+-a "$AK_INSTANCE"})
# Build query params
PARAMS="ordering=username"
[[ -n "$SEARCH" ]] && PARAMS="${PARAMS}&search=${SEARCH}"
[[ -n "$GROUP" ]] && PARAMS="${PARAMS}&groups_by_name=${GROUP}"
response=$(curl -sk -w "\n%{http_code}" \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/core/users/?${PARAMS}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list users (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.results'
exit 0
fi
# Table output
echo "USERNAME NAME EMAIL ACTIVE LAST LOGIN"
echo "-------------------- ---------------------------- ---------------------------- ------ ----------"
echo "$body" | jq -r '.results[] | [
.username,
.name,
.email,
(if .is_active then "yes" else "no" end),
(.last_login // "never" | split("T")[0])
] | @tsv' | while IFS=$'\t' read -r username name email active last_login; do
printf "%-20s %-28s %-28s %-6s %s\n" \
"${username:0:20}" "${name:0:28}" "${email:0:28}" "$active" "$last_login"
done

View File

@@ -0,0 +1,305 @@
#!/bin/bash
# agent-lint.sh — Audit agent configuration across all coding projects
#
# Usage:
# agent-lint.sh # Scan all projects in ~/src/
# agent-lint.sh --project <path> # Scan single project
# agent-lint.sh --json # Output JSON for jarvis-brain
# agent-lint.sh --verbose # Show per-check details
# agent-lint.sh --fix-hint # Show fix commands for failures
#
# Checks per project:
# 1. Has runtime context file (CLAUDE.md or RUNTIME.md)?
# 2. Has AGENTS.md?
# 3. Runtime context file references conditional context/guides?
# 4. Runtime context file has quality gates?
# 5. For monorepos: sub-directories have AGENTS.md?
set -euo pipefail
# Defaults
SRC_DIR="$HOME/src"
SINGLE_PROJECT=""
JSON_OUTPUT=false
VERBOSE=false
FIX_HINT=false
# Exclusion patterns (not coding projects)
EXCLUDE_PATTERNS=(
"_worktrees"
".backup"
"_old"
"_bak"
"junk"
"traefik"
"infrastructure"
)
# Parse args
while [[ $# -gt 0 ]]; do
case "$1" in
--project) SINGLE_PROJECT="$2"; shift 2 ;;
--json) JSON_OUTPUT=true; shift ;;
--verbose) VERBOSE=true; shift ;;
--fix-hint) FIX_HINT=true; shift ;;
--src-dir) SRC_DIR="$2"; shift 2 ;;
-h|--help)
echo "Usage: agent-lint.sh [--project <path>] [--json] [--verbose] [--fix-hint] [--src-dir <dir>]"
exit 0
;;
*) echo "Unknown option: $1"; exit 1 ;;
esac
done
# Colors (disabled for JSON mode)
if $JSON_OUTPUT; then
GREEN="" RED="" YELLOW="" NC="" BOLD="" DIM=""
else
GREEN='\033[0;32m' RED='\033[0;31m' YELLOW='\033[0;33m'
NC='\033[0m' BOLD='\033[1m' DIM='\033[2m'
fi
# Determine if a directory is a coding project
is_coding_project() {
local dir="$1"
[[ -f "$dir/package.json" ]] || \
[[ -f "$dir/pyproject.toml" ]] || \
[[ -f "$dir/Cargo.toml" ]] || \
[[ -f "$dir/go.mod" ]] || \
[[ -f "$dir/Makefile" && -f "$dir/src/main.rs" ]] || \
[[ -f "$dir/pom.xml" ]] || \
[[ -f "$dir/build.gradle" ]]
}
# Check if directory should be excluded
is_excluded() {
local dir_name
dir_name=$(basename "$1")
for pattern in "${EXCLUDE_PATTERNS[@]}"; do
if [[ "$dir_name" == *"$pattern"* ]]; then
return 0
fi
done
return 1
}
# Detect if project is a monorepo
is_monorepo() {
local dir="$1"
[[ -f "$dir/pnpm-workspace.yaml" ]] || \
[[ -f "$dir/turbo.json" ]] || \
[[ -f "$dir/lerna.json" ]] || \
(grep -q '"workspaces"' "$dir/package.json" 2>/dev/null)
}
# Resolve runtime context file (CLAUDE.md or RUNTIME.md)
runtime_context_file() {
local dir="$1"
if [[ -f "$dir/CLAUDE.md" ]]; then
echo "$dir/CLAUDE.md"
return
fi
if [[ -f "$dir/RUNTIME.md" ]]; then
echo "$dir/RUNTIME.md"
return
fi
echo ""
}
# Check for runtime context file
check_runtime_context() {
[[ -n "$(runtime_context_file "$1")" ]]
}
# Check for AGENTS.md
check_agents_md() {
[[ -f "$1/AGENTS.md" ]]
}
# Check conditional loading/context (references guides or conditional section)
check_conditional_loading() {
local ctx
ctx="$(runtime_context_file "$1")"
[[ -n "$ctx" ]] && grep -qi "agent-guides\|~/.config/mosaic/guides\|conditional.*loading\|conditional.*documentation\|conditional.*context" "$ctx" 2>/dev/null
}
# Check quality gates
check_quality_gates() {
local ctx
ctx="$(runtime_context_file "$1")"
[[ -n "$ctx" ]] && grep -qi "quality.gates\|must pass before\|lint\|typecheck\|test" "$ctx" 2>/dev/null
}
# Check monorepo sub-AGENTS.md
check_monorepo_sub_agents() {
local dir="$1"
local missing=()
if ! is_monorepo "$dir"; then
echo "N/A"
return
fi
# Check apps/, packages/, services/, plugins/ directories
for subdir_type in apps packages services plugins; do
if [[ -d "$dir/$subdir_type" ]]; then
for subdir in "$dir/$subdir_type"/*/; do
[[ -d "$subdir" ]] || continue
# Only check if it has its own manifest
if [[ -f "$subdir/package.json" ]] || [[ -f "$subdir/pyproject.toml" ]]; then
if [[ ! -f "$subdir/AGENTS.md" ]]; then
missing+=("$(basename "$subdir")")
fi
fi
done
fi
done
if [[ ${#missing[@]} -eq 0 ]]; then
echo "OK"
else
echo "MISS:${missing[*]}"
fi
}
# Lint a single project
lint_project() {
local dir="$1"
local name
name=$(basename "$dir")
local has_runtime has_agents has_guides has_quality mono_status
local score=0 max_score=4
check_runtime_context "$dir" && has_runtime="OK" || has_runtime="MISS"
check_agents_md "$dir" && has_agents="OK" || has_agents="MISS"
check_conditional_loading "$dir" && has_guides="OK" || has_guides="MISS"
check_quality_gates "$dir" && has_quality="OK" || has_quality="MISS"
mono_status=$(check_monorepo_sub_agents "$dir")
[[ "$has_runtime" == "OK" ]] && ((score++)) || true
[[ "$has_agents" == "OK" ]] && ((score++)) || true
[[ "$has_guides" == "OK" ]] && ((score++)) || true
[[ "$has_quality" == "OK" ]] && ((score++)) || true
if $JSON_OUTPUT; then
cat <<JSONEOF
{
"project": "$name",
"path": "$dir",
"runtime_context": "$has_runtime",
"agents_md": "$has_agents",
"conditional_loading": "$has_guides",
"quality_gates": "$has_quality",
"monorepo_sub_agents": "$mono_status",
"score": $score,
"max_score": $max_score
}
JSONEOF
else
# Color-code the status
local c_runtime c_agents c_guides c_quality
[[ "$has_runtime" == "OK" ]] && c_runtime="${GREEN} OK ${NC}" || c_runtime="${RED} MISS ${NC}"
[[ "$has_agents" == "OK" ]] && c_agents="${GREEN} OK ${NC}" || c_agents="${RED} MISS ${NC}"
[[ "$has_guides" == "OK" ]] && c_guides="${GREEN} OK ${NC}" || c_guides="${RED} MISS ${NC}"
[[ "$has_quality" == "OK" ]] && c_quality="${GREEN} OK ${NC}" || c_quality="${RED} MISS ${NC}"
local score_color="$RED"
[[ $score -ge 3 ]] && score_color="$YELLOW"
[[ $score -eq 4 ]] && score_color="$GREEN"
printf " %-35s %b %b %b %b ${score_color}%d/%d${NC}" \
"$name" "$c_runtime" "$c_agents" "$c_guides" "$c_quality" "$score" "$max_score"
# Show monorepo status if applicable
if [[ "$mono_status" != "N/A" && "$mono_status" != "OK" ]]; then
printf " ${YELLOW}(mono: %s)${NC}" "$mono_status"
fi
echo ""
fi
if $VERBOSE && ! $JSON_OUTPUT; then
[[ "$has_runtime" == "MISS" ]] && echo " ${DIM} Runtime context file missing (CLAUDE.md or RUNTIME.md)${NC}"
[[ "$has_agents" == "MISS" ]] && echo " ${DIM} AGENTS.md missing${NC}"
[[ "$has_guides" == "MISS" ]] && echo " ${DIM} No conditional context/loading section detected${NC}"
[[ "$has_quality" == "MISS" ]] && echo " ${DIM} No quality gates section${NC}"
if [[ "$mono_status" == MISS:* ]]; then
echo " ${DIM} Monorepo sub-AGENTS.md missing: ${mono_status#MISS:}${NC}"
fi
fi
if $FIX_HINT && ! $JSON_OUTPUT; then
if [[ "$has_runtime" == "MISS" || "$has_agents" == "MISS" ]]; then
echo " ${DIM}Fix: ~/.config/mosaic/tools/bootstrap/init-project.sh --name \"$name\" --type auto${NC}"
elif [[ "$has_guides" == "MISS" ]]; then
echo " ${DIM}Fix: ~/.config/mosaic/tools/bootstrap/agent-upgrade.sh $dir --section conditional-loading${NC}"
fi
fi
# Return score for summary
echo "$score" > /tmp/agent-lint-score-$$
}
# Main
main() {
local projects=()
local total=0 passing=0 total_score=0
if [[ -n "$SINGLE_PROJECT" ]]; then
projects=("$SINGLE_PROJECT")
else
for dir in "$SRC_DIR"/*/; do
[[ -d "$dir" ]] || continue
is_excluded "$dir" && continue
is_coding_project "$dir" && projects+=("${dir%/}")
done
fi
if [[ ${#projects[@]} -eq 0 ]]; then
echo "No coding projects found."
exit 0
fi
if $JSON_OUTPUT; then
echo '{ "audit_date": "'$(date -I)'", "projects": ['
local first=true
for dir in "${projects[@]}"; do
$first || echo ","
first=false
lint_project "$dir"
done
echo '] }'
else
echo ""
echo -e "${BOLD}Agent Configuration Audit — $(date +%Y-%m-%d)${NC}"
echo "========================================================"
printf " %-35s %s %s %s %s %s\n" \
"Project" "RUNTIME" "AGENTS" "Guides" "Quality" "Score"
echo " -----------------------------------------------------------------------"
for dir in "${projects[@]}"; do
lint_project "$dir"
local score
score=$(cat /tmp/agent-lint-score-$$ 2>/dev/null || echo 0)
((total++)) || true
((total_score += score)) || true
[[ $score -eq 4 ]] && ((passing++)) || true
done
rm -f /tmp/agent-lint-score-$$
echo " -----------------------------------------------------------------------"
local need_attention=$((total - passing))
echo ""
echo -e " ${BOLD}Summary:${NC} $total projects | ${GREEN}$passing pass${NC} | ${RED}$need_attention need attention${NC}"
echo ""
if [[ $need_attention -gt 0 ]] && ! $FIX_HINT; then
echo -e " ${DIM}Run with --fix-hint for suggested fixes${NC}"
echo -e " ${DIM}Run with --verbose for per-check details${NC}"
echo ""
fi
fi
}
main

View File

@@ -0,0 +1,332 @@
#!/bin/bash
# agent-upgrade.sh — Non-destructively upgrade agent configuration in projects
#
# Usage:
# agent-upgrade.sh <project-path> # Upgrade one project
# agent-upgrade.sh --all # Upgrade all projects in ~/src/
# agent-upgrade.sh --all --dry-run # Preview what would change
# agent-upgrade.sh <path> --section conditional-loading # Inject specific section
# agent-upgrade.sh <path> --create-agents # Create AGENTS.md if missing
# agent-upgrade.sh <path> --monorepo-scan # Create sub-AGENTS.md for monorepo dirs
#
# Safety:
# - Creates .bak backup before any modification
# - Append-only — never modifies existing sections
# - --dry-run shows what would change without writing
set -euo pipefail
# Defaults
SRC_DIR="$HOME/src"
FRAGMENTS_DIR="$HOME/.config/mosaic/templates/agent/fragments"
TEMPLATES_DIR="$HOME/.config/mosaic/templates/agent"
DRY_RUN=false
ALL_PROJECTS=false
TARGET_PATH=""
SECTION_ONLY=""
CREATE_AGENTS=false
MONOREPO_SCAN=false
# Exclusion patterns (same as agent-lint.sh)
EXCLUDE_PATTERNS=(
"_worktrees"
".backup"
"_old"
"_bak"
"junk"
"traefik"
"infrastructure"
)
# Colors
GREEN='\033[0;32m' RED='\033[0;31m' YELLOW='\033[0;33m'
NC='\033[0m' BOLD='\033[1m' DIM='\033[2m'
# Parse args
while [[ $# -gt 0 ]]; do
case "$1" in
--all) ALL_PROJECTS=true; shift ;;
--dry-run) DRY_RUN=true; shift ;;
--section) SECTION_ONLY="$2"; shift 2 ;;
--create-agents) CREATE_AGENTS=true; shift ;;
--monorepo-scan) MONOREPO_SCAN=true; shift ;;
--src-dir) SRC_DIR="$2"; shift 2 ;;
-h|--help)
echo "Usage: agent-upgrade.sh [<project-path>|--all] [--dry-run] [--section <name>] [--create-agents] [--monorepo-scan]"
echo ""
echo "Options:"
echo " --all Upgrade all projects in ~/src/"
echo " --dry-run Preview changes without writing"
echo " --section <name> Inject only a specific fragment (conditional-loading, commit-format, secrets, multi-agent, code-review, campsite-rule)"
echo " --create-agents Create AGENTS.md if missing"
echo " --monorepo-scan Create sub-AGENTS.md for monorepo directories"
exit 0
;;
*)
if [[ -d "$1" ]]; then
TARGET_PATH="$1"
else
echo "Unknown option or invalid path: $1"
exit 1
fi
shift
;;
esac
done
if ! $ALL_PROJECTS && [[ -z "$TARGET_PATH" ]]; then
echo "Error: Specify a project path or use --all"
exit 1
fi
# Helpers
is_coding_project() {
local dir="$1"
[[ -f "$dir/package.json" ]] || \
[[ -f "$dir/pyproject.toml" ]] || \
[[ -f "$dir/Cargo.toml" ]] || \
[[ -f "$dir/go.mod" ]] || \
[[ -f "$dir/pom.xml" ]] || \
[[ -f "$dir/build.gradle" ]]
}
is_excluded() {
local dir_name
dir_name=$(basename "$1")
for pattern in "${EXCLUDE_PATTERNS[@]}"; do
[[ "$dir_name" == *"$pattern"* ]] && return 0
done
return 1
}
is_monorepo() {
local dir="$1"
[[ -f "$dir/pnpm-workspace.yaml" ]] || \
[[ -f "$dir/turbo.json" ]] || \
[[ -f "$dir/lerna.json" ]] || \
(grep -q '"workspaces"' "$dir/package.json" 2>/dev/null)
}
has_section() {
local file="$1"
local pattern="$2"
[[ -f "$file" ]] && grep -qi "$pattern" "$file" 2>/dev/null
}
runtime_context_file() {
local project_dir="$1"
if [[ -f "$project_dir/CLAUDE.md" ]]; then
echo "$project_dir/CLAUDE.md"
return
fi
if [[ -f "$project_dir/RUNTIME.md" ]]; then
echo "$project_dir/RUNTIME.md"
return
fi
echo "$project_dir/CLAUDE.md"
}
backup_file() {
local file="$1"
if [[ -f "$file" ]] && ! $DRY_RUN; then
cp "$file" "${file}.bak"
fi
}
# Inject a fragment into CLAUDE.md if the section doesn't exist
inject_fragment() {
local project_dir="$1"
local fragment_name="$2"
local ctx_file
ctx_file="$(runtime_context_file "$project_dir")"
local fragment_file="$FRAGMENTS_DIR/$fragment_name.md"
if [[ ! -f "$fragment_file" ]]; then
echo -e " ${RED}Fragment not found: $fragment_file${NC}"
return 1
fi
# Determine detection pattern for this fragment
local detect_pattern
case "$fragment_name" in
conditional-loading) detect_pattern="agent-guides\|~/.config/mosaic/guides\|Conditional.*Loading\|Conditional.*Documentation\|Conditional.*Context" ;;
commit-format) detect_pattern="<type>.*#issue\|Types:.*feat.*fix" ;;
secrets) detect_pattern="NEVER hardcode secrets\|\.env.example.*committed" ;;
multi-agent) detect_pattern="Multi-Agent Coordination\|pull --rebase.*before" ;;
code-review) detect_pattern="codex-code-review\|codex-security-review\|Code Review" ;;
campsite-rule) detect_pattern="Campsite Rule\|Touching it makes it yours\|was already there.*NEVER" ;;
*) echo "Unknown fragment: $fragment_name"; return 1 ;;
esac
if [[ ! -f "$ctx_file" ]]; then
echo -e " ${YELLOW}No runtime context file (CLAUDE.md/RUNTIME.md) — skipping fragment injection${NC}"
return 0
fi
if has_section "$ctx_file" "$detect_pattern"; then
echo -e " ${DIM}$fragment_name already present${NC}"
return 0
fi
if $DRY_RUN; then
echo -e " ${GREEN}Would inject: $fragment_name${NC}"
else
backup_file "$ctx_file"
echo "" >> "$ctx_file"
cat "$fragment_file" >> "$ctx_file"
echo "" >> "$ctx_file"
echo -e " ${GREEN}Injected: $fragment_name${NC}"
fi
}
# Create AGENTS.md from template
create_agents_md() {
local project_dir="$1"
local agents_md="$project_dir/AGENTS.md"
if [[ -f "$agents_md" ]]; then
echo -e " ${DIM}AGENTS.md already exists${NC}"
return 0
fi
local project_name
project_name=$(basename "$project_dir")
# Detect project type for quality gates
local quality_gates="# Add quality gate commands here"
if [[ -f "$project_dir/package.json" ]]; then
quality_gates="npm run lint && npm run typecheck && npm test"
if grep -q '"pnpm"' "$project_dir/package.json" 2>/dev/null || [[ -f "$project_dir/pnpm-lock.yaml" ]]; then
quality_gates="pnpm lint && pnpm typecheck && pnpm test"
fi
elif [[ -f "$project_dir/pyproject.toml" ]]; then
quality_gates="uv run ruff check src/ tests/ && uv run mypy src/ && uv run pytest --cov"
fi
if $DRY_RUN; then
echo -e " ${GREEN}Would create: AGENTS.md${NC}"
else
# Use generic AGENTS.md template with substitutions
sed -e "s/\${PROJECT_NAME}/$project_name/g" \
-e "s/\${QUALITY_GATES}/$quality_gates/g" \
-e "s/\${TASK_PREFIX}/${project_name^^}/g" \
-e "s|\${SOURCE_DIR}|src|g" \
"$TEMPLATES_DIR/AGENTS.md.template" > "$agents_md"
echo -e " ${GREEN}Created: AGENTS.md${NC}"
fi
}
# Create sub-AGENTS.md for monorepo directories
create_sub_agents() {
local project_dir="$1"
if ! is_monorepo "$project_dir"; then
echo -e " ${DIM}Not a monorepo — skipping sub-AGENTS scan${NC}"
return 0
fi
local created=0
for subdir_type in apps packages services plugins; do
if [[ -d "$project_dir/$subdir_type" ]]; then
for subdir in "$project_dir/$subdir_type"/*/; do
[[ -d "$subdir" ]] || continue
# Only if it has its own manifest
if [[ -f "$subdir/package.json" ]] || [[ -f "$subdir/pyproject.toml" ]]; then
if [[ ! -f "$subdir/AGENTS.md" ]]; then
local dir_name
dir_name=$(basename "$subdir")
if $DRY_RUN; then
echo -e " ${GREEN}Would create: $subdir_type/$dir_name/AGENTS.md${NC}"
else
sed -e "s/\${DIRECTORY_NAME}/$dir_name/g" \
-e "s/\${DIRECTORY_PURPOSE}/Part of the $subdir_type layer./g" \
"$TEMPLATES_DIR/sub-agents.md.template" > "${subdir}AGENTS.md"
echo -e " ${GREEN}Created: $subdir_type/$dir_name/AGENTS.md${NC}"
fi
((created++)) || true
fi
fi
done
fi
done
if [[ $created -eq 0 ]]; then
echo -e " ${DIM}All monorepo sub-AGENTS.md present${NC}"
fi
}
# Upgrade a single project
upgrade_project() {
local dir="$1"
local name
name=$(basename "$dir")
echo -e "\n${BOLD}$name${NC} ${DIM}($dir)${NC}"
if [[ -n "$SECTION_ONLY" ]]; then
inject_fragment "$dir" "$SECTION_ONLY"
return
fi
# Always try conditional-loading (highest impact)
inject_fragment "$dir" "conditional-loading"
# Try other fragments if runtime context exists
if [[ -f "$dir/CLAUDE.md" || -f "$dir/RUNTIME.md" ]]; then
inject_fragment "$dir" "commit-format"
inject_fragment "$dir" "secrets"
inject_fragment "$dir" "multi-agent"
inject_fragment "$dir" "code-review"
inject_fragment "$dir" "campsite-rule"
fi
# Create AGENTS.md if missing (always unless --section was used)
if $CREATE_AGENTS || [[ -z "$SECTION_ONLY" ]]; then
create_agents_md "$dir"
fi
# Monorepo sub-AGENTS.md
if $MONOREPO_SCAN || [[ -z "$SECTION_ONLY" ]]; then
create_sub_agents "$dir"
fi
}
# Main
main() {
local projects=()
if $ALL_PROJECTS; then
for dir in "$SRC_DIR"/*/; do
[[ -d "$dir" ]] || continue
is_excluded "$dir" && continue
is_coding_project "$dir" && projects+=("${dir%/}")
done
else
projects=("$TARGET_PATH")
fi
if [[ ${#projects[@]} -eq 0 ]]; then
echo "No coding projects found."
exit 0
fi
local mode="LIVE"
$DRY_RUN && mode="DRY RUN"
echo -e "${BOLD}Agent Configuration Upgrade — $(date +%Y-%m-%d) [$mode]${NC}"
echo "========================================================"
for dir in "${projects[@]}"; do
upgrade_project "$dir"
done
echo ""
echo -e "${BOLD}Done.${NC}"
if $DRY_RUN; then
echo -e "${DIM}Run without --dry-run to apply changes.${NC}"
else
echo -e "${DIM}Backups saved as .bak files. Run agent-lint.sh to verify.${NC}"
fi
}
main

View File

@@ -0,0 +1,493 @@
#!/bin/bash
# init-project.sh - Bootstrap a project for AI-assisted development
# Usage: init-project.sh [OPTIONS]
#
# Creates CLAUDE.md, AGENTS.md, and standard directories using templates.
# Optionally initializes git labels and milestones.
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
TEMPLATE_DIR="$HOME/.config/mosaic/templates/agent"
GIT_SCRIPT_DIR="$HOME/.config/mosaic/tools/git"
SEQUENTIAL_MCP_SCRIPT="$HOME/.config/mosaic/bin/mosaic-ensure-sequential-thinking"
# Defaults
PROJECT_NAME=""
PROJECT_TYPE=""
REPO_URL=""
TASK_PREFIX=""
PROJECT_DESCRIPTION=""
SKIP_LABELS=false
SKIP_CI=false
CICD_DOCKER=false
DRY_RUN=false
declare -a CICD_SERVICES=()
CICD_BRANCHES="main,develop"
show_help() {
cat <<'EOF'
Usage: init-project.sh [OPTIONS]
Bootstrap a project for AI-assisted development.
Options:
-n, --name <name> Project name (required)
-t, --type <type> Project type: nestjs-nextjs, django, generic (default: auto-detect)
-r, --repo <url> Git remote URL
-p, --prefix <prefix> Orchestrator task prefix (e.g., MS, UC)
-d, --description <desc> One-line project description
--skip-labels Skip creating git labels and milestones
--skip-ci Skip copying CI pipeline files
--cicd-docker Generate Docker build/push/link pipeline steps
--cicd-service <name:path> Service for Docker CI (repeatable, requires --cicd-docker)
--cicd-branches <list> Branches for Docker builds (default: main,develop)
--dry-run Show what would be created without creating anything
-h, --help Show this help
Examples:
# Full bootstrap with auto-detection
init-project.sh --name "My App" --description "A web application"
# Specific type
init-project.sh --name "My API" --type django --prefix MA
# Dry run
init-project.sh --name "Test" --type generic --dry-run
# With Docker CI/CD pipeline
init-project.sh --name "My App" --cicd-docker \
--cicd-service "my-api:src/api/Dockerfile" \
--cicd-service "my-web:src/web/Dockerfile"
Project Types:
nestjs-nextjs NestJS + Next.js monorepo (pnpm + TurboRepo)
django Django project (pytest + ruff + mypy)
typescript Standalone TypeScript/Next.js project
python-fastapi Python FastAPI project (pytest + ruff + mypy + uv)
python-library Python library/SDK (pytest + ruff + mypy + uv)
generic Generic project (uses base templates)
auto Auto-detect from project files (default)
EOF
exit 0
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-n|--name)
PROJECT_NAME="$2"
shift 2
;;
-t|--type)
PROJECT_TYPE="$2"
shift 2
;;
-r|--repo)
REPO_URL="$2"
shift 2
;;
-p|--prefix)
TASK_PREFIX="$2"
shift 2
;;
-d|--description)
PROJECT_DESCRIPTION="$2"
shift 2
;;
--skip-labels)
SKIP_LABELS=true
shift
;;
--skip-ci)
SKIP_CI=true
shift
;;
--cicd-docker)
CICD_DOCKER=true
shift
;;
--cicd-service)
CICD_SERVICES+=("$2")
shift 2
;;
--cicd-branches)
CICD_BRANCHES="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
-h|--help)
show_help
;;
*)
echo "Unknown option: $1" >&2
echo "Run with --help for usage" >&2
exit 1
;;
esac
done
# Validate required args
if [[ -z "$PROJECT_NAME" ]]; then
echo "Error: --name is required" >&2
exit 1
fi
# Auto-detect project type if not specified
detect_project_type() {
# Monorepo (pnpm + turbo or npm workspaces with NestJS)
if [[ -f "pnpm-workspace.yaml" ]] || [[ -f "turbo.json" ]]; then
echo "nestjs-nextjs"
return
fi
if [[ -f "package.json" ]] && grep -q '"workspaces"' package.json 2>/dev/null; then
echo "nestjs-nextjs"
return
fi
# Django
if [[ -f "manage.py" ]] && [[ -f "pyproject.toml" ]]; then
echo "django"
return
fi
# FastAPI
if [[ -f "pyproject.toml" ]] && grep -q "fastapi" pyproject.toml 2>/dev/null; then
echo "python-fastapi"
return
fi
# Standalone TypeScript
if [[ -f "tsconfig.json" ]] && [[ -f "package.json" ]]; then
echo "typescript"
return
fi
# Python library/tool
if [[ -f "pyproject.toml" ]]; then
echo "python-library"
return
fi
echo "generic"
}
if [[ -z "$PROJECT_TYPE" || "$PROJECT_TYPE" == "auto" ]]; then
PROJECT_TYPE=$(detect_project_type)
echo "Auto-detected project type: $PROJECT_TYPE"
fi
# Derive defaults
if [[ -z "$REPO_URL" ]]; then
REPO_URL=$(git remote get-url origin 2>/dev/null || echo "")
fi
if [[ -z "$TASK_PREFIX" ]]; then
# Generate prefix from project name initials
TASK_PREFIX=$(echo "$PROJECT_NAME" | sed 's/[^A-Za-z ]//g' | awk '{for(i=1;i<=NF;i++) printf toupper(substr($i,1,1))}')
if [[ -z "$TASK_PREFIX" ]]; then
TASK_PREFIX="PRJ"
fi
fi
if [[ -z "$PROJECT_DESCRIPTION" ]]; then
PROJECT_DESCRIPTION="$PROJECT_NAME"
fi
PROJECT_DIR=$(basename "$(pwd)")
# Detect quality gates, source dir, and stack info based on type
case "$PROJECT_TYPE" in
nestjs-nextjs)
export QUALITY_GATES="pnpm typecheck && pnpm lint && pnpm test"
export SOURCE_DIR="apps"
export BUILD_COMMAND="pnpm build"
export TEST_COMMAND="pnpm test"
export LINT_COMMAND="pnpm lint"
export TYPECHECK_COMMAND="pnpm typecheck"
export FRONTEND_STACK="Next.js + React + TailwindCSS + Shadcn/ui"
export BACKEND_STACK="NestJS + Prisma ORM"
export DATABASE_STACK="PostgreSQL"
export TESTING_STACK="Vitest + Playwright"
export DEPLOYMENT_STACK="Docker + docker-compose"
export CONFIG_FILES="turbo.json, pnpm-workspace.yaml, tsconfig.json"
;;
django)
export QUALITY_GATES="ruff check . && mypy . && pytest tests/"
export SOURCE_DIR="src"
export BUILD_COMMAND="pip install -e ."
export TEST_COMMAND="pytest tests/"
export LINT_COMMAND="ruff check ."
export TYPECHECK_COMMAND="mypy ."
export FRONTEND_STACK="N/A"
export BACKEND_STACK="Django / Django REST Framework"
export DATABASE_STACK="PostgreSQL"
export TESTING_STACK="pytest + pytest-django"
export DEPLOYMENT_STACK="Docker + docker-compose"
export CONFIG_FILES="pyproject.toml"
export PROJECT_SLUG=$(echo "$PROJECT_NAME" | tr '[:upper:]' '[:lower:]' | tr ' ' '_' | sed 's/[^a-z0-9_]//g')
;;
typescript)
PKG_MGR="npm"
[[ -f "pnpm-lock.yaml" ]] && PKG_MGR="pnpm"
[[ -f "yarn.lock" ]] && PKG_MGR="yarn"
export QUALITY_GATES="$PKG_MGR run lint && $PKG_MGR run typecheck && $PKG_MGR test"
export SOURCE_DIR="src"
export BUILD_COMMAND="$PKG_MGR run build"
export TEST_COMMAND="$PKG_MGR test"
export LINT_COMMAND="$PKG_MGR run lint"
export TYPECHECK_COMMAND="npx tsc --noEmit"
export FRAMEWORK="TypeScript"
export PACKAGE_MANAGER="$PKG_MGR"
export FRONTEND_STACK="N/A"
export BACKEND_STACK="N/A"
export DATABASE_STACK="N/A"
export TESTING_STACK="Vitest or Jest"
export DEPLOYMENT_STACK="TBD"
export CONFIG_FILES="tsconfig.json, package.json"
# Detect Next.js
if grep -q '"next"' package.json 2>/dev/null; then
export FRAMEWORK="Next.js"
export FRONTEND_STACK="Next.js + React"
fi
;;
python-fastapi)
export PROJECT_SLUG=$(echo "$PROJECT_NAME" | tr '[:upper:]' '[:lower:]' | tr ' ' '_' | sed 's/[^a-z0-9_]//g')
export QUALITY_GATES="uv run ruff check src/ tests/ && uv run ruff format --check src/ && uv run mypy src/ && uv run pytest --cov"
export SOURCE_DIR="src"
export BUILD_COMMAND="uv sync --all-extras"
export TEST_COMMAND="uv run pytest --cov"
export LINT_COMMAND="uv run ruff check src/ tests/"
export TYPECHECK_COMMAND="uv run mypy src/"
export FRONTEND_STACK="N/A"
export BACKEND_STACK="FastAPI"
export DATABASE_STACK="TBD"
export TESTING_STACK="pytest + httpx"
export DEPLOYMENT_STACK="Docker"
export CONFIG_FILES="pyproject.toml"
;;
python-library)
export PROJECT_SLUG=$(echo "$PROJECT_NAME" | tr '[:upper:]' '[:lower:]' | tr ' ' '_' | sed 's/[^a-z0-9_]//g')
export QUALITY_GATES="uv run ruff check src/ tests/ && uv run ruff format --check src/ && uv run mypy src/ && uv run pytest --cov"
export SOURCE_DIR="src"
export BUILD_COMMAND="uv sync --all-extras"
export TEST_COMMAND="uv run pytest --cov"
export LINT_COMMAND="uv run ruff check src/ tests/"
export TYPECHECK_COMMAND="uv run mypy src/"
export BUILD_SYSTEM="hatchling"
export FRONTEND_STACK="N/A"
export BACKEND_STACK="N/A"
export DATABASE_STACK="N/A"
export TESTING_STACK="pytest"
export DEPLOYMENT_STACK="PyPI / Gitea Packages"
export CONFIG_FILES="pyproject.toml"
;;
*)
export QUALITY_GATES="echo 'No quality gates configured — update CLAUDE.md'"
export SOURCE_DIR="src"
export BUILD_COMMAND="echo 'No build command configured'"
export TEST_COMMAND="echo 'No test command configured'"
export LINT_COMMAND="echo 'No lint command configured'"
export TYPECHECK_COMMAND="echo 'No typecheck command configured'"
export FRONTEND_STACK="TBD"
export BACKEND_STACK="TBD"
export DATABASE_STACK="TBD"
export TESTING_STACK="TBD"
export DEPLOYMENT_STACK="TBD"
export CONFIG_FILES="TBD"
;;
esac
# Export common variables
export PROJECT_NAME
export PROJECT_DESCRIPTION
export PROJECT_DIR
export REPO_URL
export TASK_PREFIX
echo "=== Project Bootstrap ==="
echo " Name: $PROJECT_NAME"
echo " Type: $PROJECT_TYPE"
echo " Prefix: $TASK_PREFIX"
echo " Description: $PROJECT_DESCRIPTION"
echo " Repo: ${REPO_URL:-'(not set)'}"
echo " Directory: $(pwd)"
echo ""
# Select template directory
STACK_TEMPLATE_DIR="$TEMPLATE_DIR/projects/$PROJECT_TYPE"
if [[ ! -d "$STACK_TEMPLATE_DIR" ]]; then
STACK_TEMPLATE_DIR="$TEMPLATE_DIR"
echo "No stack-specific templates found for '$PROJECT_TYPE', using generic templates."
fi
if [[ "$DRY_RUN" == true ]]; then
echo "[DRY RUN] Would create:"
echo " - Validate sequential-thinking MCP hard requirement"
echo " - CLAUDE.md (from $STACK_TEMPLATE_DIR/CLAUDE.md.template)"
echo " - AGENTS.md (from $STACK_TEMPLATE_DIR/AGENTS.md.template)"
echo " - docs/scratchpads/"
echo " - docs/reports/qa-automation/{pending,in-progress,done,escalated}"
echo " - docs/reports/deferred/"
echo " - docs/tasks/"
echo " - docs/releases/"
echo " - docs/templates/"
if [[ "$SKIP_CI" != true ]]; then
echo " - .woodpecker/codex-review.yml"
echo " - .woodpecker/schemas/*.json"
fi
if [[ "$SKIP_LABELS" != true ]]; then
echo " - Standard git labels (epic, feature, bug, task, documentation, security, breaking)"
echo " - Milestone: 0.0.1 - Pre-MVP Foundation"
echo " - Milestone policy: 0.0.x pre-MVP, 0.1.0 for MVP release"
fi
if [[ "$CICD_DOCKER" == true ]]; then
echo " - Docker build/push/link steps appended to .woodpecker.yml"
for svc in "${CICD_SERVICES[@]}"; do
echo " - docker-build-${svc%%:*}"
done
echo " - link-packages"
fi
exit 0
fi
# Enforce sequential-thinking MCP hard requirement.
if [[ ! -x "$SEQUENTIAL_MCP_SCRIPT" ]]; then
echo "Error: Missing sequential-thinking setup helper: $SEQUENTIAL_MCP_SCRIPT" >&2
echo "Install/repair Mosaic at ~/.config/mosaic before bootstrapping projects." >&2
exit 1
fi
if "$SEQUENTIAL_MCP_SCRIPT" >/dev/null 2>&1; then
echo "Verified sequential-thinking MCP configuration"
else
echo "Error: sequential-thinking MCP setup failed (hard requirement)." >&2
echo "Run: $SEQUENTIAL_MCP_SCRIPT" >&2
exit 1
fi
# Create CLAUDE.md
if [[ -f "CLAUDE.md" ]]; then
echo "CLAUDE.md already exists — skipping (rename or delete to recreate)"
else
if [[ -f "$STACK_TEMPLATE_DIR/CLAUDE.md.template" ]]; then
envsubst < "$STACK_TEMPLATE_DIR/CLAUDE.md.template" > CLAUDE.md
echo "Created CLAUDE.md"
else
echo "Warning: No CLAUDE.md template found at $STACK_TEMPLATE_DIR" >&2
fi
fi
# Create AGENTS.md
if [[ -f "AGENTS.md" ]]; then
echo "AGENTS.md already exists — skipping (rename or delete to recreate)"
else
if [[ -f "$STACK_TEMPLATE_DIR/AGENTS.md.template" ]]; then
envsubst < "$STACK_TEMPLATE_DIR/AGENTS.md.template" > AGENTS.md
echo "Created AGENTS.md"
else
echo "Warning: No AGENTS.md template found at $STACK_TEMPLATE_DIR" >&2
fi
fi
# Create directories
mkdir -p \
docs/scratchpads \
docs/reports/qa-automation/pending \
docs/reports/qa-automation/in-progress \
docs/reports/qa-automation/done \
docs/reports/qa-automation/escalated \
docs/reports/deferred \
docs/tasks \
docs/releases \
docs/templates
echo "Created docs/scratchpads/, docs/reports/*, docs/tasks/, docs/releases/, docs/templates/"
# Set up CI/CD pipeline
if [[ "$SKIP_CI" != true ]]; then
CODEX_DIR="$HOME/.config/mosaic/tools/codex"
if [[ -d "$CODEX_DIR/woodpecker" ]]; then
mkdir -p .woodpecker/schemas
cp "$CODEX_DIR/woodpecker/codex-review.yml" .woodpecker/
cp "$CODEX_DIR/schemas/"*.json .woodpecker/schemas/
echo "Created .woodpecker/ with Codex review pipeline"
else
echo "Codex pipeline templates not found — skipping CI setup"
fi
fi
# Generate Docker build/push/link pipeline steps
if [[ "$CICD_DOCKER" == true ]]; then
CICD_SCRIPT="$HOME/.config/mosaic/tools/cicd/generate-docker-steps.sh"
if [[ -x "$CICD_SCRIPT" ]]; then
# Parse org and repo from git remote
CICD_REGISTRY=""
CICD_ORG=""
CICD_REPO_NAME=""
if [[ -n "$REPO_URL" ]]; then
# Extract host from https://host/org/repo.git or git@host:org/repo.git
CICD_REGISTRY=$(echo "$REPO_URL" | sed -E 's|https?://([^/]+)/.*|\1|; s|git@([^:]+):.*|\1|')
CICD_ORG=$(echo "$REPO_URL" | sed -E 's|https?://[^/]+/([^/]+)/.*|\1|; s|git@[^:]+:([^/]+)/.*|\1|')
CICD_REPO_NAME=$(echo "$REPO_URL" | sed -E 's|\.git$||' | sed -E 's|.*/([^/]+)$|\1|')
fi
if [[ -n "$CICD_REGISTRY" && -n "$CICD_ORG" && -n "$CICD_REPO_NAME" && ${#CICD_SERVICES[@]} -gt 0 ]]; then
# Build service args
SVC_ARGS=""
for svc in "${CICD_SERVICES[@]}"; do
SVC_ARGS="$SVC_ARGS --service $svc"
done
echo ""
echo "Generating Docker CI/CD pipeline steps..."
# Add kaniko_setup anchor to variables section if .woodpecker.yml exists
if [[ -f ".woodpecker.yml" ]]; then
# Append Docker steps to existing pipeline
"$CICD_SCRIPT" \
--registry "$CICD_REGISTRY" \
--org "$CICD_ORG" \
--repo "$CICD_REPO_NAME" \
$SVC_ARGS \
--branches "$CICD_BRANCHES" >> .woodpecker.yml
echo "Appended Docker build/push/link steps to .woodpecker.yml"
else
echo "Warning: No .woodpecker.yml found — generate quality gates first, then re-run with --cicd-docker" >&2
fi
else
if [[ ${#CICD_SERVICES[@]} -eq 0 ]]; then
echo "Warning: --cicd-docker requires at least one --cicd-service" >&2
else
echo "Warning: Could not parse registry/org/repo from git remote — specify --repo" >&2
fi
fi
else
echo "Docker CI/CD generator not found at $CICD_SCRIPT — skipping" >&2
fi
fi
# Initialize labels and milestones
if [[ "$SKIP_LABELS" != true ]]; then
LABEL_SCRIPT="$SCRIPT_DIR/init-repo-labels.sh"
if [[ -x "$LABEL_SCRIPT" ]]; then
echo ""
echo "Initializing git labels and milestones..."
"$LABEL_SCRIPT"
else
echo "Label init script not found — skipping label setup"
fi
fi
echo ""
echo "=== Bootstrap Complete ==="
echo ""
echo "Next steps:"
echo " 1. Review and customize CLAUDE.md"
echo " 2. Review and customize AGENTS.md"
echo " 3. Update quality gate commands if needed"
echo " 4. Commit: git add CLAUDE.md AGENTS.md docs/ .woodpecker/ && git commit -m 'feat: Bootstrap project for AI development'"
if [[ "$SKIP_CI" != true ]]; then
echo " 5. Add 'codex_api_key' secret to Woodpecker CI"
fi
if [[ "$CICD_DOCKER" == true ]]; then
echo " 6. Add 'gitea_username' and 'gitea_token' secrets to Woodpecker CI"
echo " (token needs package:write scope)"
fi

View File

@@ -0,0 +1,123 @@
#!/bin/bash
# init-repo-labels.sh - Create standard labels and initial milestone for a repository
# Usage: init-repo-labels.sh [--skip-milestone]
#
# Works with both Gitea (tea) and GitHub (gh).
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
GIT_SCRIPT_DIR="$HOME/.config/mosaic/tools/git"
source "$GIT_SCRIPT_DIR/detect-platform.sh"
SKIP_MILESTONE=false
while [[ $# -gt 0 ]]; do
case $1 in
--skip-milestone)
SKIP_MILESTONE=true
shift
;;
-h|--help)
echo "Usage: $(basename "$0") [--skip-milestone]"
echo ""
echo "Create standard labels and initial milestone for the current repository."
echo ""
echo "Options:"
echo " --skip-milestone Skip creating the 0.0.1 pre-MVP milestone"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1" >&2
exit 1
;;
esac
done
PLATFORM=$(detect_platform)
OWNER=$(get_repo_owner)
REPO=$(get_repo_name)
echo "Platform: $PLATFORM"
echo "Repository: $OWNER/$REPO"
echo ""
# Standard labels with colors
# Format: "name|color|description"
LABELS=(
"epic|3E4B9E|Large feature spanning multiple issues"
"feature|0E8A16|New functionality"
"bug|D73A4A|Defect fix"
"task|0075CA|General work item"
"documentation|0075CA|Documentation updates"
"security|B60205|Security-related"
"breaking|D93F0B|Breaking change"
)
create_label_github() {
local name="$1" color="$2" description="$3"
# Check if label already exists
if gh label list --repo "$OWNER/$REPO" --json name -q ".[].name" 2>/dev/null | grep -qx "$name"; then
echo " [skip] '$name' already exists"
return 0
fi
gh label create "$name" \
--repo "$OWNER/$REPO" \
--color "$color" \
--description "$description" 2>/dev/null && \
echo " [created] '$name'" || \
echo " [error] Failed to create '$name'"
}
create_label_gitea() {
local name="$1" color="$2" description="$3"
# Check if label already exists
if tea labels list 2>/dev/null | grep -q "$name"; then
echo " [skip] '$name' already exists"
return 0
fi
tea labels create --name "$name" --color "#$color" --description "$description" 2>/dev/null && \
echo " [created] '$name'" || \
echo " [error] Failed to create '$name'"
}
echo "Creating labels..."
for label_def in "${LABELS[@]}"; do
IFS='|' read -r name color description <<< "$label_def"
case "$PLATFORM" in
github)
create_label_github "$name" "$color" "$description"
;;
gitea)
create_label_gitea "$name" "$color" "$description"
;;
*)
echo "Error: Unsupported platform '$PLATFORM'" >&2
exit 1
;;
esac
done
echo ""
# Create initial pre-MVP milestone
if [[ "$SKIP_MILESTONE" != true ]]; then
echo "Creating initial pre-MVP milestone..."
"$GIT_SCRIPT_DIR/milestone-create.sh" -t "0.0.1" -d "Pre-MVP - Foundation Sprint" 2>/dev/null && \
echo " [created] Milestone '0.0.1 - Pre-MVP'" || \
echo " [skip] Milestone may already exist or creation failed"
echo " [note] Reserve 0.1.0 for MVP release milestone"
echo ""
fi
echo "Label initialization complete."

View File

@@ -0,0 +1,379 @@
#!/bin/bash
# generate-docker-steps.sh - Generate Woodpecker CI pipeline steps for Docker build/push/link
#
# Outputs valid Woodpecker YAML for:
# - Kaniko Docker build & push steps (one per service)
# - Gitea package linking step
# - npm package publish step (optional)
#
# Usage:
# generate-docker-steps.sh \
# --registry git.uscllc.com \
# --org usc \
# --repo uconnect \
# --service backend-api:src/backend-api/Dockerfile \
# --service web-portal:src/web-portal/Dockerfile \
# --branches main,develop \
# [--build-arg backend-api:NEXT_PUBLIC_API_URL=https://api.example.com] \
# [--npm-package @uconnect/schemas:src/schemas] \
# [--npm-registry https://git.uscllc.com/api/packages/usc/npm/] \
# [--depends-on build]
set -e
# Defaults
REGISTRY=""
ORG=""
REPO=""
BRANCHES="main,develop"
DEPENDS_ON="build"
declare -a SERVICES=()
declare -a BUILD_ARGS=()
declare -a NPM_PACKAGES=()
NPM_REGISTRY=""
show_help() {
cat <<'EOF'
Usage: generate-docker-steps.sh [OPTIONS]
Generate Woodpecker CI YAML for Docker build/push/link via Kaniko.
Required:
--registry <host> Gitea hostname (e.g., git.uscllc.com)
--org <name> Gitea organization (e.g., usc)
--repo <name> Repository name (e.g., uconnect)
--service <name:dockerfile> Service to build (repeatable)
Optional:
--branches <list> Comma-separated branches (default: main,develop)
--depends-on <step> Step name Docker builds depend on (default: build)
--build-arg <service:KEY=VAL> Build arg for a service (repeatable)
--npm-package <pkg:path> npm package to publish (repeatable)
--npm-registry <url> npm registry URL for publishing
--kaniko-setup-only Output just the kaniko_setup YAML anchor
-h, --help Show this help
Examples:
# Mosaic Stack pattern
generate-docker-steps.sh \
--registry git.mosaicstack.dev --org mosaic --repo stack \
--service stack-api:apps/api/Dockerfile \
--service stack-web:apps/web/Dockerfile \
--build-arg stack-web:NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev
# U-Connect pattern
generate-docker-steps.sh \
--registry git.uscllc.com --org usc --repo uconnect \
--service uconnect-backend-api:src/backend-api/Dockerfile \
--service uconnect-web-portal:src/web-portal/Dockerfile \
--service uconnect-ingest-api:src/ingest-api/Dockerfile \
--branches main,develop
EOF
exit 0
}
KANIKO_SETUP_ONLY=false
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
--registry) REGISTRY="$2"; shift 2 ;;
--org) ORG="$2"; shift 2 ;;
--repo) REPO="$2"; shift 2 ;;
--service) SERVICES+=("$2"); shift 2 ;;
--branches) BRANCHES="$2"; shift 2 ;;
--depends-on) DEPENDS_ON="$2"; shift 2 ;;
--build-arg) BUILD_ARGS+=("$2"); shift 2 ;;
--npm-package) NPM_PACKAGES+=("$2"); shift 2 ;;
--npm-registry) NPM_REGISTRY="$2"; shift 2 ;;
--kaniko-setup-only) KANIKO_SETUP_ONLY=true; shift ;;
-h|--help) show_help ;;
*) echo "Unknown option: $1" >&2; exit 1 ;;
esac
done
# Validate required args
if [[ -z "$REGISTRY" ]]; then echo "Error: --registry is required" >&2; exit 1; fi
if [[ -z "$ORG" ]]; then echo "Error: --org is required" >&2; exit 1; fi
if [[ -z "$REPO" ]]; then echo "Error: --repo is required" >&2; exit 1; fi
if [[ ${#SERVICES[@]} -eq 0 && "$KANIKO_SETUP_ONLY" != true ]]; then
echo "Error: at least one --service is required" >&2; exit 1
fi
# Parse branches into YAML list
IFS=',' read -ra BRANCH_LIST <<< "$BRANCHES"
BRANCH_YAML="["
for i in "${!BRANCH_LIST[@]}"; do
if [[ $i -gt 0 ]]; then BRANCH_YAML="$BRANCH_YAML, "; fi
BRANCH_YAML="$BRANCH_YAML${BRANCH_LIST[$i]}"
done
BRANCH_YAML="$BRANCH_YAML]"
# Helper: get build args for a specific service
get_build_args_for_service() {
local svc_name="$1"
local args=()
for ba in "${BUILD_ARGS[@]}"; do
local ba_svc="${ba%%:*}"
local ba_val="${ba#*:}"
if [[ "$ba_svc" == "$svc_name" ]]; then
args+=("$ba_val")
fi
done
echo "${args[@]}"
}
# Helper: determine Dockerfile context from path
# e.g., apps/api/Dockerfile -> . (monorepo root)
# docker/postgres/Dockerfile -> docker/postgres
get_context() {
local dockerfile="$1"
local dir
dir=$(dirname "$dockerfile")
# If Dockerfile is at project root or in a top-level apps/src dir, use "."
if [[ "$dir" == "." || "$dir" == apps/* || "$dir" == src/* || "$dir" == packages/* ]]; then
echo "."
else
echo "$dir"
fi
}
# ============================================================
# Output: YAML anchor for kaniko setup
# ============================================================
emit_kaniko_anchor() {
cat <<EOF
# Kaniko base command setup
- &kaniko_setup |
mkdir -p /kaniko/.docker
echo "{\\"auths\\":{\\"${REGISTRY}\\":{\\"username\\":\\"\$GITEA_USER\\",\\"password\\":\\"\$GITEA_TOKEN\\"}}}" > /kaniko/.docker/config.json
EOF
}
if [[ "$KANIKO_SETUP_ONLY" == true ]]; then
emit_kaniko_anchor
exit 0
fi
# ============================================================
# Output: Header comment
# ============================================================
cat <<EOF
# ======================
# Docker Build & Push (${BRANCHES} only)
# ======================
# Generated by: generate-docker-steps.sh
# Registry: ${REGISTRY}/${ORG}
# Requires secrets: gitea_username, gitea_token
#
# Tagging Strategy:
# - Always: commit SHA (first 8 chars)
EOF
for b in "${BRANCH_LIST[@]}"; do
case "$b" in
main) echo " # - main branch: 'latest'" ;;
develop) echo " # - develop branch: 'dev'" ;;
*) echo " # - ${b} branch: '${b}'" ;;
esac
done
echo " # - git tags: version tag (e.g., v1.0.0)"
echo ""
# ============================================================
# Output: Kaniko build step for each service
# ============================================================
for svc in "${SERVICES[@]}"; do
SVC_NAME="${svc%%:*}"
DOCKERFILE="${svc#*:}"
CONTEXT=$(get_context "$DOCKERFILE")
SVC_BUILD_ARGS=$(get_build_args_for_service "$SVC_NAME")
# Build the kaniko command with build args
KANIKO_EXTRA=""
if [[ -n "$SVC_BUILD_ARGS" ]]; then
for arg in $SVC_BUILD_ARGS; do
KANIKO_EXTRA="$KANIKO_EXTRA --build-arg ${arg}"
done
fi
cat <<EOF
# Build and push ${SVC_NAME}
docker-build-${SVC_NAME}:
image: gcr.io/kaniko-project/executor:debug
environment:
GITEA_USER:
from_secret: gitea_username
GITEA_TOKEN:
from_secret: gitea_token
CI_COMMIT_BRANCH: \${CI_COMMIT_BRANCH}
CI_COMMIT_TAG: \${CI_COMMIT_TAG}
CI_COMMIT_SHA: \${CI_COMMIT_SHA}
commands:
- *kaniko_setup
- |
DESTINATIONS="--destination ${REGISTRY}/${ORG}/${SVC_NAME}:\${CI_COMMIT_SHA:0:8}"
EOF
# Branch-specific tags
for b in "${BRANCH_LIST[@]}"; do
case "$b" in
main)
cat <<EOF
if [ "\$CI_COMMIT_BRANCH" = "main" ]; then
DESTINATIONS="\$DESTINATIONS --destination ${REGISTRY}/${ORG}/${SVC_NAME}:latest"
fi
EOF
;;
develop)
cat <<EOF
if [ "\$CI_COMMIT_BRANCH" = "develop" ]; then
DESTINATIONS="\$DESTINATIONS --destination ${REGISTRY}/${ORG}/${SVC_NAME}:dev"
fi
EOF
;;
*)
cat <<EOF
if [ "\$CI_COMMIT_BRANCH" = "${b}" ]; then
DESTINATIONS="\$DESTINATIONS --destination ${REGISTRY}/${ORG}/${SVC_NAME}:${b}"
fi
EOF
;;
esac
done
# Version tag
cat <<EOF
if [ -n "\$CI_COMMIT_TAG" ]; then
DESTINATIONS="\$DESTINATIONS --destination ${REGISTRY}/${ORG}/${SVC_NAME}:\$CI_COMMIT_TAG"
fi
/kaniko/executor --context ${CONTEXT} --dockerfile ${DOCKERFILE}${KANIKO_EXTRA} \$DESTINATIONS
when:
- branch: ${BRANCH_YAML}
event: [push, manual, tag]
depends_on:
- ${DEPENDS_ON}
EOF
done
# ============================================================
# Output: Package linking step
# ============================================================
cat <<EOF
# ======================
# Link Packages to Repository
# ======================
link-packages:
image: alpine:3
environment:
GITEA_TOKEN:
from_secret: gitea_token
commands:
- apk add --no-cache curl
- echo "Waiting 10 seconds for packages to be indexed in registry..."
- sleep 10
- |
set -e
link_package() {
PKG="\$\$1"
echo "Linking \$\$PKG..."
for attempt in 1 2 3; do
STATUS=\$\$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \\
-H "Authorization: token \$\$GITEA_TOKEN" \\
"https://${REGISTRY}/api/v1/packages/${ORG}/container/\$\$PKG/-/link/${REPO}")
if [ "\$\$STATUS" = "201" ] || [ "\$\$STATUS" = "204" ]; then
echo " Linked \$\$PKG"
return 0
elif [ "\$\$STATUS" = "400" ]; then
echo " \$\$PKG already linked"
return 0
elif [ "\$\$STATUS" = "404" ] && [ \$\$attempt -lt 3 ]; then
echo " \$\$PKG not found yet, waiting 5s (attempt \$\$attempt/3)..."
sleep 5
else
echo " FAILED: \$\$PKG status \$\$STATUS"
cat /tmp/link-response.txt
return 1
fi
done
}
EOF
# List all services to link
for svc in "${SERVICES[@]}"; do
SVC_NAME="${svc%%:*}"
echo " link_package \"${SVC_NAME}\""
done
# Close the link step
cat <<EOF
when:
- branch: ${BRANCH_YAML}
event: [push, manual, tag]
depends_on:
EOF
for svc in "${SERVICES[@]}"; do
SVC_NAME="${svc%%:*}"
echo " - docker-build-${SVC_NAME}"
done
echo ""
# ============================================================
# Output: npm publish step (if requested)
# ============================================================
if [[ ${#NPM_PACKAGES[@]} -gt 0 && -n "$NPM_REGISTRY" ]]; then
cat <<EOF
# ======================
# Publish npm Packages
# ======================
publish-packages:
image: node:20-alpine
environment:
GITEA_TOKEN:
from_secret: gitea_token
commands:
- |
echo "//${NPM_REGISTRY#https://}:_authToken=\$\$GITEA_TOKEN" > .npmrc
EOF
# Detect scope from first package
FIRST_PKG="${NPM_PACKAGES[0]}"
PKG_NAME="${FIRST_PKG%%:*}"
SCOPE="${PKG_NAME%%/*}"
if [[ "$SCOPE" == @* ]]; then
echo " echo \"${SCOPE}:registry=${NPM_REGISTRY}\" >> .npmrc"
fi
for pkg in "${NPM_PACKAGES[@]}"; do
PKG_NAME="${pkg%%:*}"
PKG_PATH="${pkg#*:}"
cat <<EOF
- |
CURRENT=\$\$(node -p "require('./${PKG_PATH}/package.json').version")
PUBLISHED=\$\$(npm view ${PKG_NAME} version 2>/dev/null || echo "0.0.0")
if [ "\$\$CURRENT" = "\$\$PUBLISHED" ]; then
echo "${PKG_NAME}@\$\$CURRENT already published, skipping"
else
echo "Publishing ${PKG_NAME}@\$\$CURRENT (was \$\$PUBLISHED)"
npm publish -w ${PKG_NAME}
fi
EOF
done
cat <<EOF
when:
- branch: [main]
event: [push, manual, tag]
depends_on:
- ${DEPENDS_ON}
EOF
fi

View File

@@ -0,0 +1,67 @@
#!/usr/bin/env bash
#
# _lib.sh — Shared helpers for Cloudflare tool scripts
#
# Usage: source "$(dirname "$0")/_lib.sh"
#
# Provides:
# CF_API — Base API URL
# cf_auth — Authorization header value
# cf_load_instance <instance> — Load credentials for a specific or default instance
# cf_resolve_zone <name_or_id> — Resolves a zone name to its ID (passes IDs through)
CF_API="https://api.cloudflare.com/client/v4"
cf_auth() {
echo "Bearer $CLOUDFLARE_API_TOKEN"
}
# Load credentials for a Cloudflare instance.
# If instance is empty, loads the default.
cf_load_instance() {
local instance="$1"
if [[ -n "$instance" ]]; then
load_credentials "cloudflare-${instance}"
else
load_credentials cloudflare
fi
}
# Resolve a zone name (e.g. "mosaicstack.dev") to its zone ID.
# If the input is already a 32-char hex ID, passes it through.
cf_resolve_zone() {
local input="$1"
# If it looks like a zone ID (32 hex chars), pass through
if [[ "$input" =~ ^[0-9a-f]{32}$ ]]; then
echo "$input"
return 0
fi
# Resolve by name
local response
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: $(cf_auth)" \
-H "Content-Type: application/json" \
"${CF_API}/zones?name=${input}&status=active")
local http_code
http_code=$(echo "$response" | tail -n1)
local body
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to resolve zone '$input' (HTTP $http_code)" >&2
return 1
fi
local zone_id
zone_id=$(echo "$body" | jq -r '.result[0].id // empty')
if [[ -z "$zone_id" ]]; then
echo "Error: Zone '$input' not found" >&2
return 1
fi
echo "$zone_id"
}

View File

@@ -0,0 +1,86 @@
#!/usr/bin/env bash
#
# record-create.sh — Create a DNS record in a Cloudflare zone
#
# Usage: record-create.sh -z <zone> -t <type> -n <name> -c <content> [-a instance] [-l ttl] [-p] [-P priority]
#
# Options:
# -z zone Zone name or ID (required)
# -t type Record type: A, AAAA, CNAME, MX, TXT, etc. (required)
# -n name Record name, e.g. "app" or "app.example.com" (required)
# -c content Record value/content (required)
# -a instance Cloudflare instance name (default: uses credentials default)
# -l ttl TTL in seconds (default: 1 = auto)
# -p Enable Cloudflare proxy (orange cloud)
# -P priority MX/SRV priority (default: 10)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
source "$(dirname "$0")/_lib.sh"
ZONE=""
INSTANCE=""
TYPE=""
NAME=""
CONTENT=""
TTL=1
PROXIED=false
PRIORITY=""
while getopts "z:a:t:n:c:l:pP:h" opt; do
case $opt in
z) ZONE="$OPTARG" ;;
a) INSTANCE="$OPTARG" ;;
t) TYPE="$OPTARG" ;;
n) NAME="$OPTARG" ;;
c) CONTENT="$OPTARG" ;;
l) TTL="$OPTARG" ;;
p) PROXIED=true ;;
P) PRIORITY="$OPTARG" ;;
h) head -18 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -z <zone> -t <type> -n <name> -c <content> [-a instance] [-l ttl] [-p] [-P priority]" >&2; exit 1 ;;
esac
done
if [[ -z "$ZONE" || -z "$TYPE" || -z "$NAME" || -z "$CONTENT" ]]; then
echo "Error: -z, -t, -n, and -c are all required" >&2
exit 1
fi
cf_load_instance "$INSTANCE"
ZONE_ID=$(cf_resolve_zone "$ZONE") || exit 1
# Build JSON payload
payload=$(jq -n \
--arg type "$TYPE" \
--arg name "$NAME" \
--arg content "$CONTENT" \
--argjson ttl "$TTL" \
--argjson proxied "$PROXIED" \
'{type: $type, name: $name, content: $content, ttl: $ttl, proxied: $proxied}')
# Add priority for MX/SRV records
if [[ -n "$PRIORITY" ]]; then
payload=$(echo "$payload" | jq --argjson priority "$PRIORITY" '. + {priority: $priority}')
fi
response=$(curl -s -w "\n%{http_code}" \
-X POST \
-H "Authorization: $(cf_auth)" \
-H "Content-Type: application/json" \
-d "$payload" \
"${CF_API}/zones/${ZONE_ID}/dns_records")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to create record (HTTP $http_code)" >&2
echo "$body" | jq -r '.errors[]?.message // empty' 2>/dev/null >&2
exit 1
fi
record_id=$(echo "$body" | jq -r '.result.id')
echo "Created $TYPE record: $NAME$CONTENT (ID: $record_id)"

View File

@@ -0,0 +1,55 @@
#!/usr/bin/env bash
#
# record-delete.sh — Delete a DNS record from a Cloudflare zone
#
# Usage: record-delete.sh -z <zone> -r <record-id> [-a instance]
#
# Options:
# -z zone Zone name or ID (required)
# -r record-id DNS record ID (required)
# -a instance Cloudflare instance name (default: uses credentials default)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
source "$(dirname "$0")/_lib.sh"
ZONE=""
INSTANCE=""
RECORD_ID=""
while getopts "z:a:r:h" opt; do
case $opt in
z) ZONE="$OPTARG" ;;
a) INSTANCE="$OPTARG" ;;
r) RECORD_ID="$OPTARG" ;;
h) head -11 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -z <zone> -r <record-id> [-a instance]" >&2; exit 1 ;;
esac
done
if [[ -z "$ZONE" || -z "$RECORD_ID" ]]; then
echo "Error: -z and -r are both required" >&2
exit 1
fi
cf_load_instance "$INSTANCE"
ZONE_ID=$(cf_resolve_zone "$ZONE") || exit 1
response=$(curl -s -w "\n%{http_code}" \
-X DELETE \
-H "Authorization: $(cf_auth)" \
-H "Content-Type: application/json" \
"${CF_API}/zones/${ZONE_ID}/dns_records/${RECORD_ID}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to delete record (HTTP $http_code)" >&2
echo "$body" | jq -r '.errors[]?.message // empty' 2>/dev/null >&2
exit 1
fi
echo "Deleted DNS record $RECORD_ID from zone $ZONE"

View File

@@ -0,0 +1,81 @@
#!/usr/bin/env bash
#
# record-list.sh — List DNS records for a Cloudflare zone
#
# Usage: record-list.sh -z <zone> [-a instance] [-t type] [-n name] [-f format]
#
# Options:
# -z zone Zone name or ID (required)
# -a instance Cloudflare instance name (default: uses credentials default)
# -t type Filter by record type (A, AAAA, CNAME, MX, TXT, etc.)
# -n name Filter by record name
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
source "$(dirname "$0")/_lib.sh"
ZONE=""
INSTANCE=""
TYPE=""
NAME=""
FORMAT="table"
while getopts "z:a:t:n:f:h" opt; do
case $opt in
z) ZONE="$OPTARG" ;;
a) INSTANCE="$OPTARG" ;;
t) TYPE="$OPTARG" ;;
n) NAME="$OPTARG" ;;
f) FORMAT="$OPTARG" ;;
h) head -14 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -z <zone> [-a instance] [-t type] [-n name] [-f format]" >&2; exit 1 ;;
esac
done
if [[ -z "$ZONE" ]]; then
echo "Error: -z zone is required" >&2
exit 1
fi
cf_load_instance "$INSTANCE"
ZONE_ID=$(cf_resolve_zone "$ZONE") || exit 1
# Build query params
params="per_page=100"
[[ -n "$TYPE" ]] && params="${params}&type=${TYPE}"
[[ -n "$NAME" ]] && params="${params}&name=${NAME}"
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: $(cf_auth)" \
-H "Content-Type: application/json" \
"${CF_API}/zones/${ZONE_ID}/dns_records?${params}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list records (HTTP $http_code)" >&2
echo "$body" | jq -r '.errors[]?.message // empty' 2>/dev/null >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.result'
exit 0
fi
echo "RECORD ID TYPE NAME CONTENT PROXIED TTL"
echo "-------------------------------- ----- -------------------------------------- ------------------------------- ------- -----"
echo "$body" | jq -r '.result[] | [
.id,
.type,
.name,
.content,
(if .proxied then "yes" else "no" end),
(if .ttl == 1 then "auto" else (.ttl | tostring) end)
] | @tsv' | while IFS=$'\t' read -r id type name content proxied ttl; do
printf "%-32s %-5s %-38s %-31s %-7s %s\n" "$id" "$type" "${name:0:38}" "${content:0:31}" "$proxied" "$ttl"
done

View File

@@ -0,0 +1,86 @@
#!/usr/bin/env bash
#
# record-update.sh — Update a DNS record in a Cloudflare zone
#
# Usage: record-update.sh -z <zone> -r <record-id> -t <type> -n <name> -c <content> [-a instance] [-l ttl] [-p] [-P priority]
#
# Options:
# -z zone Zone name or ID (required)
# -r record-id DNS record ID (required)
# -t type Record type: A, AAAA, CNAME, MX, TXT, etc. (required)
# -n name Record name (required)
# -c content Record value/content (required)
# -a instance Cloudflare instance name (default: uses credentials default)
# -l ttl TTL in seconds (default: 1 = auto)
# -p Enable Cloudflare proxy (orange cloud)
# -P priority MX/SRV priority
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
source "$(dirname "$0")/_lib.sh"
ZONE=""
INSTANCE=""
RECORD_ID=""
TYPE=""
NAME=""
CONTENT=""
TTL=1
PROXIED=false
PRIORITY=""
while getopts "z:a:r:t:n:c:l:pP:h" opt; do
case $opt in
z) ZONE="$OPTARG" ;;
a) INSTANCE="$OPTARG" ;;
r) RECORD_ID="$OPTARG" ;;
t) TYPE="$OPTARG" ;;
n) NAME="$OPTARG" ;;
c) CONTENT="$OPTARG" ;;
l) TTL="$OPTARG" ;;
p) PROXIED=true ;;
P) PRIORITY="$OPTARG" ;;
h) head -18 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -z <zone> -r <record-id> -t <type> -n <name> -c <content> [-a instance]" >&2; exit 1 ;;
esac
done
if [[ -z "$ZONE" || -z "$RECORD_ID" || -z "$TYPE" || -z "$NAME" || -z "$CONTENT" ]]; then
echo "Error: -z, -r, -t, -n, and -c are all required" >&2
exit 1
fi
cf_load_instance "$INSTANCE"
ZONE_ID=$(cf_resolve_zone "$ZONE") || exit 1
payload=$(jq -n \
--arg type "$TYPE" \
--arg name "$NAME" \
--arg content "$CONTENT" \
--argjson ttl "$TTL" \
--argjson proxied "$PROXIED" \
'{type: $type, name: $name, content: $content, ttl: $ttl, proxied: $proxied}')
if [[ -n "$PRIORITY" ]]; then
payload=$(echo "$payload" | jq --argjson priority "$PRIORITY" '. + {priority: $priority}')
fi
response=$(curl -s -w "\n%{http_code}" \
-X PUT \
-H "Authorization: $(cf_auth)" \
-H "Content-Type: application/json" \
-d "$payload" \
"${CF_API}/zones/${ZONE_ID}/dns_records/${RECORD_ID}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to update record (HTTP $http_code)" >&2
echo "$body" | jq -r '.errors[]?.message // empty' 2>/dev/null >&2
exit 1
fi
echo "Updated $TYPE record: $NAME$CONTENT (ID: $RECORD_ID)"

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env bash
#
# zone-list.sh — List Cloudflare zones (domains)
#
# Usage: zone-list.sh [-a instance] [-f format]
#
# Options:
# -a instance Cloudflare instance name (default: uses credentials default)
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
source "$(dirname "$0")/_lib.sh"
INSTANCE=""
FORMAT="table"
while getopts "a:f:h" opt; do
case $opt in
a) INSTANCE="$OPTARG" ;;
f) FORMAT="$OPTARG" ;;
h) head -10 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-a instance] [-f format]" >&2; exit 1 ;;
esac
done
cf_load_instance "$INSTANCE"
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: $(cf_auth)" \
-H "Content-Type: application/json" \
"${CF_API}/zones?per_page=50")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list zones (HTTP $http_code)" >&2
echo "$body" | jq -r '.errors[]?.message // empty' 2>/dev/null >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.result'
exit 0
fi
echo "ZONE ID NAME STATUS PLAN"
echo "-------------------------------- ---------------------------- -------- ----------"
echo "$body" | jq -r '.result[] | [
.id,
.name,
.status,
.plan.name
] | @tsv' | while IFS=$'\t' read -r id name status plan; do
printf "%-32s %-28s %-8s %s\n" "$id" "$name" "$status" "$plan"
done

View File

@@ -0,0 +1,279 @@
# Codex CLI Review Scripts
AI-powered code review and security review scripts using OpenAI's Codex CLI.
These scripts provide **independent** code analysis separate from Claude sessions, giving you a second AI perspective on code changes to catch issues that might be missed.
## Prerequisites
```bash
# Install Codex CLI
npm i -g @openai/codex
# Verify installation
codex --version
# Authenticate (first run)
codex # Will prompt for ChatGPT account or API key
# Verify jq is installed (for JSON processing)
jq --version
```
## Scripts
### `codex-code-review.sh`
General code quality review focusing on:
- **Correctness** — logic errors, edge cases, error handling
- **Code Quality** — complexity, duplication, naming, dead code
- **Testing** — coverage, test quality
- **Performance** — N+1 queries, blocking operations, resource cleanup
- **Dependencies** — deprecated packages
- **Documentation** — comments, public API docs
**Output:** Structured JSON with findings categorized as `blocker`, `should-fix`, or `suggestion`.
### `codex-security-review.sh`
Security vulnerability review focusing on:
- **OWASP Top 10** — injection, broken auth, XSS, CSRF, SSRF, etc.
- **Secrets Detection** — hardcoded credentials, API keys, tokens
- **Injection Flaws** — SQL, NoSQL, OS command, LDAP
- **Auth/Authz Gaps** — missing checks, privilege escalation, IDOR
- **Data Exposure** — logging sensitive data, information disclosure
- **Supply Chain** — vulnerable dependencies, typosquatting
**Output:** Structured JSON with findings categorized as `critical`, `high`, `medium`, or `low` with CWE IDs and OWASP categories.
## Usage
### Review Uncommitted Changes
```bash
# Code review
~/.config/mosaic/tools/codex/codex-code-review.sh --uncommitted
# Security review
~/.config/mosaic/tools/codex/codex-security-review.sh --uncommitted
```
### Review a Pull Request
```bash
# Review and post findings as a PR comment
~/.config/mosaic/tools/codex/codex-code-review.sh -n 42
# Security review and post to PR
~/.config/mosaic/tools/codex/codex-security-review.sh -n 42
```
### Review Against Base Branch
```bash
# Code review changes vs main
~/.config/mosaic/tools/codex/codex-code-review.sh -b main
# Security review changes vs develop
~/.config/mosaic/tools/codex/codex-security-review.sh -b develop
```
### Review a Specific Commit
```bash
~/.config/mosaic/tools/codex/codex-code-review.sh -c abc123f
~/.config/mosaic/tools/codex/codex-security-review.sh -c abc123f
```
### Save Results to File
```bash
# Save JSON output
~/.config/mosaic/tools/codex/codex-code-review.sh --uncommitted -o review-results.json
~/.config/mosaic/tools/codex/codex-security-review.sh --uncommitted -o security-results.json
```
## Options
Both scripts support the same options:
| Option | Description |
| --------------------- | ---------------------------------------------------------- |
| `-n, --pr <number>` | PR number (auto-enables posting to PR) |
| `-b, --base <branch>` | Base branch to diff against (default: main) |
| `-c, --commit <sha>` | Review a specific commit |
| `-o, --output <path>` | Write JSON results to file |
| `--post-to-pr` | Post findings as PR comment (requires -n) |
| `--uncommitted` | Review uncommitted changes (staged + unstaged + untracked) |
| `-h, --help` | Show help |
## Woodpecker CI Integration
Automated PR reviews in CI pipelines.
### Setup
1. **Copy the pipeline template to your repo:**
```bash
cp ~/.config/mosaic/tools/codex/woodpecker/codex-review.yml your-repo/.woodpecker/
```
2. **Copy the schemas directory:**
```bash
cp -r ~/.config/mosaic/tools/codex/schemas your-repo/.woodpecker/
```
3. **Add Codex API key to Woodpecker:**
- Go to your repo in Woodpecker CI
- Settings → Secrets
- Add secret: `codex_api_key` with your OpenAI API key
4. **Commit and push:**
```bash
cd your-repo
git add .woodpecker/
git commit -m "feat: Add Codex AI review pipeline"
git push
```
### Pipeline Behavior
- **Triggers on:** Pull requests
- **Runs:** Code review + Security review in parallel
- **Fails if:**
- Code review finds blockers
- Security review finds critical or high severity issues
- **Outputs:** Structured JSON results in CI logs
## Output Format
### Code Review JSON
```json
{
"summary": "Overall assessment...",
"verdict": "approve|request-changes|comment",
"confidence": 0.85,
"findings": [
{
"severity": "blocker",
"title": "SQL injection vulnerability",
"file": "src/api/users.ts",
"line_start": 42,
"line_end": 45,
"description": "User input directly interpolated into SQL query",
"suggestion": "Use parameterized queries"
}
],
"stats": {
"files_reviewed": 5,
"blockers": 1,
"should_fix": 3,
"suggestions": 8
}
}
```
### Security Review JSON
```json
{
"summary": "Security assessment...",
"risk_level": "high",
"confidence": 0.9,
"findings": [
{
"severity": "high",
"title": "Hardcoded API key",
"file": "src/config.ts",
"line_start": 10,
"description": "API key hardcoded in source",
"cwe_id": "CWE-798",
"owasp_category": "A02:2021-Cryptographic Failures",
"remediation": "Move to environment variables or secrets manager"
}
],
"stats": {
"files_reviewed": 5,
"critical": 0,
"high": 1,
"medium": 2,
"low": 3
}
}
```
## Platform Support
Works with both **GitHub** and **Gitea** via the shared `~/.config/mosaic/tools/git/` infrastructure:
- Auto-detects platform from git remote
- Posts PR comments using `gh` (GitHub) or `tea` (Gitea)
- Unified interface across both platforms
## Architecture
```
codex-code-review.sh
codex-security-review.sh
common.sh
↓ sources
../git/detect-platform.sh (platform detection)
../git/pr-review.sh (post PR comments)
↓ uses
gh (GitHub) or tea (Gitea)
```
## Troubleshooting
### "codex: command not found"
```bash
npm i -g @openai/codex
```
### "jq: command not found"
```bash
# Arch Linux
sudo pacman -S jq
# Debian/Ubuntu
sudo apt install jq
```
### "Error: Not inside a git repository"
Run the script from inside a git repository.
### "No changes found to review"
The specified mode (--uncommitted, --base, etc.) found no changes to review.
### "Codex produced no output"
Check your Codex API key and authentication:
```bash
codex # Re-authenticate if needed
```
## Model Configuration
By default, scripts use the model configured in `~/.codex/config.toml`:
- **Model:** `gpt-5.3-codex` (recommended for code review)
- **Reasoning effort:** `high`
For best results, use `gpt-5.2-codex` or newer for strongest review accuracy.
## See Also
- `~/.config/mosaic/guides/CODE-REVIEW.md` — Manual code review checklist
- `~/.config/mosaic/tools/git/` — Git helper scripts (issue/PR management)
- OpenAI Codex CLI docs: https://developers.openai.com/codex/cli/

View File

@@ -0,0 +1,238 @@
#!/bin/bash
# codex-code-review.sh - Run an AI-powered code quality review using Codex CLI
# Usage: codex-code-review.sh [OPTIONS]
#
# Runs codex exec in read-only sandbox mode with a structured code review prompt.
# Outputs findings as JSON and optionally posts them to a PR.
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/common.sh"
# Defaults
PR_NUMBER=""
BASE_BRANCH="main"
COMMIT_SHA=""
OUTPUT_FILE=""
POST_TO_PR=false
UNCOMMITTED=false
REVIEW_MODE=""
show_help() {
cat <<'EOF'
Usage: codex-code-review.sh [OPTIONS]
Run an AI-powered code quality review using OpenAI Codex CLI.
Options:
-n, --pr <number> PR number (auto-enables posting findings to PR)
-b, --base <branch> Base branch to diff against (default: main)
-c, --commit <sha> Review a specific commit
-o, --output <path> Write JSON results to file
--post-to-pr Post findings as PR comment (requires -n)
--uncommitted Review uncommitted changes (staged + unstaged + untracked)
-h, --help Show this help
Examples:
# Review uncommitted changes
codex-code-review.sh --uncommitted
# Review a PR and post findings as a comment
codex-code-review.sh -n 42
# Review changes against main, save JSON
codex-code-review.sh -b main -o review.json
# Review a specific commit
codex-code-review.sh -c abc123f
EOF
exit 0
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-n|--pr)
PR_NUMBER="$2"
POST_TO_PR=true
REVIEW_MODE="pr"
shift 2
;;
-b|--base)
BASE_BRANCH="$2"
REVIEW_MODE="base"
shift 2
;;
-c|--commit)
COMMIT_SHA="$2"
REVIEW_MODE="commit"
shift 2
;;
-o|--output)
OUTPUT_FILE="$2"
shift 2
;;
--post-to-pr)
POST_TO_PR=true
shift
;;
--uncommitted)
UNCOMMITTED=true
REVIEW_MODE="uncommitted"
shift
;;
-h|--help)
show_help
;;
*)
echo "Unknown option: $1" >&2
echo "Run with --help for usage" >&2
exit 1
;;
esac
done
# Validate
if [[ -z "$REVIEW_MODE" ]]; then
echo "Error: Specify a review mode: --uncommitted, --base <branch>, --commit <sha>, or --pr <number>" >&2
exit 1
fi
if [[ "$POST_TO_PR" == true && -z "$PR_NUMBER" ]]; then
echo "Error: --post-to-pr requires -n <pr_number>" >&2
exit 1
fi
check_codex
check_jq
# Verify we're in a git repo
if ! git rev-parse --is-inside-work-tree &>/dev/null; then
echo "Error: Not inside a git repository" >&2
exit 1
fi
# Get the diff context
echo "Gathering diff context..." >&2
case "$REVIEW_MODE" in
uncommitted) DIFF_CONTEXT=$(build_diff_context "uncommitted" "") ;;
base) DIFF_CONTEXT=$(build_diff_context "base" "$BASE_BRANCH") ;;
commit) DIFF_CONTEXT=$(build_diff_context "commit" "$COMMIT_SHA") ;;
pr) DIFF_CONTEXT=$(build_diff_context "pr" "$PR_NUMBER") ;;
esac
if [[ -z "$DIFF_CONTEXT" ]]; then
echo "No changes found to review." >&2
exit 0
fi
# Build the review prompt
REVIEW_PROMPT=$(cat <<'PROMPT'
You are an expert code reviewer. Review the following code changes thoroughly.
Focus on issues that are ACTIONABLE and IMPORTANT. Do not flag trivial style issues.
## Review Checklist
### Correctness
- Code does what it claims to do
- Edge cases are handled
- Error conditions are managed properly
- No obvious bugs or logic errors
### Code Quality
- Functions are focused and reasonably sized
- No unnecessary complexity
- DRY - no significant duplication
- Clear naming for variables and functions
- No dead code or commented-out code
### Testing
- Tests exist for new functionality
- Tests cover happy path AND error cases
- No flaky tests introduced
### Performance
- No obvious N+1 queries
- No blocking operations in hot paths
- Resource cleanup (connections, file handles)
### Dependencies
- No deprecated packages
- No unnecessary new dependencies
### Documentation
- Complex logic has explanatory comments
- Public APIs are documented
## Severity Guide
- **blocker**: Must fix before merge (bugs, correctness issues, missing error handling)
- **should-fix**: Important but not blocking (code quality, minor issues)
- **suggestion**: Optional improvements (nice-to-haves)
Only report findings you are confident about (confidence > 0.7).
If the code looks good, say so — don't manufacture issues.
PROMPT
)
# Set up temp files for output and diff
TEMP_OUTPUT=$(mktemp /tmp/codex-review-XXXXXX.json)
TEMP_DIFF=$(mktemp /tmp/codex-diff-XXXXXX.txt)
trap 'rm -f "$TEMP_OUTPUT" "$TEMP_DIFF"' EXIT
SCHEMA_FILE="$SCRIPT_DIR/schemas/code-review-schema.json"
# Write diff to temp file
echo "$DIFF_CONTEXT" > "$TEMP_DIFF"
echo "Running Codex code review..." >&2
echo " Diff size: $(wc -l < "$TEMP_DIFF") lines" >&2
# Build full prompt with diff reference
FULL_PROMPT="${REVIEW_PROMPT}
Here are the code changes to review:
\`\`\`diff
$(cat "$TEMP_DIFF")
\`\`\`"
# Run codex exec with prompt from stdin to avoid arg length limits
echo "$FULL_PROMPT" | codex exec \
--sandbox read-only \
--output-schema "$SCHEMA_FILE" \
-o "$TEMP_OUTPUT" \
- 2>&1 | while IFS= read -r line; do
echo " [codex] $line" >&2
done
# Check output was produced
if [[ ! -s "$TEMP_OUTPUT" ]]; then
echo "Error: Codex produced no output" >&2
exit 1
fi
# Validate JSON
if ! jq empty "$TEMP_OUTPUT" 2>/dev/null; then
echo "Error: Codex output is not valid JSON" >&2
cat "$TEMP_OUTPUT" >&2
exit 1
fi
# Save output if requested
if [[ -n "$OUTPUT_FILE" ]]; then
cp "$TEMP_OUTPUT" "$OUTPUT_FILE"
echo "Results saved to: $OUTPUT_FILE" >&2
fi
# Post to PR if requested
if [[ "$POST_TO_PR" == true && -n "$PR_NUMBER" ]]; then
echo "Posting findings to PR #$PR_NUMBER..." >&2
post_to_pr "$PR_NUMBER" "$TEMP_OUTPUT" "code"
echo "Posted review to PR #$PR_NUMBER" >&2
fi
# Always print results to stdout
print_results "$TEMP_OUTPUT" "code"

View File

@@ -0,0 +1,235 @@
#!/bin/bash
# codex-security-review.sh - Run an AI-powered security vulnerability review using Codex CLI
# Usage: codex-security-review.sh [OPTIONS]
#
# Runs codex exec in read-only sandbox mode with a security-focused review prompt.
# Outputs findings as JSON and optionally posts them to a PR.
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/common.sh"
# Defaults
PR_NUMBER=""
BASE_BRANCH="main"
COMMIT_SHA=""
OUTPUT_FILE=""
POST_TO_PR=false
UNCOMMITTED=false
REVIEW_MODE=""
show_help() {
cat <<'EOF'
Usage: codex-security-review.sh [OPTIONS]
Run an AI-powered security vulnerability review using OpenAI Codex CLI.
Options:
-n, --pr <number> PR number (auto-enables posting findings to PR)
-b, --base <branch> Base branch to diff against (default: main)
-c, --commit <sha> Review a specific commit
-o, --output <path> Write JSON results to file
--post-to-pr Post findings as PR comment (requires -n)
--uncommitted Review uncommitted changes (staged + unstaged + untracked)
-h, --help Show this help
Examples:
# Security review uncommitted changes
codex-security-review.sh --uncommitted
# Security review a PR and post findings
codex-security-review.sh -n 42
# Security review against main, save JSON
codex-security-review.sh -b main -o security.json
# Security review a specific commit
codex-security-review.sh -c abc123f
EOF
exit 0
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-n|--pr)
PR_NUMBER="$2"
POST_TO_PR=true
REVIEW_MODE="pr"
shift 2
;;
-b|--base)
BASE_BRANCH="$2"
REVIEW_MODE="base"
shift 2
;;
-c|--commit)
COMMIT_SHA="$2"
REVIEW_MODE="commit"
shift 2
;;
-o|--output)
OUTPUT_FILE="$2"
shift 2
;;
--post-to-pr)
POST_TO_PR=true
shift
;;
--uncommitted)
UNCOMMITTED=true
REVIEW_MODE="uncommitted"
shift
;;
-h|--help)
show_help
;;
*)
echo "Unknown option: $1" >&2
echo "Run with --help for usage" >&2
exit 1
;;
esac
done
# Validate
if [[ -z "$REVIEW_MODE" ]]; then
echo "Error: Specify a review mode: --uncommitted, --base <branch>, --commit <sha>, or --pr <number>" >&2
exit 1
fi
if [[ "$POST_TO_PR" == true && -z "$PR_NUMBER" ]]; then
echo "Error: --post-to-pr requires -n <pr_number>" >&2
exit 1
fi
check_codex
check_jq
# Verify we're in a git repo
if ! git rev-parse --is-inside-work-tree &>/dev/null; then
echo "Error: Not inside a git repository" >&2
exit 1
fi
# Get the diff context
echo "Gathering diff context..." >&2
case "$REVIEW_MODE" in
uncommitted) DIFF_CONTEXT=$(build_diff_context "uncommitted" "") ;;
base) DIFF_CONTEXT=$(build_diff_context "base" "$BASE_BRANCH") ;;
commit) DIFF_CONTEXT=$(build_diff_context "commit" "$COMMIT_SHA") ;;
pr) DIFF_CONTEXT=$(build_diff_context "pr" "$PR_NUMBER") ;;
esac
if [[ -z "$DIFF_CONTEXT" ]]; then
echo "No changes found to review." >&2
exit 0
fi
# Build the security review prompt
REVIEW_PROMPT=$(cat <<'PROMPT'
You are an expert application security engineer performing a security-focused code review.
Your goal is to identify vulnerabilities, security anti-patterns, and data exposure risks.
## Security Review Scope
### OWASP Top 10 (2021)
- A01: Broken Access Control — missing authorization checks, IDOR, privilege escalation
- A02: Cryptographic Failures — weak algorithms, plaintext secrets, missing encryption
- A03: Injection — SQL, NoSQL, OS command, LDAP, XPath injection
- A04: Insecure Design — missing threat modeling, unsafe business logic
- A05: Security Misconfiguration — debug mode, default credentials, unnecessary features
- A06: Vulnerable Components — known CVEs in dependencies
- A07: Authentication Failures — weak auth, missing MFA, session issues
- A08: Data Integrity Failures — deserialization, unsigned updates
- A09: Logging Failures — sensitive data in logs, missing audit trails
- A10: SSRF — unvalidated URLs, internal service access
### Additional Checks
- Hardcoded secrets, API keys, tokens, passwords
- Insecure direct object references
- Missing input validation at trust boundaries
- Cross-Site Scripting (XSS) — reflected, stored, DOM-based
- Cross-Site Request Forgery (CSRF) protection
- Insecure file handling (path traversal, unrestricted upload)
- Race conditions and TOCTOU vulnerabilities
- Information disclosure (stack traces, verbose errors)
- Supply chain risks (typosquatting, dependency confusion)
## Severity Guide
- **critical**: Exploitable vulnerability with immediate impact (RCE, auth bypass, data breach)
- **high**: Significant vulnerability requiring prompt fix (injection, XSS, secrets exposure)
- **medium**: Vulnerability with limited exploitability or impact (missing headers, weak config)
- **low**: Minor security concern or hardening opportunity (informational, defense-in-depth)
## Rules
- Include CWE IDs when applicable
- Include OWASP category when applicable
- Provide specific remediation steps for every finding
- Only report findings you are confident about
- Do NOT flag non-security code quality issues
- If no security issues found, say so clearly
PROMPT
)
# Set up temp files for output and diff
TEMP_OUTPUT=$(mktemp /tmp/codex-security-XXXXXX.json)
TEMP_DIFF=$(mktemp /tmp/codex-diff-XXXXXX.txt)
trap 'rm -f "$TEMP_OUTPUT" "$TEMP_DIFF"' EXIT
SCHEMA_FILE="$SCRIPT_DIR/schemas/security-review-schema.json"
# Write diff to temp file
echo "$DIFF_CONTEXT" > "$TEMP_DIFF"
echo "Running Codex security review..." >&2
echo " Diff size: $(wc -l < "$TEMP_DIFF") lines" >&2
# Build full prompt with diff reference
FULL_PROMPT="${REVIEW_PROMPT}
Here are the code changes to security review:
\`\`\`diff
$(cat "$TEMP_DIFF")
\`\`\`"
# Run codex exec with prompt from stdin to avoid arg length limits
echo "$FULL_PROMPT" | codex exec \
--sandbox read-only \
--output-schema "$SCHEMA_FILE" \
-o "$TEMP_OUTPUT" \
- 2>&1 | while IFS= read -r line; do
echo " [codex] $line" >&2
done
# Check output was produced
if [[ ! -s "$TEMP_OUTPUT" ]]; then
echo "Error: Codex produced no output" >&2
exit 1
fi
# Validate JSON
if ! jq empty "$TEMP_OUTPUT" 2>/dev/null; then
echo "Error: Codex output is not valid JSON" >&2
cat "$TEMP_OUTPUT" >&2
exit 1
fi
# Save output if requested
if [[ -n "$OUTPUT_FILE" ]]; then
cp "$TEMP_OUTPUT" "$OUTPUT_FILE"
echo "Results saved to: $OUTPUT_FILE" >&2
fi
# Post to PR if requested
if [[ "$POST_TO_PR" == true && -n "$PR_NUMBER" ]]; then
echo "Posting findings to PR #$PR_NUMBER..." >&2
post_to_pr "$PR_NUMBER" "$TEMP_OUTPUT" "security"
echo "Posted security review to PR #$PR_NUMBER" >&2
fi
# Always print results to stdout
print_results "$TEMP_OUTPUT" "security"

View File

@@ -0,0 +1,191 @@
#!/bin/bash
# common.sh - Shared utilities for Codex review scripts
# Source this file from review scripts: source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
set -e
CODEX_SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
GIT_SCRIPT_DIR="$CODEX_SCRIPT_DIR/../git"
# Source platform detection
source "$GIT_SCRIPT_DIR/detect-platform.sh"
# Check codex is installed
check_codex() {
if ! command -v codex &>/dev/null; then
echo "Error: codex CLI not found. Install with: npm i -g @openai/codex" >&2
exit 1
fi
}
# Check jq is installed (needed for JSON processing)
check_jq() {
if ! command -v jq &>/dev/null; then
echo "Error: jq not found. Install with your package manager." >&2
exit 1
fi
}
# Build the codex exec command args for the review mode
# Arguments: $1=mode (--uncommitted|--base|--commit), $2=value (branch/sha)
build_diff_context() {
local mode="$1"
local value="$2"
local diff_text=""
case "$mode" in
uncommitted)
diff_text=$(git diff HEAD 2>/dev/null; git diff --cached 2>/dev/null; git ls-files --others --exclude-standard 2>/dev/null | while read -r f; do echo "=== NEW FILE: $f ==="; cat "$f" 2>/dev/null; done)
;;
base)
diff_text=$(git diff "${value}...HEAD" 2>/dev/null)
;;
commit)
diff_text=$(git show "$value" 2>/dev/null)
;;
pr)
# For PRs, we need to fetch the PR diff
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
diff_text=$(gh pr diff "$value" 2>/dev/null)
elif [[ "$PLATFORM" == "gitea" ]]; then
# tea doesn't have a direct pr diff command, use git
local pr_base
pr_base=$(tea pr list --fields index,base --output simple 2>/dev/null | grep "^${value}" | awk '{print $2}')
if [[ -n "$pr_base" ]]; then
diff_text=$(git diff "${pr_base}...HEAD" 2>/dev/null)
else
# Fallback: fetch PR info via API
local repo_info
repo_info=$(get_repo_info)
local remote_url
remote_url=$(git remote get-url origin 2>/dev/null)
local host
host=$(echo "$remote_url" | sed -E 's|.*://([^/]+).*|\1|; s|.*@([^:]+).*|\1|')
diff_text=$(curl -s "https://${host}/api/v1/repos/${repo_info}/pulls/${value}" \
-H "Authorization: token $(tea login list --output simple 2>/dev/null | head -1 | awk '{print $2}')" \
2>/dev/null | jq -r '.diff_url // empty')
if [[ -n "$diff_text" && "$diff_text" != "null" ]]; then
diff_text=$(curl -s "$diff_text" 2>/dev/null)
else
diff_text=$(git diff "main...HEAD" 2>/dev/null)
fi
fi
fi
;;
esac
echo "$diff_text"
}
# Format JSON findings as markdown for PR comments
# Arguments: $1=json_file, $2=review_type (code|security)
format_findings_as_markdown() {
local json_file="$1"
local review_type="$2"
if [[ ! -f "$json_file" ]]; then
echo "Error: JSON file not found: $json_file" >&2
return 1
fi
local summary verdict confidence
summary=$(jq -r '.summary' "$json_file")
confidence=$(jq -r '.confidence' "$json_file")
if [[ "$review_type" == "code" ]]; then
verdict=$(jq -r '.verdict' "$json_file")
local blockers should_fix suggestions files_reviewed
blockers=$(jq -r '.stats.blockers' "$json_file")
should_fix=$(jq -r '.stats.should_fix' "$json_file")
suggestions=$(jq -r '.stats.suggestions' "$json_file")
files_reviewed=$(jq -r '.stats.files_reviewed' "$json_file")
cat <<EOF
## Codex Code Review
**Verdict:** ${verdict} | **Confidence:** ${confidence} | **Files reviewed:** ${files_reviewed}
**Findings:** ${blockers} blockers, ${should_fix} should-fix, ${suggestions} suggestions
### Summary
${summary}
EOF
else
local risk_level critical high medium low files_reviewed
risk_level=$(jq -r '.risk_level' "$json_file")
critical=$(jq -r '.stats.critical' "$json_file")
high=$(jq -r '.stats.high' "$json_file")
medium=$(jq -r '.stats.medium' "$json_file")
low=$(jq -r '.stats.low' "$json_file")
files_reviewed=$(jq -r '.stats.files_reviewed' "$json_file")
cat <<EOF
## Codex Security Review
**Risk Level:** ${risk_level} | **Confidence:** ${confidence} | **Files reviewed:** ${files_reviewed}
**Findings:** ${critical} critical, ${high} high, ${medium} medium, ${low} low
### Summary
${summary}
EOF
fi
# Output findings
local finding_count
finding_count=$(jq '.findings | length' "$json_file")
if [[ "$finding_count" -gt 0 ]]; then
echo "### Findings"
echo ""
jq -r '.findings[] | "#### [\(.severity | ascii_upcase)] \(.title)\n- **File:** `\(.file)`\(if .line_start then " (L\(.line_start)\(if .line_end and .line_end != .line_start then "-L\(.line_end)" else "" end))" else "" end)\n- \(.description)\(if .suggestion then "\n- **Suggestion:** \(.suggestion)" else "" end)\(if .cwe_id then "\n- **CWE:** \(.cwe_id)" else "" end)\(if .owasp_category then "\n- **OWASP:** \(.owasp_category)" else "" end)\(if .remediation then "\n- **Remediation:** \(.remediation)" else "" end)\n"' "$json_file"
else
echo "*No issues found.*"
fi
echo "---"
echo "*Reviewed by Codex ($(codex --version 2>/dev/null || echo "unknown"))*"
}
# Post review findings to a PR
# Arguments: $1=pr_number, $2=json_file, $3=review_type (code|security)
post_to_pr() {
local pr_number="$1"
local json_file="$2"
local review_type="$3"
local markdown
markdown=$(format_findings_as_markdown "$json_file" "$review_type")
detect_platform
# Determine review action based on findings
local action="comment"
if [[ "$review_type" == "code" ]]; then
local verdict
verdict=$(jq -r '.verdict' "$json_file")
action="$verdict"
else
local risk_level
risk_level=$(jq -r '.risk_level' "$json_file")
case "$risk_level" in
critical|high) action="request-changes" ;;
medium) action="comment" ;;
low|none) action="comment" ;;
esac
fi
# Post the review
"$GIT_SCRIPT_DIR/pr-review.sh" -n "$pr_number" -a "$action" -c "$markdown"
}
# Print review results to stdout
# Arguments: $1=json_file, $2=review_type (code|security)
print_results() {
local json_file="$1"
local review_type="$2"
format_findings_as_markdown "$json_file" "$review_type"
}

View File

@@ -0,0 +1,92 @@
{
"type": "object",
"additionalProperties": false,
"properties": {
"summary": {
"type": "string",
"description": "Brief overall assessment of the code changes"
},
"verdict": {
"type": "string",
"enum": ["approve", "request-changes", "comment"],
"description": "Overall review verdict"
},
"confidence": {
"type": "number",
"minimum": 0,
"maximum": 1,
"description": "Confidence score for the review (0-1)"
},
"findings": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": false,
"properties": {
"severity": {
"type": "string",
"enum": ["blocker", "should-fix", "suggestion"],
"description": "Finding severity: blocker (must fix), should-fix (important), suggestion (optional)"
},
"title": {
"type": "string",
"description": "Short title describing the issue"
},
"file": {
"type": "string",
"description": "File path where the issue was found"
},
"line_start": {
"type": "integer",
"description": "Starting line number"
},
"line_end": {
"type": "integer",
"description": "Ending line number"
},
"description": {
"type": "string",
"description": "Detailed explanation of the issue"
},
"suggestion": {
"type": "string",
"description": "Suggested fix or improvement"
}
},
"required": [
"severity",
"title",
"file",
"line_start",
"line_end",
"description",
"suggestion"
]
}
},
"stats": {
"type": "object",
"additionalProperties": false,
"properties": {
"files_reviewed": {
"type": "integer",
"description": "Number of files reviewed"
},
"blockers": {
"type": "integer",
"description": "Count of blocker findings"
},
"should_fix": {
"type": "integer",
"description": "Count of should-fix findings"
},
"suggestions": {
"type": "integer",
"description": "Count of suggestion findings"
}
},
"required": ["files_reviewed", "blockers", "should_fix", "suggestions"]
}
},
"required": ["summary", "verdict", "confidence", "findings", "stats"]
}

View File

@@ -0,0 +1,106 @@
{
"type": "object",
"additionalProperties": false,
"properties": {
"summary": {
"type": "string",
"description": "Brief overall security assessment of the code changes"
},
"risk_level": {
"type": "string",
"enum": ["critical", "high", "medium", "low", "none"],
"description": "Overall security risk level"
},
"confidence": {
"type": "number",
"minimum": 0,
"maximum": 1,
"description": "Confidence score for the review (0-1)"
},
"findings": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": false,
"properties": {
"severity": {
"type": "string",
"enum": ["critical", "high", "medium", "low"],
"description": "Vulnerability severity level"
},
"title": {
"type": "string",
"description": "Short title describing the vulnerability"
},
"file": {
"type": "string",
"description": "File path where the vulnerability was found"
},
"line_start": {
"type": "integer",
"description": "Starting line number"
},
"line_end": {
"type": "integer",
"description": "Ending line number"
},
"description": {
"type": "string",
"description": "Detailed explanation of the vulnerability"
},
"cwe_id": {
"type": "string",
"description": "CWE identifier if applicable (e.g., CWE-79)"
},
"owasp_category": {
"type": "string",
"description": "OWASP Top 10 category if applicable (e.g., A03:2021-Injection)"
},
"remediation": {
"type": "string",
"description": "Specific remediation steps to fix the vulnerability"
}
},
"required": [
"severity",
"title",
"file",
"line_start",
"line_end",
"description",
"cwe_id",
"owasp_category",
"remediation"
]
}
},
"stats": {
"type": "object",
"additionalProperties": false,
"properties": {
"files_reviewed": {
"type": "integer",
"description": "Number of files reviewed"
},
"critical": {
"type": "integer",
"description": "Count of critical findings"
},
"high": {
"type": "integer",
"description": "Count of high findings"
},
"medium": {
"type": "integer",
"description": "Count of medium findings"
},
"low": {
"type": "integer",
"description": "Count of low findings"
}
},
"required": ["files_reviewed", "critical", "high", "medium", "low"]
}
},
"required": ["summary", "risk_level", "confidence", "findings", "stats"]
}

View File

@@ -0,0 +1,90 @@
# Codex AI Review Pipeline for Woodpecker CI
# Drop this into your repo's .woodpecker/ directory to enable automated
# code and security reviews on every pull request.
#
# Required secrets:
# - codex_api_key: OpenAI API key or Codex-compatible key
#
# Optional secrets:
# - gitea_token: Gitea API token for posting PR comments (if not using tea CLI auth)
when:
event: pull_request
variables:
- &node_image 'node:22-slim'
- &install_codex 'npm i -g @openai/codex'
steps:
# --- Code Quality Review ---
code-review:
image: *node_image
environment:
CODEX_API_KEY:
from_secret: codex_api_key
commands:
- *install_codex
- apt-get update -qq && apt-get install -y -qq jq git > /dev/null 2>&1
# Generate the diff
- git fetch origin ${CI_COMMIT_TARGET_BRANCH:-main}
- DIFF=$(git diff origin/${CI_COMMIT_TARGET_BRANCH:-main}...HEAD)
# Run code review with structured output
- |
codex exec \
--sandbox read-only \
--output-schema .woodpecker/schemas/code-review-schema.json \
-o /tmp/code-review.json \
"You are an expert code reviewer. Review the following code changes for correctness, code quality, testing, performance, and documentation issues. Only flag actionable, important issues. Categorize as blocker/should-fix/suggestion. If code looks good, say so.
Changes:
$DIFF"
# Output summary
- echo "=== Code Review Results ==="
- jq '.' /tmp/code-review.json
- |
BLOCKERS=$(jq '.stats.blockers // 0' /tmp/code-review.json)
if [ "$BLOCKERS" -gt 0 ]; then
echo "FAIL: $BLOCKERS blocker(s) found"
exit 1
fi
echo "PASS: No blockers found"
# --- Security Review ---
security-review:
image: *node_image
environment:
CODEX_API_KEY:
from_secret: codex_api_key
commands:
- *install_codex
- apt-get update -qq && apt-get install -y -qq jq git > /dev/null 2>&1
# Generate the diff
- git fetch origin ${CI_COMMIT_TARGET_BRANCH:-main}
- DIFF=$(git diff origin/${CI_COMMIT_TARGET_BRANCH:-main}...HEAD)
# Run security review with structured output
- |
codex exec \
--sandbox read-only \
--output-schema .woodpecker/schemas/security-review-schema.json \
-o /tmp/security-review.json \
"You are an expert application security engineer. Review the following code changes for security vulnerabilities including OWASP Top 10, hardcoded secrets, injection flaws, auth/authz gaps, XSS, CSRF, SSRF, path traversal, and supply chain risks. Include CWE IDs and remediation steps. Only flag real security issues, not code quality.
Changes:
$DIFF"
# Output summary
- echo "=== Security Review Results ==="
- jq '.' /tmp/security-review.json
- |
CRITICAL=$(jq '.stats.critical // 0' /tmp/security-review.json)
HIGH=$(jq '.stats.high // 0' /tmp/security-review.json)
if [ "$CRITICAL" -gt 0 ] || [ "$HIGH" -gt 0 ]; then
echo "FAIL: $CRITICAL critical, $HIGH high severity finding(s)"
exit 1
fi
echo "PASS: No critical or high severity findings"

View File

@@ -0,0 +1,64 @@
#!/usr/bin/env bash
# mosaic-context-loader.sh — SessionStart hook for Claude Code
# Injects mandatory Mosaic config files into agent context at session init.
# Stdout from this script is added to Claude's context before processing.
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
# Mandatory load order (per AGENTS.md contract)
MANDATORY_FILES=(
"$MOSAIC_HOME/SOUL.md"
"$MOSAIC_HOME/USER.md"
"$MOSAIC_HOME/STANDARDS.md"
"$MOSAIC_HOME/AGENTS.md"
"$MOSAIC_HOME/TOOLS.md"
)
# E2E delivery guide (canonical uppercase path)
E2E_DELIVERY=""
for candidate in \
"$MOSAIC_HOME/guides/E2E-DELIVERY.md"; do
if [[ -f "$candidate" ]]; then
E2E_DELIVERY="$candidate"
break
fi
done
# Runtime-specific reference
RUNTIME_FILE="$MOSAIC_HOME/runtime/claude/RUNTIME.md"
# Project-local AGENTS.md (cwd at session start)
PROJECT_AGENTS=""
if [[ -f "./AGENTS.md" ]]; then
PROJECT_AGENTS="./AGENTS.md"
fi
emit_file() {
local filepath="$1"
local label="${2:-$(basename "$filepath")}"
if [[ -f "$filepath" ]]; then
echo "=== MOSAIC: $label ==="
cat "$filepath"
echo ""
fi
}
echo "=== MOSAIC CONTEXT INJECTION (SessionStart) ==="
echo ""
for f in "${MANDATORY_FILES[@]}"; do
emit_file "$f"
done
if [[ -n "$E2E_DELIVERY" ]]; then
emit_file "$E2E_DELIVERY" "E2E-DELIVERY.md"
fi
if [[ -n "$PROJECT_AGENTS" ]]; then
emit_file "$PROJECT_AGENTS" "Project AGENTS.md ($(pwd))"
fi
emit_file "$RUNTIME_FILE" "Claude RUNTIME.md"
echo "=== END MOSAIC CONTEXT INJECTION ==="

View File

@@ -0,0 +1,66 @@
# Coolify Tool Suite
Manage Coolify container deployment platform (projects, services, deployments, environment variables).
## Prerequisites
- `jq` and `curl` installed
- Coolify credentials in `~/src/jarvis-brain/credentials.json` (or `$MOSAIC_CREDENTIALS_FILE`)
- Required fields: `coolify.url`, `coolify.app_token`
## Scripts
| Script | Purpose |
| ------------------- | ------------------------------------- |
| `team-list.sh` | List teams |
| `project-list.sh` | List projects |
| `service-list.sh` | List all services |
| `service-status.sh` | Get service details and status |
| `deploy.sh` | Trigger service deployment |
| `env-set.sh` | Set environment variable on a service |
## Common Options
- `-f json` — JSON output (default: table)
- `-u uuid` — Service UUID (for service-specific operations)
- `-h` — Show help
## API Reference
- Base URL: `http://10.1.1.44:8000`
- API prefix: `/api/v1/`
- Auth: Bearer token in `Authorization` header
- Rate limit: 200 requests per interval
## Known Limitations
- **FQDN updates on compose sub-apps not supported via API.** Workaround: update directly in Coolify's PostgreSQL DB (`coolify-db` container, `service_applications` table).
- **Compose must be base64-encoded** in `docker_compose_raw` field when creating services via API.
- **Don't send `type` with `docker_compose_raw`** — API rejects payloads with both fields.
## Coolify Magic Variables
Coolify reads special env vars from compose files:
- `SERVICE_FQDN_{NAME}_{PORT}` — assigns a domain to a compose service
- `SERVICE_URL_{NAME}_{PORT}` — internal URL reference
- Must use list-style env syntax (`- SERVICE_FQDN_API_3001`), NOT dict-style.
## Examples
```bash
# List all projects
~/.config/mosaic/tools/coolify/project-list.sh
# List services as JSON
~/.config/mosaic/tools/coolify/service-list.sh -f json
# Check service status
~/.config/mosaic/tools/coolify/service-status.sh -u <uuid>
# Set an env var
~/.config/mosaic/tools/coolify/env-set.sh -u <uuid> -k DATABASE_URL -v "postgres://..."
# Deploy a service
~/.config/mosaic/tools/coolify/deploy.sh -u <uuid>
```

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
#
# deploy.sh — Trigger Coolify service deployment
#
# Usage: deploy.sh -u <uuid> [-f]
#
# Options:
# -u uuid Service UUID (required)
# -f Force restart (stop then start)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
UUID=""
FORCE=false
while getopts "u:fh" opt; do
case $opt in
u) UUID="$OPTARG" ;;
f) FORCE=true ;;
h) head -11 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -u <uuid> [-f]" >&2; exit 1 ;;
esac
done
if [[ -z "$UUID" ]]; then
echo "Error: -u uuid is required" >&2
exit 1
fi
if [[ "$FORCE" == "true" ]]; then
echo "Stopping service $UUID..."
curl -s -o /dev/null -w "" \
-X POST \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/services/${UUID}/stop"
sleep 2
fi
echo "Starting service $UUID..."
response=$(curl -s -w "\n%{http_code}" \
-X POST \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/services/${UUID}/start")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" && "$http_code" != "201" && "$http_code" != "202" ]]; then
echo "Error: Deployment failed (HTTP $http_code)" >&2
echo "$body" | jq -r '.' 2>/dev/null >&2 || echo "$body" >&2
exit 1
fi
echo "Deployment triggered successfully for service $UUID"
echo "$body" | jq -r '.message // empty' 2>/dev/null || true

View File

@@ -0,0 +1,65 @@
#!/usr/bin/env bash
#
# env-set.sh — Set environment variable on a Coolify service
#
# Usage: env-set.sh -u <uuid> -k <key> -v <value> [--preview]
#
# Options:
# -u uuid Service UUID (required)
# -k key Environment variable name (required)
# -v value Environment variable value (required)
# --preview Set as preview-only variable
# -h Show this help
#
# Note: Changes take effect on next deploy/restart.
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
UUID=""
KEY=""
VALUE=""
IS_PREVIEW="false"
while [[ $# -gt 0 ]]; do
case $1 in
-u) UUID="$2"; shift 2 ;;
-k) KEY="$2"; shift 2 ;;
-v) VALUE="$2"; shift 2 ;;
--preview) IS_PREVIEW="true"; shift ;;
-h) head -15 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -u <uuid> -k <key> -v <value> [--preview]" >&2; exit 1 ;;
esac
done
if [[ -z "$UUID" || -z "$KEY" || -z "$VALUE" ]]; then
echo "Error: -u uuid, -k key, and -v value are required" >&2
exit 1
fi
payload=$(jq -n \
--arg key "$KEY" \
--arg value "$VALUE" \
--argjson preview "$IS_PREVIEW" \
'{key: $key, value: $value, is_preview: $preview}')
response=$(curl -s -w "\n%{http_code}" \
-X PATCH \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
-d "$payload" \
"${COOLIFY_URL}/api/v1/services/${UUID}/envs")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" && "$http_code" != "201" ]]; then
echo "Error: Failed to set environment variable (HTTP $http_code)" >&2
echo "$body" | jq -r '.' 2>/dev/null >&2 || echo "$body" >&2
exit 1
fi
echo "Set $KEY on service $UUID"
echo "Note: Redeploy the service to apply the change"

View File

@@ -0,0 +1,52 @@
#!/usr/bin/env bash
#
# project-list.sh — List Coolify projects
#
# Usage: project-list.sh [-f format]
#
# Options:
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
FORMAT="table"
while getopts "f:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
h) head -10 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format]" >&2; exit 1 ;;
esac
done
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/projects")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list projects (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "UUID NAME DESCRIPTION"
echo "------------------------------------ ---------------------------- ----------------------------------------"
echo "$body" | jq -r '.[] | [
.uuid,
.name,
(.description // "—")
] | @tsv' | while IFS=$'\t' read -r uuid name desc; do
printf "%-36s %-28s %s\n" "$uuid" "${name:0:28}" "${desc:0:40}"
done

View File

@@ -0,0 +1,53 @@
#!/usr/bin/env bash
#
# service-list.sh — List Coolify services
#
# Usage: service-list.sh [-f format]
#
# Options:
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
FORMAT="table"
while getopts "f:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
h) head -10 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format]" >&2; exit 1 ;;
esac
done
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/services")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list services (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "UUID NAME TYPE STATUS"
echo "------------------------------------ ---------------------------- ------------ ----------"
echo "$body" | jq -r '.[] | [
.uuid,
.name,
(.type // "unknown"),
(.status // "unknown")
] | @tsv' | while IFS=$'\t' read -r uuid name type status; do
printf "%-36s %-28s %-12s %s\n" "$uuid" "${name:0:28}" "${type:0:12}" "$status"
done

View File

@@ -0,0 +1,62 @@
#!/usr/bin/env bash
#
# service-status.sh — Get Coolify service status and details
#
# Usage: service-status.sh -u <uuid> [-f format]
#
# Options:
# -u uuid Service UUID (required)
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
UUID=""
FORMAT="table"
while getopts "u:f:h" opt; do
case $opt in
u) UUID="$OPTARG" ;;
f) FORMAT="$OPTARG" ;;
h) head -12 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -u <uuid> [-f format]" >&2; exit 1 ;;
esac
done
if [[ -z "$UUID" ]]; then
echo "Error: -u uuid is required" >&2
exit 1
fi
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/services/${UUID}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to get service status (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "Service Details"
echo "==============="
echo "$body" | jq -r '
" UUID: \(.uuid)\n" +
" Name: \(.name)\n" +
" Type: \(.type // "unknown")\n" +
" Status: \(.status // "unknown")\n" +
" FQDN: \(.fqdn // "none")\n" +
" Created: \(.created_at // "unknown")\n" +
" Updated: \(.updated_at // "unknown")"
'

View File

@@ -0,0 +1,52 @@
#!/usr/bin/env bash
#
# team-list.sh — List Coolify teams
#
# Usage: team-list.sh [-f format]
#
# Options:
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
FORMAT="table"
while getopts "f:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
h) head -10 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format]" >&2; exit 1 ;;
esac
done
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/teams")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list teams (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "ID NAME DESCRIPTION"
echo "---- ---------------------------- ----------------------------------------"
echo "$body" | jq -r '.[] | [
(.id | tostring),
.name,
(.description // "—")
] | @tsv' | while IFS=$'\t' read -r id name desc; do
printf "%-4s %-28s %s\n" "$id" "${name:0:28}" "${desc:0:40}"
done

View File

@@ -0,0 +1 @@
node_modules/

View File

@@ -0,0 +1,5 @@
#!/usr/bin/env bash
# Launcher for Excalidraw MCP stdio server.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
exec node --loader "$SCRIPT_DIR/loader.mjs" "$SCRIPT_DIR/server.mjs"

View File

@@ -0,0 +1,76 @@
/**
* Custom ESM loader to fix missing .js extensions in @excalidraw/excalidraw deps.
*
* Problems patched:
* 1. excalidraw imports 'roughjs/bin/rough' (and other roughjs/* paths) without .js
* 2. roughjs/* files import sibling modules as './canvas' (relative, no .js)
* 3. JSON files need { type: 'json' } import attribute in Node.js v22+
*
* Usage: node --loader ./loader.mjs server.mjs [args...]
*/
import { fileURLToPath, pathToFileURL } from 'url';
import { dirname, resolve as pathResolve } from 'path';
const __dirname = dirname(fileURLToPath(import.meta.url));
// Modules that have incompatible ESM format — redirect to local stubs
const STUBS = {
'@excalidraw/laser-pointer': pathToFileURL(pathResolve(__dirname, 'stubs/laser-pointer.mjs')).href,
};
export async function resolve(specifier, context, nextResolve) {
// 0. Module stubs (incompatible ESM format packages)
if (STUBS[specifier]) {
return { url: STUBS[specifier], shortCircuit: true };
}
// 1. Bare roughjs/* specifiers without .js extension
if (/^roughjs\/bin\/[a-z-]+$/.test(specifier)) {
return nextResolve(`${specifier}.js`, context);
}
// 2. Relative imports without extension (e.g. './canvas' from roughjs/bin/rough.js)
// These come in as relative paths that resolve to extensionless file URLs.
if (specifier.startsWith('./') || specifier.startsWith('../')) {
// Try resolving first; if it fails with a missing-extension error, add .js
try {
return await nextResolve(specifier, context);
} catch (err) {
if (err.code === 'ERR_MODULE_NOT_FOUND') {
// Try appending .js
try {
return await nextResolve(`${specifier}.js`, context);
} catch {
// Fall through to original error
}
}
throw err;
}
}
// 3. JSON imports need type: 'json' attribute
if (specifier.endsWith('.json')) {
const resolved = await nextResolve(specifier, context);
if (!resolved.importAttributes?.type) {
return {
...resolved,
importAttributes: { ...resolved.importAttributes, type: 'json' },
};
}
return resolved;
}
return nextResolve(specifier, context);
}
export async function load(url, context, nextLoad) {
// Ensure JSON files are loaded with json format
if (url.endsWith('.json')) {
return nextLoad(url, {
...context,
importAttributes: { ...context.importAttributes, type: 'json' },
});
}
return nextLoad(url, context);
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,11 @@
{
"name": "excalidraw-mcp",
"version": "1.0.0",
"type": "module",
"private": true,
"dependencies": {
"@modelcontextprotocol/sdk": "^1.12.0",
"@excalidraw/excalidraw": "^0.18.0",
"jsdom": "^25.0.1"
}
}

View File

@@ -0,0 +1,323 @@
#!/usr/bin/env node
/**
* Excalidraw MCP stdio server
* Provides headless .excalidraw → SVG export via @excalidraw/excalidraw.
* Optional: diagram generation via EXCALIDRAW_GEN_PATH (excalidraw_gen.py).
*/
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import { z } from "zod/v3";
import { readFileSync, writeFileSync, existsSync } from 'fs';
import { resolve } from 'path';
import { spawnSync } from 'child_process';
import { JSDOM } from 'jsdom';
// ---------------------------------------------------------------------------
// 1. DOM environment — must be established BEFORE importing excalidraw
// ---------------------------------------------------------------------------
const dom = new JSDOM('<!DOCTYPE html><html><body></body></html>', {
url: 'http://localhost/',
pretendToBeVisual: true,
});
const { window } = dom;
// Helper: define a global, overriding read-only getters (e.g. navigator in Node v22)
function defineGlobal(key, value) {
if (value === undefined) return;
try {
Object.defineProperty(global, key, {
value,
writable: true,
configurable: true,
});
} catch {
// Already defined and non-configurable — skip
}
}
// Core DOM globals
defineGlobal('window', window);
defineGlobal('document', window.document);
defineGlobal('navigator', window.navigator);
defineGlobal('location', window.location);
defineGlobal('history', window.history);
defineGlobal('screen', window.screen);
// Element / event interfaces
for (const key of [
'Node', 'Element', 'HTMLElement', 'SVGElement', 'SVGSVGElement',
'HTMLCanvasElement', 'HTMLImageElement', 'Image',
'Event', 'CustomEvent', 'MouseEvent', 'PointerEvent',
'KeyboardEvent', 'TouchEvent', 'WheelEvent', 'InputEvent',
'MutationObserver', 'ResizeObserver', 'IntersectionObserver',
'XMLHttpRequest', 'XMLSerializer',
'DOMParser', 'Range',
'getComputedStyle', 'matchMedia',
]) {
defineGlobal(key, window[key]);
}
// Animation frame stubs (jsdom doesn't implement them)
global.requestAnimationFrame = (fn) => setTimeout(() => fn(Date.now()), 0);
global.cancelAnimationFrame = (id) => clearTimeout(id);
// CSS Font Loading API stub — jsdom doesn't implement FontFace
class FontFaceStub {
constructor(family, source, _descriptors) {
this.family = family;
this.source = source;
this.status = 'loaded';
this.loaded = Promise.resolve(this);
}
load() { return Promise.resolve(this); }
}
defineGlobal('FontFace', FontFaceStub);
// FontFaceSet stub for document.fonts
const fontFaceSet = {
add: () => {},
delete: () => {},
has: () => false,
clear: () => {},
load: () => Promise.resolve([]),
check: () => true,
ready: Promise.resolve(),
status: 'loaded',
forEach: () => {},
[Symbol.iterator]: function*() {},
};
Object.defineProperty(window.document, 'fonts', {
value: fontFaceSet,
writable: true,
configurable: true,
});
// Canvas stub — excalidraw's exportToSvg doesn't need real canvas rendering,
// but the class must exist for isinstance checks.
if (!global.HTMLCanvasElement) {
defineGlobal('HTMLCanvasElement', window.HTMLCanvasElement ?? class HTMLCanvasElement {});
}
// Device pixel ratio
global.devicePixelRatio = 1;
// ---------------------------------------------------------------------------
// 1b. Stub canvas getContext — excalidraw calls this at module init time.
// jsdom throws "Not implemented" by default; we return a no-op 2D stub.
// ---------------------------------------------------------------------------
const _canvasCtx = {
canvas: { width: 800, height: 600 },
fillRect: () => {}, clearRect: () => {}, strokeRect: () => {},
getImageData: (x, y, w, h) => ({ data: new Uint8ClampedArray(w * h * 4), width: w, height: h }),
putImageData: () => {}, createImageData: () => ({ data: new Uint8ClampedArray(0) }),
setTransform: () => {}, resetTransform: () => {}, transform: () => {},
drawImage: () => {}, save: () => {}, restore: () => {},
scale: () => {}, rotate: () => {}, translate: () => {},
beginPath: () => {}, closePath: () => {}, moveTo: () => {}, lineTo: () => {},
bezierCurveTo: () => {}, quadraticCurveTo: () => {},
arc: () => {}, arcTo: () => {}, ellipse: () => {}, rect: () => {},
fill: () => {}, stroke: () => {}, clip: () => {},
fillText: () => {}, strokeText: () => {},
measureText: (t) => ({ width: t.length * 8, actualBoundingBoxAscent: 12, actualBoundingBoxDescent: 3, fontBoundingBoxAscent: 14, fontBoundingBoxDescent: 4 }),
createLinearGradient: () => ({ addColorStop: () => {} }),
createRadialGradient: () => ({ addColorStop: () => {} }),
createPattern: () => null,
setLineDash: () => {}, getLineDash: () => [],
isPointInPath: () => false, isPointInStroke: () => false,
getContextAttributes: () => ({ alpha: true, desynchronized: false }),
font: '10px sans-serif', fillStyle: '#000', strokeStyle: '#000',
lineWidth: 1, lineCap: 'butt', lineJoin: 'miter',
textAlign: 'start', textBaseline: 'alphabetic',
globalAlpha: 1, globalCompositeOperation: 'source-over',
shadowOffsetX: 0, shadowOffsetY: 0, shadowBlur: 0, shadowColor: 'transparent',
miterLimit: 10, lineDashOffset: 0, filter: 'none', imageSmoothingEnabled: true,
};
// Patch before excalidraw import so module-level canvas calls get the stub
if (window.HTMLCanvasElement) {
window.HTMLCanvasElement.prototype.getContext = function (type) {
if (type === '2d') return _canvasCtx;
return null;
};
}
// ---------------------------------------------------------------------------
// 2. Load excalidraw (dynamic import so globals are set first)
// ---------------------------------------------------------------------------
let exportToSvg;
try {
const excalidraw = await import('@excalidraw/excalidraw');
exportToSvg = excalidraw.exportToSvg;
if (!exportToSvg) throw new Error('exportToSvg not found in package exports');
} catch (err) {
process.stderr.write(`FATAL: Failed to load @excalidraw/excalidraw: ${err.message}\n`);
process.exit(1);
}
// ---------------------------------------------------------------------------
// 3. SVG export helper
// ---------------------------------------------------------------------------
async function renderToSvg(elements, appState, files) {
const svgEl = await exportToSvg({
elements: elements ?? [],
appState: {
exportWithDarkMode: false,
exportBackground: true,
viewBackgroundColor: '#ffffff',
...appState,
},
files: files ?? {},
});
const serializer = new window.XMLSerializer();
return serializer.serializeToString(svgEl);
}
// ---------------------------------------------------------------------------
// 4. Gen subprocess helper (optional — requires EXCALIDRAW_GEN_PATH)
// ---------------------------------------------------------------------------
function requireGenPath() {
const p = process.env.EXCALIDRAW_GEN_PATH;
if (!p) {
return null;
}
return p;
}
function spawnGen(args) {
const genPath = requireGenPath();
if (!genPath) {
return {
ok: false,
text: 'EXCALIDRAW_GEN_PATH is not set. Set it to the path of excalidraw_gen.py to use diagram generation.',
};
}
const result = spawnSync('python3', [genPath, ...args], { encoding: 'utf8' });
if (result.error) return { ok: false, text: `spawn error: ${result.error.message}` };
if (result.status !== 0) return { ok: false, text: result.stderr || 'subprocess failed' };
return { ok: true, text: result.stdout.trim() };
}
// ---------------------------------------------------------------------------
// 5. MCP Server
// ---------------------------------------------------------------------------
const server = new McpServer({
name: "excalidraw",
version: "1.0.0",
});
// --- Tool: excalidraw_to_svg ---
server.tool(
"excalidraw_to_svg",
"Convert Excalidraw elements JSON to SVG string",
{
elements: z.string().describe("JSON string of Excalidraw elements array"),
app_state: z.string().optional().describe("JSON string of appState overrides"),
},
async ({ elements, app_state }) => {
let parsed;
try {
parsed = JSON.parse(elements);
} catch (err) {
throw new Error(`Invalid elements JSON: ${err.message}`);
}
const appState = app_state ? JSON.parse(app_state) : {};
const svg = await renderToSvg(parsed, appState, {});
return { content: [{ type: "text", text: svg }] };
}
);
// --- Tool: excalidraw_file_to_svg ---
server.tool(
"excalidraw_file_to_svg",
"Convert an .excalidraw file to SVG (writes .svg alongside the input file)",
{
file_path: z.string().describe("Absolute or relative path to .excalidraw file"),
},
async ({ file_path }) => {
const absPath = resolve(file_path);
if (!existsSync(absPath)) {
throw new Error(`File not found: ${absPath}`);
}
const raw = JSON.parse(readFileSync(absPath, 'utf8'));
const svg = await renderToSvg(raw.elements, raw.appState, raw.files);
const outPath = absPath.replace(/\.excalidraw$/, '.svg');
writeFileSync(outPath, svg, 'utf8');
return {
content: [{ type: "text", text: `SVG written to: ${outPath}\n\n${svg}` }],
};
}
);
// --- Tool: list_diagrams ---
server.tool(
"list_diagrams",
"List available diagram templates from the DIAGRAMS registry (requires EXCALIDRAW_GEN_PATH)",
{},
async () => {
const res = spawnGen(['--list']);
return { content: [{ type: "text", text: res.text }] };
}
);
// --- Tool: generate_diagram ---
server.tool(
"generate_diagram",
"Generate an .excalidraw file from a named diagram template (requires EXCALIDRAW_GEN_PATH)",
{
name: z.string().describe("Diagram template name (from list_diagrams)"),
output_path: z.string().optional().describe("Output path for the .excalidraw file"),
},
async ({ name, output_path }) => {
const args = [name];
if (output_path) args.push('--output', output_path);
const res = spawnGen(args);
if (!res.ok) throw new Error(res.text);
return { content: [{ type: "text", text: res.text }] };
}
);
// --- Tool: generate_and_export ---
server.tool(
"generate_and_export",
"Generate an .excalidraw file and immediately export it to SVG (requires EXCALIDRAW_GEN_PATH)",
{
name: z.string().describe("Diagram template name (from list_diagrams)"),
output_path: z.string().optional().describe("Output path for the .excalidraw file (SVG written alongside)"),
},
async ({ name, output_path }) => {
const genArgs = [name];
if (output_path) genArgs.push('--output', output_path);
const genRes = spawnGen(genArgs);
if (!genRes.ok) throw new Error(genRes.text);
const excalidrawPath = genRes.text;
if (!existsSync(excalidrawPath)) {
throw new Error(`Generated file not found: ${excalidrawPath}`);
}
const raw = JSON.parse(readFileSync(excalidrawPath, 'utf8'));
const svg = await renderToSvg(raw.elements, raw.appState, raw.files);
const svgPath = excalidrawPath.replace(/\.excalidraw$/, '.svg');
writeFileSync(svgPath, svg, 'utf8');
return {
content: [{ type: "text", text: `Generated: ${excalidrawPath}\nExported SVG: ${svgPath}` }],
};
}
);
// --- Start ---
const transport = new StdioServerTransport();
await server.connect(transport);

View File

@@ -0,0 +1,7 @@
/**
* Stub for @excalidraw/laser-pointer
* The real package uses a Parcel bundle format that Node.js ESM can't consume.
* For headless SVG export, the laser pointer feature is not needed.
*/
export class LaserPointer {}
export default { LaserPointer };

View File

@@ -0,0 +1,247 @@
# ci-queue-wait.ps1 - Wait until project CI queue is clear (no running/queued pipeline on branch head)
# Usage: .\ci-queue-wait.ps1 [-Branch main] [-TimeoutSeconds 900] [-IntervalSeconds 15] [-Purpose merge] [-RequireStatus]
[CmdletBinding()]
param(
[Alias("B")]
[string]$Branch = "main",
[Alias("t")]
[int]$TimeoutSeconds = 900,
[Alias("i")]
[int]$IntervalSeconds = 15,
[ValidateSet("push", "merge")]
[string]$Purpose = "merge",
[switch]$RequireStatus,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: ci-queue-wait.ps1 [-Branch main] [-TimeoutSeconds 900] [-IntervalSeconds 15] [-Purpose push|merge] [-RequireStatus]
Options:
-Branch, -B BRANCH Branch head to inspect (default: main)
-TimeoutSeconds, -t SECONDS Max wait time (default: 900)
-IntervalSeconds, -i SECONDS Poll interval (default: 15)
-Purpose VALUE push or merge (default: merge)
-RequireStatus Fail if no CI status contexts are present
-Help, -h Show help
"@
}
if ($Help) {
Show-Usage
exit 0
}
if ($TimeoutSeconds -lt 1 -or $IntervalSeconds -lt 1) {
Write-Error "TimeoutSeconds and IntervalSeconds must be positive integers."
exit 1
}
function Get-RemoteHost {
$remoteUrl = git remote get-url origin 2>$null
if ([string]::IsNullOrEmpty($remoteUrl)) { return $null }
if ($remoteUrl -match "^https?://([^/]+)/") { return $Matches[1] }
if ($remoteUrl -match "^git@([^:]+):") { return $Matches[1] }
return $null
}
function Get-GiteaToken {
param([string]$Host)
if ($env:GITEA_TOKEN) { return $env:GITEA_TOKEN }
$credPath = Join-Path $HOME ".git-credentials"
if (-not (Test-Path $credPath)) { return $null }
$line = Get-Content $credPath | Where-Object { $_ -like "*$Host*" } | Select-Object -First 1
if (-not $line) { return $null }
if ($line -match 'https?://[^@]*:([^@/]+)@') {
return $Matches[1]
}
return $null
}
function Get-QueueState {
param([object]$Payload)
$pending = @("pending", "queued", "running", "waiting")
$failure = @("failure", "error", "failed")
$success = @("success")
$state = ""
if ($null -ne $Payload.state) {
$state = "$($Payload.state)".ToLowerInvariant()
}
if ($pending -contains $state) { return "pending" }
if ($failure -contains $state) { return "terminal-failure" }
if ($success -contains $state) { return "terminal-success" }
$values = @()
$statuses = @()
if ($null -ne $Payload.statuses) { $statuses = @($Payload.statuses) }
foreach ($s in $statuses) {
if ($null -eq $s) { continue }
$v = ""
if ($null -ne $s.status) { $v = "$($s.status)".ToLowerInvariant() }
elseif ($null -ne $s.state) { $v = "$($s.state)".ToLowerInvariant() }
if (-not [string]::IsNullOrEmpty($v)) { $values += $v }
}
if ($values.Count -eq 0 -and [string]::IsNullOrEmpty($state)) { return "no-status" }
if (($values | Where-Object { $pending -contains $_ }).Count -gt 0) { return "pending" }
if (($values | Where-Object { $failure -contains $_ }).Count -gt 0) { return "terminal-failure" }
if ($values.Count -gt 0 -and ($values | Where-Object { -not ($success -contains $_) }).Count -eq 0) { return "terminal-success" }
return "unknown"
}
function Print-PendingContexts {
param([object]$Payload)
$pending = @("pending", "queued", "running", "waiting")
$statuses = @()
if ($null -ne $Payload.statuses) { $statuses = @($Payload.statuses) }
if ($statuses.Count -eq 0) {
Write-Host "[ci-queue-wait] no status contexts reported"
return
}
$found = $false
foreach ($s in $statuses) {
if ($null -eq $s) { continue }
$name = if ($s.context) { $s.context } elseif ($s.name) { $s.name } else { "unknown-context" }
$value = if ($s.status) { "$($s.status)".ToLowerInvariant() } elseif ($s.state) { "$($s.state)".ToLowerInvariant() } else { "unknown" }
$target = if ($s.target_url) { $s.target_url } elseif ($s.url) { $s.url } else { "" }
if ($pending -contains $value) {
$found = $true
if ($target) {
Write-Host "[ci-queue-wait] pending: $name=$value ($target)"
}
else {
Write-Host "[ci-queue-wait] pending: $name=$value"
}
}
}
if (-not $found) {
Write-Host "[ci-queue-wait] no pending contexts"
}
}
$platform = Get-GitPlatform
$owner = Get-GitRepoOwner
$repo = Get-GitRepoName
if ([string]::IsNullOrEmpty($owner) -or [string]::IsNullOrEmpty($repo)) {
Write-Error "Could not determine repository owner/name from git remote."
exit 1
}
$headSha = $null
$host = $null
$giteaToken = $null
switch ($platform) {
"github" {
if (-not (Get-Command gh -ErrorAction SilentlyContinue)) {
Write-Error "gh CLI is required for GitHub CI queue guard."
exit 1
}
$headSha = (& gh api "repos/$owner/$repo/branches/$Branch" --jq ".commit.sha").Trim()
if ([string]::IsNullOrEmpty($headSha)) {
Write-Error "Could not resolve $Branch head SHA."
exit 1
}
Write-Host "[ci-queue-wait] platform=github purpose=$Purpose branch=$Branch sha=$headSha"
}
"gitea" {
$host = Get-RemoteHost
if ([string]::IsNullOrEmpty($host)) {
Write-Error "Could not determine remote host."
exit 1
}
$giteaToken = Get-GiteaToken -Host $host
if ([string]::IsNullOrEmpty($giteaToken)) {
Write-Error "Gitea token not found. Set GITEA_TOKEN or configure ~/.git-credentials."
exit 1
}
try {
$branchUrl = "https://$host/api/v1/repos/$owner/$repo/branches/$Branch"
$branchPayload = Invoke-RestMethod -Method Get -Uri $branchUrl -Headers @{ Authorization = "token $giteaToken" }
$headSha = ($branchPayload.commit.id | Out-String).Trim()
}
catch {
Write-Error "Could not resolve $Branch head SHA from Gitea API."
exit 1
}
if ([string]::IsNullOrEmpty($headSha)) {
Write-Error "Could not resolve $Branch head SHA."
exit 1
}
Write-Host "[ci-queue-wait] platform=gitea purpose=$Purpose branch=$Branch sha=$headSha"
}
default {
Write-Error "Unsupported platform '$platform'."
exit 1
}
}
$deadline = (Get-Date).AddSeconds($TimeoutSeconds)
while ($true) {
if ((Get-Date) -gt $deadline) {
Write-Error "Timed out waiting for CI queue to clear on $Branch after ${TimeoutSeconds}s."
exit 124
}
try {
if ($platform -eq "github") {
$statusJson = & gh api "repos/$owner/$repo/commits/$headSha/status"
$payload = $statusJson | ConvertFrom-Json
}
else {
$statusUrl = "https://$host/api/v1/repos/$owner/$repo/commits/$headSha/status"
$payload = Invoke-RestMethod -Method Get -Uri $statusUrl -Headers @{ Authorization = "token $giteaToken" }
}
}
catch {
Write-Error "Failed to query commit status for queue guard."
exit 1
}
$state = Get-QueueState -Payload $payload
Write-Host "[ci-queue-wait] state=$state purpose=$Purpose branch=$Branch"
switch ($state) {
"pending" {
Print-PendingContexts -Payload $payload
Start-Sleep -Seconds $IntervalSeconds
}
"no-status" {
if ($RequireStatus) {
Write-Error "No CI status contexts found while -RequireStatus is set."
exit 1
}
Write-Host "[ci-queue-wait] no status contexts present; proceeding."
exit 0
}
"terminal-success" { exit 0 }
"terminal-failure" { exit 0 }
"unknown" { exit 0 }
default { exit 0 }
}
}

View File

@@ -0,0 +1,273 @@
#!/bin/bash
# ci-queue-wait.sh - Wait until project CI queue is clear (no running/queued pipeline on branch head)
# Usage: ci-queue-wait.sh [-B branch] [-t timeout_sec] [-i interval_sec] [--purpose push|merge] [--require-status]
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
BRANCH="main"
TIMEOUT_SEC=900
INTERVAL_SEC=15
PURPOSE="merge"
REQUIRE_STATUS=0
usage() {
cat <<EOF
Usage: $(basename "$0") [-B branch] [-t timeout_sec] [-i interval_sec] [--purpose push|merge] [--require-status]
Options:
-B, --branch BRANCH Branch head to inspect (default: main)
-t, --timeout SECONDS Max wait time in seconds (default: 900)
-i, --interval SECONDS Poll interval in seconds (default: 15)
--purpose VALUE Log context: push|merge (default: merge)
--require-status Fail if no CI status contexts are present
-h, --help Show this help
Examples:
$(basename "$0")
$(basename "$0") --purpose push -B main -t 600 -i 10
EOF
}
# get_remote_host and get_gitea_token are provided by detect-platform.sh
get_state_from_status_json() {
python3 - <<'PY'
import json
import sys
try:
payload = json.load(sys.stdin)
except Exception:
print("unknown")
raise SystemExit(0)
statuses = payload.get("statuses") or []
state = (payload.get("state") or "").lower()
pending_values = {"pending", "queued", "running", "waiting"}
failure_values = {"failure", "error", "failed"}
success_values = {"success"}
if state in pending_values:
print("pending")
raise SystemExit(0)
if state in failure_values:
print("terminal-failure")
raise SystemExit(0)
if state in success_values:
print("terminal-success")
raise SystemExit(0)
values = []
for item in statuses:
if not isinstance(item, dict):
continue
value = (item.get("status") or item.get("state") or "").lower()
if value:
values.append(value)
if not values and not state:
print("no-status")
elif any(v in pending_values for v in values):
print("pending")
elif any(v in failure_values for v in values):
print("terminal-failure")
elif values and all(v in success_values for v in values):
print("terminal-success")
else:
print("unknown")
PY
}
print_pending_contexts() {
python3 - <<'PY'
import json
import sys
try:
payload = json.load(sys.stdin)
except Exception:
print("[ci-queue-wait] unable to decode status payload")
raise SystemExit(0)
statuses = payload.get("statuses") or []
if not statuses:
print("[ci-queue-wait] no status contexts reported")
raise SystemExit(0)
pending_values = {"pending", "queued", "running", "waiting"}
found = False
for item in statuses:
if not isinstance(item, dict):
continue
name = item.get("context") or item.get("name") or "unknown-context"
value = (item.get("status") or item.get("state") or "unknown").lower()
target = item.get("target_url") or item.get("url") or ""
if value in pending_values:
found = True
if target:
print(f"[ci-queue-wait] pending: {name}={value} ({target})")
else:
print(f"[ci-queue-wait] pending: {name}={value}")
if not found:
print("[ci-queue-wait] no pending contexts")
PY
}
github_get_branch_head_sha() {
local owner="$1"
local repo="$2"
local branch="$3"
gh api "repos/${owner}/${repo}/branches/${branch}" --jq '.commit.sha'
}
github_get_commit_status_json() {
local owner="$1"
local repo="$2"
local sha="$3"
gh api "repos/${owner}/${repo}/commits/${sha}/status"
}
gitea_get_branch_head_sha() {
local host="$1"
local repo="$2"
local branch="$3"
local token="$4"
local url="https://${host}/api/v1/repos/${repo}/branches/${branch}"
curl -fsS -H "Authorization: token ${token}" "$url" | python3 -c '
import json, sys
data = json.load(sys.stdin)
commit = data.get("commit") or {}
print((commit.get("id") or "").strip())
'
}
gitea_get_commit_status_json() {
local host="$1"
local repo="$2"
local sha="$3"
local token="$4"
local url="https://${host}/api/v1/repos/${repo}/commits/${sha}/status"
curl -fsS -H "Authorization: token ${token}" "$url"
}
while [[ $# -gt 0 ]]; do
case "$1" in
-B|--branch)
BRANCH="$2"
shift 2
;;
-t|--timeout)
TIMEOUT_SEC="$2"
shift 2
;;
-i|--interval)
INTERVAL_SEC="$2"
shift 2
;;
--purpose)
PURPOSE="$2"
shift 2
;;
--require-status)
REQUIRE_STATUS=1
shift
;;
-h|--help)
usage
exit 0
;;
*)
echo "Unknown option: $1" >&2
usage >&2
exit 1
;;
esac
done
if ! [[ "$TIMEOUT_SEC" =~ ^[0-9]+$ ]] || ! [[ "$INTERVAL_SEC" =~ ^[0-9]+$ ]]; then
echo "Error: timeout and interval must be integer seconds." >&2
exit 1
fi
OWNER=$(get_repo_owner)
REPO=$(get_repo_name)
detect_platform > /dev/null
PLATFORM="${PLATFORM:-unknown}"
if [[ "$PLATFORM" == "github" ]]; then
if ! command -v gh >/dev/null 2>&1; then
echo "Error: gh CLI is required for GitHub CI queue guard." >&2
exit 1
fi
HEAD_SHA=$(github_get_branch_head_sha "$OWNER" "$REPO" "$BRANCH")
if [[ -z "$HEAD_SHA" ]]; then
echo "Error: Could not resolve ${BRANCH} head SHA." >&2
exit 1
fi
echo "[ci-queue-wait] platform=github purpose=${PURPOSE} branch=${BRANCH} sha=${HEAD_SHA}"
elif [[ "$PLATFORM" == "gitea" ]]; then
HOST=$(get_remote_host) || {
echo "Error: Could not determine remote host." >&2
exit 1
}
TOKEN=$(get_gitea_token "$HOST") || {
echo "Error: Gitea token not found. Set GITEA_TOKEN or configure ~/.git-credentials." >&2
exit 1
}
HEAD_SHA=$(gitea_get_branch_head_sha "$HOST" "$OWNER/$REPO" "$BRANCH" "$TOKEN")
if [[ -z "$HEAD_SHA" ]]; then
echo "Error: Could not resolve ${BRANCH} head SHA." >&2
exit 1
fi
echo "[ci-queue-wait] platform=gitea purpose=${PURPOSE} branch=${BRANCH} sha=${HEAD_SHA}"
else
echo "Error: Unsupported platform '${PLATFORM}'." >&2
exit 1
fi
START_TS=$(date +%s)
DEADLINE_TS=$((START_TS + TIMEOUT_SEC))
while true; do
NOW_TS=$(date +%s)
if (( NOW_TS > DEADLINE_TS )); then
echo "Error: Timed out waiting for CI queue to clear on ${BRANCH} after ${TIMEOUT_SEC}s." >&2
exit 124
fi
if [[ "$PLATFORM" == "github" ]]; then
STATUS_JSON=$(github_get_commit_status_json "$OWNER" "$REPO" "$HEAD_SHA")
else
STATUS_JSON=$(gitea_get_commit_status_json "$HOST" "$OWNER/$REPO" "$HEAD_SHA" "$TOKEN")
fi
STATE=$(printf '%s' "$STATUS_JSON" | get_state_from_status_json)
echo "[ci-queue-wait] state=${STATE} purpose=${PURPOSE} branch=${BRANCH}"
case "$STATE" in
pending)
printf '%s' "$STATUS_JSON" | print_pending_contexts
sleep "$INTERVAL_SEC"
;;
no-status)
if [[ "$REQUIRE_STATUS" -eq 1 ]]; then
echo "Error: No CI status contexts found for ${BRANCH} while --require-status is set." >&2
exit 1
fi
echo "[ci-queue-wait] no status contexts present; proceeding."
exit 0
;;
terminal-success|terminal-failure|unknown)
# Queue guard only blocks on pending/running/queued states.
exit 0
;;
*)
echo "[ci-queue-wait] unrecognized state '${STATE}', proceeding conservatively."
exit 0
;;
esac
done

View File

@@ -0,0 +1,83 @@
# detect-platform.ps1 - Detect git platform (Gitea or GitHub) for current repo
# Usage: . .\detect-platform.ps1; Get-GitPlatform
# or: .\detect-platform.ps1 (prints platform name)
function Get-GitPlatform {
[CmdletBinding()]
param()
$remoteUrl = git remote get-url origin 2>$null
if ([string]::IsNullOrEmpty($remoteUrl)) {
Write-Error "Not a git repository or no origin remote"
return $null
}
# Check for GitHub
if ($remoteUrl -match "github\.com") {
return "github"
}
# Check for common Gitea indicators
# Gitea URLs typically don't contain github.com, gitlab.com, bitbucket.org
if ($remoteUrl -notmatch "gitlab\.com" -and $remoteUrl -notmatch "bitbucket\.org") {
# Assume Gitea for self-hosted repos
return "gitea"
}
return "unknown"
}
function Get-GitRepoInfo {
[CmdletBinding()]
param()
$remoteUrl = git remote get-url origin 2>$null
if ([string]::IsNullOrEmpty($remoteUrl)) {
Write-Error "Not a git repository or no origin remote"
return $null
}
# Extract owner/repo from URL
# Handles: git@host:owner/repo.git, https://host/owner/repo.git, https://host/owner/repo
$repoPath = $remoteUrl
if ($remoteUrl -match "^git@") {
$repoPath = ($remoteUrl -split ":")[1]
} else {
# Remove protocol and host
$repoPath = $remoteUrl -replace "^https?://[^/]+/", ""
}
# Remove .git suffix if present
$repoPath = $repoPath -replace "\.git$", ""
return $repoPath
}
function Get-GitRepoOwner {
[CmdletBinding()]
param()
$repoInfo = Get-GitRepoInfo
if ($repoInfo) {
return ($repoInfo -split "/")[0]
}
return $null
}
function Get-GitRepoName {
[CmdletBinding()]
param()
$repoInfo = Get-GitRepoInfo
if ($repoInfo) {
return ($repoInfo -split "/")[-1]
}
return $null
}
# If script is run directly (not dot-sourced), output the platform
if ($MyInvocation.InvocationName -ne ".") {
Get-GitPlatform
}

View File

@@ -0,0 +1,149 @@
#!/bin/bash
# detect-platform.sh - Detect git platform (Gitea or GitHub) for current repo
# Usage: source detect-platform.sh && detect_platform
# or: ./detect-platform.sh (prints platform name)
detect_platform() {
local remote_url
remote_url=$(git remote get-url origin 2>/dev/null)
if [[ -z "$remote_url" ]]; then
echo "error: not a git repository or no origin remote" >&2
return 1
fi
# Check for GitHub
if [[ "$remote_url" == *"github.com"* ]]; then
PLATFORM="github"
export PLATFORM
echo "github"
return 0
fi
# Check for common Gitea indicators
# Gitea URLs typically don't contain github.com, gitlab.com, bitbucket.org
if [[ "$remote_url" != *"gitlab.com"* ]] && \
[[ "$remote_url" != *"bitbucket.org"* ]]; then
# Assume Gitea for self-hosted repos
PLATFORM="gitea"
export PLATFORM
echo "gitea"
return 0
fi
PLATFORM="unknown"
export PLATFORM
echo "unknown"
return 1
}
get_repo_info() {
local remote_url
remote_url=$(git remote get-url origin 2>/dev/null)
if [[ -z "$remote_url" ]]; then
echo "error: not a git repository or no origin remote" >&2
return 1
fi
# Extract owner/repo from URL
# Handles: git@host:owner/repo.git, https://host/owner/repo.git, https://host/owner/repo
local repo_path
if [[ "$remote_url" == git@* ]]; then
repo_path="${remote_url#*:}"
else
repo_path="${remote_url#*://}"
repo_path="${repo_path#*/}"
fi
# Remove .git suffix if present
repo_path="${repo_path%.git}"
echo "$repo_path"
}
get_repo_owner() {
local repo_info
repo_info=$(get_repo_info)
echo "${repo_info%%/*}"
}
get_repo_name() {
local repo_info
repo_info=$(get_repo_info)
echo "${repo_info##*/}"
}
get_remote_host() {
local remote_url
remote_url=$(git remote get-url origin 2>/dev/null || true)
if [[ -z "$remote_url" ]]; then
return 1
fi
if [[ "$remote_url" =~ ^https?://([^/]+)/ ]]; then
echo "${BASH_REMATCH[1]}"
return 0
fi
if [[ "$remote_url" =~ ^git@([^:]+): ]]; then
echo "${BASH_REMATCH[1]}"
return 0
fi
return 1
}
# Resolve a Gitea API token for the given host.
# Priority: Mosaic credential loader → GITEA_TOKEN env → ~/.git-credentials
get_gitea_token() {
local host="$1"
local script_dir
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
local cred_loader="$script_dir/../_lib/credentials.sh"
# 1. Mosaic credential loader (host → service mapping, run in subshell to avoid polluting env)
if [[ -f "$cred_loader" ]]; then
local token
token=$(
source "$cred_loader"
case "$host" in
git.mosaicstack.dev) load_credentials gitea-mosaicstack 2>/dev/null ;;
git.uscllc.com) load_credentials gitea-usc 2>/dev/null ;;
*)
for svc in gitea-mosaicstack gitea-usc; do
load_credentials "$svc" 2>/dev/null || continue
[[ "${GITEA_URL:-}" == *"$host"* ]] && break
unset GITEA_TOKEN GITEA_URL
done
;;
esac
echo "${GITEA_TOKEN:-}"
)
if [[ -n "$token" ]]; then
echo "$token"
return 0
fi
fi
# 2. GITEA_TOKEN env var (may be set by caller)
if [[ -n "${GITEA_TOKEN:-}" ]]; then
echo "$GITEA_TOKEN"
return 0
fi
# 3. ~/.git-credentials file
local creds="$HOME/.git-credentials"
if [[ -f "$creds" ]]; then
local token
token=$(grep -F "$host" "$creds" 2>/dev/null | sed -n 's#https\?://[^@]*:\([^@/]*\)@.*#\1#p' | head -n 1)
if [[ -n "$token" ]]; then
echo "$token"
return 0
fi
fi
return 1
}
# If script is run directly (not sourced), output the platform
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
detect_platform
fi

View File

@@ -0,0 +1,111 @@
# issue-assign.ps1 - Assign issues on Gitea or GitHub
# Usage: .\issue-assign.ps1 -Issue ISSUE_NUMBER [-Assignee assignee] [-Labels labels] [-Milestone milestone]
[CmdletBinding()]
param(
[Parameter(Mandatory=$true)]
[Alias("i")]
[int]$Issue,
[Alias("a")]
[string]$Assignee,
[Alias("l")]
[string]$Labels,
[Alias("m")]
[string]$Milestone,
[Alias("r")]
[switch]$RemoveAssignee,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: issue-assign.ps1 [OPTIONS]
Assign or update an issue on the current repository (Gitea or GitHub).
Options:
-Issue, -i NUMBER Issue number (required)
-Assignee, -a USER Assign to user (use @me for self)
-Labels, -l LABELS Add comma-separated labels
-Milestone, -m NAME Set milestone
-RemoveAssignee, -r Remove current assignee
-Help, -h Show this help message
Examples:
.\issue-assign.ps1 -i 42 -a "username"
.\issue-assign.ps1 -i 42 -l "in-progress" -m "0.2.0"
.\issue-assign.ps1 -i 42 -a @me
"@
exit 1
}
if ($Help) {
Show-Usage
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
if ($Assignee) {
gh issue edit $Issue --add-assignee $Assignee
}
if ($RemoveAssignee) {
$current = gh issue view $Issue --json assignees -q '.assignees[].login' 2>$null
if ($current) {
$assignees = ($current -split "`n") -join ","
gh issue edit $Issue --remove-assignee $assignees
}
}
if ($Labels) {
gh issue edit $Issue --add-label $Labels
}
if ($Milestone) {
gh issue edit $Issue --milestone $Milestone
}
Write-Host "Issue #$Issue updated successfully"
}
"gitea" {
$needsEdit = $false
$cmd = @("tea", "issue", "edit", $Issue)
if ($Assignee) {
$cmd += @("--assignees", $Assignee)
$needsEdit = $true
}
if ($Labels) {
$cmd += @("--labels", $Labels)
$needsEdit = $true
}
if ($Milestone) {
$milestoneList = tea milestones list 2>$null
$milestoneId = ($milestoneList | Select-String "^\s*(\d+).*$Milestone" | ForEach-Object { $_.Matches.Groups[1].Value } | Select-Object -First 1)
if ($milestoneId) {
$cmd += @("--milestone", $milestoneId)
$needsEdit = $true
} else {
Write-Warning "Could not find milestone '$Milestone'"
}
}
if ($needsEdit) {
& $cmd[0] $cmd[1..($cmd.Length-1)]
Write-Host "Issue #$Issue updated successfully"
} else {
Write-Host "No changes specified"
}
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

View File

@@ -0,0 +1,135 @@
#!/bin/bash
# issue-assign.sh - Assign issues on Gitea or GitHub
# Usage: issue-assign.sh -i ISSUE_NUMBER [-a assignee] [-l labels] [-m milestone]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
ISSUE=""
ASSIGNEE=""
LABELS=""
MILESTONE=""
REMOVE_ASSIGNEE=false
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
Assign or update an issue on the current repository (Gitea or GitHub).
Options:
-i, --issue NUMBER Issue number (required)
-a, --assignee USER Assign to user (use @me for self)
-l, --labels LABELS Add comma-separated labels
-m, --milestone NAME Set milestone
-r, --remove-assignee Remove current assignee
-h, --help Show this help message
Examples:
$(basename "$0") -i 42 -a "username"
$(basename "$0") -i 42 -l "in-progress" -m "0.2.0"
$(basename "$0") -i 42 -a @me
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE="$2"
shift 2
;;
-a|--assignee)
ASSIGNEE="$2"
shift 2
;;
-l|--labels)
LABELS="$2"
shift 2
;;
-m|--milestone)
MILESTONE="$2"
shift 2
;;
-r|--remove-assignee)
REMOVE_ASSIGNEE=true
shift
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
if [[ -z "$ISSUE" ]]; then
echo "Error: Issue number is required (-i)" >&2
usage
fi
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
if [[ -n "$ASSIGNEE" ]]; then
gh issue edit "$ISSUE" --add-assignee "$ASSIGNEE"
fi
if [[ "$REMOVE_ASSIGNEE" == true ]]; then
# Get current assignees and remove them
CURRENT=$(gh issue view "$ISSUE" --json assignees -q '.assignees[].login' 2>/dev/null | tr '\n' ',')
if [[ -n "$CURRENT" ]]; then
gh issue edit "$ISSUE" --remove-assignee "${CURRENT%,}"
fi
fi
if [[ -n "$LABELS" ]]; then
gh issue edit "$ISSUE" --add-label "$LABELS"
fi
if [[ -n "$MILESTONE" ]]; then
gh issue edit "$ISSUE" --milestone "$MILESTONE"
fi
echo "Issue #$ISSUE updated successfully"
;;
gitea)
# tea issue edit syntax
CMD="tea issue edit $ISSUE"
NEEDS_EDIT=false
if [[ -n "$ASSIGNEE" ]]; then
# tea uses --assignees flag
CMD="$CMD --assignees \"$ASSIGNEE\""
NEEDS_EDIT=true
fi
if [[ -n "$LABELS" ]]; then
# tea uses --labels flag (replaces existing)
CMD="$CMD --labels \"$LABELS\""
NEEDS_EDIT=true
fi
if [[ -n "$MILESTONE" ]]; then
MILESTONE_ID=$(tea milestones list 2>/dev/null | grep -E "^\s*[0-9]+" | grep "$MILESTONE" | awk '{print $1}' | head -1)
if [[ -n "$MILESTONE_ID" ]]; then
CMD="$CMD --milestone $MILESTONE_ID"
NEEDS_EDIT=true
else
echo "Warning: Could not find milestone '$MILESTONE'" >&2
fi
fi
if [[ "$NEEDS_EDIT" == true ]]; then
eval "$CMD"
echo "Issue #$ISSUE updated successfully"
else
echo "No changes specified"
fi
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

View File

@@ -0,0 +1,64 @@
#!/bin/bash
# issue-close.sh - Close an issue on GitHub or Gitea
# Usage: issue-close.sh -i <issue_number> [-c <comment>]
set -e
# Source platform detection
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
ISSUE_NUMBER=""
COMMENT=""
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE_NUMBER="$2"
shift 2
;;
-c|--comment)
COMMENT="$2"
shift 2
;;
-h|--help)
echo "Usage: issue-close.sh -i <issue_number> [-c <comment>]"
echo ""
echo "Options:"
echo " -i, --issue Issue number (required)"
echo " -c, --comment Comment to add before closing (optional)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$ISSUE_NUMBER" ]]; then
echo "Error: Issue number is required (-i)"
exit 1
fi
# Detect platform and close issue
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
if [[ -n "$COMMENT" ]]; then
gh issue comment "$ISSUE_NUMBER" --body "$COMMENT"
fi
gh issue close "$ISSUE_NUMBER"
echo "Closed GitHub issue #$ISSUE_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
if [[ -n "$COMMENT" ]]; then
tea issue comment "$ISSUE_NUMBER" "$COMMENT"
fi
tea issue close "$ISSUE_NUMBER"
echo "Closed Gitea issue #$ISSUE_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,61 @@
#!/bin/bash
# issue-comment.sh - Add a comment to an issue on GitHub or Gitea
# Usage: issue-comment.sh -i <issue_number> -c <comment>
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
ISSUE_NUMBER=""
COMMENT=""
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE_NUMBER="$2"
shift 2
;;
-c|--comment)
COMMENT="$2"
shift 2
;;
-h|--help)
echo "Usage: issue-comment.sh -i <issue_number> -c <comment>"
echo ""
echo "Options:"
echo " -i, --issue Issue number (required)"
echo " -c, --comment Comment text (required)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$ISSUE_NUMBER" ]]; then
echo "Error: Issue number is required (-i)"
exit 1
fi
if [[ -z "$COMMENT" ]]; then
echo "Error: Comment is required (-c)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
gh issue comment "$ISSUE_NUMBER" --body "$COMMENT"
echo "Added comment to GitHub issue #$ISSUE_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
tea issue comment "$ISSUE_NUMBER" "$COMMENT"
echo "Added comment to Gitea issue #$ISSUE_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,80 @@
# issue-create.ps1 - Create issues on Gitea or GitHub
# Usage: .\issue-create.ps1 -Title "Title" [-Body "Body"] [-Labels "label1,label2"] [-Milestone "milestone"]
[CmdletBinding()]
param(
[Parameter(Mandatory=$true)]
[Alias("t")]
[string]$Title,
[Alias("b")]
[string]$Body,
[Alias("l")]
[string]$Labels,
[Alias("m")]
[string]$Milestone,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: issue-create.ps1 [OPTIONS]
Create an issue on the current repository (Gitea or GitHub).
Options:
-Title, -t TITLE Issue title (required)
-Body, -b BODY Issue body/description
-Labels, -l LABELS Comma-separated labels (e.g., "bug,feature")
-Milestone, -m NAME Milestone name to assign
-Help, -h Show this help message
Examples:
.\issue-create.ps1 -Title "Fix login bug" -Labels "bug,priority-high"
.\issue-create.ps1 -t "Add dark mode" -b "Implement theme switching" -m "0.2.0"
"@
exit 1
}
if ($Help) {
Show-Usage
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
$cmd = @("gh", "issue", "create", "--title", $Title)
if ($Body) { $cmd += @("--body", $Body) }
if ($Labels) { $cmd += @("--label", $Labels) }
if ($Milestone) { $cmd += @("--milestone", $Milestone) }
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
"gitea" {
$cmd = @("tea", "issue", "create", "--title", $Title)
if ($Body) { $cmd += @("--description", $Body) }
if ($Labels) { $cmd += @("--labels", $Labels) }
if ($Milestone) {
# Try to get milestone ID by name
$milestoneList = tea milestones list 2>$null
$milestoneId = ($milestoneList | Select-String "^\s*(\d+).*$Milestone" | ForEach-Object { $_.Matches.Groups[1].Value } | Select-Object -First 1)
if ($milestoneId) {
$cmd += @("--milestone", $milestoneId)
} else {
Write-Warning "Could not find milestone '$Milestone', creating without milestone"
}
}
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

View File

@@ -0,0 +1,139 @@
#!/bin/bash
# issue-create.sh - Create issues on Gitea or GitHub
# Usage: issue-create.sh -t "Title" [-b "Body"] [-l "label1,label2"] [-m "milestone"]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
TITLE=""
BODY=""
LABELS=""
MILESTONE=""
# get_remote_host and get_gitea_token are provided by detect-platform.sh
gitea_issue_create_api() {
local host repo token url payload
host=$(get_remote_host) || {
echo "Error: could not determine remote host for API fallback" >&2
return 1
}
repo=$(get_repo_info) || {
echo "Error: could not determine repo owner/name for API fallback" >&2
return 1
}
token=$(get_gitea_token "$host") || {
echo "Error: Gitea token not found for API fallback (set GITEA_TOKEN or configure ~/.git-credentials)" >&2
return 1
}
if [[ -n "$LABELS" || -n "$MILESTONE" ]]; then
echo "Warning: API fallback currently applies title/body only; labels/milestone require authenticated tea setup." >&2
fi
payload=$(TITLE="$TITLE" BODY="$BODY" python3 - <<'PY'
import json
import os
payload = {"title": os.environ["TITLE"]}
body = os.environ.get("BODY", "")
if body:
payload["body"] = body
print(json.dumps(payload))
PY
)
url="https://${host}/api/v1/repos/${repo}/issues"
curl -fsS -X POST \
-H "Authorization: token ${token}" \
-H "Content-Type: application/json" \
-d "$payload" \
"$url"
}
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
Create an issue on the current repository (Gitea or GitHub).
Options:
-t, --title TITLE Issue title (required)
-b, --body BODY Issue body/description
-l, --labels LABELS Comma-separated labels (e.g., "bug,feature")
-m, --milestone NAME Milestone name to assign
-h, --help Show this help message
Examples:
$(basename "$0") -t "Fix login bug" -l "bug,priority-high"
$(basename "$0") -t "Add dark mode" -b "Implement theme switching" -m "0.2.0"
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-t|--title)
TITLE="$2"
shift 2
;;
-b|--body)
BODY="$2"
shift 2
;;
-l|--labels)
LABELS="$2"
shift 2
;;
-m|--milestone)
MILESTONE="$2"
shift 2
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
if [[ -z "$TITLE" ]]; then
echo "Error: Title is required (-t)" >&2
usage
fi
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
CMD="gh issue create --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --body \"$BODY\""
[[ -n "$LABELS" ]] && CMD="$CMD --label \"$LABELS\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
eval "$CMD"
;;
gitea)
if command -v tea >/dev/null 2>&1; then
CMD="tea issue create --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --description \"$BODY\""
[[ -n "$LABELS" ]] && CMD="$CMD --labels \"$LABELS\""
# tea accepts milestone by name directly (verified 2026-02-05)
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
if eval "$CMD"; then
exit 0
fi
echo "Warning: tea issue create failed, trying Gitea API fallback..." >&2
fi
gitea_issue_create_api
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

View File

@@ -0,0 +1,84 @@
#!/bin/bash
# issue-edit.sh - Edit an issue on GitHub or Gitea
# Usage: issue-edit.sh -i <issue_number> [-t <title>] [-b <body>] [-l <labels>] [-m <milestone>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
ISSUE_NUMBER=""
TITLE=""
BODY=""
LABELS=""
MILESTONE=""
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE_NUMBER="$2"
shift 2
;;
-t|--title)
TITLE="$2"
shift 2
;;
-b|--body)
BODY="$2"
shift 2
;;
-l|--labels)
LABELS="$2"
shift 2
;;
-m|--milestone)
MILESTONE="$2"
shift 2
;;
-h|--help)
echo "Usage: issue-edit.sh -i <issue_number> [-t <title>] [-b <body>] [-l <labels>] [-m <milestone>]"
echo ""
echo "Options:"
echo " -i, --issue Issue number (required)"
echo " -t, --title New title"
echo " -b, --body New body/description"
echo " -l, --labels Labels (comma-separated, replaces existing)"
echo " -m, --milestone Milestone name"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$ISSUE_NUMBER" ]]; then
echo "Error: Issue number is required (-i)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
CMD="gh issue edit $ISSUE_NUMBER"
[[ -n "$TITLE" ]] && CMD="$CMD --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --body \"$BODY\""
[[ -n "$LABELS" ]] && CMD="$CMD --add-label \"$LABELS\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
eval $CMD
echo "Updated GitHub issue #$ISSUE_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
CMD="tea issue edit $ISSUE_NUMBER"
[[ -n "$TITLE" ]] && CMD="$CMD --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --description \"$BODY\""
[[ -n "$LABELS" ]] && CMD="$CMD --add-labels \"$LABELS\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
eval $CMD
echo "Updated Gitea issue #$ISSUE_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,78 @@
# issue-list.ps1 - List issues on Gitea or GitHub
# Usage: .\issue-list.ps1 [-State state] [-Label label] [-Milestone milestone] [-Assignee assignee]
[CmdletBinding()]
param(
[Alias("s")]
[ValidateSet("open", "closed", "all")]
[string]$State = "open",
[Alias("l")]
[string]$Label,
[Alias("m")]
[string]$Milestone,
[Alias("a")]
[string]$Assignee,
[Alias("n")]
[int]$Limit = 100,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: issue-list.ps1 [OPTIONS]
List issues from the current repository (Gitea or GitHub).
Options:
-State, -s STATE Filter by state: open, closed, all (default: open)
-Label, -l LABEL Filter by label
-Milestone, -m NAME Filter by milestone name
-Assignee, -a USER Filter by assignee
-Limit, -n N Maximum issues to show (default: 100)
-Help, -h Show this help message
Examples:
.\issue-list.ps1 # List open issues
.\issue-list.ps1 -s all -l bug # All issues with 'bug' label
.\issue-list.ps1 -m "0.2.0" # Issues in milestone 0.2.0
"@
exit 1
}
if ($Help) {
Show-Usage
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
$cmd = @("gh", "issue", "list", "--state", $State, "--limit", $Limit)
if ($Label) { $cmd += @("--label", $Label) }
if ($Milestone) { $cmd += @("--milestone", $Milestone) }
if ($Assignee) { $cmd += @("--assignee", $Assignee) }
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
"gitea" {
$cmd = @("tea", "issues", "list", "--state", $State, "--limit", $Limit)
if ($Label) { $cmd += @("--labels", $Label) }
if ($Milestone) { $cmd += @("--milestones", $Milestone) }
& $cmd[0] $cmd[1..($cmd.Length-1)]
if ($Assignee) {
Write-Warning "Assignee filtering may require manual review for Gitea"
}
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

View File

@@ -0,0 +1,96 @@
#!/bin/bash
# issue-list.sh - List issues on Gitea or GitHub
# Usage: issue-list.sh [-s state] [-l label] [-m milestone] [-a assignee]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
STATE="open"
LABEL=""
MILESTONE=""
ASSIGNEE=""
LIMIT=100
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
List issues from the current repository (Gitea or GitHub).
Options:
-s, --state STATE Filter by state: open, closed, all (default: open)
-l, --label LABEL Filter by label
-m, --milestone NAME Filter by milestone name
-a, --assignee USER Filter by assignee
-n, --limit N Maximum issues to show (default: 100)
-h, --help Show this help message
Examples:
$(basename "$0") # List open issues
$(basename "$0") -s all -l bug # All issues with 'bug' label
$(basename "$0") -m "0.2.0" # Issues in milestone 0.2.0
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-s|--state)
STATE="$2"
shift 2
;;
-l|--label)
LABEL="$2"
shift 2
;;
-m|--milestone)
MILESTONE="$2"
shift 2
;;
-a|--assignee)
ASSIGNEE="$2"
shift 2
;;
-n|--limit)
LIMIT="$2"
shift 2
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
CMD="gh issue list --state $STATE --limit $LIMIT"
[[ -n "$LABEL" ]] && CMD="$CMD --label \"$LABEL\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
[[ -n "$ASSIGNEE" ]] && CMD="$CMD --assignee \"$ASSIGNEE\""
eval "$CMD"
;;
gitea)
CMD="tea issues list --state $STATE --limit $LIMIT"
[[ -n "$LABEL" ]] && CMD="$CMD --labels \"$LABEL\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestones \"$MILESTONE\""
# Note: tea may not support assignee filter directly
eval "$CMD"
if [[ -n "$ASSIGNEE" ]]; then
echo "Note: Assignee filtering may require manual review for Gitea" >&2
fi
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

View File

@@ -0,0 +1,62 @@
#!/bin/bash
# issue-reopen.sh - Reopen a closed issue on GitHub or Gitea
# Usage: issue-reopen.sh -i <issue_number> [-c <comment>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
ISSUE_NUMBER=""
COMMENT=""
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE_NUMBER="$2"
shift 2
;;
-c|--comment)
COMMENT="$2"
shift 2
;;
-h|--help)
echo "Usage: issue-reopen.sh -i <issue_number> [-c <comment>]"
echo ""
echo "Options:"
echo " -i, --issue Issue number (required)"
echo " -c, --comment Comment to add when reopening (optional)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$ISSUE_NUMBER" ]]; then
echo "Error: Issue number is required (-i)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
if [[ -n "$COMMENT" ]]; then
gh issue comment "$ISSUE_NUMBER" --body "$COMMENT"
fi
gh issue reopen "$ISSUE_NUMBER"
echo "Reopened GitHub issue #$ISSUE_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
if [[ -n "$COMMENT" ]]; then
tea issue comment "$ISSUE_NUMBER" "$COMMENT"
fi
tea issue reopen "$ISSUE_NUMBER"
echo "Reopened Gitea issue #$ISSUE_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,79 @@
#!/bin/bash
# issue-view.sh - View issue details on GitHub or Gitea
# Usage: issue-view.sh -i <issue_number>
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
ISSUE_NUMBER=""
# get_remote_host and get_gitea_token are provided by detect-platform.sh
gitea_issue_view_api() {
local host repo token url
host=$(get_remote_host) || {
echo "Error: could not determine remote host for API fallback" >&2
return 1
}
repo=$(get_repo_info) || {
echo "Error: could not determine repo owner/name for API fallback" >&2
return 1
}
token=$(get_gitea_token "$host") || {
echo "Error: Gitea token not found for API fallback (set GITEA_TOKEN or configure ~/.git-credentials)" >&2
return 1
}
url="https://${host}/api/v1/repos/${repo}/issues/${ISSUE_NUMBER}"
if command -v python3 >/dev/null 2>&1; then
curl -fsS -H "Authorization: token ${token}" "$url" | python3 -m json.tool
else
curl -fsS -H "Authorization: token ${token}" "$url"
fi
}
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE_NUMBER="$2"
shift 2
;;
-h|--help)
echo "Usage: issue-view.sh -i <issue_number>"
echo ""
echo "Options:"
echo " -i, --issue Issue number (required)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$ISSUE_NUMBER" ]]; then
echo "Error: Issue number is required (-i)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
gh issue view "$ISSUE_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
if command -v tea >/dev/null 2>&1; then
if tea issue "$ISSUE_NUMBER"; then
exit 0
fi
echo "Warning: tea issue view failed, trying Gitea API fallback..." >&2
fi
gitea_issue_view_api
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,50 @@
#!/bin/bash
# milestone-close.sh - Close a milestone on GitHub or Gitea
# Usage: milestone-close.sh -t <title>
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
TITLE=""
while [[ $# -gt 0 ]]; do
case $1 in
-t|--title)
TITLE="$2"
shift 2
;;
-h|--help)
echo "Usage: milestone-close.sh -t <title>"
echo ""
echo "Options:"
echo " -t, --title Milestone title (required)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$TITLE" ]]; then
echo "Error: Milestone title is required (-t)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
gh api -X PATCH "/repos/{owner}/{repo}/milestones/$(gh api "/repos/{owner}/{repo}/milestones" --jq ".[] | select(.title==\"$TITLE\") | .number")" -f state=closed
echo "Closed GitHub milestone: $TITLE"
elif [[ "$PLATFORM" == "gitea" ]]; then
tea milestone close "$TITLE"
echo "Closed Gitea milestone: $TITLE"
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,98 @@
# milestone-create.ps1 - Create milestones on Gitea or GitHub
# Usage: .\milestone-create.ps1 -Title "Title" [-Description "Description"] [-Due "YYYY-MM-DD"]
[CmdletBinding()]
param(
[Alias("t")]
[string]$Title,
[Alias("d")]
[string]$Description,
[string]$Due,
[switch]$List,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: milestone-create.ps1 [OPTIONS]
Create or list milestones on the current repository (Gitea or GitHub).
Versioning Convention:
- Features get dedicated milestones
- Pre-MVP milestones MUST use 0.0.x and MUST start at 0.0.1
- 0.1.0 is reserved for MVP release
- After MVP, continue semantic progression (0.1.x, 0.2.x, ...)
Options:
-Title, -t TITLE Milestone title/version (e.g., "0.0.1")
-Description, -d DESC Milestone description
-Due DATE Due date (YYYY-MM-DD format)
-List List existing milestones
-Help, -h Show this help message
Examples:
.\milestone-create.ps1 -List
.\milestone-create.ps1 -t "0.0.1" -d "Pre-MVP Foundation Sprint"
.\milestone-create.ps1 -t "0.1.0" -d "MVP Release" -Due "2025-03-01"
"@
exit 1
}
if ($Help) {
Show-Usage
}
$platform = Get-GitPlatform
if ($List) {
switch ($platform) {
"github" {
gh api repos/:owner/:repo/milestones --jq '.[] | "\(.number)`t\(.title)`t\(.state)`t\(.open_issues)/\(.closed_issues) issues"'
}
"gitea" {
tea milestones list
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}
exit 0
}
if (-not $Title) {
Write-Error "Title is required (-t) for creating milestones"
Show-Usage
}
switch ($platform) {
"github" {
$payload = @{ title = $Title }
if ($Description) { $payload.description = $Description }
if ($Due) { $payload.due_on = "${Due}T00:00:00Z" }
$json = $payload | ConvertTo-Json -Compress
$json | gh api repos/:owner/:repo/milestones --method POST --input -
Write-Host "Milestone '$Title' created successfully"
}
"gitea" {
$cmd = @("tea", "milestones", "create", "--title", $Title)
if ($Description) { $cmd += @("--description", $Description) }
if ($Due) { $cmd += @("--deadline", $Due) }
& $cmd[0] $cmd[1..($cmd.Length-1)]
Write-Host "Milestone '$Title' created successfully"
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

View File

@@ -0,0 +1,117 @@
#!/bin/bash
# milestone-create.sh - Create milestones on Gitea or GitHub
# Usage: milestone-create.sh -t "Title" [-d "Description"] [--due "YYYY-MM-DD"]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
TITLE=""
DESCRIPTION=""
DUE_DATE=""
LIST_ONLY=false
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
Create or list milestones on the current repository (Gitea or GitHub).
Versioning Convention:
- Features get dedicated milestones
- Pre-MVP milestones MUST use 0.0.x and MUST start at 0.0.1
- 0.1.0 is reserved for MVP release
- After MVP, continue semantic progression (0.1.x, 0.2.x, ...)
Options:
-t, --title TITLE Milestone title/version (e.g., "0.0.1")
-d, --desc DESCRIPTION Milestone description
--due DATE Due date (YYYY-MM-DD format)
--list List existing milestones
-h, --help Show this help message
Examples:
$(basename "$0") --list
$(basename "$0") -t "0.0.1" -d "Pre-MVP Foundation Sprint"
$(basename "$0") -t "0.1.0" -d "MVP Release" --due "2025-03-01"
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-t|--title)
TITLE="$2"
shift 2
;;
-d|--desc)
DESCRIPTION="$2"
shift 2
;;
--due)
DUE_DATE="$2"
shift 2
;;
--list)
LIST_ONLY=true
shift
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
PLATFORM=$(detect_platform)
if [[ "$LIST_ONLY" == true ]]; then
case "$PLATFORM" in
github)
gh api repos/:owner/:repo/milestones --jq '.[] | "\(.number)\t\(.title)\t\(.state)\t\(.open_issues)/\(.closed_issues) issues"'
;;
gitea)
tea milestones list
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac
exit 0
fi
if [[ -z "$TITLE" ]]; then
echo "Error: Title is required (-t) for creating milestones" >&2
usage
fi
case "$PLATFORM" in
github)
# GitHub uses the API for milestone creation
JSON_PAYLOAD="{\"title\":\"$TITLE\""
[[ -n "$DESCRIPTION" ]] && JSON_PAYLOAD="$JSON_PAYLOAD,\"description\":\"$DESCRIPTION\""
[[ -n "$DUE_DATE" ]] && JSON_PAYLOAD="$JSON_PAYLOAD,\"due_on\":\"${DUE_DATE}T00:00:00Z\""
JSON_PAYLOAD="$JSON_PAYLOAD}"
gh api repos/:owner/:repo/milestones --method POST --input - <<< "$JSON_PAYLOAD"
echo "Milestone '$TITLE' created successfully"
;;
gitea)
CMD="tea milestones create --title \"$TITLE\""
[[ -n "$DESCRIPTION" ]] && CMD="$CMD --description \"$DESCRIPTION\""
[[ -n "$DUE_DATE" ]] && CMD="$CMD --deadline \"$DUE_DATE\""
eval "$CMD"
echo "Milestone '$TITLE' created successfully"
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

View File

@@ -0,0 +1,43 @@
#!/bin/bash
# milestone-list.sh - List milestones on GitHub or Gitea
# Usage: milestone-list.sh [-s <state>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
STATE="open"
while [[ $# -gt 0 ]]; do
case $1 in
-s|--state)
STATE="$2"
shift 2
;;
-h|--help)
echo "Usage: milestone-list.sh [-s <state>]"
echo ""
echo "Options:"
echo " -s, --state Filter by state: open, closed, all (default: open)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
gh api "/repos/{owner}/{repo}/milestones?state=$STATE" --jq '.[] | "\(.title) (\(.state)) - \(.open_issues) open, \(.closed_issues) closed"'
elif [[ "$PLATFORM" == "gitea" ]]; then
tea milestone list
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,239 @@
#!/bin/bash
# pr-ci-wait.sh - Wait for PR CI status to reach terminal state (GitHub/Gitea)
# Usage: pr-ci-wait.sh -n <pr_number> [-t timeout_sec] [-i interval_sec]
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
PR_NUMBER=""
TIMEOUT_SEC=1800
INTERVAL_SEC=15
usage() {
cat <<EOF
Usage: $(basename "$0") -n <pr_number> [-t timeout_sec] [-i interval_sec]
Options:
-n, --number NUMBER PR number (required)
-t, --timeout SECONDS Max wait time in seconds (default: 1800)
-i, --interval SECONDS Poll interval in seconds (default: 15)
-h, --help Show this help
Examples:
$(basename "$0") -n 643
$(basename "$0") -n 643 -t 900 -i 10
EOF
}
# get_remote_host and get_gitea_token are provided by detect-platform.sh
extract_state_from_status_json() {
python3 - <<'PY'
import json
import sys
try:
payload = json.load(sys.stdin)
except Exception:
print("unknown")
raise SystemExit(0)
state = (payload.get("state") or "").lower()
if state in {"success", "pending", "failure", "error"}:
print(state)
raise SystemExit(0)
statuses = payload.get("statuses") or []
values = []
for item in statuses:
if not isinstance(item, dict):
continue
value = (item.get("status") or item.get("state") or "").lower()
if value:
values.append(value)
if any(v in {"failure", "error"} for v in values):
print("failure")
elif values and all(v == "success" for v in values):
print("success")
elif any(v in {"pending", "running", "queued", "waiting"} for v in values):
print("pending")
else:
print("unknown")
PY
}
print_status_summary() {
python3 - <<'PY'
import json
import sys
try:
payload = json.load(sys.stdin)
except Exception:
print("[pr-ci-wait] status payload unavailable")
raise SystemExit(0)
statuses = payload.get("statuses") or []
if not statuses:
print("[pr-ci-wait] no status contexts reported yet")
raise SystemExit(0)
for item in statuses:
if not isinstance(item, dict):
continue
name = item.get("context") or item.get("name") or "unknown-context"
state = item.get("status") or item.get("state") or "unknown-state"
target = item.get("target_url") or item.get("url") or ""
if target:
print(f"[pr-ci-wait] {name}: {state} ({target})")
else:
print(f"[pr-ci-wait] {name}: {state}")
PY
}
github_get_pr_head_sha() {
gh pr view "$PR_NUMBER" --json headRefOid --jq '.headRefOid'
}
github_get_commit_status_json() {
local owner="$1"
local repo="$2"
local sha="$3"
gh api "repos/${owner}/${repo}/commits/${sha}/status"
}
gitea_get_pr_head_sha() {
local host="$1"
local repo="$2"
local token="$3"
local url="https://${host}/api/v1/repos/${repo}/pulls/${PR_NUMBER}"
curl -fsS -H "Authorization: token ${token}" "$url" | python3 -c '
import json, sys
data = json.load(sys.stdin)
print((data.get("head") or {}).get("sha", ""))
'
}
gitea_get_commit_status_json() {
local host="$1"
local repo="$2"
local token="$3"
local sha="$4"
local url="https://${host}/api/v1/repos/${repo}/commits/${sha}/status"
curl -fsS -H "Authorization: token ${token}" "$url"
}
while [[ $# -gt 0 ]]; do
case "$1" in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-t|--timeout)
TIMEOUT_SEC="$2"
shift 2
;;
-i|--interval)
INTERVAL_SEC="$2"
shift 2
;;
-h|--help)
usage
exit 0
;;
*)
echo "Unknown option: $1" >&2
usage >&2
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)." >&2
usage >&2
exit 1
fi
if ! [[ "$TIMEOUT_SEC" =~ ^[0-9]+$ ]] || ! [[ "$INTERVAL_SEC" =~ ^[0-9]+$ ]]; then
echo "Error: timeout and interval must be integer seconds." >&2
exit 1
fi
detect_platform > /dev/null
OWNER=$(get_repo_owner)
REPO=$(get_repo_name)
START_TS=$(date +%s)
DEADLINE_TS=$((START_TS + TIMEOUT_SEC))
if [[ "$PLATFORM" == "github" ]]; then
if ! command -v gh >/dev/null 2>&1; then
echo "Error: gh CLI is required for GitHub CI status polling." >&2
exit 1
fi
HEAD_SHA=$(github_get_pr_head_sha)
if [[ -z "$HEAD_SHA" ]]; then
echo "Error: Could not resolve head SHA for PR #$PR_NUMBER." >&2
exit 1
fi
echo "[pr-ci-wait] Platform=github PR=#${PR_NUMBER} head_sha=${HEAD_SHA}"
elif [[ "$PLATFORM" == "gitea" ]]; then
HOST=$(get_remote_host) || {
echo "Error: Could not determine remote host." >&2
exit 1
}
TOKEN=$(get_gitea_token "$HOST") || {
echo "Error: Gitea token not found. Set GITEA_TOKEN or configure ~/.git-credentials." >&2
exit 1
}
HEAD_SHA=$(gitea_get_pr_head_sha "$HOST" "$OWNER/$REPO" "$TOKEN")
if [[ -z "$HEAD_SHA" ]]; then
echo "Error: Could not resolve head SHA for PR #$PR_NUMBER." >&2
exit 1
fi
echo "[pr-ci-wait] Platform=gitea host=${HOST} PR=#${PR_NUMBER} head_sha=${HEAD_SHA}"
else
echo "Error: Unsupported platform '${PLATFORM}'." >&2
exit 1
fi
while true; do
NOW_TS=$(date +%s)
if (( NOW_TS > DEADLINE_TS )); then
echo "Error: Timed out waiting for CI status on PR #$PR_NUMBER after ${TIMEOUT_SEC}s." >&2
exit 124
fi
if [[ "$PLATFORM" == "github" ]]; then
STATUS_JSON=$(github_get_commit_status_json "$OWNER" "$REPO" "$HEAD_SHA")
else
STATUS_JSON=$(gitea_get_commit_status_json "$HOST" "$OWNER/$REPO" "$TOKEN" "$HEAD_SHA")
fi
STATE=$(printf '%s' "$STATUS_JSON" | extract_state_from_status_json)
echo "[pr-ci-wait] state=${STATE} pr=#${PR_NUMBER} sha=${HEAD_SHA}"
case "$STATE" in
success)
printf '%s' "$STATUS_JSON" | print_status_summary
echo "[pr-ci-wait] CI is green for PR #$PR_NUMBER."
exit 0
;;
failure|error)
printf '%s' "$STATUS_JSON" | print_status_summary
echo "Error: CI reported ${STATE} for PR #$PR_NUMBER." >&2
exit 1
;;
pending|unknown)
sleep "$INTERVAL_SEC"
;;
*)
echo "[pr-ci-wait] Unrecognized state '${STATE}', continuing to poll..."
sleep "$INTERVAL_SEC"
;;
esac
done

View File

@@ -0,0 +1,62 @@
#!/bin/bash
# pr-close.sh - Close a pull request without merging on GitHub or Gitea
# Usage: pr-close.sh -n <pr_number> [-c <comment>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
PR_NUMBER=""
COMMENT=""
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-c|--comment)
COMMENT="$2"
shift 2
;;
-h|--help)
echo "Usage: pr-close.sh -n <pr_number> [-c <comment>]"
echo ""
echo "Options:"
echo " -n, --number PR number (required)"
echo " -c, --comment Comment before closing (optional)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
if [[ -n "$COMMENT" ]]; then
gh pr comment "$PR_NUMBER" --body "$COMMENT"
fi
gh pr close "$PR_NUMBER"
echo "Closed GitHub PR #$PR_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
if [[ -n "$COMMENT" ]]; then
tea pr comment "$PR_NUMBER" "$COMMENT"
fi
tea pr close "$PR_NUMBER"
echo "Closed Gitea PR #$PR_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,130 @@
# pr-create.ps1 - Create pull requests on Gitea or GitHub
# Usage: .\pr-create.ps1 -Title "Title" [-Body "Body"] [-Base base] [-Head head] [-Labels "labels"] [-Milestone "milestone"]
[CmdletBinding()]
param(
[Alias("t")]
[string]$Title,
[Alias("b")]
[string]$Body,
[Alias("B")]
[string]$Base,
[Alias("H")]
[string]$Head,
[Alias("l")]
[string]$Labels,
[Alias("m")]
[string]$Milestone,
[Alias("i")]
[int]$Issue,
[Alias("d")]
[switch]$Draft,
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: pr-create.ps1 [OPTIONS]
Create a pull request on the current repository (Gitea or GitHub).
Options:
-Title, -t TITLE PR title (required, or use -Issue)
-Body, -b BODY PR description/body
-Base, -B BRANCH Base branch to merge into (default: main/master)
-Head, -H BRANCH Head branch with changes (default: current branch)
-Labels, -l LABELS Comma-separated labels
-Milestone, -m NAME Milestone name
-Issue, -i NUMBER Link to issue (auto-generates title if not provided)
-Draft, -d Create as draft PR
-Help Show this help message
Examples:
.\pr-create.ps1 -Title "Add login feature" -Body "Implements user authentication"
.\pr-create.ps1 -t "Fix bug" -B main -H feature/fix-123
.\pr-create.ps1 -i 42 -b "Implements the feature described in #42"
.\pr-create.ps1 -t "WIP: New feature" -Draft
"@
exit 1
}
if ($Help) {
Show-Usage
}
# If no title but issue provided, generate title
if (-not $Title -and $Issue) {
$Title = "Fixes #$Issue"
}
if (-not $Title) {
Write-Error "Title is required (-t) or provide an issue (-i)"
Show-Usage
}
# Default head branch to current branch
if (-not $Head) {
$Head = git branch --show-current
}
# Add issue reference to body if provided
if ($Issue) {
if ($Body) {
$Body = "$Body`n`nFixes #$Issue"
} else {
$Body = "Fixes #$Issue"
}
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
$cmd = @("gh", "pr", "create", "--title", $Title)
if ($Body) { $cmd += @("--body", $Body) }
if ($Base) { $cmd += @("--base", $Base) }
if ($Head) { $cmd += @("--head", $Head) }
if ($Labels) { $cmd += @("--label", $Labels) }
if ($Milestone) { $cmd += @("--milestone", $Milestone) }
if ($Draft) { $cmd += "--draft" }
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
"gitea" {
$cmd = @("tea", "pr", "create", "--title", $Title)
if ($Body) { $cmd += @("--description", $Body) }
if ($Base) { $cmd += @("--base", $Base) }
if ($Head) { $cmd += @("--head", $Head) }
if ($Labels) { $cmd += @("--labels", $Labels) }
if ($Milestone) {
$milestoneList = tea milestones list 2>$null
$milestoneId = ($milestoneList | Select-String "^\s*(\d+).*$Milestone" | ForEach-Object { $_.Matches.Groups[1].Value } | Select-Object -First 1)
if ($milestoneId) {
$cmd += @("--milestone", $milestoneId)
} else {
Write-Warning "Could not find milestone '$Milestone', creating without milestone"
}
}
if ($Draft) {
Write-Warning "Draft PR may not be supported by your tea version"
}
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

View File

@@ -0,0 +1,164 @@
#!/bin/bash
# pr-create.sh - Create pull requests on Gitea or GitHub
# Usage: pr-create.sh -t "Title" [-b "Body"] [-B base] [-H head] [-l "labels"] [-m "milestone"]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
TITLE=""
BODY=""
BASE_BRANCH=""
HEAD_BRANCH=""
LABELS=""
MILESTONE=""
DRAFT=false
ISSUE=""
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
Create a pull request on the current repository (Gitea or GitHub).
Options:
-t, --title TITLE PR title (required, or use --issue)
-b, --body BODY PR description/body
-B, --base BRANCH Base branch to merge into (default: main/master)
-H, --head BRANCH Head branch with changes (default: current branch)
-l, --labels LABELS Comma-separated labels
-m, --milestone NAME Milestone name
-i, --issue NUMBER Link to issue (auto-generates title if not provided)
-d, --draft Create as draft PR
-h, --help Show this help message
Examples:
$(basename "$0") -t "Add login feature" -b "Implements user authentication"
$(basename "$0") -t "Fix bug" -B main -H feature/fix-123
$(basename "$0") -i 42 -b "Implements the feature described in #42"
$(basename "$0") -t "WIP: New feature" --draft
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-t|--title)
TITLE="$2"
shift 2
;;
-b|--body)
BODY="$2"
shift 2
;;
-B|--base)
BASE_BRANCH="$2"
shift 2
;;
-H|--head)
HEAD_BRANCH="$2"
shift 2
;;
-l|--labels)
LABELS="$2"
shift 2
;;
-m|--milestone)
MILESTONE="$2"
shift 2
;;
-i|--issue)
ISSUE="$2"
shift 2
;;
-d|--draft)
DRAFT=true
shift
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
# If no title but issue provided, generate title
if [[ -z "$TITLE" ]] && [[ -n "$ISSUE" ]]; then
TITLE="Fixes #$ISSUE"
fi
if [[ -z "$TITLE" ]]; then
echo "Error: Title is required (-t) or provide an issue (-i)" >&2
usage
fi
# Default head branch to current branch
if [[ -z "$HEAD_BRANCH" ]]; then
HEAD_BRANCH=$(git branch --show-current)
fi
# Add issue reference to body if provided
if [[ -n "$ISSUE" ]]; then
if [[ -n "$BODY" ]]; then
BODY="$BODY
Fixes #$ISSUE"
else
BODY="Fixes #$ISSUE"
fi
fi
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
CMD="gh pr create --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --body \"$BODY\""
[[ -n "$BASE_BRANCH" ]] && CMD="$CMD --base \"$BASE_BRANCH\""
[[ -n "$HEAD_BRANCH" ]] && CMD="$CMD --head \"$HEAD_BRANCH\""
[[ -n "$LABELS" ]] && CMD="$CMD --label \"$LABELS\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
[[ "$DRAFT" == true ]] && CMD="$CMD --draft"
eval "$CMD"
;;
gitea)
# tea pull create syntax
CMD="tea pr create --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --description \"$BODY\""
[[ -n "$BASE_BRANCH" ]] && CMD="$CMD --base \"$BASE_BRANCH\""
[[ -n "$HEAD_BRANCH" ]] && CMD="$CMD --head \"$HEAD_BRANCH\""
# Handle labels for tea
if [[ -n "$LABELS" ]]; then
# tea may use --labels flag
CMD="$CMD --labels \"$LABELS\""
fi
# Handle milestone for tea
if [[ -n "$MILESTONE" ]]; then
MILESTONE_ID=$(tea milestones list 2>/dev/null | grep -E "^\s*[0-9]+" | grep "$MILESTONE" | awk '{print $1}' | head -1)
if [[ -n "$MILESTONE_ID" ]]; then
CMD="$CMD --milestone $MILESTONE_ID"
else
echo "Warning: Could not find milestone '$MILESTONE', creating without milestone" >&2
fi
fi
# Note: tea may not support --draft flag in all versions
if [[ "$DRAFT" == true ]]; then
echo "Note: Draft PR may not be supported by your tea version" >&2
fi
eval "$CMD"
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

View File

@@ -0,0 +1,87 @@
#!/bin/bash
# pr-diff.sh - Get the diff for a pull request on GitHub or Gitea
# Usage: pr-diff.sh -n <pr_number> [-o <output_file>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
PR_NUMBER=""
OUTPUT_FILE=""
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-o|--output)
OUTPUT_FILE="$2"
shift 2
;;
-h|--help)
echo "Usage: pr-diff.sh -n <pr_number> [-o <output_file>]"
echo ""
echo "Options:"
echo " -n, --number PR number (required)"
echo " -o, --output Output file (optional, prints to stdout if omitted)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)" >&2
exit 1
fi
detect_platform > /dev/null
if [[ "$PLATFORM" == "github" ]]; then
if [[ -n "$OUTPUT_FILE" ]]; then
gh pr diff "$PR_NUMBER" > "$OUTPUT_FILE"
else
gh pr diff "$PR_NUMBER"
fi
elif [[ "$PLATFORM" == "gitea" ]]; then
# tea doesn't have a direct diff command — use the API
OWNER=$(get_repo_owner)
REPO=$(get_repo_name)
REMOTE_URL=$(git remote get-url origin 2>/dev/null)
# Extract host from remote URL
if [[ "$REMOTE_URL" == https://* ]]; then
HOST=$(echo "$REMOTE_URL" | sed -E 's|https://([^/]+)/.*|\1|')
elif [[ "$REMOTE_URL" == git@* ]]; then
HOST=$(echo "$REMOTE_URL" | sed -E 's|git@([^:]+):.*|\1|')
else
echo "Error: Cannot determine host from remote URL" >&2
exit 1
fi
DIFF_URL="https://${HOST}/api/v1/repos/${OWNER}/${REPO}/pulls/${PR_NUMBER}.diff"
GITEA_API_TOKEN=$(get_gitea_token "$HOST" || true)
if [[ -n "$GITEA_API_TOKEN" ]]; then
DIFF_CONTENT=$(curl -sS -H "Authorization: token $GITEA_API_TOKEN" "$DIFF_URL")
else
DIFF_CONTENT=$(curl -sS "$DIFF_URL")
fi
if [[ -n "$OUTPUT_FILE" ]]; then
echo "$DIFF_CONTENT" > "$OUTPUT_FILE"
else
echo "$DIFF_CONTENT"
fi
else
echo "Error: Unknown platform" >&2
exit 1
fi

View File

@@ -0,0 +1,76 @@
# pr-list.ps1 - List pull requests on Gitea or GitHub
# Usage: .\pr-list.ps1 [-State state] [-Label label] [-Author author]
[CmdletBinding()]
param(
[Alias("s")]
[ValidateSet("open", "closed", "merged", "all")]
[string]$State = "open",
[Alias("l")]
[string]$Label,
[Alias("a")]
[string]$Author,
[Alias("n")]
[int]$Limit = 100,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: pr-list.ps1 [OPTIONS]
List pull requests from the current repository (Gitea or GitHub).
Options:
-State, -s STATE Filter by state: open, closed, merged, all (default: open)
-Label, -l LABEL Filter by label
-Author, -a USER Filter by author
-Limit, -n N Maximum PRs to show (default: 100)
-Help, -h Show this help message
Examples:
.\pr-list.ps1 # List open PRs
.\pr-list.ps1 -s all # All PRs
.\pr-list.ps1 -s merged -a username # Merged PRs by user
"@
exit 1
}
if ($Help) {
Show-Usage
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
$cmd = @("gh", "pr", "list", "--state", $State, "--limit", $Limit)
if ($Label) { $cmd += @("--label", $Label) }
if ($Author) { $cmd += @("--author", $Author) }
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
"gitea" {
$cmd = @("tea", "pr", "list", "--state", $State, "--limit", $Limit)
if ($Label) {
Write-Warning "Label filtering may require manual review for Gitea"
}
if ($Author) {
Write-Warning "Author filtering may require manual review for Gitea"
}
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

View File

@@ -0,0 +1,93 @@
#!/bin/bash
# pr-list.sh - List pull requests on Gitea or GitHub
# Usage: pr-list.sh [-s state] [-l label] [-a author]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
STATE="open"
LABEL=""
AUTHOR=""
LIMIT=100
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
List pull requests from the current repository (Gitea or GitHub).
Options:
-s, --state STATE Filter by state: open, closed, merged, all (default: open)
-l, --label LABEL Filter by label
-a, --author USER Filter by author
-n, --limit N Maximum PRs to show (default: 100)
-h, --help Show this help message
Examples:
$(basename "$0") # List open PRs
$(basename "$0") -s all # All PRs
$(basename "$0") -s merged -a username # Merged PRs by user
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-s|--state)
STATE="$2"
shift 2
;;
-l|--label)
LABEL="$2"
shift 2
;;
-a|--author)
AUTHOR="$2"
shift 2
;;
-n|--limit)
LIMIT="$2"
shift 2
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
CMD="gh pr list --state $STATE --limit $LIMIT"
[[ -n "$LABEL" ]] && CMD="$CMD --label \"$LABEL\""
[[ -n "$AUTHOR" ]] && CMD="$CMD --author \"$AUTHOR\""
eval "$CMD"
;;
gitea)
# tea pr list - note: tea uses 'pulls' subcommand in some versions
CMD="tea pr list --state $STATE --limit $LIMIT"
# tea filtering may be limited
if [[ -n "$LABEL" ]]; then
echo "Note: Label filtering may require manual review for Gitea" >&2
fi
if [[ -n "$AUTHOR" ]]; then
echo "Note: Author filtering may require manual review for Gitea" >&2
fi
eval "$CMD"
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

View File

@@ -0,0 +1,98 @@
# pr-merge.ps1 - Merge pull requests on Gitea or GitHub
# Usage: .\pr-merge.ps1 -Number PR_NUMBER [-Method squash] [-DeleteBranch]
[CmdletBinding()]
param(
[Parameter(Mandatory=$true)]
[Alias("n")]
[int]$Number,
[Alias("m")]
[string]$Method = "squash",
[Alias("d")]
[switch]$DeleteBranch,
[switch]$SkipQueueGuard,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: pr-merge.ps1 [OPTIONS]
Merge a pull request on the current repository (Gitea or GitHub).
Options:
-Number, -n NUMBER PR number to merge (required)
-Method, -m METHOD Merge method: squash only (default: squash)
-DeleteBranch, -d Delete the head branch after merge
-SkipQueueGuard Skip CI queue guard wait before merge
-Help, -h Show this help message
Examples:
.\pr-merge.ps1 -n 42 # Merge PR #42
.\pr-merge.ps1 -n 42 -m squash # Squash merge
.\pr-merge.ps1 -n 42 -d # Squash merge and delete branch
"@
exit 1
}
if ($Help) {
Show-Usage
}
if ($Method -ne "squash") {
Write-Error "Mosaic policy enforces squash merge only. Received '$Method'."
exit 1
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
$baseRef = (& gh pr view $Number --json baseRefName --jq ".baseRefName").Trim()
if ($baseRef -ne "main") {
Write-Error "Mosaic policy allows merges only for PRs targeting 'main' (found '$baseRef')."
exit 1
}
if (-not $SkipQueueGuard) {
$timeout = if ($env:MOSAIC_CI_QUEUE_TIMEOUT_SEC) { [int]$env:MOSAIC_CI_QUEUE_TIMEOUT_SEC } else { 900 }
$interval = if ($env:MOSAIC_CI_QUEUE_POLL_SEC) { [int]$env:MOSAIC_CI_QUEUE_POLL_SEC } else { 15 }
& "$ScriptDir\ci-queue-wait.ps1" -Purpose merge -Branch $baseRef -TimeoutSeconds $timeout -IntervalSeconds $interval
if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE }
}
$cmd = @("gh", "pr", "merge", $Number, "--squash")
if ($DeleteBranch) { $cmd += "--delete-branch" }
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
"gitea" {
if (-not $SkipQueueGuard) {
$timeout = if ($env:MOSAIC_CI_QUEUE_TIMEOUT_SEC) { [int]$env:MOSAIC_CI_QUEUE_TIMEOUT_SEC } else { 900 }
$interval = if ($env:MOSAIC_CI_QUEUE_POLL_SEC) { [int]$env:MOSAIC_CI_QUEUE_POLL_SEC } else { 15 }
& "$ScriptDir\ci-queue-wait.ps1" -Purpose merge -Branch "main" -TimeoutSeconds $timeout -IntervalSeconds $interval
if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE }
}
$cmd = @("tea", "pr", "merge", $Number, "--style", "squash")
if ($DeleteBranch) {
Write-Warning "Branch deletion after merge may need to be done separately with tea"
}
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}
Write-Host "PR #$Number merged successfully"

View File

@@ -0,0 +1,116 @@
#!/bin/bash
# pr-merge.sh - Merge pull requests on Gitea or GitHub
# Usage: pr-merge.sh -n PR_NUMBER [-m squash] [-d] [--skip-queue-guard]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
PR_NUMBER=""
MERGE_METHOD="squash"
DELETE_BRANCH=false
SKIP_QUEUE_GUARD=false
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
Merge a pull request on the current repository (Gitea or GitHub).
Options:
-n, --number NUMBER PR number to merge (required)
-m, --method METHOD Merge method: squash only (default: squash)
-d, --delete-branch Delete the head branch after merge
--skip-queue-guard Skip CI queue guard wait before merge
-h, --help Show this help message
Examples:
$(basename "$0") -n 42 # Merge PR #42
$(basename "$0") -n 42 -m squash # Squash merge
$(basename "$0") -n 42 -d # Squash merge and delete branch
$(basename "$0") -n 42 --skip-queue-guard # Skip queue guard wait
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-m|--method)
MERGE_METHOD="$2"
shift 2
;;
-d|--delete-branch)
DELETE_BRANCH=true
shift
;;
--skip-queue-guard)
SKIP_QUEUE_GUARD=true
shift
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)" >&2
usage
fi
if [[ "$MERGE_METHOD" != "squash" ]]; then
echo "Error: Mosaic policy enforces squash merge only. Received '$MERGE_METHOD'." >&2
exit 1
fi
BASE_BRANCH="$("$SCRIPT_DIR/pr-metadata.sh" -n "$PR_NUMBER" | python3 -c 'import json, sys; print((json.load(sys.stdin).get("baseRefName") or "").strip())')"
if [[ "$BASE_BRANCH" != "main" ]]; then
echo "Error: Mosaic policy allows merges only for PRs targeting 'main' (found '$BASE_BRANCH')." >&2
exit 1
fi
if [[ "$SKIP_QUEUE_GUARD" != true ]]; then
"$SCRIPT_DIR/ci-queue-wait.sh" \
--purpose merge \
-B "$BASE_BRANCH" \
-t "${MOSAIC_CI_QUEUE_TIMEOUT_SEC:-900}" \
-i "${MOSAIC_CI_QUEUE_POLL_SEC:-15}"
fi
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
CMD="gh pr merge $PR_NUMBER --squash"
[[ "$DELETE_BRANCH" == true ]] && CMD="$CMD --delete-branch"
eval "$CMD"
;;
gitea)
# tea pr merge syntax
CMD="tea pr merge $PR_NUMBER --style squash"
# Delete branch after merge if requested
if [[ "$DELETE_BRANCH" == true ]]; then
echo "Note: Branch deletion after merge may need to be done separately with tea" >&2
fi
eval "$CMD"
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac
echo "PR #$PR_NUMBER merged successfully"

View File

@@ -0,0 +1,113 @@
#!/bin/bash
# pr-metadata.sh - Get PR metadata as JSON on GitHub or Gitea
# Usage: pr-metadata.sh -n <pr_number> [-o <output_file>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
PR_NUMBER=""
OUTPUT_FILE=""
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-o|--output)
OUTPUT_FILE="$2"
shift 2
;;
-h|--help)
echo "Usage: pr-metadata.sh -n <pr_number> [-o <output_file>]"
echo ""
echo "Options:"
echo " -n, --number PR number (required)"
echo " -o, --output Output file (optional, prints to stdout if omitted)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)" >&2
exit 1
fi
detect_platform > /dev/null
if [[ "$PLATFORM" == "github" ]]; then
METADATA=$(gh pr view "$PR_NUMBER" --json number,title,body,state,author,headRefName,baseRefName,files,labels,assignees,milestone,createdAt,updatedAt,url,isDraft)
if [[ -n "$OUTPUT_FILE" ]]; then
echo "$METADATA" > "$OUTPUT_FILE"
else
echo "$METADATA"
fi
elif [[ "$PLATFORM" == "gitea" ]]; then
OWNER=$(get_repo_owner)
REPO=$(get_repo_name)
REMOTE_URL=$(git remote get-url origin 2>/dev/null)
# Extract host from remote URL
if [[ "$REMOTE_URL" == https://* ]]; then
HOST=$(echo "$REMOTE_URL" | sed -E 's|https://([^/]+)/.*|\1|')
elif [[ "$REMOTE_URL" == git@* ]]; then
HOST=$(echo "$REMOTE_URL" | sed -E 's|git@([^:]+):.*|\1|')
else
echo "Error: Cannot determine host from remote URL" >&2
exit 1
fi
API_URL="https://${HOST}/api/v1/repos/${OWNER}/${REPO}/pulls/${PR_NUMBER}"
GITEA_API_TOKEN=$(get_gitea_token "$HOST" || true)
if [[ -n "$GITEA_API_TOKEN" ]]; then
RAW=$(curl -sS -H "Authorization: token $GITEA_API_TOKEN" "$API_URL")
else
RAW=$(curl -sS "$API_URL")
fi
# Normalize Gitea response to match our expected schema
METADATA=$(echo "$RAW" | python3 -c "
import json, sys
data = json.load(sys.stdin)
normalized = {
'number': data.get('number'),
'title': data.get('title'),
'body': data.get('body', ''),
'state': data.get('state'),
'author': data.get('user', {}).get('login', ''),
'headRefName': data.get('head', {}).get('ref', ''),
'baseRefName': data.get('base', {}).get('ref', ''),
'labels': [l.get('name', '') for l in data.get('labels', [])],
'assignees': [a.get('login', '') for a in data.get('assignees', [])],
'milestone': data.get('milestone', {}).get('title', '') if data.get('milestone') else '',
'createdAt': data.get('created_at', ''),
'updatedAt': data.get('updated_at', ''),
'url': data.get('html_url', ''),
'isDraft': data.get('draft', False),
'mergeable': data.get('mergeable'),
'diffUrl': data.get('diff_url', ''),
}
json.dump(normalized, sys.stdout, indent=2)
")
if [[ -n "$OUTPUT_FILE" ]]; then
echo "$METADATA" > "$OUTPUT_FILE"
else
echo "$METADATA"
fi
else
echo "Error: Unknown platform" >&2
exit 1
fi

View File

@@ -0,0 +1,115 @@
#!/bin/bash
# pr-review.sh - Review a pull request on GitHub or Gitea
# Usage: pr-review.sh -n <pr_number> -a <action> [-c <comment>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
PR_NUMBER=""
ACTION=""
COMMENT=""
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-a|--action)
ACTION="$2"
shift 2
;;
-c|--comment)
COMMENT="$2"
shift 2
;;
-h|--help)
echo "Usage: pr-review.sh -n <pr_number> -a <action> [-c <comment>]"
echo ""
echo "Options:"
echo " -n, --number PR number (required)"
echo " -a, --action Review action: approve, request-changes, comment (required)"
echo " -c, --comment Review comment (required for request-changes)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)"
exit 1
fi
if [[ -z "$ACTION" ]]; then
echo "Error: Action is required (-a): approve, request-changes, comment"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
case $ACTION in
approve)
gh pr review "$PR_NUMBER" --approve ${COMMENT:+--body "$COMMENT"}
echo "Approved GitHub PR #$PR_NUMBER"
;;
request-changes)
if [[ -z "$COMMENT" ]]; then
echo "Error: Comment required for request-changes"
exit 1
fi
gh pr review "$PR_NUMBER" --request-changes --body "$COMMENT"
echo "Requested changes on GitHub PR #$PR_NUMBER"
;;
comment)
if [[ -z "$COMMENT" ]]; then
echo "Error: Comment required"
exit 1
fi
gh pr review "$PR_NUMBER" --comment --body "$COMMENT"
echo "Added review comment to GitHub PR #$PR_NUMBER"
;;
*)
echo "Error: Unknown action: $ACTION"
exit 1
;;
esac
elif [[ "$PLATFORM" == "gitea" ]]; then
case $ACTION in
approve)
tea pr approve "$PR_NUMBER" ${COMMENT:+--comment "$COMMENT"}
echo "Approved Gitea PR #$PR_NUMBER"
;;
request-changes)
if [[ -z "$COMMENT" ]]; then
echo "Error: Comment required for request-changes"
exit 1
fi
tea pr reject "$PR_NUMBER" --comment "$COMMENT"
echo "Requested changes on Gitea PR #$PR_NUMBER"
;;
comment)
if [[ -z "$COMMENT" ]]; then
echo "Error: Comment required"
exit 1
fi
tea pr comment "$PR_NUMBER" "$COMMENT"
echo "Added comment to Gitea PR #$PR_NUMBER"
;;
*)
echo "Error: Unknown action: $ACTION"
exit 1
;;
esac
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,48 @@
#!/bin/bash
# pr-view.sh - View pull request details on GitHub or Gitea
# Usage: pr-view.sh -n <pr_number>
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
PR_NUMBER=""
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-h|--help)
echo "Usage: pr-view.sh -n <pr_number>"
echo ""
echo "Options:"
echo " -n, --number PR number (required)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
gh pr view "$PR_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
tea pr "$PR_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,56 @@
# GLPI Tool Suite
Manage GLPI IT service management (tickets, computers/assets, users).
## Prerequisites
- `jq` and `curl` installed
- GLPI credentials in `~/src/jarvis-brain/credentials.json` (or `$MOSAIC_CREDENTIALS_FILE`)
- Required fields: `glpi.url`, `glpi.app_token`, `glpi.user_token`
## Authentication
GLPI uses a two-step auth flow:
1. `session-init.sh` exchanges app_token + user_token for a session_token
2. All subsequent calls use the session_token + app_token
The session token is cached at `~/.cache/mosaic/glpi-session` and auto-refreshed when expired.
## Scripts
| Script | Purpose |
| ------------------ | -------------------------------- |
| `session-init.sh` | Initialize and cache API session |
| `computer-list.sh` | List computers/IT assets |
| `ticket-list.sh` | List tickets (filter by status) |
| `ticket-create.sh` | Create a new ticket |
| `user-list.sh` | List users |
## Common Options
- `-f json` — JSON output (default: table)
- `-l limit` — Result count (default: 50)
- `-h` — Show help
## API Reference
- Base URL: `https://help.uscllc.com/apirest.php`
- Auth headers: `App-Token` + `Session-Token`
- Pattern: RESTful item-based (`/ItemType/{id}`)
## Examples
```bash
# List all tickets
~/.config/mosaic/tools/glpi/ticket-list.sh
# List only open tickets
~/.config/mosaic/tools/glpi/ticket-list.sh -s new
# Create a ticket
~/.config/mosaic/tools/glpi/ticket-create.sh -t "Server down" -c "Web server unresponsive" -p 4
# List computers as JSON
~/.config/mosaic/tools/glpi/computer-list.sh -f json
```

View File

@@ -0,0 +1,59 @@
#!/usr/bin/env bash
#
# computer-list.sh — List GLPI computers/assets
#
# Usage: computer-list.sh [-f format] [-l limit]
#
# Options:
# -f format Output format: table (default), json
# -l limit Number of results (default: 50)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials glpi
FORMAT="table"
LIMIT=50
while getopts "f:l:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
l) LIMIT="$OPTARG" ;;
h) head -11 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-l limit]" >&2; exit 1 ;;
esac
done
SESSION_TOKEN=$("$SCRIPT_DIR/session-init.sh" -q)
response=$(curl -sk -w "\n%{http_code}" \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Session-Token: $SESSION_TOKEN" \
"${GLPI_URL}/Computer?range=0-${LIMIT}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list computers (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "ID NAME SERIAL STATUS"
echo "------ ---------------------------- ------------------ ----------"
echo "$body" | jq -r '.[] | [
(.id | tostring),
.name,
(.serial // "—"),
(.states_id | tostring)
] | @tsv' | while IFS=$'\t' read -r id name serial states_id; do
printf "%-6s %-28s %-18s %s\n" "$id" "${name:0:28}" "${serial:0:18}" "$states_id"
done

View File

@@ -0,0 +1,85 @@
#!/usr/bin/env bash
#
# session-init.sh — Initialize GLPI API session
#
# Usage: session-init.sh [-f] [-q]
#
# Authenticates with GLPI and caches the session token at
# ~/.cache/mosaic/glpi-session.
#
# Options:
# -f Force re-authentication (ignore cached session)
# -q Quiet mode — only output the session token
# -h Show this help
#
# Environment variables (or credentials.json):
# GLPI_URL — GLPI API base URL
# GLPI_APP_TOKEN — GLPI application token
# GLPI_USER_TOKEN — GLPI user token
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials glpi
CACHE_DIR="$HOME/.cache/mosaic"
CACHE_FILE="$CACHE_DIR/glpi-session"
FORCE=false
QUIET=false
while getopts "fqh" opt; do
case $opt in
f) FORCE=true ;;
q) QUIET=true ;;
h) head -18 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f] [-q]" >&2; exit 1 ;;
esac
done
# Check cached session validity
if [[ "$FORCE" == "false" ]] && [[ -f "$CACHE_FILE" ]]; then
cached_token=$(cat "$CACHE_FILE")
if [[ -n "$cached_token" ]]; then
# Validate with a lightweight call
http_code=$(curl -sk -o /dev/null -w "%{http_code}" \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Session-Token: $cached_token" \
"${GLPI_URL}/getMyEntities")
if [[ "$http_code" == "200" ]]; then
[[ "$QUIET" == "false" ]] && echo "Using cached session (valid)" >&2
echo "$cached_token"
exit 0
fi
[[ "$QUIET" == "false" ]] && echo "Cached session expired, re-authenticating..." >&2
fi
fi
# Initialize session
response=$(curl -sk -w "\n%{http_code}" \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Authorization: user_token $GLPI_USER_TOKEN" \
"${GLPI_URL}/initSession")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to initialize GLPI session (HTTP $http_code)" >&2
echo "$body" | jq -r '.' 2>/dev/null >&2 || echo "$body" >&2
exit 1
fi
session_token=$(echo "$body" | jq -r '.session_token // empty')
if [[ -z "$session_token" ]]; then
echo "Error: No session_token in response" >&2
exit 1
fi
# Cache the session
mkdir -p "$CACHE_DIR"
echo "$session_token" > "$CACHE_FILE"
chmod 600 "$CACHE_FILE"
[[ "$QUIET" == "false" ]] && echo "Session initialized and cached" >&2
echo "$session_token"

View File

@@ -0,0 +1,77 @@
#!/usr/bin/env bash
#
# ticket-create.sh — Create a GLPI ticket
#
# Usage: ticket-create.sh -t <title> -c <content> [-p priority] [-y type]
#
# Options:
# -t title Ticket title (required)
# -c content Ticket description (required)
# -p priority 1=VeryLow, 2=Low, 3=Medium (default), 4=High, 5=VeryHigh, 6=Major
# -y type 1=Incident (default), 2=Request
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials glpi
TITLE=""
CONTENT=""
PRIORITY=3
TYPE=1
FORMAT="table"
while getopts "t:c:p:y:f:h" opt; do
case $opt in
t) TITLE="$OPTARG" ;;
c) CONTENT="$OPTARG" ;;
p) PRIORITY="$OPTARG" ;;
y) TYPE="$OPTARG" ;;
f) FORMAT="$OPTARG" ;;
h) head -13 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -t <title> -c <content> [-p priority] [-y type]" >&2; exit 1 ;;
esac
done
if [[ -z "$TITLE" || -z "$CONTENT" ]]; then
echo "Error: -t title and -c content are required" >&2
exit 1
fi
SESSION_TOKEN=$("$SCRIPT_DIR/session-init.sh" -q)
payload=$(jq -n \
--arg name "$TITLE" \
--arg content "$CONTENT" \
--argjson priority "$PRIORITY" \
--argjson type "$TYPE" \
'{input: {name: $name, content: $content, priority: $priority, type: $type}}')
response=$(curl -sk -w "\n%{http_code}" -X POST \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Session-Token: $SESSION_TOKEN" \
-H "Content-Type: application/json" \
-d "$payload" \
"${GLPI_URL}/Ticket")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "201" && "$http_code" != "200" ]]; then
echo "Error: Failed to create ticket (HTTP $http_code)" >&2
echo "$body" | jq -r '.' 2>/dev/null >&2 || echo "$body" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
else
ticket_id=$(echo "$body" | jq -r '.id // .message // .')
echo "Ticket created: #$ticket_id"
echo " Title: $TITLE"
echo " Priority: $PRIORITY"
echo " Type: $([ "$TYPE" = "1" ] && echo "Incident" || echo "Request")"
fi

View File

@@ -0,0 +1,88 @@
#!/usr/bin/env bash
#
# ticket-list.sh — List GLPI tickets
#
# Usage: ticket-list.sh [-f format] [-l limit] [-s status]
#
# Options:
# -f format Output format: table (default), json
# -l limit Number of results (default: 50)
# -s status Filter: new, processing, pending, solved, closed
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials glpi
FORMAT="table"
LIMIT=50
STATUS=""
while getopts "f:l:s:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
l) LIMIT="$OPTARG" ;;
s) STATUS="$OPTARG" ;;
h) head -13 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-l limit] [-s status]" >&2; exit 1 ;;
esac
done
SESSION_TOKEN=$("$SCRIPT_DIR/session-init.sh" -q)
ENDPOINT="${GLPI_URL}/Ticket?range=0-${LIMIT}&order=DESC&sort=date_mod"
# Map status names to GLPI status IDs
if [[ -n "$STATUS" ]]; then
case "$STATUS" in
new) STATUS_ID=1 ;;
processing|assigned) STATUS_ID=2 ;;
pending|planned) STATUS_ID=3 ;;
solved) STATUS_ID=5 ;;
closed) STATUS_ID=6 ;;
*) echo "Error: Unknown status '$STATUS'. Use: new, processing, pending, solved, closed" >&2; exit 1 ;;
esac
ENDPOINT="${ENDPOINT}&searchText[status]=${STATUS_ID}"
fi
response=$(curl -sk -w "\n%{http_code}" \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Session-Token: $SESSION_TOKEN" \
"$ENDPOINT")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list tickets (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "ID PRIORITY STATUS TITLE DATE"
echo "------ -------- ------ ---------------------------------------- ----------"
echo "$body" | jq -r '.[] | [
(.id | tostring),
(.priority | tostring),
(.status | tostring),
.name,
(.date_mod | split(" ")[0])
] | @tsv' | while IFS=$'\t' read -r id priority status name date; do
# Map priority numbers
case "$priority" in
1) pri="VLow" ;; 2) pri="Low" ;; 3) pri="Med" ;;
4) pri="High" ;; 5) pri="VHigh" ;; 6) pri="Major" ;; *) pri="$priority" ;;
esac
# Map status numbers
case "$status" in
1) stat="New" ;; 2) stat="Proc" ;; 3) stat="Pend" ;;
4) stat="Plan" ;; 5) stat="Solv" ;; 6) stat="Clos" ;; *) stat="$status" ;;
esac
printf "%-6s %-8s %-6s %-40s %s\n" "$id" "$pri" "$stat" "${name:0:40}" "$date"
done

View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
#
# user-list.sh — List GLPI users
#
# Usage: user-list.sh [-f format] [-l limit]
#
# Options:
# -f format Output format: table (default), json
# -l limit Number of results (default: 50)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials glpi
FORMAT="table"
LIMIT=50
while getopts "f:l:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
l) LIMIT="$OPTARG" ;;
h) head -11 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-l limit]" >&2; exit 1 ;;
esac
done
SESSION_TOKEN=$("$SCRIPT_DIR/session-init.sh" -q)
response=$(curl -sk -w "\n%{http_code}" \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Session-Token: $SESSION_TOKEN" \
"${GLPI_URL}/User?range=0-${LIMIT}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list users (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "ID USERNAME REALNAME FIRSTNAME ACTIVE"
echo "------ -------------------- -------------------- -------------------- ------"
echo "$body" | jq -r '.[] | [
(.id | tostring),
(.name // "—"),
(.realname // "—"),
(.firstname // "—"),
(if .is_active == 1 then "yes" else "no" end)
] | @tsv' | while IFS=$'\t' read -r id name realname firstname active; do
printf "%-6s %-20s %-20s %-20s %s\n" \
"$id" "${name:0:20}" "${realname:0:20}" "${firstname:0:20}" "$active"
done

View File

@@ -0,0 +1,194 @@
#!/usr/bin/env bash
#
# stack-health.sh — Check health of all configured Mosaic stack services
#
# Usage: stack-health.sh [-f format] [-s service] [-q]
#
# Checks connectivity to all services configured in credentials.json.
# For each service, makes a lightweight API call and reports status.
#
# Options:
# -f format Output format: table (default), json
# -s service Check only a specific service
# -q Quiet — exit code only (0=all healthy, 1=any unhealthy)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
FORMAT="table"
SINGLE_SERVICE=""
QUIET=false
CRED_FILE="${MOSAIC_CREDENTIALS_FILE:-$HOME/src/jarvis-brain/credentials.json}"
while getopts "f:s:qh" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
s) SINGLE_SERVICE="$OPTARG" ;;
q) QUIET=true ;;
h) head -15 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-s service] [-q]" >&2; exit 1 ;;
esac
done
if [[ ! -f "$CRED_FILE" ]]; then
echo "Error: Credentials file not found: $CRED_FILE" >&2
exit 1
fi
# Colors (disabled if not a terminal or quiet mode)
if [[ -t 1 ]] && [[ "$QUIET" == "false" ]]; then
GREEN='\033[0;32m' RED='\033[0;31m' YELLOW='\033[0;33m' RESET='\033[0m'
else
GREEN='' RED='' YELLOW='' RESET=''
fi
TOTAL=0
HEALTHY=0
RESULTS="[]"
check_service() {
local name="$1"
local display_name="$2"
local url="$3"
local endpoint="$4"
local auth_header="$5"
local insecure="${6:-false}"
TOTAL=$((TOTAL + 1))
local curl_args=(-s -o /dev/null -w "%{http_code} %{time_total}" --connect-timeout 5 --max-time 10)
[[ -n "$auth_header" ]] && curl_args+=(-H "$auth_header")
[[ "$insecure" == "true" ]] && curl_args+=(-k)
local result
result=$(curl "${curl_args[@]}" "${url}${endpoint}" 2>/dev/null) || result="000 0.000"
local http_code response_time status_text
http_code=$(echo "$result" | awk '{print $1}')
response_time=$(echo "$result" | awk '{print $2}')
if [[ "$http_code" -ge 200 && "$http_code" -lt 400 ]]; then
status_text="UP"
HEALTHY=$((HEALTHY + 1))
elif [[ "$http_code" == "000" ]]; then
status_text="DOWN"
elif [[ "$http_code" == "401" || "$http_code" == "403" ]]; then
# Auth error but service is reachable
status_text="AUTH_ERR"
HEALTHY=$((HEALTHY + 1)) # Service is up, just auth issue
else
status_text="ERROR"
fi
# Append to JSON results
RESULTS=$(echo "$RESULTS" | jq --arg n "$name" --arg d "$display_name" \
--arg u "$url" --arg s "$status_text" --arg c "$http_code" --arg t "$response_time" \
'. + [{name: $n, display_name: $d, url: $u, status: $s, http_code: ($c | tonumber), response_time: $t}]')
if [[ "$QUIET" == "false" && "$FORMAT" == "table" ]]; then
local color="$GREEN"
[[ "$status_text" == "DOWN" || "$status_text" == "ERROR" ]] && color="$RED"
[[ "$status_text" == "AUTH_ERR" ]] && color="$YELLOW"
printf " %-22s %-35s ${color}%-8s${RESET} %ss\n" \
"$display_name" "$url" "$status_text" "$response_time"
fi
}
# Discover and check services
[[ "$QUIET" == "false" && "$FORMAT" == "table" ]] && {
echo ""
echo " SERVICE URL STATUS RESPONSE"
echo " ---------------------- ----------------------------------- -------- --------"
}
# Portainer
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "portainer" ]]; then
portainer_url=$(jq -r '.portainer.url // empty' "$CRED_FILE")
portainer_key=$(jq -r '.portainer.api_key // empty' "$CRED_FILE")
if [[ -n "$portainer_url" ]]; then
check_service "portainer" "Portainer" "$portainer_url" "/api/system/status" \
"X-API-Key: $portainer_key" "true"
fi
fi
# Coolify
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "coolify" ]]; then
coolify_url=$(jq -r '.coolify.url // empty' "$CRED_FILE")
coolify_token=$(jq -r '.coolify.app_token // empty' "$CRED_FILE")
if [[ -n "$coolify_url" ]]; then
check_service "coolify" "Coolify" "$coolify_url" "/api/v1/teams" \
"Authorization: Bearer $coolify_token" "false"
fi
fi
# Authentik
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "authentik" ]]; then
authentik_url=$(jq -r '.authentik.url // empty' "$CRED_FILE")
if [[ -n "$authentik_url" ]]; then
check_service "authentik" "Authentik" "$authentik_url" "/-/health/ready/" "" "true"
fi
fi
# GLPI
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "glpi" ]]; then
glpi_url=$(jq -r '.glpi.url // empty' "$CRED_FILE")
if [[ -n "$glpi_url" ]]; then
check_service "glpi" "GLPI" "$glpi_url" "/" "" "true"
fi
fi
# Gitea instances
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "gitea" ]]; then
for instance in mosaicstack usc; do
gitea_url=$(jq -r ".gitea.${instance}.url // empty" "$CRED_FILE")
if [[ -n "$gitea_url" ]]; then
display="Gitea (${instance})"
check_service "gitea-${instance}" "$display" "$gitea_url" "/api/v1/version" "" "true"
fi
done
fi
# GitHub
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "github" ]]; then
github_token=$(jq -r '.github.token // empty' "$CRED_FILE")
if [[ -n "$github_token" ]]; then
check_service "github" "GitHub" "https://api.github.com" "/rate_limit" \
"Authorization: Bearer $github_token" "false"
fi
fi
# Woodpecker
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "woodpecker" ]]; then
woodpecker_url=$(jq -r '.woodpecker.url // empty' "$CRED_FILE")
woodpecker_token=$(jq -r '.woodpecker.token // empty' "$CRED_FILE")
if [[ -n "$woodpecker_url" && -n "$woodpecker_token" ]]; then
check_service "woodpecker" "Woodpecker CI" "$woodpecker_url" "/api/user" \
"Authorization: Bearer $woodpecker_token" "true"
elif [[ "$QUIET" == "false" && "$FORMAT" == "table" ]]; then
printf " %-22s %-35s ${YELLOW}%-8s${RESET} %s\n" \
"Woodpecker CI" "—" "NOTOKEN" "—"
fi
fi
# Output
if [[ "$FORMAT" == "json" ]]; then
jq -n --argjson results "$RESULTS" --argjson total "$TOTAL" --argjson healthy "$HEALTHY" \
'{total: $total, healthy: $healthy, results: $results}'
exit 0
fi
if [[ "$QUIET" == "false" && "$FORMAT" == "table" ]]; then
echo ""
UNHEALTHY=$((TOTAL - HEALTHY))
if [[ "$UNHEALTHY" -eq 0 ]]; then
echo -e " ${GREEN}All $TOTAL services healthy${RESET}"
else
echo -e " ${RED}$UNHEALTHY/$TOTAL services unhealthy${RESET}"
fi
echo ""
fi
# Exit code: 0 if all healthy, 1 if any unhealthy
[[ "$HEALTHY" -eq "$TOTAL" ]]

View File

@@ -0,0 +1,85 @@
# Mosaic Matrix Orchestrator Rail
Runtime-agnostic orchestration rail for delegating work to worker agents and enforcing
mechanical quality gates.
## Purpose
- Decouple orchestration from any single agent runtime feature set
- Persist state in repo-local `.mosaic/orchestrator/` files
- Emit structured events for Matrix transport and audit trails
- Enforce rails before marking tasks complete
## Components
- `protocol/` - JSON schemas for task/event payloads
- `controller/mosaic_orchestrator.py` - deterministic controller loop
- `adapters/` - runtime adapter guidance
## Repo Contract
The controller expects this layout in each bootstrapped repo:
```text
.mosaic/orchestrator/
config.json
tasks.json
state.json
events.ndjson
logs/
results/
```
## Quick Start
From a bootstrapped repo:
```bash
~/.config/mosaic/bin/mosaic-orchestrator-matrix-cycle
~/.config/mosaic/bin/mosaic-orchestrator-run --once
~/.config/mosaic/bin/mosaic-orchestrator-drain
```
Continuous loop:
```bash
~/.config/mosaic/bin/mosaic-orchestrator-run --poll-sec 10
```
Sync from `docs/TASKS.md` to queue:
```bash
~/.config/mosaic/bin/mosaic-orchestrator-sync-tasks --apply
```
Set worker command when needed:
```bash
export MOSAIC_WORKER_EXEC="codex -p"
# or
export MOSAIC_WORKER_EXEC="opencode -p"
```
Publish new orchestrator events to Matrix:
```bash
~/.config/mosaic/bin/mosaic-orchestrator-matrix-publish
```
Consume Matrix task messages into `tasks.json`:
```bash
~/.config/mosaic/bin/mosaic-orchestrator-matrix-consume
```
## Matrix Note
This rail writes canonical events to `.mosaic/orchestrator/events.ndjson`.
The Matrix transport bridge publishes those events into the configured control room
and can consume task commands from that room.
Task injection message format (room text):
```text
!mosaic-task {"id":"TASK-123","title":"Fix bug","command":"echo run","quality_gates":["pnpm lint"]}
```

View File

@@ -0,0 +1,52 @@
# Adapter Contract
Runtime adapters translate task commands into concrete worker invocations.
## Minimal Contract
Each task should define either:
1. `command` directly in `tasks.json`, or
2. controller-level `worker.command_template` in `.mosaic/orchestrator/config.json`
`command_template` may use:
- `{task_id}`
- `{task_title}`
- `{task_file}`
## Examples
Codex:
```json
{
"worker": {
"command_template": "codex \"run task {task_id}: {task_title}\""
}
}
```
Claude:
```json
{
"worker": {
"command_template": "claude -p \"Execute task {task_id}: {task_title}\""
}
}
```
OpenCode:
```json
{
"worker": {
"command_template": "opencode \"execute task {task_id}: {task_title}\""
}
}
```
## Recommendation
Prefer explicit per-task `command` for deterministic execution and auditability.

View File

@@ -0,0 +1,2 @@
__pycache__/
*.pyc

View File

@@ -0,0 +1,346 @@
#!/usr/bin/env python3
"""Deterministic orchestrator controller for Mosaic task delegation."""
from __future__ import annotations
import argparse
import datetime as dt
import json
import os
import pathlib
import subprocess
import sys
import time
import uuid
from typing import Any
def now_iso() -> str:
return dt.datetime.now(dt.timezone.utc).isoformat()
def load_json(path: pathlib.Path, default: Any) -> Any:
if not path.exists():
return default
with path.open("r", encoding="utf-8") as f:
return json.load(f)
def save_json(path: pathlib.Path, data: Any) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
tmp = path.with_suffix(path.suffix + ".tmp")
with tmp.open("w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
f.write("\n")
tmp.replace(path)
def append_event(events_path: pathlib.Path, event: dict[str, Any]) -> None:
events_path.parent.mkdir(parents=True, exist_ok=True)
with events_path.open("a", encoding="utf-8") as f:
f.write(json.dumps(event, ensure_ascii=True) + "\n")
def emit_event(
events_path: pathlib.Path,
event_type: str,
task_id: str,
status: str,
source: str,
message: str,
metadata: dict[str, Any] | None = None,
) -> None:
append_event(
events_path,
{
"event_id": str(uuid.uuid4()),
"event_type": event_type,
"task_id": task_id,
"status": status,
"timestamp": now_iso(),
"source": source,
"message": message,
"metadata": metadata or {},
},
)
def run_shell(command: str, cwd: pathlib.Path, log_path: pathlib.Path, timeout_sec: int) -> tuple[int, str, bool]:
log_path.parent.mkdir(parents=True, exist_ok=True)
with log_path.open("a", encoding="utf-8") as log:
log.write(f"\n[{now_iso()}] COMMAND: {command}\n")
log.flush()
proc = subprocess.Popen(
["bash", "-lc", command],
cwd=str(cwd),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
encoding="utf-8",
)
timed_out = False
try:
output, _ = proc.communicate(timeout=max(1, timeout_sec))
code = proc.returncode
except subprocess.TimeoutExpired:
timed_out = True
proc.kill()
output, _ = proc.communicate()
code = 124
log.write(f"[{now_iso()}] TIMEOUT: exceeded {timeout_sec}s\n")
if output:
log.write(output)
log.write(f"[{now_iso()}] EXIT: {code}\n")
return code, output or "", timed_out
def render_command_template(template: str, task: dict[str, Any], task_file: pathlib.Path) -> str:
return (
template.replace("{task_id}", str(task.get("id", "")))
.replace("{task_title}", str(task.get("title", "")))
.replace("{task_file}", str(task_file))
)
def parse_dep_list(raw: Any) -> list[str]:
if isinstance(raw, list):
return [str(x).strip() for x in raw if str(x).strip()]
if isinstance(raw, str):
return [x.strip() for x in raw.split(",") if x.strip()]
return []
def is_completed_status(status: str) -> bool:
return status in {"completed", "done"}
def pick_next_task(tasks: list[dict[str, Any]]) -> dict[str, Any] | None:
status_by_id = {str(t.get("id", "")): str(t.get("status", "")) for t in tasks}
for task in tasks:
if task.get("status", "pending") != "pending":
continue
deps = parse_dep_list(task.get("depends_on"))
deps_ready = all(is_completed_status(status_by_id.get(dep, "")) for dep in deps)
if deps_ready:
return task
return None
def run_single_task(repo_root: pathlib.Path, orch_dir: pathlib.Path, config: dict[str, Any]) -> bool:
tasks_path = orch_dir / "tasks.json"
state_path = orch_dir / "state.json"
events_path = orch_dir / "events.ndjson"
logs_dir = orch_dir / "logs"
results_dir = orch_dir / "results"
tasks = load_json(tasks_path, {"tasks": []})
task_items = tasks.get("tasks", [])
if not isinstance(task_items, list):
raise ValueError("tasks.json must contain {'tasks': [...]} structure")
task = pick_next_task(task_items)
if not task:
return False
task_id = str(task.get("id", "unknown-task"))
max_attempts = int(task.get("max_attempts") or config.get("worker", {}).get("max_attempts") or 1)
attempt = int(task.get("attempts", 0)) + 1
task["attempts"] = attempt
task["max_attempts"] = max_attempts
task["status"] = "running"
task["started_at"] = now_iso()
save_json(tasks_path, {"tasks": task_items})
state = load_json(state_path, {"running_task_id": None, "updated_at": None})
state["running_task_id"] = task_id
state["updated_at"] = now_iso()
save_json(state_path, state)
emit_event(events_path, "task.assigned", task_id, "running", "controller", "Task assigned")
emit_event(events_path, "task.started", task_id, "running", "worker", "Worker execution started")
log_path = logs_dir / f"{task_id}.log"
task_file = orch_dir / f"task-{task_id}.json"
save_json(task_file, task)
cmd = str(task.get("command", "")).strip()
if not cmd:
template = str(config.get("worker", {}).get("command_template", "")).strip()
if template:
cmd = render_command_template(template, task, task_file)
if not cmd:
task["status"] = "failed"
task["failed_at"] = now_iso()
task["error"] = "No task command or worker command_template configured."
save_json(tasks_path, {"tasks": task_items})
emit_event(events_path, "task.failed", task_id, "failed", "controller", task["error"])
state["running_task_id"] = None
state["updated_at"] = now_iso()
save_json(state_path, state)
return True
timeout_sec = int(task.get("timeout_seconds") or config.get("worker", {}).get("timeout_seconds") or 7200)
rc, _, timed_out = run_shell(cmd, repo_root, log_path, timeout_sec)
if rc != 0:
task["error"] = f"Worker command timed out after {timeout_sec}s" if timed_out else f"Worker command failed with exit code {rc}"
if attempt < max_attempts:
task["status"] = "pending"
task["last_failed_at"] = now_iso()
emit_event(
events_path,
"task.retry.scheduled",
task_id,
"pending",
"worker",
f"{task['error']}; retry {attempt + 1}/{max_attempts}",
)
else:
task["status"] = "failed"
task["failed_at"] = now_iso()
emit_event(events_path, "task.failed", task_id, "failed", "worker", task["error"])
save_json(tasks_path, {"tasks": task_items})
state["running_task_id"] = None
state["updated_at"] = now_iso()
save_json(state_path, state)
save_json(
results_dir / f"{task_id}.json",
{"task_id": task_id, "status": task["status"], "exit_code": rc, "attempt": attempt, "max_attempts": max_attempts},
)
return True
gates = task.get("quality_gates") or config.get("quality_gates") or []
all_passed = True
gate_results: list[dict[str, Any]] = []
for gate in gates:
gate_cmd = str(gate).strip()
if not gate_cmd:
continue
emit_event(events_path, "rail.check.started", task_id, "running", "quality-gate", f"Running gate: {gate_cmd}")
gate_rc, _, gate_timed_out = run_shell(gate_cmd, repo_root, log_path, timeout_sec)
if gate_rc == 0:
emit_event(events_path, "rail.check.passed", task_id, "running", "quality-gate", f"Gate passed: {gate_cmd}")
else:
all_passed = False
emit_event(
events_path,
"rail.check.failed",
task_id,
"failed",
"quality-gate",
f"Gate timed out after {timeout_sec}s: {gate_cmd}" if gate_timed_out else f"Gate failed ({gate_rc}): {gate_cmd}",
)
gate_results.append({"command": gate_cmd, "exit_code": gate_rc})
if all_passed:
task["status"] = "completed"
task["completed_at"] = now_iso()
emit_event(events_path, "task.completed", task_id, "completed", "controller", "Task completed")
else:
task["error"] = "One or more quality gates failed"
if attempt < max_attempts:
task["status"] = "pending"
task["last_failed_at"] = now_iso()
emit_event(
events_path,
"task.retry.scheduled",
task_id,
"pending",
"controller",
f"{task['error']}; retry {attempt + 1}/{max_attempts}",
)
else:
task["status"] = "failed"
task["failed_at"] = now_iso()
emit_event(events_path, "task.failed", task_id, "failed", "controller", task["error"])
save_json(tasks_path, {"tasks": task_items})
state["running_task_id"] = None
state["updated_at"] = now_iso()
save_json(state_path, state)
save_json(
results_dir / f"{task_id}.json",
{
"task_id": task_id,
"status": task["status"],
"completed_at": task.get("completed_at"),
"failed_at": task.get("failed_at"),
"gate_results": gate_results,
},
)
return True
def queue_state(orch_dir: pathlib.Path) -> dict[str, int]:
tasks = load_json(orch_dir / "tasks.json", {"tasks": []})
task_items = tasks.get("tasks", [])
if not isinstance(task_items, list):
return {"pending": 0, "running": 0, "runnable": 0}
pending = 0
running = 0
runnable = 0
status_by_id = {str(t.get("id", "")): str(t.get("status", "")) for t in task_items}
for task in task_items:
status = str(task.get("status", "pending"))
if status == "pending":
pending += 1
deps = parse_dep_list(task.get("depends_on"))
if all(is_completed_status(status_by_id.get(dep, "")) for dep in deps):
runnable += 1
if status == "running":
running += 1
return {"pending": pending, "running": running, "runnable": runnable}
def main() -> int:
parser = argparse.ArgumentParser(description="Mosaic deterministic orchestrator controller")
parser.add_argument("--repo", default=os.getcwd(), help="Repository root (default: cwd)")
parser.add_argument("--once", action="store_true", help="Process at most one pending task and exit")
parser.add_argument("--until-drained", action="store_true", help="Run until no pending tasks remain (or blocked)")
parser.add_argument("--poll-sec", type=int, default=10, help="Polling interval for continuous mode")
args = parser.parse_args()
repo_root = pathlib.Path(args.repo).resolve()
orch_dir = repo_root / ".mosaic" / "orchestrator"
config_path = orch_dir / "config.json"
if not config_path.exists():
print(f"[mosaic-orchestrator] missing config: {config_path}", file=sys.stderr)
return 1
config = load_json(config_path, {})
if not config.get("enabled", False):
print("[mosaic-orchestrator] disabled in .mosaic/orchestrator/config.json (enabled=false)")
return 0
if args.once:
processed = run_single_task(repo_root, orch_dir, config)
if not processed:
print("[mosaic-orchestrator] no pending tasks")
return 0
print(f"[mosaic-orchestrator] loop start repo={repo_root} poll={args.poll_sec}s")
while True:
try:
processed = run_single_task(repo_root, orch_dir, config)
if not processed:
qs = queue_state(orch_dir)
if args.until_drained:
if qs["pending"] == 0 and qs["running"] == 0:
print("[mosaic-orchestrator] drained: no pending tasks")
return 0
if qs["pending"] > 0 and qs["runnable"] == 0 and qs["running"] == 0:
print("[mosaic-orchestrator] blocked: pending tasks remain but dependencies are unmet", file=sys.stderr)
return 2
time.sleep(max(1, args.poll_sec))
except KeyboardInterrupt:
print("\n[mosaic-orchestrator] stopping")
return 0
except Exception as exc: # pragma: no cover
print(f"[mosaic-orchestrator] error: {exc}", file=sys.stderr)
time.sleep(max(1, args.poll_sec))
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,195 @@
#!/usr/bin/env python3
"""Sync docs/TASKS.md rows into .mosaic/orchestrator/tasks.json."""
from __future__ import annotations
import argparse
import json
import os
import pathlib
from typing import Any
def load_json(path: pathlib.Path, default: Any) -> Any:
if not path.exists():
return default
with path.open("r", encoding="utf-8") as f:
return json.load(f)
def save_json(path: pathlib.Path, data: Any) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
tmp = path.with_suffix(path.suffix + ".tmp")
with tmp.open("w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
f.write("\n")
tmp.replace(path)
def split_pipe_row(line: str) -> list[str]:
row = line.strip()
if row.startswith("|"):
row = row[1:]
if row.endswith("|"):
row = row[:-1]
return [c.strip() for c in row.split("|")]
def parse_tasks_markdown(path: pathlib.Path) -> list[dict[str, str]]:
if not path.exists():
return []
lines = path.read_text(encoding="utf-8").splitlines()
header_idx = -1
headers: list[str] = []
for i, line in enumerate(lines):
if "|" not in line:
continue
cells = [x.lower() for x in split_pipe_row(line)]
if "id" in cells and "status" in cells and "description" in cells:
header_idx = i
headers = cells
break
if header_idx < 0:
return []
rows: list[dict[str, str]] = []
for line in lines[header_idx + 2 :]:
if not line.strip().startswith("|"):
if rows:
break
continue
cells = split_pipe_row(line)
if len(cells) < len(headers):
cells += [""] * (len(headers) - len(cells))
row = {headers[i]: cells[i] for i in range(len(headers))}
task_id = row.get("id", "").strip()
if not task_id or task_id.lower() == "id":
continue
rows.append(row)
return rows
def map_status(raw: str) -> str:
value = raw.strip().lower()
mapping = {
"not-started": "pending",
"todo": "pending",
"pending": "pending",
"in-progress": "pending",
"needs-qa": "pending",
"done": "completed",
"completed": "completed",
"failed": "failed",
}
return mapping.get(value, "pending")
def parse_depends(raw: str) -> list[str]:
return [x.strip() for x in raw.split(",") if x.strip()]
def build_task(
row: dict[str, str],
existing: dict[str, Any],
runtime_default: str,
source_path: str,
) -> dict[str, Any]:
task_id = row.get("id", "").strip()
description = row.get("description", "").strip()
issue = row.get("issue", "").strip()
repo = row.get("repo", "").strip()
branch = row.get("branch", "").strip()
depends_on = parse_depends(row.get("depends_on", ""))
task = dict(existing)
task["id"] = task_id
task["title"] = description or task_id
task["description"] = description
task["status"] = map_status(row.get("status", "pending"))
task["depends_on"] = depends_on
task["runtime"] = str(task.get("runtime") or runtime_default or "codex")
task["command"] = str(task.get("command") or "")
task["quality_gates"] = task.get("quality_gates") or []
metadata = dict(task.get("metadata") or {})
metadata.update(
{
"source": source_path,
"issue": issue,
"repo": repo,
"branch": branch,
}
)
task["metadata"] = metadata
return task
def main() -> int:
parser = argparse.ArgumentParser(description="Sync docs/TASKS.md into .mosaic/orchestrator/tasks.json")
parser.add_argument("--repo", default=os.getcwd(), help="Repository root (default: cwd)")
parser.add_argument("--docs", default="docs/TASKS.md", help="Path to tasks markdown (repo-relative)")
parser.add_argument(
"--tasks-json",
default=".mosaic/orchestrator/tasks.json",
help="Path to orchestrator tasks JSON (repo-relative)",
)
parser.add_argument("--keep-unlisted", action="store_true", help="Retain tasks already in JSON but missing from docs/TASKS.md")
parser.add_argument("--apply", action="store_true", help="Write changes (default is dry-run)")
args = parser.parse_args()
repo = pathlib.Path(args.repo).resolve()
docs_path = (repo / args.docs).resolve()
# Backward compatibility: fall back to legacy lowercase path when default path is absent.
if args.docs == "docs/TASKS.md" and not docs_path.exists():
legacy_docs_path = (repo / "docs/tasks.md").resolve()
if legacy_docs_path.exists():
docs_path = legacy_docs_path
tasks_path = (repo / args.tasks_json).resolve()
config_path = repo / ".mosaic" / "orchestrator" / "config.json"
config = load_json(config_path, {})
runtime_default = str(config.get("worker", {}).get("runtime") or "codex")
rows = parse_tasks_markdown(docs_path)
try:
source_path = str(docs_path.relative_to(repo))
except ValueError:
source_path = str(docs_path)
existing_payload = load_json(tasks_path, {"tasks": []})
existing_tasks = existing_payload.get("tasks", [])
if not isinstance(existing_tasks, list):
existing_tasks = []
existing_by_id = {str(t.get("id", "")): t for t in existing_tasks}
out_tasks: list[dict[str, Any]] = []
seen: set[str] = set()
for row in rows:
task_id = row.get("id", "").strip()
if not task_id:
continue
seen.add(task_id)
out_tasks.append(
build_task(
row,
existing_by_id.get(task_id, {}),
runtime_default,
source_path,
)
)
if args.keep_unlisted:
for task in existing_tasks:
task_id = str(task.get("id", ""))
if task_id and task_id not in seen:
out_tasks.append(task)
payload = {"tasks": out_tasks}
if args.apply:
save_json(tasks_path, payload)
print(f"[mosaic-orchestrator-sync] wrote {len(out_tasks)} tasks -> {tasks_path}")
else:
print(f"[mosaic-orchestrator-sync] dry-run: {len(out_tasks)} tasks would be written -> {tasks_path}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,48 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://mosaicstack.dev/schemas/orchestrator/event.schema.json",
"title": "Mosaic Orchestrator Event",
"type": "object",
"required": ["event_id", "event_type", "task_id", "status", "timestamp", "source"],
"properties": {
"event_id": {
"type": "string",
"description": "UUID string"
},
"event_type": {
"type": "string",
"enum": [
"task.assigned",
"task.started",
"task.progress",
"task.completed",
"task.failed",
"rail.check.started",
"rail.check.passed",
"rail.check.failed"
]
},
"task_id": {
"type": "string"
},
"status": {
"type": "string",
"enum": ["pending", "running", "completed", "failed"]
},
"timestamp": {
"type": "string",
"format": "date-time"
},
"source": {
"type": "string",
"enum": ["controller", "worker", "quality-gate"]
},
"message": {
"type": "string"
},
"metadata": {
"type": "object"
}
},
"additionalProperties": true
}

View File

@@ -0,0 +1,40 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://mosaicstack.dev/schemas/orchestrator/task.schema.json",
"title": "Mosaic Orchestrator Task",
"type": "object",
"required": ["id", "title", "status"],
"properties": {
"id": {
"type": "string"
},
"title": {
"type": "string"
},
"description": {
"type": "string"
},
"status": {
"type": "string",
"enum": ["pending", "running", "completed", "failed"]
},
"runtime": {
"type": "string",
"description": "Preferred worker runtime, e.g. codex, claude, opencode"
},
"command": {
"type": "string",
"description": "Worker command to execute for this task"
},
"quality_gates": {
"type": "array",
"items": {
"type": "string"
}
},
"metadata": {
"type": "object"
}
},
"additionalProperties": true
}

View File

@@ -0,0 +1,2 @@
__pycache__/
*.pyc

View File

@@ -0,0 +1,200 @@
#!/usr/bin/env python3
"""Matrix transport bridge for Mosaic orchestrator events/tasks."""
from __future__ import annotations
import argparse
import json
import pathlib
import urllib.parse
import urllib.request
import uuid
from typing import Any
def load_json(path: pathlib.Path, default: Any) -> Any:
if not path.exists():
return default
with path.open("r", encoding="utf-8") as f:
return json.load(f)
def save_json(path: pathlib.Path, data: Any) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
tmp = path.with_suffix(path.suffix + ".tmp")
with tmp.open("w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
f.write("\n")
tmp.replace(path)
def matrix_request(
homeserver: str,
access_token: str,
method: str,
path: str,
payload: dict[str, Any] | None = None,
) -> dict[str, Any]:
url = homeserver.rstrip("/") + path
body = None
headers = {"Authorization": f"Bearer {access_token}"}
if payload is not None:
body = json.dumps(payload, ensure_ascii=True).encode("utf-8")
headers["Content-Type"] = "application/json"
req = urllib.request.Request(url, method=method, data=body, headers=headers)
with urllib.request.urlopen(req, timeout=30) as resp:
raw = resp.read().decode("utf-8")
return json.loads(raw) if raw else {}
def matrix_send_message(homeserver: str, access_token: str, room_id: str, message: str) -> None:
txn = str(uuid.uuid4())
path = f"/_matrix/client/v3/rooms/{urllib.parse.quote(room_id, safe='')}/send/m.room.message/{txn}"
matrix_request(
homeserver,
access_token,
"PUT",
path,
{"msgtype": "m.text", "body": message},
)
def format_event_message(event: dict[str, Any]) -> str:
et = event.get("event_type", "unknown")
tid = event.get("task_id", "unknown")
status = event.get("status", "unknown")
msg = event.get("message", "")
return f"[mosaic-orch] {et} task={tid} status={status} :: {msg}"
def publish_events(repo: pathlib.Path, config: dict[str, Any]) -> int:
orch = repo / ".mosaic" / "orchestrator"
events_path = orch / "events.ndjson"
bridge_state_path = orch / "matrix_state.json"
state = load_json(bridge_state_path, {"last_published_line": 0, "since": None})
homeserver = str(config.get("matrix", {}).get("homeserver_url", "")).strip()
token = str(config.get("matrix", {}).get("access_token", "")).strip()
room_id = str(config.get("matrix", {}).get("control_room_id", "")).strip()
if not homeserver or not token or not room_id:
raise ValueError("matrix homeserver_url, access_token, and control_room_id are required")
if not events_path.exists():
return 0
lines = events_path.read_text(encoding="utf-8").splitlines()
start = int(state.get("last_published_line", 0))
published = 0
for idx, line in enumerate(lines[start:], start=start + 1):
if not line.strip():
continue
event = json.loads(line)
matrix_send_message(homeserver, token, room_id, format_event_message(event))
state["last_published_line"] = idx
published += 1
save_json(bridge_state_path, state)
return published
def parse_task_command(body: str) -> dict[str, Any] | None:
raw = body.strip()
if raw.startswith("!mosaic-task "):
payload = raw[len("!mosaic-task ") :].strip()
elif raw.startswith("@mosaic task "):
payload = raw[len("@mosaic task ") :].strip()
else:
return None
task = json.loads(payload)
if not isinstance(task, dict):
raise ValueError("task payload must be a JSON object")
if "id" not in task or "title" not in task:
raise ValueError("task payload requires id and title")
task.setdefault("status", "pending")
return task
def consume_tasks(repo: pathlib.Path, config: dict[str, Any]) -> int:
orch = repo / ".mosaic" / "orchestrator"
tasks_path = orch / "tasks.json"
bridge_state_path = orch / "matrix_state.json"
state = load_json(bridge_state_path, {"last_published_line": 0, "since": None})
tasks = load_json(tasks_path, {"tasks": []})
task_items = tasks.get("tasks", [])
if not isinstance(task_items, list):
raise ValueError("tasks.json must contain {'tasks': [...]} structure")
homeserver = str(config.get("matrix", {}).get("homeserver_url", "")).strip()
token = str(config.get("matrix", {}).get("access_token", "")).strip()
room_id = str(config.get("matrix", {}).get("control_room_id", "")).strip()
bot_user_id = str(config.get("matrix", {}).get("bot_user_id", "")).strip()
if not homeserver or not token or not room_id:
raise ValueError("matrix homeserver_url, access_token, and control_room_id are required")
since = state.get("since")
path = "/_matrix/client/v3/sync?timeout=1"
if since:
path += "&since=" + urllib.parse.quote(str(since), safe="")
sync = matrix_request(homeserver, token, "GET", path)
if "next_batch" in sync:
state["since"] = sync["next_batch"]
room_timeline = (
sync.get("rooms", {})
.get("join", {})
.get(room_id, {})
.get("timeline", {})
.get("events", [])
)
added = 0
existing = {str(t.get("id")) for t in task_items if isinstance(t, dict)}
for evt in room_timeline:
if evt.get("type") != "m.room.message":
continue
sender = str(evt.get("sender", ""))
if bot_user_id and sender == bot_user_id:
continue
body = str(evt.get("content", {}).get("body", ""))
task = parse_task_command(body)
if not task:
continue
task_id = str(task.get("id"))
if task_id in existing:
continue
task_items.append(task)
existing.add(task_id)
added += 1
save_json(tasks_path, {"tasks": task_items})
save_json(bridge_state_path, state)
return added
def main() -> int:
p = argparse.ArgumentParser(description="Mosaic Matrix transport bridge")
p.add_argument("--repo", default=".", help="Repository root")
p.add_argument("--mode", required=True, choices=["publish", "consume"], help="Bridge mode")
args = p.parse_args()
repo = pathlib.Path(args.repo).resolve()
config = load_json(repo / ".mosaic" / "orchestrator" / "config.json", {})
if not config.get("enabled", False):
print("[mosaic-orch-matrix] disabled in config (enabled=false)")
return 0
if str(config.get("transport", "")).strip() != "matrix":
print("[mosaic-orch-matrix] config transport != matrix; nothing to do")
return 0
if args.mode == "publish":
count = publish_events(repo, config)
print(f"[mosaic-orch-matrix] published_events={count}")
return 0
count = consume_tasks(repo, config)
print(f"[mosaic-orch-matrix] consumed_tasks={count}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,523 @@
#!/usr/bin/env bash
#
# _lib.sh — Shared functions for r0 coordinator scripts
#
# Usage: source ~/.config/mosaic/tools/orchestrator/_lib.sh
#
# Provides state file access, TASKS.md parsing, session lock management,
# process health checks, and formatting utilities.
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
ORCH_SUBDIR=".mosaic/orchestrator"
MISSION_FILE="mission.json"
SESSION_LOCK_FILE="session.lock"
NEXT_TASK_FILE="next-task.json"
MANIFEST_FILE="docs/MISSION-MANIFEST.md"
TASKS_MD="docs/TASKS.md"
SCRATCHPAD_DIR="docs/scratchpads"
# Thresholds (seconds)
STALE_THRESHOLD=300 # 5 minutes
DEAD_THRESHOLD=1800 # 30 minutes
# ─── Color support ───────────────────────────────────────────────────────────
if [[ -t 1 ]]; then
C_GREEN='\033[0;32m'
C_RED='\033[0;31m'
C_YELLOW='\033[0;33m'
C_CYAN='\033[0;36m'
C_BOLD='\033[1m'
C_DIM='\033[2m'
C_RESET='\033[0m'
else
C_GREEN='' C_RED='' C_YELLOW='' C_CYAN='' C_BOLD='' C_DIM='' C_RESET=''
fi
# ─── Dependency checks ──────────────────────────────────────────────────────
_require_jq() {
if ! command -v jq &>/dev/null; then
echo -e "${C_RED}Error: jq is required but not installed${C_RESET}" >&2
return 1
fi
}
coord_runtime() {
local runtime="${MOSAIC_COORD_RUNTIME:-claude}"
case "$runtime" in
claude|codex) echo "$runtime" ;;
*) echo "claude" ;;
esac
}
coord_launch_command() {
local runtime
runtime="$(coord_runtime)"
echo "mosaic $runtime"
}
coord_run_command() {
local runtime
runtime="$(coord_runtime)"
if [[ "$runtime" == "claude" ]]; then
echo "mosaic coord run"
else
echo "mosaic coord run --$runtime"
fi
}
# ─── Project / state file access ────────────────────────────────────────────
# Return the orchestrator directory for a project
orch_dir() {
local project="${1:-.}"
echo "$project/$ORCH_SUBDIR"
}
# Return the mission.json path for a project
mission_path() {
local project="${1:-.}"
echo "$(orch_dir "$project")/$MISSION_FILE"
}
next_task_capsule_path() {
local project="${1:-.}"
echo "$(orch_dir "$project")/$NEXT_TASK_FILE"
}
# Exit with error if mission.json is missing or inactive
require_mission() {
local project="${1:-.}"
local mp
mp="$(mission_path "$project")"
if [[ ! -f "$mp" ]]; then
echo -e "${C_RED}No mission found at $mp${C_RESET}" >&2
echo "Initialize one with: mosaic coord init --name \"Mission Name\"" >&2
return 1
fi
_require_jq || return 1
local status
status="$(jq -r '.status // "inactive"' "$mp")"
if [[ "$status" == "inactive" ]]; then
echo -e "${C_YELLOW}Mission exists but is inactive. Initialize with: mosaic coord init${C_RESET}" >&2
return 1
fi
}
# Cat mission.json (caller pipes to jq)
load_mission() {
local project="${1:-.}"
cat "$(mission_path "$project")"
}
# ─── Atomic JSON write ──────────────────────────────────────────────────────
write_json() {
local path="$1"
local content="$2"
local tmp
tmp="$(mktemp "${path}.tmp.XXXXXX")"
echo "$content" > "$tmp"
mv "$tmp" "$path"
}
# ─── TASKS.md parsing ───────────────────────────────────────────────────────
# Parse TASKS.md pipe-delimited table and output JSON counts
count_tasks_md() {
local project="${1:-.}"
local tasks_file="$project/$TASKS_MD"
if [[ ! -f "$tasks_file" ]]; then
echo '{"total":0,"done":0,"in_progress":0,"pending":0,"failed":0,"blocked":0}'
return
fi
awk -F'|' '
/^\|.*[Ii][Dd].*[Ss]tatus/ { header=1; next }
header && /^\|.*---/ { data=1; next }
data && /^\|/ {
gsub(/^[ \t]+|[ \t]+$/, "", $3)
status = tolower($3)
total++
if (status == "done" || status == "completed") done++
else if (status == "in-progress" || status == "in_progress") inprog++
else if (status == "not-started" || status == "pending" || status == "todo") pending++
else if (status == "failed") failed++
else if (status == "blocked") blocked++
}
data && !/^\|/ && total > 0 { exit }
END {
printf "{\"total\":%d,\"done\":%d,\"in_progress\":%d,\"pending\":%d,\"failed\":%d,\"blocked\":%d}\n",
total, done, inprog, pending, failed, blocked
}
' "$tasks_file"
}
# Return the ID of the first not-started/pending task
find_next_task() {
local project="${1:-.}"
local tasks_file="$project/$TASKS_MD"
if [[ ! -f "$tasks_file" ]]; then
echo ""
return
fi
awk -F'|' '
/^\|.*[Ii][Dd].*[Ss]tatus/ { header=1; next }
header && /^\|.*---/ { data=1; next }
data && /^\|/ {
gsub(/^[ \t]+|[ \t]+$/, "", $2)
gsub(/^[ \t]+|[ \t]+$/, "", $3)
status = tolower($3)
if (status == "not-started" || status == "pending" || status == "todo") {
print $2
exit
}
}
' "$tasks_file"
}
# ─── Session lock management ────────────────────────────────────────────────
session_lock_path() {
local project="${1:-.}"
echo "$(orch_dir "$project")/$SESSION_LOCK_FILE"
}
session_lock_read() {
local project="${1:-.}"
local lp
lp="$(session_lock_path "$project")"
if [[ -f "$lp" ]]; then
cat "$lp"
return 0
fi
return 1
}
session_lock_write() {
local project="${1:-.}"
local session_id="$2"
local runtime="$3"
local pid="$4"
local milestone_id="${5:-}"
local lp
lp="$(session_lock_path "$project")"
_require_jq || return 1
local json
json=$(jq -n \
--arg sid "$session_id" \
--arg rt "$runtime" \
--arg pid "$pid" \
--arg ts "$(iso_now)" \
--arg pp "$(cd "$project" && pwd)" \
--arg mid "$milestone_id" \
'{
session_id: $sid,
runtime: $rt,
pid: ($pid | tonumber),
started_at: $ts,
project_path: $pp,
milestone_id: $mid
}')
write_json "$lp" "$json"
}
session_lock_clear() {
local project="${1:-.}"
local lp
lp="$(session_lock_path "$project")"
rm -f "$lp"
}
# ─── Process health checks ──────────────────────────────────────────────────
is_pid_alive() {
local pid="$1"
kill -0 "$pid" 2>/dev/null
}
detect_agent_runtime() {
local pid="$1"
local cmdline
if [[ -f "/proc/$pid/cmdline" ]]; then
cmdline="$(tr '\0' ' ' < "/proc/$pid/cmdline")"
if [[ "$cmdline" == *claude* ]]; then
echo "claude"
elif [[ "$cmdline" == *codex* ]]; then
echo "codex"
elif [[ "$cmdline" == *opencode* ]]; then
echo "opencode"
else
echo "unknown"
fi
else
echo "unknown"
fi
}
# ─── Time / formatting utilities ────────────────────────────────────────────
iso_now() {
date -u +"%Y-%m-%dT%H:%M:%SZ"
}
epoch_now() {
date +%s
}
# Convert ISO timestamp to epoch seconds
iso_to_epoch() {
local ts="$1"
date -d "$ts" +%s 2>/dev/null || echo 0
}
# Return most recent modification time (epoch) of key project files
last_activity_time() {
local project="${1:-.}"
local latest=0
local ts
for f in \
"$project/$TASKS_MD" \
"$project/$(orch_dir "$project")/$MISSION_FILE" \
"$(orch_dir "$project")/state.json"; do
if [[ -f "$f" ]]; then
ts="$(stat -c %Y "$f" 2>/dev/null || echo 0)"
(( ts > latest )) && latest=$ts
fi
done
# Also check git log for last commit time
if git -C "$project" rev-parse --is-inside-work-tree &>/dev/null; then
ts="$(git -C "$project" log -1 --format=%ct 2>/dev/null || echo 0)"
(( ts > latest )) && latest=$ts
fi
echo "$latest"
}
# Format seconds-ago into human-readable string
format_ago() {
local epoch="$1"
local now
now="$(epoch_now)"
local diff=$(( now - epoch ))
if (( diff < 60 )); then
echo "${diff}s ago"
elif (( diff < 3600 )); then
echo "$(( diff / 60 ))m ago"
elif (( diff < 86400 )); then
echo "$(( diff / 3600 ))h $(( (diff % 3600) / 60 ))m ago"
else
echo "$(( diff / 86400 ))d ago"
fi
}
# Format seconds into duration string
format_duration() {
local secs="$1"
if (( secs < 60 )); then
echo "${secs}s"
elif (( secs < 3600 )); then
echo "$(( secs / 60 ))m $(( secs % 60 ))s"
else
echo "$(( secs / 3600 ))h $(( (secs % 3600) / 60 ))m"
fi
}
# ─── Session ID generation ──────────────────────────────────────────────────
next_session_id() {
local project="${1:-.}"
local mp
mp="$(mission_path "$project")"
if [[ ! -f "$mp" ]]; then
echo "sess-001"
return
fi
_require_jq || { echo "sess-001"; return; }
local count
count="$(jq '.sessions | length' "$mp")"
printf "sess-%03d" "$(( count + 1 ))"
}
# ─── Milestone helpers ───────────────────────────────────────────────────────
# Get current milestone (first in-progress, or first pending)
current_milestone_id() {
local project="${1:-.}"
_require_jq || return 1
local mp
mp="$(mission_path "$project")"
[[ -f "$mp" ]] || return 1
local mid
mid="$(jq -r '[.milestones[] | select(.status == "in-progress")][0].id // empty' "$mp")"
if [[ -z "$mid" ]]; then
mid="$(jq -r '[.milestones[] | select(.status == "pending")][0].id // empty' "$mp")"
fi
echo "$mid"
}
# Get milestone name by ID
milestone_name() {
local project="${1:-.}"
local mid="$2"
_require_jq || return 1
local mp
mp="$(mission_path "$project")"
[[ -f "$mp" ]] || return 1
jq -r --arg id "$mid" '.milestones[] | select(.id == $id) | .name // empty' "$mp"
}
# ─── Next-task capsule helpers ───────────────────────────────────────────────
write_next_task_capsule() {
local project="${1:-.}"
local runtime="${2:-claude}"
local mission_id="${3:-}"
local mission_name="${4:-}"
local project_path="${5:-}"
local quality_gates="${6:-}"
local current_ms_id="${7:-}"
local current_ms_name="${8:-}"
local next_task="${9:-}"
local tasks_done="${10:-0}"
local tasks_total="${11:-0}"
local pct="${12:-0}"
local current_branch="${13:-}"
_require_jq || return 1
mkdir -p "$(orch_dir "$project")"
local payload
payload="$(jq -n \
--arg generated_at "$(iso_now)" \
--arg runtime "$runtime" \
--arg mission_id "$mission_id" \
--arg mission_name "$mission_name" \
--arg project_path "$project_path" \
--arg quality_gates "$quality_gates" \
--arg current_ms_id "$current_ms_id" \
--arg current_ms_name "$current_ms_name" \
--arg next_task "$next_task" \
--arg current_branch "$current_branch" \
--arg tasks_done "$tasks_done" \
--arg tasks_total "$tasks_total" \
--arg pct "$pct" \
'{
generated_at: $generated_at,
runtime: $runtime,
mission_id: $mission_id,
mission_name: $mission_name,
project_path: $project_path,
quality_gates: $quality_gates,
current_milestone: {
id: $current_ms_id,
name: $current_ms_name
},
next_task: $next_task,
progress: {
tasks_done: ($tasks_done | tonumber),
tasks_total: ($tasks_total | tonumber),
pct: ($pct | tonumber)
},
current_branch: $current_branch
}')"
write_json "$(next_task_capsule_path "$project")" "$payload"
}
build_codex_strict_kickoff() {
local project="${1:-.}"
local continuation_prompt="${2:-}"
_require_jq || return 1
local capsule_path
capsule_path="$(next_task_capsule_path "$project")"
local capsule='{}'
if [[ -f "$capsule_path" ]]; then
capsule="$(cat "$capsule_path")"
fi
local mission_id next_task project_path quality_gates
mission_id="$(echo "$capsule" | jq -r '.mission_id // "unknown"')"
next_task="$(echo "$capsule" | jq -r '.next_task // "none"')"
project_path="$(echo "$capsule" | jq -r '.project_path // "."')"
quality_gates="$(echo "$capsule" | jq -r '.quality_gates // "none"')"
cat <<EOF
Now initiating Orchestrator mode...
STRICT EXECUTION PROFILE FOR CODEX (HARD GATE)
- Do NOT ask clarifying questions before your first tool actions unless a Mosaic escalation trigger is hit.
- Your first actions must be reading mission state files in order.
- Treat the next-task capsule as authoritative execution input.
REQUIRED FIRST ACTIONS (IN ORDER)
1. Read ~/.config/mosaic/guides/ORCHESTRATOR-PROTOCOL.md
2. Read docs/MISSION-MANIFEST.md
3. Read docs/scratchpads/${mission_id}.md
4. Read docs/TASKS.md
5. Begin execution on next task: ${next_task}
WORKING CONTEXT
- Project: ${project_path}
- Quality gates: ${quality_gates}
- Capsule file: .mosaic/orchestrator/next-task.json
Task capsule (JSON):
\`\`\`json
${capsule}
\`\`\`
Continuation prompt:
${continuation_prompt}
EOF
}
# Get next milestone after the given one
next_milestone_id() {
local project="${1:-.}"
local current_id="$2"
_require_jq || return 1
local mp
mp="$(mission_path "$project")"
[[ -f "$mp" ]] || return 1
jq -r --arg cid "$current_id" '
.milestones as $ms |
($ms | to_entries | map(select(.value.id == $cid)) | .[0].key // -1) as $idx |
if $idx >= 0 and ($idx + 1) < ($ms | length) then
$ms[$idx + 1].id
else
empty
end
' "$mp"
}
# ─── Slugify ─────────────────────────────────────────────────────────────────
slugify() {
echo "$1" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/-/g; s/--*/-/g; s/^-//; s/-$//'
}

View File

@@ -0,0 +1,173 @@
#!/usr/bin/env bash
set -euo pipefail
#
# continue-prompt.sh — Generate continuation prompt for next orchestrator session
#
# Usage:
# continue-prompt.sh [--project <path>] [--milestone <id>] [--copy]
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/_lib.sh"
# ─── Parse arguments ─────────────────────────────────────────────────────────
PROJECT="."
MILESTONE=""
COPY=false
while [[ $# -gt 0 ]]; do
case "$1" in
--project) PROJECT="$2"; shift 2 ;;
--milestone) MILESTONE="$2"; shift 2 ;;
--copy) COPY=true; shift ;;
-h|--help)
echo "Usage: continue-prompt.sh [--project <path>] [--milestone <id>] [--copy]"
exit 0
;;
*) echo "Unknown option: $1" >&2; exit 1 ;;
esac
done
_require_jq
require_mission "$PROJECT"
target_runtime="$(coord_runtime)"
launch_cmd="$(coord_launch_command)"
# ─── Load mission data ──────────────────────────────────────────────────────
mission="$(load_mission "$PROJECT")"
mission_name="$(echo "$mission" | jq -r '.name')"
mission_id="$(echo "$mission" | jq -r '.mission_id')"
quality_gates="$(echo "$mission" | jq -r '.quality_gates // "—"')"
project_path="$(echo "$mission" | jq -r '.project_path')"
# Determine current milestone
if [[ -n "$MILESTONE" ]]; then
current_ms_id="$MILESTONE"
else
current_ms_id="$(current_milestone_id "$PROJECT")"
fi
current_ms_name=""
if [[ -n "$current_ms_id" ]]; then
current_ms_name="$(milestone_name "$PROJECT" "$current_ms_id")"
fi
# Task counts
task_counts="$(count_tasks_md "$PROJECT")"
tasks_total="$(echo "$task_counts" | jq '.total')"
tasks_done="$(echo "$task_counts" | jq '.done')"
pct=0
(( tasks_total > 0 )) && pct=$(( (tasks_done * 100) / tasks_total ))
# Next task
next_task="$(find_next_task "$PROJECT")"
# Current branch
current_branch=""
if git -C "$PROJECT" rev-parse --is-inside-work-tree &>/dev/null; then
current_branch="$(git -C "$PROJECT" branch --show-current 2>/dev/null || echo "—")"
fi
# Previous session info
session_count="$(echo "$mission" | jq '.sessions | length')"
prev_session_id="—"
prev_runtime="—"
prev_duration="—"
prev_ended_reason="—"
prev_last_task="—"
if (( session_count > 0 )); then
last_idx=$(( session_count - 1 ))
prev_session_id="$(echo "$mission" | jq -r ".sessions[$last_idx].session_id // \"—\"")"
prev_runtime="$(echo "$mission" | jq -r ".sessions[$last_idx].runtime // \"—\"")"
prev_ended_reason="$(echo "$mission" | jq -r ".sessions[$last_idx].ended_reason // \"—\"")"
prev_last_task="$(echo "$mission" | jq -r ".sessions[$last_idx].last_task_id // \"—\"")"
s_start="$(echo "$mission" | jq -r ".sessions[$last_idx].started_at // \"\"")"
s_end="$(echo "$mission" | jq -r ".sessions[$last_idx].ended_at // \"\"")"
if [[ -n "$s_start" && -n "$s_end" && "$s_end" != "" ]]; then
s_epoch="$(iso_to_epoch "$s_start")"
e_epoch="$(iso_to_epoch "$s_end")"
if (( e_epoch > 0 && s_epoch > 0 )); then
prev_duration="$(format_duration $(( e_epoch - s_epoch )))"
fi
fi
fi
# Write machine-readable next-task capsule for deterministic runtime launches.
write_next_task_capsule \
"$PROJECT" \
"$target_runtime" \
"$mission_id" \
"$mission_name" \
"$project_path" \
"$quality_gates" \
"$current_ms_id" \
"$current_ms_name" \
"$next_task" \
"$tasks_done" \
"$tasks_total" \
"$pct" \
"$current_branch"
# ─── Generate prompt ────────────────────────────────────────────────────────
prompt="$(cat <<EOF
## Continuation Mission
Continue **$mission_name** from existing state.
## Setup
- **Project:** $project_path
- **State:** docs/TASKS.md (already populated — ${tasks_done}/${tasks_total} tasks complete)
- **Manifest:** docs/MISSION-MANIFEST.md
- **Scratchpad:** docs/scratchpads/${mission_id}.md
- **Protocol:** ~/.config/mosaic/guides/ORCHESTRATOR.md
- **Quality gates:** $quality_gates
- **Target runtime:** $target_runtime
## Resume Point
- **Current milestone:** ${current_ms_name:-—} (${current_ms_id:-—})
- **Next task:** ${next_task:-—}
- **Progress:** ${tasks_done}/${tasks_total} tasks (${pct}%)
- **Branch:** ${current_branch:-—}
## Previous Session Context
- **Session:** $prev_session_id ($prev_runtime, $prev_duration)
- **Ended:** $prev_ended_reason
- **Last completed task:** $prev_last_task
## Instructions
1. Read \`~/.config/mosaic/guides/ORCHESTRATOR.md\` for full protocol
2. Read \`docs/MISSION-MANIFEST.md\` for mission scope and status
3. Read \`docs/scratchpads/${mission_id}.md\` for session history and decisions
4. Read \`docs/TASKS.md\` for current task state
5. \`git pull --rebase\` to sync latest changes
6. Launch runtime with \`$launch_cmd\`
7. Continue execution from task **${next_task:-next-pending}**
8. Follow Two-Phase Completion Protocol
9. You are the SOLE writer of \`docs/TASKS.md\`
EOF
)"
# ─── Output ──────────────────────────────────────────────────────────────────
if [[ "$COPY" == true ]]; then
if command -v wl-copy &>/dev/null; then
echo "$prompt" | wl-copy
echo -e "${C_GREEN}Continuation prompt copied to clipboard (wl-copy)${C_RESET}" >&2
elif command -v xclip &>/dev/null; then
echo "$prompt" | xclip -selection clipboard
echo -e "${C_GREEN}Continuation prompt copied to clipboard (xclip)${C_RESET}" >&2
else
echo -e "${C_YELLOW}No clipboard tool found (wl-copy or xclip). Printing to stdout.${C_RESET}" >&2
echo "$prompt"
fi
else
echo "$prompt"
fi

View File

@@ -0,0 +1,286 @@
#!/usr/bin/env bash
set -euo pipefail
#
# mission-init.sh — Initialize a new orchestration mission
#
# Usage:
# mission-init.sh --name <name> [options]
#
# Options:
# --name <name> Mission name (required)
# --project <path> Project directory (default: CWD)
# --prefix <prefix> Task ID prefix (e.g., MS)
# --milestones <comma-list> Milestone names, comma-separated
# --quality-gates <command> Quality gate command string
# --version <semver> Milestone version (default: 0.0.1)
# --description <text> Mission description
# --force Overwrite existing active mission
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/_lib.sh"
# ─── Parse arguments ─────────────────────────────────────────────────────────
NAME=""
PROJECT="."
PREFIX=""
MILESTONES=""
QUALITY_GATES=""
VERSION="0.0.1"
DESCRIPTION=""
FORCE=false
while [[ $# -gt 0 ]]; do
case "$1" in
--name) NAME="$2"; shift 2 ;;
--project) PROJECT="$2"; shift 2 ;;
--prefix) PREFIX="$2"; shift 2 ;;
--milestones) MILESTONES="$2"; shift 2 ;;
--quality-gates) QUALITY_GATES="$2"; shift 2 ;;
--version) VERSION="$2"; shift 2 ;;
--description) DESCRIPTION="$2"; shift 2 ;;
--force) FORCE=true; shift ;;
-h|--help)
cat <<'USAGE'
mission-init.sh — Initialize a new orchestration mission
Usage: mission-init.sh --name <name> [options]
Options:
--name <name> Mission name (required)
--project <path> Project directory (default: CWD)
--prefix <prefix> Task ID prefix (e.g., MS)
--milestones <comma-list> Milestone names, comma-separated
--quality-gates <command> Quality gate command string
--version <semver> Milestone version (default: 0.0.1)
--description <text> Mission description
--force Overwrite existing active mission
Example:
mosaic coord init \
--name "Security Remediation" \
--prefix SEC \
--milestones "Critical Fixes,High Priority,Code Quality" \
--quality-gates "pnpm lint && pnpm typecheck && pnpm test"
USAGE
exit 0
;;
*) echo "Unknown option: $1" >&2; exit 1 ;;
esac
done
if [[ -z "$NAME" ]]; then
echo -e "${C_RED}Error: --name is required${C_RESET}" >&2
exit 1
fi
_require_jq
# ─── Validate project ───────────────────────────────────────────────────────
od="$(orch_dir "$PROJECT")"
if [[ ! -d "$od" ]]; then
echo -e "${C_RED}Error: $od not found. Run 'mosaic bootstrap' first.${C_RESET}" >&2
exit 1
fi
# Check for existing active mission
mp="$(mission_path "$PROJECT")"
if [[ -f "$mp" ]]; then
existing_status="$(jq -r '.status // "inactive"' "$mp")"
if [[ "$existing_status" == "active" || "$existing_status" == "paused" ]] && [[ "$FORCE" != true ]]; then
existing_name="$(jq -r '.name // "unnamed"' "$mp")"
echo -e "${C_YELLOW}Active mission exists: $existing_name (status: $existing_status)${C_RESET}" >&2
echo "Use --force to overwrite." >&2
exit 1
fi
fi
# ─── Generate mission ID ────────────────────────────────────────────────────
MISSION_ID="$(slugify "$NAME")-$(date +%Y%m%d)"
# ─── Build milestones array ─────────────────────────────────────────────────
milestones_json="[]"
if [[ -n "$MILESTONES" ]]; then
IFS=',' read -ra ms_array <<< "$MILESTONES"
for i in "${!ms_array[@]}"; do
ms_name="$(echo "${ms_array[$i]}" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
ms_id="phase-$(( i + 1 ))"
ms_branch="$(slugify "$ms_name")"
milestones_json="$(echo "$milestones_json" | jq \
--arg id "$ms_id" \
--arg name "$ms_name" \
--arg branch "$ms_branch" \
'. + [{
"id": $id,
"name": $name,
"status": "pending",
"branch": $branch,
"issue_ref": "",
"started_at": "",
"completed_at": ""
}]')"
done
fi
MILESTONE_COUNT="$(echo "$milestones_json" | jq 'length')"
# ─── Write mission.json ─────────────────────────────────────────────────────
mission_json="$(jq -n \
--arg mid "$MISSION_ID" \
--arg name "$NAME" \
--arg desc "$DESCRIPTION" \
--arg pp "$(cd "$PROJECT" && pwd)" \
--arg ts "$(iso_now)" \
--arg prefix "$PREFIX" \
--arg qg "$QUALITY_GATES" \
--arg ver "$VERSION" \
--argjson milestones "$milestones_json" \
'{
schema_version: 1,
mission_id: $mid,
name: $name,
description: $desc,
project_path: $pp,
created_at: $ts,
status: "active",
task_prefix: $prefix,
quality_gates: $qg,
milestone_version: $ver,
milestones: $milestones,
sessions: []
}')"
write_json "$mp" "$mission_json"
# ─── Scaffold MISSION-MANIFEST.md ───────────────────────────────────────────
manifest_path="$PROJECT/$MANIFEST_FILE"
mkdir -p "$(dirname "$manifest_path")"
if [[ ! -f "$manifest_path" ]] || [[ "$FORCE" == true ]]; then
# Build milestones table rows
ms_table=""
for i in $(seq 0 $(( MILESTONE_COUNT - 1 ))); do
ms_id="$(echo "$milestones_json" | jq -r ".[$i].id")"
ms_name="$(echo "$milestones_json" | jq -r ".[$i].name")"
ms_table+="| $(( i + 1 )) | $ms_id | $ms_name | pending | — | — | — | — |"$'\n'
done
cat > "$manifest_path" <<EOF
# Mission Manifest — $NAME
> Persistent document tracking full mission scope, status, and session history.
> Updated by the orchestrator at each phase transition and milestone completion.
## Mission
**ID:** $MISSION_ID
**Statement:** $DESCRIPTION
**Phase:** Intake
**Current Milestone:** —
**Progress:** 0 / $MILESTONE_COUNT milestones
**Status:** active
**Last Updated:** $(date -u +"%Y-%m-%d %H:%M UTC")
## Success Criteria
<!-- Define measurable success criteria here -->
## Milestones
| # | ID | Name | Status | Branch | Issue | Started | Completed |
|---|-----|------|--------|--------|-------|---------|-----------|
$ms_table
## Deployment
| Target | URL | Method |
|--------|-----|--------|
| — | — | — |
## Token Budget
| Metric | Value |
|--------|-------|
| Budget | — |
| Used | 0 |
| Mode | normal |
## Session History
| Session | Runtime | Started | Duration | Ended Reason | Last Task |
|---------|---------|---------|----------|--------------|-----------|
## Scratchpad
Path: \`docs/scratchpads/$MISSION_ID.md\`
EOF
fi
# ─── Scaffold scratchpad ────────────────────────────────────────────────────
sp_dir="$PROJECT/$SCRATCHPAD_DIR"
sp_file="$sp_dir/$MISSION_ID.md"
mkdir -p "$sp_dir"
if [[ ! -f "$sp_file" ]]; then
cat > "$sp_file" <<EOF
# Mission Scratchpad — $NAME
> Append-only log. NEVER delete entries. NEVER overwrite sections.
> This is the orchestrator's working memory across sessions.
## Original Mission Prompt
\`\`\`
(Paste the mission prompt here on first session)
\`\`\`
## Planning Decisions
## Session Log
| Session | Date | Milestone | Tasks Done | Outcome |
|---------|------|-----------|------------|---------|
## Open Questions
## Corrections
EOF
fi
# ─── Scaffold TASKS.md if absent ────────────────────────────────────────────
tasks_path="$PROJECT/$TASKS_MD"
mkdir -p "$(dirname "$tasks_path")"
if [[ ! -f "$tasks_path" ]]; then
cat > "$tasks_path" <<EOF
# Tasks — $NAME
> Single-writer: orchestrator only. Workers read but never modify.
| id | status | milestone | description | pr | notes |
|----|--------|-----------|-------------|----|-------|
EOF
fi
# ─── Report ──────────────────────────────────────────────────────────────────
runtime_cmd="$(coord_launch_command)"
run_cmd="$(coord_run_command)"
echo ""
echo -e "${C_GREEN}${C_BOLD}Mission initialized: $NAME${C_RESET}"
echo ""
echo -e " ${C_CYAN}Mission ID:${C_RESET} $MISSION_ID"
echo -e " ${C_CYAN}Milestones:${C_RESET} $MILESTONE_COUNT"
echo -e " ${C_CYAN}State:${C_RESET} $(mission_path "$PROJECT")"
echo -e " ${C_CYAN}Manifest:${C_RESET} $manifest_path"
echo -e " ${C_CYAN}Scratchpad:${C_RESET} $sp_file"
echo -e " ${C_CYAN}Tasks:${C_RESET} $tasks_path"
echo ""
echo "Next: Resume with '$run_cmd' (or launch directly with '$runtime_cmd')."

View File

@@ -0,0 +1,181 @@
#!/usr/bin/env bash
set -euo pipefail
#
# mission-status.sh — Show mission progress dashboard
#
# Usage:
# mission-status.sh [--project <path>] [--format table|json|markdown]
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/_lib.sh"
# ─── Parse arguments ─────────────────────────────────────────────────────────
PROJECT="."
FORMAT="table"
while [[ $# -gt 0 ]]; do
case "$1" in
--project) PROJECT="$2"; shift 2 ;;
--format) FORMAT="$2"; shift 2 ;;
-h|--help)
echo "Usage: mission-status.sh [--project <path>] [--format table|json|markdown]"
exit 0
;;
*) echo "Unknown option: $1" >&2; exit 1 ;;
esac
done
_require_jq
require_mission "$PROJECT"
# ─── Load data ───────────────────────────────────────────────────────────────
mission="$(load_mission "$PROJECT")"
mission_name="$(echo "$mission" | jq -r '.name')"
mission_id="$(echo "$mission" | jq -r '.mission_id')"
mission_status="$(echo "$mission" | jq -r '.status')"
version="$(echo "$mission" | jq -r '.milestone_version // "—"')"
created_at="$(echo "$mission" | jq -r '.created_at // "—"')"
session_count="$(echo "$mission" | jq '.sessions | length')"
milestone_count="$(echo "$mission" | jq '.milestones | length')"
completed_milestones="$(echo "$mission" | jq '[.milestones[] | select(.status == "completed")] | length')"
# Task counts
task_counts="$(count_tasks_md "$PROJECT")"
tasks_total="$(echo "$task_counts" | jq '.total')"
tasks_done="$(echo "$task_counts" | jq '.done')"
tasks_inprog="$(echo "$task_counts" | jq '.in_progress')"
tasks_pending="$(echo "$task_counts" | jq '.pending')"
tasks_blocked="$(echo "$task_counts" | jq '.blocked')"
tasks_failed="$(echo "$task_counts" | jq '.failed')"
# Next task
next_task="$(find_next_task "$PROJECT")"
# ─── JSON output ─────────────────────────────────────────────────────────────
if [[ "$FORMAT" == "json" ]]; then
echo "$mission" | jq \
--argjson tasks "$task_counts" \
--arg next "$next_task" \
'. + {task_counts: $tasks, next_task: $next}'
exit 0
fi
# ─── Progress bar ────────────────────────────────────────────────────────────
progress_bar() {
local done=$1
local total=$2
local width=30
if (( total == 0 )); then
printf "[%${width}s]" ""
return
fi
local filled=$(( (done * width) / total ))
local empty=$(( width - filled ))
local bar=""
for (( i=0; i<filled; i++ )); do bar+="="; done
if (( empty > 0 && filled > 0 )); then
bar+=">"
empty=$(( empty - 1 ))
fi
for (( i=0; i<empty; i++ )); do bar+="."; done
printf "[%s]" "$bar"
}
# ─── Table / Markdown output ────────────────────────────────────────────────
# Header
echo ""
echo "=================================================="
echo -e " ${C_BOLD}Mission: $mission_name${C_RESET}"
echo -e " Status: ${C_CYAN}$mission_status${C_RESET} Version: $version"
echo -e " Started: ${created_at:0:10} Sessions: $session_count"
echo "=================================================="
echo ""
# Milestones
echo -e "${C_BOLD}Milestones:${C_RESET}"
for i in $(seq 0 $(( milestone_count - 1 ))); do
ms_id="$(echo "$mission" | jq -r ".milestones[$i].id")"
ms_name="$(echo "$mission" | jq -r ".milestones[$i].name")"
ms_status="$(echo "$mission" | jq -r ".milestones[$i].status")"
ms_issue="$(echo "$mission" | jq -r ".milestones[$i].issue_ref // \"\"")"
case "$ms_status" in
completed) icon="${C_GREEN}[x]${C_RESET}" ;;
in-progress) icon="${C_YELLOW}[>]${C_RESET}" ;;
blocked) icon="${C_RED}[!]${C_RESET}" ;;
*) icon="${C_DIM}[ ]${C_RESET}" ;;
esac
issue_str=""
[[ -n "$ms_issue" ]] && issue_str="$ms_issue"
printf " %b %-40s %s\n" "$icon" "$ms_name" "$issue_str"
done
echo ""
# Tasks progress
pct=0
(( tasks_total > 0 )) && pct=$(( (tasks_done * 100) / tasks_total ))
echo -e "${C_BOLD}Tasks:${C_RESET} $(progress_bar "$tasks_done" "$tasks_total") ${tasks_done}/${tasks_total} (${pct}%)"
echo -e " done: ${C_GREEN}$tasks_done${C_RESET} in-progress: ${C_YELLOW}$tasks_inprog${C_RESET} pending: $tasks_pending blocked: ${C_RED}$tasks_blocked${C_RESET} failed: ${C_RED}$tasks_failed${C_RESET}"
echo ""
# Session history (last 5)
if (( session_count > 0 )); then
echo -e "${C_BOLD}Recent Sessions:${C_RESET}"
start_idx=$(( session_count > 5 ? session_count - 5 : 0 ))
for i in $(seq "$start_idx" $(( session_count - 1 ))); do
s_id="$(echo "$mission" | jq -r ".sessions[$i].session_id")"
s_rt="$(echo "$mission" | jq -r ".sessions[$i].runtime // \"—\"")"
s_start="$(echo "$mission" | jq -r ".sessions[$i].started_at // \"\"")"
s_end="$(echo "$mission" | jq -r ".sessions[$i].ended_at // \"\"")"
s_reason="$(echo "$mission" | jq -r ".sessions[$i].ended_reason // \"—\"")"
s_last="$(echo "$mission" | jq -r ".sessions[$i].last_task_id // \"—\"")"
duration_str="—"
if [[ -n "$s_start" && -n "$s_end" && "$s_end" != "" ]]; then
s_epoch="$(iso_to_epoch "$s_start")"
e_epoch="$(iso_to_epoch "$s_end")"
if (( e_epoch > 0 && s_epoch > 0 )); then
duration_str="$(format_duration $(( e_epoch - s_epoch )))"
fi
fi
printf " %-10s %-8s %-10s %-18s → %s\n" "$s_id" "$s_rt" "$duration_str" "$s_reason" "$s_last"
done
echo ""
fi
# Current session check
lock_data=""
if lock_data="$(session_lock_read "$PROJECT" 2>/dev/null)"; then
lock_pid="$(echo "$lock_data" | jq -r '.pid // 0')"
lock_rt="$(echo "$lock_data" | jq -r '.runtime // "unknown"')"
lock_start="$(echo "$lock_data" | jq -r '.started_at // ""')"
if is_pid_alive "$lock_pid"; then
dur=0
if [[ -n "$lock_start" ]]; then
dur=$(( $(epoch_now) - $(iso_to_epoch "$lock_start") ))
fi
echo -e "${C_GREEN}Current: running ($lock_rt, PID $lock_pid, $(format_duration "$dur"))${C_RESET}"
else
echo -e "${C_RED}Stale session lock: $lock_rt (PID $lock_pid, not running)${C_RESET}"
echo " Run: mosaic coord resume --clean-lock"
fi
else
echo -e "${C_DIM}No active session.${C_RESET}"
fi
[[ -n "$next_task" ]] && echo -e "Next unblocked task: ${C_CYAN}$next_task${C_RESET}"
echo ""

View File

@@ -0,0 +1,208 @@
#!/usr/bin/env bash
set -euo pipefail
#
# session-resume.sh — Crash recovery for dead orchestrator sessions
#
# Usage:
# session-resume.sh [--project <path>] [--clean-lock]
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/_lib.sh"
# ─── Parse arguments ─────────────────────────────────────────────────────────
PROJECT="."
CLEAN_LOCK=false
while [[ $# -gt 0 ]]; do
case "$1" in
--project) PROJECT="$2"; shift 2 ;;
--clean-lock) CLEAN_LOCK=true; shift ;;
-h|--help)
echo "Usage: session-resume.sh [--project <path>] [--clean-lock]"
exit 0
;;
*) echo "Unknown option: $1" >&2; exit 1 ;;
esac
done
_require_jq
# ─── Check session lock ─────────────────────────────────────────────────────
lock_data=""
has_lock=false
if lock_data="$(session_lock_read "$PROJECT" 2>/dev/null)"; then
has_lock=true
fi
if [[ "$has_lock" == true ]]; then
lock_pid="$(echo "$lock_data" | jq -r '.pid // 0')"
lock_sid="$(echo "$lock_data" | jq -r '.session_id // "unknown"')"
lock_rt="$(echo "$lock_data" | jq -r '.runtime // "unknown"')"
lock_start="$(echo "$lock_data" | jq -r '.started_at // ""')"
lock_milestone="$(echo "$lock_data" | jq -r '.milestone_id // ""')"
if is_pid_alive "$lock_pid"; then
echo -e "${C_YELLOW}Session $lock_sid is still running (PID $lock_pid).${C_RESET}"
echo "Use 'mosaic coord status' to check session health."
exit 0
fi
# Session is dead
echo ""
echo -e "${C_RED}${C_BOLD}CRASH RECOVERY — Session $lock_sid ($lock_rt)${C_RESET}"
echo "==========================================="
echo ""
if [[ -n "$lock_start" ]]; then
echo -e " ${C_CYAN}Session started:${C_RESET} $lock_start"
fi
echo -e " ${C_CYAN}Session died:${C_RESET} PID $lock_pid is not running"
[[ -n "$lock_milestone" ]] && echo -e " ${C_CYAN}Active milestone:${C_RESET} $lock_milestone"
echo ""
else
# No lock — check mission.json for last session info
if [[ -f "$(mission_path "$PROJECT")" ]]; then
mission="$(load_mission "$PROJECT")"
session_count="$(echo "$mission" | jq '.sessions | length')"
if (( session_count > 0 )); then
last_idx=$(( session_count - 1 ))
last_sid="$(echo "$mission" | jq -r ".sessions[$last_idx].session_id")"
last_reason="$(echo "$mission" | jq -r ".sessions[$last_idx].ended_reason // \"unknown\"")"
echo -e "${C_DIM}No session lock found. Last session: $last_sid (ended: $last_reason)${C_RESET}"
echo "Use 'mosaic coord continue' to generate a continuation prompt."
exit 0
fi
fi
echo -e "${C_DIM}No session state found.${C_RESET}"
exit 4
fi
# ─── Detect dirty state ─────────────────────────────────────────────────────
echo -e "${C_BOLD}Dirty State:${C_RESET}"
dirty_files=""
if git -C "$PROJECT" rev-parse --is-inside-work-tree &>/dev/null; then
dirty_files="$(git -C "$PROJECT" status --porcelain 2>/dev/null || true)"
fi
if [[ -n "$dirty_files" ]]; then
echo " Modified files:"
echo "$dirty_files" | head -20 | while IFS= read -r line; do
echo " $line"
done
file_count="$(echo "$dirty_files" | wc -l)"
if (( file_count > 20 )); then
echo " ... and $(( file_count - 20 )) more"
fi
else
echo -e " ${C_GREEN}Working tree is clean.${C_RESET}"
fi
# Check for in-progress tasks
inprog_count=0
task_counts="$(count_tasks_md "$PROJECT")"
inprog_count="$(echo "$task_counts" | jq '.in_progress')"
if (( inprog_count > 0 )); then
echo -e " ${C_YELLOW}$inprog_count task(s) still marked in-progress in TASKS.md${C_RESET}"
fi
echo ""
# ─── Recovery actions ────────────────────────────────────────────────────────
echo -e "${C_BOLD}Recovery Actions:${C_RESET}"
if [[ -n "$dirty_files" ]]; then
echo " 1. Review changes: git diff"
echo " 2. If good: git add -A && git commit -m \"wip: partial work from crashed session\""
echo " 3. If bad: git checkout ."
fi
echo " 4. Clean lock: mosaic coord resume --clean-lock"
echo " 5. Generate prompt: mosaic coord continue"
echo ""
# ─── Clean lock if requested ─────────────────────────────────────────────────
if [[ "$CLEAN_LOCK" == true ]]; then
echo -e "${C_CYAN}Cleaning session lock...${C_RESET}"
# Update mission.json with crash info
mp="$(mission_path "$PROJECT")"
if [[ -f "$mp" && "$has_lock" == true ]]; then
updated="$(jq \
--arg sid "$lock_sid" \
--arg ts "$(iso_now)" \
'(.sessions[] | select(.session_id == $sid)) |= . + {
ended_at: $ts,
ended_reason: "crashed"
}' "$mp")"
write_json "$mp" "$updated"
echo " Updated mission.json: session $lock_sid marked as crashed"
fi
session_lock_clear "$PROJECT"
echo " Cleared session.lock"
echo ""
echo -e "${C_GREEN}Lock cleared. Generate continuation prompt with: mosaic coord continue${C_RESET}"
fi
# ─── Generate resume prompt ─────────────────────────────────────────────────
if [[ "$CLEAN_LOCK" != true ]]; then
echo "---"
echo ""
echo -e "${C_BOLD}Resume Prompt (paste to new session):${C_RESET}"
echo ""
mission_name=""
mission_id=""
if [[ -f "$(mission_path "$PROJECT")" ]]; then
mission="$(load_mission "$PROJECT")"
mission_name="$(echo "$mission" | jq -r '.name')"
mission_id="$(echo "$mission" | jq -r '.mission_id')"
quality_gates="$(echo "$mission" | jq -r '.quality_gates // "—"')"
project_path="$(echo "$mission" | jq -r '.project_path')"
fi
task_counts="$(count_tasks_md "$PROJECT")"
tasks_done="$(echo "$task_counts" | jq '.done')"
tasks_total="$(echo "$task_counts" | jq '.total')"
next_task="$(find_next_task "$PROJECT")"
cat <<EOF
## Crash Recovery Mission
Recovering **${mission_name:-Unknown Mission}** from crashed session ${lock_sid:-unknown}.
### WARNING: Dirty State Detected
The previous session left uncommitted changes. Before continuing:
1. Run \`git diff\` to review uncommitted changes
2. Decide: commit (if good) or discard (if broken)
3. Then proceed with the mission
## Setup
- **Project:** ${project_path:-$PROJECT}
- **State:** docs/TASKS.md (${tasks_done}/${tasks_total} tasks complete)
- **Manifest:** docs/MISSION-MANIFEST.md
- **Scratchpad:** docs/scratchpads/${mission_id:-mission}.md
- **Protocol:** ~/.config/mosaic/guides/ORCHESTRATOR.md
- **Quality gates:** ${quality_gates:-—}
## Resume Point
- **Next task:** ${next_task:-check TASKS.md}
## Instructions
1. Read \`docs/MISSION-MANIFEST.md\` for mission scope
2. Read \`docs/scratchpads/${mission_id:-mission}.md\` for session history
3. Review and resolve any uncommitted changes first
4. Read \`docs/TASKS.md\` for current task state
5. Continue execution from the next pending task
6. You are the SOLE writer of \`docs/TASKS.md\`
EOF
fi

View File

@@ -0,0 +1,91 @@
#!/usr/bin/env bash
set -euo pipefail
#
# session-run.sh — Generate continuation context and launch target runtime.
#
# Usage:
# session-run.sh [--project <path>] [--milestone <id>] [--print] [--yolo]
#
# Behavior:
# - Builds continuation prompt + next-task capsule.
# - Launches selected runtime (default: claude, override via MOSAIC_COORD_RUNTIME).
# - For codex, injects strict orchestration kickoff to reduce clarification loops.
# - --yolo launches the runtime in dangerous/skip-permissions mode.
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/_lib.sh"
PROJECT="."
MILESTONE=""
PRINT=false
YOLO=false
while [[ $# -gt 0 ]]; do
case "$1" in
--project) PROJECT="$2"; shift 2 ;;
--milestone) MILESTONE="$2"; shift 2 ;;
--print) PRINT=true; shift ;;
--yolo) YOLO=true; shift ;;
-h|--help)
cat <<'USAGE'
Usage: session-run.sh [--project <path>] [--milestone <id>] [--print] [--yolo]
Options:
--project <path> Project directory (default: CWD)
--milestone <id> Force specific milestone context
--print Print launch prompt only (no runtime launch)
--yolo Launch runtime in dangerous/skip-permissions mode
USAGE
exit 0
;;
*) echo "Unknown option: $1" >&2; exit 1 ;;
esac
done
PROJECT="${PROJECT/#\~/$HOME}"
PROJECT="$(cd "$PROJECT" && pwd)"
_require_jq
require_mission "$PROJECT"
runtime="$(coord_runtime)"
launch_cmd="$(coord_launch_command)"
continue_cmd=(bash "$SCRIPT_DIR/continue-prompt.sh" --project "$PROJECT")
if [[ -n "$MILESTONE" ]]; then
continue_cmd+=(--milestone "$MILESTONE")
fi
continuation_prompt="$(MOSAIC_COORD_RUNTIME="$runtime" "${continue_cmd[@]}")"
if [[ "$runtime" == "codex" ]]; then
launch_prompt="$(build_codex_strict_kickoff "$PROJECT" "$continuation_prompt")"
else
launch_prompt="$continuation_prompt"
fi
if [[ "$PRINT" == true ]]; then
echo "$launch_prompt"
exit 0
fi
if [[ "$YOLO" == true ]]; then
launch_cmd="mosaic yolo $runtime"
fi
echo -e "${C_CYAN}Launching orchestration runtime: ${launch_cmd}${C_RESET}"
echo -e "${C_CYAN}Project:${C_RESET} $PROJECT"
echo -e "${C_CYAN}Capsule:${C_RESET} $(next_task_capsule_path "$PROJECT")"
[[ "$YOLO" == true ]] && echo -e "${C_YELLOW}[YOLO] Dangerous permissions mode enabled.${C_RESET}"
cd "$PROJECT"
if [[ "$YOLO" == true ]]; then
exec "$MOSAIC_HOME/bin/mosaic" yolo "$runtime" "$launch_prompt"
elif [[ "$runtime" == "claude" ]]; then
exec "$MOSAIC_HOME/bin/mosaic" claude "$launch_prompt"
elif [[ "$runtime" == "codex" ]]; then
exec "$MOSAIC_HOME/bin/mosaic" codex "$launch_prompt"
fi
echo -e "${C_RED}Unsupported coord runtime: $runtime${C_RESET}" >&2
exit 1

View File

@@ -0,0 +1,241 @@
#!/usr/bin/env bash
set -euo pipefail
#
# session-status.sh — Check agent session health
#
# Usage:
# session-status.sh [--project <path>] [--format table|json]
#
# Exit codes:
# 0 = running
# 2 = stale (recently died)
# 3 = dead (no longer running)
# 4 = no session
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/_lib.sh"
# ─── Parse arguments ─────────────────────────────────────────────────────────
PROJECT="."
FORMAT="table"
while [[ $# -gt 0 ]]; do
case "$1" in
--project) PROJECT="$2"; shift 2 ;;
--format) FORMAT="$2"; shift 2 ;;
-h|--help)
echo "Usage: session-status.sh [--project <path>] [--format table|json]"
exit 0
;;
*) echo "Unknown option: $1" >&2; exit 1 ;;
esac
done
_require_jq
runtime_cmd="$(coord_launch_command)"
run_cmd="$(coord_run_command)"
# ─── Check session lock ─────────────────────────────────────────────────────
lock_data=""
if ! lock_data="$(session_lock_read "$PROJECT")"; then
# No active session — but check if a mission exists
mp="$(mission_path "$PROJECT")"
if [[ -f "$mp" ]]; then
m_status="$(jq -r '.status // "inactive"' "$mp")"
m_name="$(jq -r '.name // "unnamed"' "$mp")"
m_id="$(jq -r '.mission_id // ""' "$mp")"
m_total="$(jq '.milestones | length' "$mp")"
m_done="$(jq '[.milestones[] | select(.status == "completed")] | length' "$mp")"
m_current="$(jq -r '[.milestones[] | select(.status == "active" or .status == "pending")][0].name // "none"' "$mp")"
# Task counts if TASKS.md exists
task_json="$(count_tasks_md "$PROJECT")"
t_total="$(echo "$task_json" | jq '.total')"
t_done="$(echo "$task_json" | jq '.done')"
t_pending="$(echo "$task_json" | jq '.pending')"
t_inprog="$(echo "$task_json" | jq '.in_progress')"
if [[ "$FORMAT" == "json" ]]; then
jq -n \
--arg status "no-session" \
--arg mission_status "$m_status" \
--arg mission_name "$m_name" \
--arg mission_id "$m_id" \
--argjson milestones_total "$m_total" \
--argjson milestones_done "$m_done" \
--argjson tasks_total "$t_total" \
--argjson tasks_done "$t_done" \
'{
status: $status,
mission: {
status: $mission_status,
name: $mission_name,
id: $mission_id,
milestones_total: $milestones_total,
milestones_done: $milestones_done,
tasks_total: $tasks_total,
tasks_done: $tasks_done
}
}'
else
echo ""
echo -e " ${C_DIM}No active agent session.${C_RESET}"
echo ""
# Mission info
case "$m_status" in
active) ms_color="${C_GREEN}ACTIVE${C_RESET}" ;;
paused) ms_color="${C_YELLOW}PAUSED${C_RESET}" ;;
completed) ms_color="${C_CYAN}COMPLETED${C_RESET}" ;;
*) ms_color="${C_DIM}${m_status}${C_RESET}" ;;
esac
echo -e " ${C_BOLD}Mission:${C_RESET} $m_name"
echo -e " ${C_CYAN}Status:${C_RESET} $ms_color"
echo -e " ${C_CYAN}ID:${C_RESET} $m_id"
echo -e " ${C_CYAN}Milestones:${C_RESET} $m_done / $m_total completed"
[[ "$m_current" != "none" ]] && echo -e " ${C_CYAN}Current:${C_RESET} $m_current"
if (( t_total > 0 )); then
echo -e " ${C_CYAN}Tasks:${C_RESET} $t_done / $t_total done ($t_pending pending, $t_inprog in-progress)"
fi
echo ""
if [[ "$m_status" == "active" || "$m_status" == "paused" ]]; then
echo -e " ${C_BOLD}Next steps:${C_RESET}"
echo " $run_cmd Auto-generate context and launch"
echo " mosaic coord continue Generate continuation prompt"
echo " $runtime_cmd Launch agent session"
elif [[ "$m_status" == "completed" ]]; then
echo -e " ${C_DIM}Mission completed. Start a new one with: mosaic coord init${C_RESET}"
else
echo -e " ${C_DIM}Initialize with: mosaic coord init --name \"Mission Name\"${C_RESET}"
fi
echo ""
fi
else
if [[ "$FORMAT" == "json" ]]; then
echo '{"status":"no-session","mission":null}'
else
echo ""
echo -e " ${C_DIM}No active session.${C_RESET}"
echo -e " ${C_DIM}No mission found.${C_RESET}"
echo ""
echo " Initialize with: mosaic coord init --name \"Mission Name\""
echo ""
fi
fi
exit 4
fi
# Parse lock
session_id="$(echo "$lock_data" | jq -r '.session_id // "unknown"')"
runtime="$(echo "$lock_data" | jq -r '.runtime // "unknown"')"
pid="$(echo "$lock_data" | jq -r '.pid // 0')"
started_at="$(echo "$lock_data" | jq -r '.started_at // ""')"
milestone_id="$(echo "$lock_data" | jq -r '.milestone_id // ""')"
# ─── Determine status ───────────────────────────────────────────────────────
status="unknown"
exit_code=1
if is_pid_alive "$pid"; then
status="running"
exit_code=0
else
# PID is dead — check how recently
last_act="$(last_activity_time "$PROJECT")"
now="$(epoch_now)"
age=$(( now - last_act ))
if (( age < STALE_THRESHOLD )); then
status="stale"
exit_code=2
elif (( age < DEAD_THRESHOLD )); then
status="stale"
exit_code=2
else
status="dead"
exit_code=3
fi
fi
# ─── Gather supplementary info ──────────────────────────────────────────────
duration_secs=0
if [[ -n "$started_at" ]]; then
start_epoch="$(iso_to_epoch "$started_at")"
now="$(epoch_now)"
duration_secs=$(( now - start_epoch ))
fi
last_act="$(last_activity_time "$PROJECT")"
# Current milestone from mission.json
current_ms=""
if [[ -f "$(mission_path "$PROJECT")" ]]; then
current_ms="$(current_milestone_id "$PROJECT")"
if [[ -n "$current_ms" ]]; then
ms_name="$(milestone_name "$PROJECT" "$current_ms")"
[[ -n "$ms_name" ]] && current_ms="$current_ms ($ms_name)"
fi
fi
# Next task from TASKS.md
next_task="$(find_next_task "$PROJECT")"
# ─── Output ──────────────────────────────────────────────────────────────────
if [[ "$FORMAT" == "json" ]]; then
jq -n \
--arg status "$status" \
--arg session_id "$session_id" \
--arg runtime "$runtime" \
--arg pid "$pid" \
--arg started_at "$started_at" \
--arg duration "$duration_secs" \
--arg milestone "$current_ms" \
--arg next_task "$next_task" \
--arg last_activity "$last_act" \
'{
status: $status,
session_id: $session_id,
runtime: $runtime,
pid: ($pid | tonumber),
started_at: $started_at,
duration_seconds: ($duration | tonumber),
milestone: $milestone,
next_task: $next_task,
last_activity_epoch: ($last_activity | tonumber)
}'
else
# Color the status
case "$status" in
running) status_color="${C_GREEN}RUNNING${C_RESET}" ;;
stale) status_color="${C_YELLOW}STALE${C_RESET}" ;;
dead) status_color="${C_RED}DEAD${C_RESET}" ;;
*) status_color="$status" ;;
esac
echo ""
echo -e " Session Status: $status_color ($runtime)"
echo -e " ${C_CYAN}Session ID:${C_RESET} $session_id"
echo -e " ${C_CYAN}Started:${C_RESET} $started_at ($(format_duration "$duration_secs"))"
echo -e " ${C_CYAN}PID:${C_RESET} $pid"
[[ -n "$current_ms" ]] && echo -e " ${C_CYAN}Milestone:${C_RESET} $current_ms"
[[ -n "$next_task" ]] && echo -e " ${C_CYAN}Next task:${C_RESET} $next_task"
echo -e " ${C_CYAN}Last activity:${C_RESET} $(format_ago "$last_act")"
echo ""
if [[ "$status" == "stale" || "$status" == "dead" ]]; then
echo -e " ${C_YELLOW}Session is no longer running.${C_RESET}"
echo " Recovery: mosaic coord resume"
echo " Continue: mosaic coord continue"
echo ""
fi
fi
exit "$exit_code"

View File

@@ -0,0 +1,78 @@
#!/usr/bin/env bash
set -euo pipefail
#
# smoke-test.sh — Behavior smoke checks for coord continue/run workflows.
#
# Usage:
# smoke-test.sh
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/_lib.sh"
PASS=0
FAIL=0
pass_case() {
echo "PASS: $1"
PASS=$((PASS + 1))
}
fail_case() {
echo "FAIL: $1" >&2
FAIL=$((FAIL + 1))
}
tmp_project="$(mktemp -d)"
trap 'rm -rf "$tmp_project"' EXIT
mkdir -p "$tmp_project/.mosaic/orchestrator" "$tmp_project/docs/scratchpads"
cat > "$tmp_project/.mosaic/orchestrator/mission.json" <<'JSON'
{
"mission_id": "smoke-mission-20260223",
"name": "Smoke Mission",
"status": "active",
"project_path": "SMOKE_PROJECT",
"quality_gates": "pnpm lint && pnpm test",
"milestones": [
{ "id": "M1", "name": "Milestone One", "status": "pending" }
],
"sessions": []
}
JSON
cat > "$tmp_project/docs/MISSION-MANIFEST.md" <<'MD'
# Mission Manifest
MD
cat > "$tmp_project/docs/scratchpads/smoke-mission-20260223.md" <<'MD'
# Scratchpad
MD
cat > "$tmp_project/docs/TASKS.md" <<'MD'
| id | status | milestone | description | pr | notes |
|----|--------|-----------|-------------|----|-------|
| T-001 | pending | M1 | Smoke task | | |
MD
codex_continue_output="$(MOSAIC_COORD_RUNTIME=codex bash "$SCRIPT_DIR/continue-prompt.sh" --project "$tmp_project")"
capsule_file="$tmp_project/.mosaic/orchestrator/next-task.json"
if [[ -f "$capsule_file" ]]; then pass_case "continue writes next-task capsule"; else fail_case "continue writes next-task capsule"; fi
if jq -e '.runtime == "codex"' "$capsule_file" >/dev/null 2>&1; then pass_case "capsule runtime is codex"; else fail_case "capsule runtime is codex"; fi
if jq -e '.next_task == "T-001"' "$capsule_file" >/dev/null 2>&1; then pass_case "capsule next_task is T-001"; else fail_case "capsule next_task is T-001"; fi
if grep -Fq 'Target runtime:** codex' <<< "$codex_continue_output"; then pass_case "continue prompt contains target runtime codex"; else fail_case "continue prompt contains target runtime codex"; fi
codex_run_prompt="$(MOSAIC_COORD_RUNTIME=codex bash "$SCRIPT_DIR/session-run.sh" --project "$tmp_project" --print)"
if [[ "$(printf '%s\n' "$codex_run_prompt" | head -n1)" == "Now initiating Orchestrator mode..." ]]; then pass_case "codex run prompt first line is mode declaration"; else fail_case "codex run prompt first line is mode declaration"; fi
if grep -Fq 'Do NOT ask clarifying questions before your first tool actions' <<< "$codex_run_prompt"; then pass_case "codex run prompt includes no-questions hard gate"; else fail_case "codex run prompt includes no-questions hard gate"; fi
if grep -Fq '"next_task": "T-001"' <<< "$codex_run_prompt"; then pass_case "codex run prompt embeds capsule json"; else fail_case "codex run prompt embeds capsule json"; fi
claude_run_prompt="$(MOSAIC_COORD_RUNTIME=claude bash "$SCRIPT_DIR/session-run.sh" --project "$tmp_project" --print)"
if [[ "$(printf '%s\n' "$claude_run_prompt" | head -n1)" == "## Continuation Mission" ]]; then pass_case "claude run prompt remains continuation prompt format"; else fail_case "claude run prompt remains continuation prompt format"; fi
echo ""
echo "Smoke test summary: pass=$PASS fail=$FAIL"
if (( FAIL > 0 )); then
exit 1
fi

View File

@@ -0,0 +1,212 @@
# Portainer CLI Scripts
CLI tools for managing Portainer stacks via the API.
## Setup
### Environment Variables
Set these environment variables before using the scripts:
```bash
export PORTAINER_URL="https://portainer.example.com:9443"
export PORTAINER_API_KEY="your-api-key-here"
```
You can add these to your shell profile (`~/.bashrc`, `~/.zshrc`) or use a `.env` file.
### Creating an API Key
1. Log in to Portainer
2. Click your username in the top right corner > "My account"
3. Scroll to "Access tokens" section
4. Click "Add access token"
5. Enter a descriptive name (e.g., "CLI scripts")
6. Copy the token immediately (you cannot view it again)
### Dependencies
- `curl` - HTTP client
- `jq` - JSON processor
Both are typically pre-installed on most Linux distributions.
## Scripts
### stack-list.sh
List all Portainer stacks.
```bash
# List all stacks in table format
stack-list.sh
# List stacks in JSON format
stack-list.sh -f json
# List only stack names
stack-list.sh -f names
stack-list.sh -q
# Filter by endpoint ID
stack-list.sh -e 1
```
### stack-status.sh
Show status and containers for a stack.
```bash
# Show stack status
stack-status.sh -n mystack
# Show status in JSON format
stack-status.sh -n mystack -f json
# Use stack ID instead of name
stack-status.sh -i 5
```
### stack-redeploy.sh
Redeploy a stack. For git-based stacks, this pulls the latest from the repository.
```bash
# Redeploy a stack by name
stack-redeploy.sh -n mystack
# Redeploy and pull latest images
stack-redeploy.sh -n mystack -p
# Redeploy by stack ID
stack-redeploy.sh -i 5 -p
```
### stack-logs.sh
View logs for stack services/containers.
```bash
# List available services in a stack
stack-logs.sh -n mystack
# View logs for a specific service
stack-logs.sh -n mystack -s webapp
# Show last 200 lines
stack-logs.sh -n mystack -s webapp -t 200
# Follow logs (stream)
stack-logs.sh -n mystack -s webapp -f
# Include timestamps
stack-logs.sh -n mystack -s webapp --timestamps
```
### stack-start.sh
Start an inactive stack.
```bash
stack-start.sh -n mystack
stack-start.sh -i 5
```
### stack-stop.sh
Stop a running stack.
```bash
stack-stop.sh -n mystack
stack-stop.sh -i 5
```
### endpoint-list.sh
List all Portainer endpoints/environments.
```bash
# List in table format
endpoint-list.sh
# List in JSON format
endpoint-list.sh -f json
```
## Common Workflows
### CI/CD Redeploy
After pushing changes to a git-based stack's repository:
```bash
# Redeploy with latest images
stack-redeploy.sh -n myapp -p
# Check status
stack-status.sh -n myapp
# View logs to verify startup
stack-logs.sh -n myapp -s api -t 50
```
### Debugging a Failing Stack
```bash
# Check overall status
stack-status.sh -n myapp
# List all services
stack-logs.sh -n myapp
# View logs for failing service
stack-logs.sh -n myapp -s worker -t 200
# Follow logs in real-time
stack-logs.sh -n myapp -s worker -f
```
### Restart a Stack
```bash
# Stop the stack
stack-stop.sh -n myapp
# Start it again
stack-start.sh -n myapp
# Or just redeploy (pulls latest images)
stack-redeploy.sh -n myapp -p
```
## Error Handling
All scripts:
- Exit with code 0 on success
- Exit with code 1 on error
- Print errors to stderr
- Validate required environment variables before making API calls
## API Reference
These scripts use the Portainer CE API. Key endpoints:
| Endpoint | Method | Description |
| ------------------------------------------------- | ------ | ------------------------ |
| `/api/stacks` | GET | List all stacks |
| `/api/stacks/{id}` | GET | Get stack details |
| `/api/stacks/{id}/file` | GET | Get stack compose file |
| `/api/stacks/{id}` | PUT | Update/redeploy stack |
| `/api/stacks/{id}/git/redeploy` | PUT | Redeploy git-based stack |
| `/api/stacks/{id}/start` | POST | Start inactive stack |
| `/api/stacks/{id}/stop` | POST | Stop running stack |
| `/api/endpoints` | GET | List all environments |
| `/api/endpoints/{id}/docker/containers/json` | GET | List containers |
| `/api/endpoints/{id}/docker/containers/{id}/logs` | GET | Get container logs |
For full API documentation, see:
- [Portainer API Access](https://docs.portainer.io/api/access)
- [Portainer API Examples](https://docs.portainer.io/api/examples)
- [Portainer API Docs](https://docs.portainer.io/api/docs)

View File

@@ -0,0 +1,85 @@
#!/usr/bin/env bash
#
# endpoint-list.sh - List all Portainer endpoints/environments
#
# Usage: endpoint-list.sh [-f format]
#
# Environment variables:
# PORTAINER_URL - Portainer instance URL (e.g., https://portainer.example.com:9443)
# PORTAINER_API_KEY - API access token
#
# Options:
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
# Default values
FORMAT="table"
# Parse arguments
while getopts "f:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
h)
head -16 "$0" | grep "^#" | sed 's/^# \?//'
exit 0
;;
*)
echo "Usage: $0 [-f format]" >&2
exit 1
;;
esac
done
# Validate environment
if [[ -z "${PORTAINER_URL:-}" ]]; then
echo "Error: PORTAINER_URL environment variable not set" >&2
exit 1
fi
if [[ -z "${PORTAINER_API_KEY:-}" ]]; then
echo "Error: PORTAINER_API_KEY environment variable not set" >&2
exit 1
fi
# Remove trailing slash from URL
PORTAINER_URL="${PORTAINER_URL%/}"
# Fetch endpoints
response=$(curl -s -w "\n%{http_code}" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
"${PORTAINER_URL}/api/endpoints")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: API request failed with status $http_code" >&2
echo "$body" >&2
exit 1
fi
# Output based on format
case "$FORMAT" in
json)
echo "$body" | jq '.'
;;
table)
echo "ID NAME TYPE STATUS URL"
echo "---- ---------------------------- ---------- -------- ---"
echo "$body" | jq -r '.[] | [
.Id,
.Name,
(if .Type == 1 then "docker" elif .Type == 2 then "agent" elif .Type == 3 then "azure" elif .Type == 4 then "edge" elif .Type == 5 then "kubernetes" else "unknown" end),
(if .Status == 1 then "up" elif .Status == 2 then "down" else "unknown" end),
.URL
] | @tsv' | while IFS=$'\t' read -r id name type status url; do
printf "%-4s %-28s %-10s %-8s %s\n" "$id" "$name" "$type" "$status" "$url"
done
;;
*)
echo "Error: Unknown format '$FORMAT'. Use: table, json" >&2
exit 1
;;
esac

View File

@@ -0,0 +1,100 @@
#!/usr/bin/env bash
#
# stack-list.sh - List all Portainer stacks
#
# Usage: stack-list.sh [-e endpoint_id] [-f format] [-q]
#
# Environment variables:
# PORTAINER_URL - Portainer instance URL (e.g., https://portainer.example.com:9443)
# PORTAINER_API_KEY - API access token
#
# Options:
# -e endpoint_id Filter by endpoint/environment ID
# -f format Output format: table (default), json, names
# -q Quiet mode - only output stack names (shortcut for -f names)
# -h Show this help
set -euo pipefail
# Default values
ENDPOINT_FILTER=""
FORMAT="table"
QUIET=false
# Parse arguments
while getopts "e:f:qh" opt; do
case $opt in
e) ENDPOINT_FILTER="$OPTARG" ;;
f) FORMAT="$OPTARG" ;;
q) QUIET=true; FORMAT="names" ;;
h)
head -20 "$0" | grep "^#" | sed 's/^# \?//'
exit 0
;;
*)
echo "Usage: $0 [-e endpoint_id] [-f format] [-q]" >&2
exit 1
;;
esac
done
# Validate environment
if [[ -z "${PORTAINER_URL:-}" ]]; then
echo "Error: PORTAINER_URL environment variable not set" >&2
exit 1
fi
if [[ -z "${PORTAINER_API_KEY:-}" ]]; then
echo "Error: PORTAINER_API_KEY environment variable not set" >&2
exit 1
fi
# Remove trailing slash from URL
PORTAINER_URL="${PORTAINER_URL%/}"
# Fetch stacks
response=$(curl -s -w "\n%{http_code}" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
"${PORTAINER_URL}/api/stacks")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: API request failed with status $http_code" >&2
echo "$body" >&2
exit 1
fi
# Filter by endpoint if specified
if [[ -n "$ENDPOINT_FILTER" ]]; then
body=$(echo "$body" | jq --arg eid "$ENDPOINT_FILTER" '[.[] | select(.EndpointId == ($eid | tonumber))]')
fi
# Output based on format
case "$FORMAT" in
json)
echo "$body" | jq '.'
;;
names)
echo "$body" | jq -r '.[].Name'
;;
table)
echo "ID NAME STATUS TYPE ENDPOINT CREATED"
echo "---- ---------------------------- -------- -------- -------- -------"
echo "$body" | jq -r '.[] | [
.Id,
.Name,
(if .Status == 1 then "active" elif .Status == 2 then "inactive" else "unknown" end),
(if .Type == 1 then "swarm" elif .Type == 2 then "compose" elif .Type == 3 then "k8s" else "unknown" end),
.EndpointId,
(.CreationDate | split("T")[0] // "N/A")
] | @tsv' | while IFS=$'\t' read -r id name status type endpoint created; do
printf "%-4s %-28s %-8s %-8s %-8s %s\n" "$id" "$name" "$status" "$type" "$endpoint" "$created"
done
;;
*)
echo "Error: Unknown format '$FORMAT'. Use: table, json, names" >&2
exit 1
;;
esac

Some files were not shown because too many files have changed in this diff Show More