feat: rename rails/ to tools/ and add service tool suites (#4)

Co-authored-by: Jason Woltje <jason@diversecanvas.com>
Co-committed-by: Jason Woltje <jason@diversecanvas.com>
This commit was merged in pull request #4.
This commit is contained in:
2026-02-22 17:52:23 +00:00
committed by jason.woltje
parent 248db8935c
commit a8e580e1a3
158 changed files with 2481 additions and 213 deletions

175
tools/_lib/credentials.sh Executable file
View File

@@ -0,0 +1,175 @@
#!/usr/bin/env bash
#
# credentials.sh — Shared credential loader for Mosaic tool suites
#
# Usage: source ~/.config/mosaic/tools/_lib/credentials.sh
# load_credentials <service-name>
#
# Loads credentials from environment variables first, then falls back
# to ~/src/jarvis-brain/credentials.json (or MOSAIC_CREDENTIALS_FILE).
#
# Supported services:
# portainer, coolify, authentik, glpi, github,
# gitea-mosaicstack, gitea-usc, woodpecker
#
# After loading, service-specific env vars are exported.
# Run `load_credentials --help` for details.
MOSAIC_CREDENTIALS_FILE="${MOSAIC_CREDENTIALS_FILE:-$HOME/src/jarvis-brain/credentials.json}"
_mosaic_require_jq() {
if ! command -v jq &>/dev/null; then
echo "Error: jq is required but not installed" >&2
return 1
fi
}
_mosaic_read_cred() {
local jq_path="$1"
if [[ ! -f "$MOSAIC_CREDENTIALS_FILE" ]]; then
echo "Error: Credentials file not found: $MOSAIC_CREDENTIALS_FILE" >&2
return 1
fi
jq -r "$jq_path // empty" "$MOSAIC_CREDENTIALS_FILE"
}
load_credentials() {
local service="$1"
if [[ -z "$service" || "$service" == "--help" ]]; then
cat <<'EOF'
Usage: load_credentials <service>
Services and exported variables:
portainer → PORTAINER_URL, PORTAINER_API_KEY
coolify → COOLIFY_URL, COOLIFY_TOKEN
authentik → AUTHENTIK_URL, AUTHENTIK_TOKEN, AUTHENTIK_USERNAME, AUTHENTIK_PASSWORD
glpi → GLPI_URL, GLPI_APP_TOKEN, GLPI_USER_TOKEN
github → GITHUB_TOKEN
gitea-mosaicstack → GITEA_URL, GITEA_TOKEN
gitea-usc → GITEA_URL, GITEA_TOKEN
woodpecker → WOODPECKER_URL, WOODPECKER_TOKEN
EOF
return 0
fi
_mosaic_require_jq || return 1
case "$service" in
portainer)
export PORTAINER_URL="${PORTAINER_URL:-$(_mosaic_read_cred '.portainer.url')}"
export PORTAINER_API_KEY="${PORTAINER_API_KEY:-$(_mosaic_read_cred '.portainer.api_key')}"
PORTAINER_URL="${PORTAINER_URL%/}"
[[ -n "$PORTAINER_URL" ]] || { echo "Error: portainer.url not found" >&2; return 1; }
[[ -n "$PORTAINER_API_KEY" ]] || { echo "Error: portainer.api_key not found" >&2; return 1; }
;;
coolify)
export COOLIFY_URL="${COOLIFY_URL:-$(_mosaic_read_cred '.coolify.url')}"
export COOLIFY_TOKEN="${COOLIFY_TOKEN:-$(_mosaic_read_cred '.coolify.app_token')}"
COOLIFY_URL="${COOLIFY_URL%/}"
[[ -n "$COOLIFY_URL" ]] || { echo "Error: coolify.url not found" >&2; return 1; }
[[ -n "$COOLIFY_TOKEN" ]] || { echo "Error: coolify.app_token not found" >&2; return 1; }
;;
authentik)
export AUTHENTIK_URL="${AUTHENTIK_URL:-$(_mosaic_read_cred '.authentik.url')}"
export AUTHENTIK_TOKEN="${AUTHENTIK_TOKEN:-$(_mosaic_read_cred '.authentik.token')}"
export AUTHENTIK_USERNAME="${AUTHENTIK_USERNAME:-$(_mosaic_read_cred '.authentik.username')}"
export AUTHENTIK_PASSWORD="${AUTHENTIK_PASSWORD:-$(_mosaic_read_cred '.authentik.password')}"
AUTHENTIK_URL="${AUTHENTIK_URL%/}"
[[ -n "$AUTHENTIK_URL" ]] || { echo "Error: authentik.url not found" >&2; return 1; }
;;
glpi)
export GLPI_URL="${GLPI_URL:-$(_mosaic_read_cred '.glpi.url')}"
export GLPI_APP_TOKEN="${GLPI_APP_TOKEN:-$(_mosaic_read_cred '.glpi.app_token')}"
export GLPI_USER_TOKEN="${GLPI_USER_TOKEN:-$(_mosaic_read_cred '.glpi.user_token')}"
GLPI_URL="${GLPI_URL%/}"
[[ -n "$GLPI_URL" ]] || { echo "Error: glpi.url not found" >&2; return 1; }
;;
github)
export GITHUB_TOKEN="${GITHUB_TOKEN:-$(_mosaic_read_cred '.github.token')}"
[[ -n "$GITHUB_TOKEN" ]] || { echo "Error: github.token not found" >&2; return 1; }
;;
gitea-mosaicstack)
export GITEA_URL="${GITEA_URL:-$(_mosaic_read_cred '.gitea.mosaicstack.url')}"
export GITEA_TOKEN="${GITEA_TOKEN:-$(_mosaic_read_cred '.gitea.mosaicstack.token')}"
GITEA_URL="${GITEA_URL%/}"
[[ -n "$GITEA_URL" ]] || { echo "Error: gitea.mosaicstack.url not found" >&2; return 1; }
[[ -n "$GITEA_TOKEN" ]] || { echo "Error: gitea.mosaicstack.token not found" >&2; return 1; }
;;
gitea-usc)
export GITEA_URL="${GITEA_URL:-$(_mosaic_read_cred '.gitea.usc.url')}"
export GITEA_TOKEN="${GITEA_TOKEN:-$(_mosaic_read_cred '.gitea.usc.token')}"
GITEA_URL="${GITEA_URL%/}"
[[ -n "$GITEA_URL" ]] || { echo "Error: gitea.usc.url not found" >&2; return 1; }
[[ -n "$GITEA_TOKEN" ]] || { echo "Error: gitea.usc.token not found" >&2; return 1; }
;;
woodpecker)
export WOODPECKER_URL="${WOODPECKER_URL:-$(_mosaic_read_cred '.woodpecker.url')}"
export WOODPECKER_TOKEN="${WOODPECKER_TOKEN:-$(_mosaic_read_cred '.woodpecker.token')}"
WOODPECKER_URL="${WOODPECKER_URL%/}"
[[ -n "$WOODPECKER_URL" ]] || { echo "Error: woodpecker.url not found" >&2; return 1; }
[[ -n "$WOODPECKER_TOKEN" ]] || { echo "Error: woodpecker.token not found" >&2; return 1; }
;;
*)
echo "Error: Unknown service '$service'" >&2
echo "Supported: portainer, coolify, authentik, glpi, github, gitea-mosaicstack, gitea-usc, woodpecker" >&2
return 1
;;
esac
}
# Common HTTP helper — makes a curl request and separates body from status code
# Usage: mosaic_http GET "/api/v1/endpoint" "Authorization: Bearer $TOKEN" [base_url]
# Returns: body on stdout, sets MOSAIC_HTTP_CODE
mosaic_http() {
local method="$1"
local endpoint="$2"
local auth_header="$3"
local base_url="${4:-}"
local response
response=$(curl -sk -w "\n%{http_code}" -X "$method" \
-H "$auth_header" \
-H "Content-Type: application/json" \
"${base_url}${endpoint}")
MOSAIC_HTTP_CODE=$(echo "$response" | tail -n1)
echo "$response" | sed '$d'
}
# POST variant with body
# Usage: mosaic_http_post "/api/v1/endpoint" "Authorization: Bearer $TOKEN" '{"key":"val"}' [base_url]
mosaic_http_post() {
local endpoint="$1"
local auth_header="$2"
local data="$3"
local base_url="${4:-}"
local response
response=$(curl -sk -w "\n%{http_code}" -X POST \
-H "$auth_header" \
-H "Content-Type: application/json" \
-d "$data" \
"${base_url}${endpoint}")
MOSAIC_HTTP_CODE=$(echo "$response" | tail -n1)
echo "$response" | sed '$d'
}
# PATCH variant with body
mosaic_http_patch() {
local endpoint="$1"
local auth_header="$2"
local data="$3"
local base_url="${4:-}"
local response
response=$(curl -sk -w "\n%{http_code}" -X PATCH \
-H "$auth_header" \
-H "Content-Type: application/json" \
-d "$data" \
"${base_url}${endpoint}")
MOSAIC_HTTP_CODE=$(echo "$response" | tail -n1)
echo "$response" | sed '$d'
}

59
tools/authentik/README.md Normal file
View File

@@ -0,0 +1,59 @@
# Authentik Tool Suite
Manage Authentik identity provider (SSO, users, groups, applications, flows) via CLI.
## Prerequisites
- `jq` installed
- Authentik credentials in `~/src/jarvis-brain/credentials.json` (or `$MOSAIC_CREDENTIALS_FILE`)
- Required fields: `authentik.url`, `authentik.username`, `authentik.password`
## Authentication
Scripts use `auth-token.sh` to auto-authenticate via username/password and cache the API token at `~/.cache/mosaic/authentik-token`. The token is validated on each use and refreshed automatically when expired.
For better security, create a long-lived API token in Authentik admin (Directory > Tokens) and set `$AUTHENTIK_TOKEN` in your environment — the scripts will use it directly.
## Scripts
| Script | Purpose |
|--------|---------|
| `auth-token.sh` | Authenticate and cache API token |
| `user-list.sh` | List users (search, filter by group) |
| `user-create.sh` | Create user with optional group assignment |
| `group-list.sh` | List groups |
| `app-list.sh` | List OAuth/SAML applications |
| `flow-list.sh` | List authentication flows |
| `admin-status.sh` | System health and version info |
## Common Options
All scripts support:
- `-f json` — JSON output (default: table)
- `-h` — Show help
## API Reference
- Base URL: `https://auth.diversecanvas.com`
- API prefix: `/api/v3/`
- OpenAPI schema: `/api/v3/schema/`
- Auth: Bearer token in `Authorization` header
## Examples
```bash
# List all users
~/.config/mosaic/tools/authentik/user-list.sh
# Search for a user
~/.config/mosaic/tools/authentik/user-list.sh -s "jason"
# Create a user in the admins group
~/.config/mosaic/tools/authentik/user-create.sh -u newuser -n "New User" -e new@example.com -g admins
# List OAuth applications as JSON
~/.config/mosaic/tools/authentik/app-list.sh -f json
# Check system health
~/.config/mosaic/tools/authentik/admin-status.sh
```

55
tools/authentik/admin-status.sh Executable file
View File

@@ -0,0 +1,55 @@
#!/usr/bin/env bash
#
# admin-status.sh — Authentik system health and version info
#
# Usage: admin-status.sh [-f format]
#
# Options:
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials authentik
FORMAT="table"
while getopts "f:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
h) head -11 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format]" >&2; exit 1 ;;
esac
done
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q)
response=$(curl -sk -w "\n%{http_code}" \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/admin/system/")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to get system status (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "Authentik System Status"
echo "======================="
echo "$body" | jq -r '
" URL: \(.http_host // "unknown")\n" +
" Version: \(.runtime.authentik_version // "unknown")\n" +
" Python: \(.runtime.python_version // "unknown")\n" +
" Workers: \(.runtime.gunicorn_workers // "unknown")\n" +
" Build Hash: \(.runtime.build_hash // "unknown")\n" +
" Embedded Outpost: \(.embedded_outpost_host // "unknown")"
'

62
tools/authentik/app-list.sh Executable file
View File

@@ -0,0 +1,62 @@
#!/usr/bin/env bash
#
# app-list.sh — List Authentik applications
#
# Usage: app-list.sh [-f format] [-s search]
#
# Options:
# -f format Output format: table (default), json
# -s search Search by application name
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials authentik
FORMAT="table"
SEARCH=""
while getopts "f:s:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
s) SEARCH="$OPTARG" ;;
h) head -12 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-s search]" >&2; exit 1 ;;
esac
done
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q)
PARAMS="ordering=name"
[[ -n "$SEARCH" ]] && PARAMS="${PARAMS}&search=${SEARCH}"
response=$(curl -sk -w "\n%{http_code}" \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/core/applications/?${PARAMS}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list applications (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.results'
exit 0
fi
echo "NAME SLUG PROVIDER LAUNCH URL"
echo "---------------------------- ---------------------------- ----------------- ----------------------------------------"
echo "$body" | jq -r '.results[] | [
.name,
.slug,
(.provider_obj.name // "none"),
(.launch_url // "—")
] | @tsv' | while IFS=$'\t' read -r name slug provider launch_url; do
printf "%-28s %-28s %-17s %s\n" \
"${name:0:28}" "${slug:0:28}" "${provider:0:17}" "$launch_url"
done

86
tools/authentik/auth-token.sh Executable file
View File

@@ -0,0 +1,86 @@
#!/usr/bin/env bash
#
# auth-token.sh — Obtain and cache Authentik API token
#
# Usage: auth-token.sh [-f] [-q]
#
# Returns a valid Authentik API token. Checks in order:
# 1. Cached token at ~/.cache/mosaic/authentik-token (if valid)
# 2. Pre-configured token from credentials.json (authentik.token)
# 3. Fails with instructions to create a token in the admin UI
#
# Options:
# -f Force re-validation (ignore cached token)
# -q Quiet mode — only output the token
# -h Show this help
#
# Environment variables (or credentials.json):
# AUTHENTIK_URL — Authentik instance URL
# AUTHENTIK_TOKEN — Pre-configured API token (recommended)
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials authentik
CACHE_DIR="$HOME/.cache/mosaic"
CACHE_FILE="$CACHE_DIR/authentik-token"
FORCE=false
QUIET=false
while getopts "fqh" opt; do
case $opt in
f) FORCE=true ;;
q) QUIET=true ;;
h) head -20 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f] [-q]" >&2; exit 1 ;;
esac
done
_validate_token() {
local token="$1"
local http_code
http_code=$(curl -sk -o /dev/null -w "%{http_code}" \
--connect-timeout 5 --max-time 10 \
-H "Authorization: Bearer $token" \
"${AUTHENTIK_URL}/api/v3/core/users/me/")
[[ "$http_code" == "200" ]]
}
# 1. Check cached token
if [[ "$FORCE" == "false" ]] && [[ -f "$CACHE_FILE" ]]; then
cached_token=$(cat "$CACHE_FILE")
if [[ -n "$cached_token" ]] && _validate_token "$cached_token"; then
[[ "$QUIET" == "false" ]] && echo "Using cached token (valid)" >&2
echo "$cached_token"
exit 0
fi
[[ "$QUIET" == "false" ]] && echo "Cached token invalid, checking credentials..." >&2
fi
# 2. Use pre-configured token from credentials.json
if [[ -n "${AUTHENTIK_TOKEN:-}" ]]; then
if _validate_token "$AUTHENTIK_TOKEN"; then
# Cache it for faster future access
mkdir -p "$CACHE_DIR"
echo "$AUTHENTIK_TOKEN" > "$CACHE_FILE"
chmod 600 "$CACHE_FILE"
[[ "$QUIET" == "false" ]] && echo "Token validated and cached at $CACHE_FILE" >&2
echo "$AUTHENTIK_TOKEN"
exit 0
else
echo "Error: Pre-configured AUTHENTIK_TOKEN is invalid (API returned non-200)" >&2
exit 1
fi
fi
# 3. No token available
echo "Error: No Authentik API token configured" >&2
echo "" >&2
echo "To create one:" >&2
echo " 1. Log into Authentik admin: ${AUTHENTIK_URL}/if/admin/#/core/tokens" >&2
echo " 2. Click 'Create' → set identifier (e.g., 'mosaic-agent')" >&2
echo " 3. Select 'API Token' intent, uncheck 'Expiring'" >&2
echo " 4. Copy the key and add to credentials.json:" >&2
echo " jq '.authentik.token = \"<your-token>\"' credentials.json > tmp && mv tmp credentials.json" >&2
exit 1

62
tools/authentik/flow-list.sh Executable file
View File

@@ -0,0 +1,62 @@
#!/usr/bin/env bash
#
# flow-list.sh — List Authentik flows
#
# Usage: flow-list.sh [-f format] [-d designation]
#
# Options:
# -f format Output format: table (default), json
# -d designation Filter by designation (authentication, authorization, enrollment, etc.)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials authentik
FORMAT="table"
DESIGNATION=""
while getopts "f:d:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
d) DESIGNATION="$OPTARG" ;;
h) head -13 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-d designation]" >&2; exit 1 ;;
esac
done
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q)
PARAMS="ordering=slug"
[[ -n "$DESIGNATION" ]] && PARAMS="${PARAMS}&designation=${DESIGNATION}"
response=$(curl -sk -w "\n%{http_code}" \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/flows/instances/?${PARAMS}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list flows (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.results'
exit 0
fi
echo "NAME SLUG DESIGNATION TITLE"
echo "---------------------------- ---------------------------- ---------------- ----------------------------"
echo "$body" | jq -r '.results[] | [
.name,
.slug,
.designation,
(.title // "—")
] | @tsv' | while IFS=$'\t' read -r name slug designation title; do
printf "%-28s %-28s %-16s %s\n" \
"${name:0:28}" "${slug:0:28}" "$designation" "${title:0:28}"
done

61
tools/authentik/group-list.sh Executable file
View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
#
# group-list.sh — List Authentik groups
#
# Usage: group-list.sh [-f format] [-s search]
#
# Options:
# -f format Output format: table (default), json
# -s search Search by group name
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials authentik
FORMAT="table"
SEARCH=""
while getopts "f:s:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
s) SEARCH="$OPTARG" ;;
h) head -12 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-s search]" >&2; exit 1 ;;
esac
done
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q)
PARAMS="ordering=name"
[[ -n "$SEARCH" ]] && PARAMS="${PARAMS}&search=${SEARCH}"
response=$(curl -sk -w "\n%{http_code}" \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/core/groups/?${PARAMS}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list groups (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.results'
exit 0
fi
echo "NAME PK MEMBERS SUPERUSER"
echo "---------------------------- ------------------------------------ ------- ---------"
echo "$body" | jq -r '.results[] | [
.name,
.pk,
(.users | length | tostring),
(if .is_superuser then "yes" else "no" end)
] | @tsv' | while IFS=$'\t' read -r name pk members superuser; do
printf "%-28s %-36s %-7s %s\n" "${name:0:28}" "$pk" "$members" "$superuser"
done

93
tools/authentik/user-create.sh Executable file
View File

@@ -0,0 +1,93 @@
#!/usr/bin/env bash
#
# user-create.sh — Create an Authentik user
#
# Usage: user-create.sh -u <username> -n <name> -e <email> [-p password] [-g group]
#
# Options:
# -u username Username (required)
# -n name Display name (required)
# -e email Email address (required)
# -p password Initial password (optional — user gets set-password flow if omitted)
# -g group Group name to add user to (optional)
# -f format Output format: table (default), json
# -h Show this help
#
# Environment variables (or credentials.json):
# AUTHENTIK_URL — Authentik instance URL
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials authentik
USERNAME="" NAME="" EMAIL="" PASSWORD="" GROUP="" FORMAT="table"
while getopts "u:n:e:p:g:f:h" opt; do
case $opt in
u) USERNAME="$OPTARG" ;;
n) NAME="$OPTARG" ;;
e) EMAIL="$OPTARG" ;;
p) PASSWORD="$OPTARG" ;;
g) GROUP="$OPTARG" ;;
f) FORMAT="$OPTARG" ;;
h) head -18 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -u <username> -n <name> -e <email> [-p password] [-g group]" >&2; exit 1 ;;
esac
done
if [[ -z "$USERNAME" || -z "$NAME" || -z "$EMAIL" ]]; then
echo "Error: -u username, -n name, and -e email are required" >&2
exit 1
fi
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q)
# Build user payload
payload=$(jq -n \
--arg username "$USERNAME" \
--arg name "$NAME" \
--arg email "$EMAIL" \
'{username: $username, name: $name, email: $email, is_active: true}')
# Add password if provided
if [[ -n "$PASSWORD" ]]; then
payload=$(echo "$payload" | jq --arg pw "$PASSWORD" '. + {password: $pw}')
fi
# Add to group if provided
if [[ -n "$GROUP" ]]; then
# Look up group PK by name
group_response=$(curl -sk \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/core/groups/?search=${GROUP}")
group_pk=$(echo "$group_response" | jq -r ".results[] | select(.name == \"$GROUP\") | .pk" | head -1)
if [[ -n "$group_pk" ]]; then
payload=$(echo "$payload" | jq --arg gk "$group_pk" '. + {groups: [$gk]}')
else
echo "Warning: Group '$GROUP' not found — creating user without group" >&2
fi
fi
response=$(curl -sk -w "\n%{http_code}" -X POST \
-H "Authorization: Bearer $TOKEN" \
-H "Content-Type: application/json" \
-d "$payload" \
"${AUTHENTIK_URL}/api/v3/core/users/")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "201" ]]; then
echo "Error: Failed to create user (HTTP $http_code)" >&2
echo "$body" | jq -r '.' 2>/dev/null >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
else
echo "User created successfully:"
echo "$body" | jq -r '" Username: \(.username)\n Name: \(.name)\n Email: \(.email)\n PK: \(.pk)"'
fi

72
tools/authentik/user-list.sh Executable file
View File

@@ -0,0 +1,72 @@
#!/usr/bin/env bash
#
# user-list.sh — List Authentik users
#
# Usage: user-list.sh [-f format] [-s search] [-g group]
#
# Options:
# -f format Output format: table (default), json
# -s search Search term (matches username, name, email)
# -g group Filter by group name
# -h Show this help
#
# Environment variables (or credentials.json):
# AUTHENTIK_URL — Authentik instance URL
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials authentik
FORMAT="table"
SEARCH=""
GROUP=""
while getopts "f:s:g:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
s) SEARCH="$OPTARG" ;;
g) GROUP="$OPTARG" ;;
h) head -14 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-s search] [-g group]" >&2; exit 1 ;;
esac
done
TOKEN=$("$SCRIPT_DIR/auth-token.sh" -q)
# Build query params
PARAMS="ordering=username"
[[ -n "$SEARCH" ]] && PARAMS="${PARAMS}&search=${SEARCH}"
[[ -n "$GROUP" ]] && PARAMS="${PARAMS}&groups_by_name=${GROUP}"
response=$(curl -sk -w "\n%{http_code}" \
-H "Authorization: Bearer $TOKEN" \
"${AUTHENTIK_URL}/api/v3/core/users/?${PARAMS}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list users (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.results'
exit 0
fi
# Table output
echo "USERNAME NAME EMAIL ACTIVE LAST LOGIN"
echo "-------------------- ---------------------------- ---------------------------- ------ ----------"
echo "$body" | jq -r '.results[] | [
.username,
.name,
.email,
(if .is_active then "yes" else "no" end),
(.last_login // "never" | split("T")[0])
] | @tsv' | while IFS=$'\t' read -r username name email active last_login; do
printf "%-20s %-28s %-28s %-6s %s\n" \
"${username:0:20}" "${name:0:28}" "${email:0:28}" "$active" "$last_login"
done

305
tools/bootstrap/agent-lint.sh Executable file
View File

@@ -0,0 +1,305 @@
#!/bin/bash
# agent-lint.sh — Audit agent configuration across all coding projects
#
# Usage:
# agent-lint.sh # Scan all projects in ~/src/
# agent-lint.sh --project <path> # Scan single project
# agent-lint.sh --json # Output JSON for jarvis-brain
# agent-lint.sh --verbose # Show per-check details
# agent-lint.sh --fix-hint # Show fix commands for failures
#
# Checks per project:
# 1. Has runtime context file (CLAUDE.md or RUNTIME.md)?
# 2. Has AGENTS.md?
# 3. Runtime context file references conditional context/guides?
# 4. Runtime context file has quality gates?
# 5. For monorepos: sub-directories have AGENTS.md?
set -euo pipefail
# Defaults
SRC_DIR="$HOME/src"
SINGLE_PROJECT=""
JSON_OUTPUT=false
VERBOSE=false
FIX_HINT=false
# Exclusion patterns (not coding projects)
EXCLUDE_PATTERNS=(
"_worktrees"
".backup"
"_old"
"_bak"
"junk"
"traefik"
"infrastructure"
)
# Parse args
while [[ $# -gt 0 ]]; do
case "$1" in
--project) SINGLE_PROJECT="$2"; shift 2 ;;
--json) JSON_OUTPUT=true; shift ;;
--verbose) VERBOSE=true; shift ;;
--fix-hint) FIX_HINT=true; shift ;;
--src-dir) SRC_DIR="$2"; shift 2 ;;
-h|--help)
echo "Usage: agent-lint.sh [--project <path>] [--json] [--verbose] [--fix-hint] [--src-dir <dir>]"
exit 0
;;
*) echo "Unknown option: $1"; exit 1 ;;
esac
done
# Colors (disabled for JSON mode)
if $JSON_OUTPUT; then
GREEN="" RED="" YELLOW="" NC="" BOLD="" DIM=""
else
GREEN='\033[0;32m' RED='\033[0;31m' YELLOW='\033[0;33m'
NC='\033[0m' BOLD='\033[1m' DIM='\033[2m'
fi
# Determine if a directory is a coding project
is_coding_project() {
local dir="$1"
[[ -f "$dir/package.json" ]] || \
[[ -f "$dir/pyproject.toml" ]] || \
[[ -f "$dir/Cargo.toml" ]] || \
[[ -f "$dir/go.mod" ]] || \
[[ -f "$dir/Makefile" && -f "$dir/src/main.rs" ]] || \
[[ -f "$dir/pom.xml" ]] || \
[[ -f "$dir/build.gradle" ]]
}
# Check if directory should be excluded
is_excluded() {
local dir_name
dir_name=$(basename "$1")
for pattern in "${EXCLUDE_PATTERNS[@]}"; do
if [[ "$dir_name" == *"$pattern"* ]]; then
return 0
fi
done
return 1
}
# Detect if project is a monorepo
is_monorepo() {
local dir="$1"
[[ -f "$dir/pnpm-workspace.yaml" ]] || \
[[ -f "$dir/turbo.json" ]] || \
[[ -f "$dir/lerna.json" ]] || \
(grep -q '"workspaces"' "$dir/package.json" 2>/dev/null)
}
# Resolve runtime context file (CLAUDE.md or RUNTIME.md)
runtime_context_file() {
local dir="$1"
if [[ -f "$dir/CLAUDE.md" ]]; then
echo "$dir/CLAUDE.md"
return
fi
if [[ -f "$dir/RUNTIME.md" ]]; then
echo "$dir/RUNTIME.md"
return
fi
echo ""
}
# Check for runtime context file
check_runtime_context() {
[[ -n "$(runtime_context_file "$1")" ]]
}
# Check for AGENTS.md
check_agents_md() {
[[ -f "$1/AGENTS.md" ]]
}
# Check conditional loading/context (references guides or conditional section)
check_conditional_loading() {
local ctx
ctx="$(runtime_context_file "$1")"
[[ -n "$ctx" ]] && grep -qi "agent-guides\|~/.config/mosaic/guides\|conditional.*loading\|conditional.*documentation\|conditional.*context" "$ctx" 2>/dev/null
}
# Check quality gates
check_quality_gates() {
local ctx
ctx="$(runtime_context_file "$1")"
[[ -n "$ctx" ]] && grep -qi "quality.gates\|must pass before\|lint\|typecheck\|test" "$ctx" 2>/dev/null
}
# Check monorepo sub-AGENTS.md
check_monorepo_sub_agents() {
local dir="$1"
local missing=()
if ! is_monorepo "$dir"; then
echo "N/A"
return
fi
# Check apps/, packages/, services/, plugins/ directories
for subdir_type in apps packages services plugins; do
if [[ -d "$dir/$subdir_type" ]]; then
for subdir in "$dir/$subdir_type"/*/; do
[[ -d "$subdir" ]] || continue
# Only check if it has its own manifest
if [[ -f "$subdir/package.json" ]] || [[ -f "$subdir/pyproject.toml" ]]; then
if [[ ! -f "$subdir/AGENTS.md" ]]; then
missing+=("$(basename "$subdir")")
fi
fi
done
fi
done
if [[ ${#missing[@]} -eq 0 ]]; then
echo "OK"
else
echo "MISS:${missing[*]}"
fi
}
# Lint a single project
lint_project() {
local dir="$1"
local name
name=$(basename "$dir")
local has_runtime has_agents has_guides has_quality mono_status
local score=0 max_score=4
check_runtime_context "$dir" && has_runtime="OK" || has_runtime="MISS"
check_agents_md "$dir" && has_agents="OK" || has_agents="MISS"
check_conditional_loading "$dir" && has_guides="OK" || has_guides="MISS"
check_quality_gates "$dir" && has_quality="OK" || has_quality="MISS"
mono_status=$(check_monorepo_sub_agents "$dir")
[[ "$has_runtime" == "OK" ]] && ((score++)) || true
[[ "$has_agents" == "OK" ]] && ((score++)) || true
[[ "$has_guides" == "OK" ]] && ((score++)) || true
[[ "$has_quality" == "OK" ]] && ((score++)) || true
if $JSON_OUTPUT; then
cat <<JSONEOF
{
"project": "$name",
"path": "$dir",
"runtime_context": "$has_runtime",
"agents_md": "$has_agents",
"conditional_loading": "$has_guides",
"quality_gates": "$has_quality",
"monorepo_sub_agents": "$mono_status",
"score": $score,
"max_score": $max_score
}
JSONEOF
else
# Color-code the status
local c_runtime c_agents c_guides c_quality
[[ "$has_runtime" == "OK" ]] && c_runtime="${GREEN} OK ${NC}" || c_runtime="${RED} MISS ${NC}"
[[ "$has_agents" == "OK" ]] && c_agents="${GREEN} OK ${NC}" || c_agents="${RED} MISS ${NC}"
[[ "$has_guides" == "OK" ]] && c_guides="${GREEN} OK ${NC}" || c_guides="${RED} MISS ${NC}"
[[ "$has_quality" == "OK" ]] && c_quality="${GREEN} OK ${NC}" || c_quality="${RED} MISS ${NC}"
local score_color="$RED"
[[ $score -ge 3 ]] && score_color="$YELLOW"
[[ $score -eq 4 ]] && score_color="$GREEN"
printf " %-35s %b %b %b %b ${score_color}%d/%d${NC}" \
"$name" "$c_runtime" "$c_agents" "$c_guides" "$c_quality" "$score" "$max_score"
# Show monorepo status if applicable
if [[ "$mono_status" != "N/A" && "$mono_status" != "OK" ]]; then
printf " ${YELLOW}(mono: %s)${NC}" "$mono_status"
fi
echo ""
fi
if $VERBOSE && ! $JSON_OUTPUT; then
[[ "$has_runtime" == "MISS" ]] && echo " ${DIM} Runtime context file missing (CLAUDE.md or RUNTIME.md)${NC}"
[[ "$has_agents" == "MISS" ]] && echo " ${DIM} AGENTS.md missing${NC}"
[[ "$has_guides" == "MISS" ]] && echo " ${DIM} No conditional context/loading section detected${NC}"
[[ "$has_quality" == "MISS" ]] && echo " ${DIM} No quality gates section${NC}"
if [[ "$mono_status" == MISS:* ]]; then
echo " ${DIM} Monorepo sub-AGENTS.md missing: ${mono_status#MISS:}${NC}"
fi
fi
if $FIX_HINT && ! $JSON_OUTPUT; then
if [[ "$has_runtime" == "MISS" || "$has_agents" == "MISS" ]]; then
echo " ${DIM}Fix: ~/.config/mosaic/tools/bootstrap/init-project.sh --name \"$name\" --type auto${NC}"
elif [[ "$has_guides" == "MISS" ]]; then
echo " ${DIM}Fix: ~/.config/mosaic/tools/bootstrap/agent-upgrade.sh $dir --section conditional-loading${NC}"
fi
fi
# Return score for summary
echo "$score" > /tmp/agent-lint-score-$$
}
# Main
main() {
local projects=()
local total=0 passing=0 total_score=0
if [[ -n "$SINGLE_PROJECT" ]]; then
projects=("$SINGLE_PROJECT")
else
for dir in "$SRC_DIR"/*/; do
[[ -d "$dir" ]] || continue
is_excluded "$dir" && continue
is_coding_project "$dir" && projects+=("${dir%/}")
done
fi
if [[ ${#projects[@]} -eq 0 ]]; then
echo "No coding projects found."
exit 0
fi
if $JSON_OUTPUT; then
echo '{ "audit_date": "'$(date -I)'", "projects": ['
local first=true
for dir in "${projects[@]}"; do
$first || echo ","
first=false
lint_project "$dir"
done
echo '] }'
else
echo ""
echo -e "${BOLD}Agent Configuration Audit — $(date +%Y-%m-%d)${NC}"
echo "========================================================"
printf " %-35s %s %s %s %s %s\n" \
"Project" "RUNTIME" "AGENTS" "Guides" "Quality" "Score"
echo " -----------------------------------------------------------------------"
for dir in "${projects[@]}"; do
lint_project "$dir"
local score
score=$(cat /tmp/agent-lint-score-$$ 2>/dev/null || echo 0)
((total++)) || true
((total_score += score)) || true
[[ $score -eq 4 ]] && ((passing++)) || true
done
rm -f /tmp/agent-lint-score-$$
echo " -----------------------------------------------------------------------"
local need_attention=$((total - passing))
echo ""
echo -e " ${BOLD}Summary:${NC} $total projects | ${GREEN}$passing pass${NC} | ${RED}$need_attention need attention${NC}"
echo ""
if [[ $need_attention -gt 0 ]] && ! $FIX_HINT; then
echo -e " ${DIM}Run with --fix-hint for suggested fixes${NC}"
echo -e " ${DIM}Run with --verbose for per-check details${NC}"
echo ""
fi
fi
}
main

332
tools/bootstrap/agent-upgrade.sh Executable file
View File

@@ -0,0 +1,332 @@
#!/bin/bash
# agent-upgrade.sh — Non-destructively upgrade agent configuration in projects
#
# Usage:
# agent-upgrade.sh <project-path> # Upgrade one project
# agent-upgrade.sh --all # Upgrade all projects in ~/src/
# agent-upgrade.sh --all --dry-run # Preview what would change
# agent-upgrade.sh <path> --section conditional-loading # Inject specific section
# agent-upgrade.sh <path> --create-agents # Create AGENTS.md if missing
# agent-upgrade.sh <path> --monorepo-scan # Create sub-AGENTS.md for monorepo dirs
#
# Safety:
# - Creates .bak backup before any modification
# - Append-only — never modifies existing sections
# - --dry-run shows what would change without writing
set -euo pipefail
# Defaults
SRC_DIR="$HOME/src"
FRAGMENTS_DIR="$HOME/.config/mosaic/templates/agent/fragments"
TEMPLATES_DIR="$HOME/.config/mosaic/templates/agent"
DRY_RUN=false
ALL_PROJECTS=false
TARGET_PATH=""
SECTION_ONLY=""
CREATE_AGENTS=false
MONOREPO_SCAN=false
# Exclusion patterns (same as agent-lint.sh)
EXCLUDE_PATTERNS=(
"_worktrees"
".backup"
"_old"
"_bak"
"junk"
"traefik"
"infrastructure"
)
# Colors
GREEN='\033[0;32m' RED='\033[0;31m' YELLOW='\033[0;33m'
NC='\033[0m' BOLD='\033[1m' DIM='\033[2m'
# Parse args
while [[ $# -gt 0 ]]; do
case "$1" in
--all) ALL_PROJECTS=true; shift ;;
--dry-run) DRY_RUN=true; shift ;;
--section) SECTION_ONLY="$2"; shift 2 ;;
--create-agents) CREATE_AGENTS=true; shift ;;
--monorepo-scan) MONOREPO_SCAN=true; shift ;;
--src-dir) SRC_DIR="$2"; shift 2 ;;
-h|--help)
echo "Usage: agent-upgrade.sh [<project-path>|--all] [--dry-run] [--section <name>] [--create-agents] [--monorepo-scan]"
echo ""
echo "Options:"
echo " --all Upgrade all projects in ~/src/"
echo " --dry-run Preview changes without writing"
echo " --section <name> Inject only a specific fragment (conditional-loading, commit-format, secrets, multi-agent, code-review, campsite-rule)"
echo " --create-agents Create AGENTS.md if missing"
echo " --monorepo-scan Create sub-AGENTS.md for monorepo directories"
exit 0
;;
*)
if [[ -d "$1" ]]; then
TARGET_PATH="$1"
else
echo "Unknown option or invalid path: $1"
exit 1
fi
shift
;;
esac
done
if ! $ALL_PROJECTS && [[ -z "$TARGET_PATH" ]]; then
echo "Error: Specify a project path or use --all"
exit 1
fi
# Helpers
is_coding_project() {
local dir="$1"
[[ -f "$dir/package.json" ]] || \
[[ -f "$dir/pyproject.toml" ]] || \
[[ -f "$dir/Cargo.toml" ]] || \
[[ -f "$dir/go.mod" ]] || \
[[ -f "$dir/pom.xml" ]] || \
[[ -f "$dir/build.gradle" ]]
}
is_excluded() {
local dir_name
dir_name=$(basename "$1")
for pattern in "${EXCLUDE_PATTERNS[@]}"; do
[[ "$dir_name" == *"$pattern"* ]] && return 0
done
return 1
}
is_monorepo() {
local dir="$1"
[[ -f "$dir/pnpm-workspace.yaml" ]] || \
[[ -f "$dir/turbo.json" ]] || \
[[ -f "$dir/lerna.json" ]] || \
(grep -q '"workspaces"' "$dir/package.json" 2>/dev/null)
}
has_section() {
local file="$1"
local pattern="$2"
[[ -f "$file" ]] && grep -qi "$pattern" "$file" 2>/dev/null
}
runtime_context_file() {
local project_dir="$1"
if [[ -f "$project_dir/CLAUDE.md" ]]; then
echo "$project_dir/CLAUDE.md"
return
fi
if [[ -f "$project_dir/RUNTIME.md" ]]; then
echo "$project_dir/RUNTIME.md"
return
fi
echo "$project_dir/CLAUDE.md"
}
backup_file() {
local file="$1"
if [[ -f "$file" ]] && ! $DRY_RUN; then
cp "$file" "${file}.bak"
fi
}
# Inject a fragment into CLAUDE.md if the section doesn't exist
inject_fragment() {
local project_dir="$1"
local fragment_name="$2"
local ctx_file
ctx_file="$(runtime_context_file "$project_dir")"
local fragment_file="$FRAGMENTS_DIR/$fragment_name.md"
if [[ ! -f "$fragment_file" ]]; then
echo -e " ${RED}Fragment not found: $fragment_file${NC}"
return 1
fi
# Determine detection pattern for this fragment
local detect_pattern
case "$fragment_name" in
conditional-loading) detect_pattern="agent-guides\|~/.config/mosaic/guides\|Conditional.*Loading\|Conditional.*Documentation\|Conditional.*Context" ;;
commit-format) detect_pattern="<type>.*#issue\|Types:.*feat.*fix" ;;
secrets) detect_pattern="NEVER hardcode secrets\|\.env.example.*committed" ;;
multi-agent) detect_pattern="Multi-Agent Coordination\|pull --rebase.*before" ;;
code-review) detect_pattern="codex-code-review\|codex-security-review\|Code Review" ;;
campsite-rule) detect_pattern="Campsite Rule\|Touching it makes it yours\|was already there.*NEVER" ;;
*) echo "Unknown fragment: $fragment_name"; return 1 ;;
esac
if [[ ! -f "$ctx_file" ]]; then
echo -e " ${YELLOW}No runtime context file (CLAUDE.md/RUNTIME.md) — skipping fragment injection${NC}"
return 0
fi
if has_section "$ctx_file" "$detect_pattern"; then
echo -e " ${DIM}$fragment_name already present${NC}"
return 0
fi
if $DRY_RUN; then
echo -e " ${GREEN}Would inject: $fragment_name${NC}"
else
backup_file "$ctx_file"
echo "" >> "$ctx_file"
cat "$fragment_file" >> "$ctx_file"
echo "" >> "$ctx_file"
echo -e " ${GREEN}Injected: $fragment_name${NC}"
fi
}
# Create AGENTS.md from template
create_agents_md() {
local project_dir="$1"
local agents_md="$project_dir/AGENTS.md"
if [[ -f "$agents_md" ]]; then
echo -e " ${DIM}AGENTS.md already exists${NC}"
return 0
fi
local project_name
project_name=$(basename "$project_dir")
# Detect project type for quality gates
local quality_gates="# Add quality gate commands here"
if [[ -f "$project_dir/package.json" ]]; then
quality_gates="npm run lint && npm run typecheck && npm test"
if grep -q '"pnpm"' "$project_dir/package.json" 2>/dev/null || [[ -f "$project_dir/pnpm-lock.yaml" ]]; then
quality_gates="pnpm lint && pnpm typecheck && pnpm test"
fi
elif [[ -f "$project_dir/pyproject.toml" ]]; then
quality_gates="uv run ruff check src/ tests/ && uv run mypy src/ && uv run pytest --cov"
fi
if $DRY_RUN; then
echo -e " ${GREEN}Would create: AGENTS.md${NC}"
else
# Use generic AGENTS.md template with substitutions
sed -e "s/\${PROJECT_NAME}/$project_name/g" \
-e "s/\${QUALITY_GATES}/$quality_gates/g" \
-e "s/\${TASK_PREFIX}/${project_name^^}/g" \
-e "s|\${SOURCE_DIR}|src|g" \
"$TEMPLATES_DIR/AGENTS.md.template" > "$agents_md"
echo -e " ${GREEN}Created: AGENTS.md${NC}"
fi
}
# Create sub-AGENTS.md for monorepo directories
create_sub_agents() {
local project_dir="$1"
if ! is_monorepo "$project_dir"; then
echo -e " ${DIM}Not a monorepo — skipping sub-AGENTS scan${NC}"
return 0
fi
local created=0
for subdir_type in apps packages services plugins; do
if [[ -d "$project_dir/$subdir_type" ]]; then
for subdir in "$project_dir/$subdir_type"/*/; do
[[ -d "$subdir" ]] || continue
# Only if it has its own manifest
if [[ -f "$subdir/package.json" ]] || [[ -f "$subdir/pyproject.toml" ]]; then
if [[ ! -f "$subdir/AGENTS.md" ]]; then
local dir_name
dir_name=$(basename "$subdir")
if $DRY_RUN; then
echo -e " ${GREEN}Would create: $subdir_type/$dir_name/AGENTS.md${NC}"
else
sed -e "s/\${DIRECTORY_NAME}/$dir_name/g" \
-e "s/\${DIRECTORY_PURPOSE}/Part of the $subdir_type layer./g" \
"$TEMPLATES_DIR/sub-agents.md.template" > "${subdir}AGENTS.md"
echo -e " ${GREEN}Created: $subdir_type/$dir_name/AGENTS.md${NC}"
fi
((created++)) || true
fi
fi
done
fi
done
if [[ $created -eq 0 ]]; then
echo -e " ${DIM}All monorepo sub-AGENTS.md present${NC}"
fi
}
# Upgrade a single project
upgrade_project() {
local dir="$1"
local name
name=$(basename "$dir")
echo -e "\n${BOLD}$name${NC} ${DIM}($dir)${NC}"
if [[ -n "$SECTION_ONLY" ]]; then
inject_fragment "$dir" "$SECTION_ONLY"
return
fi
# Always try conditional-loading (highest impact)
inject_fragment "$dir" "conditional-loading"
# Try other fragments if runtime context exists
if [[ -f "$dir/CLAUDE.md" || -f "$dir/RUNTIME.md" ]]; then
inject_fragment "$dir" "commit-format"
inject_fragment "$dir" "secrets"
inject_fragment "$dir" "multi-agent"
inject_fragment "$dir" "code-review"
inject_fragment "$dir" "campsite-rule"
fi
# Create AGENTS.md if missing (always unless --section was used)
if $CREATE_AGENTS || [[ -z "$SECTION_ONLY" ]]; then
create_agents_md "$dir"
fi
# Monorepo sub-AGENTS.md
if $MONOREPO_SCAN || [[ -z "$SECTION_ONLY" ]]; then
create_sub_agents "$dir"
fi
}
# Main
main() {
local projects=()
if $ALL_PROJECTS; then
for dir in "$SRC_DIR"/*/; do
[[ -d "$dir" ]] || continue
is_excluded "$dir" && continue
is_coding_project "$dir" && projects+=("${dir%/}")
done
else
projects=("$TARGET_PATH")
fi
if [[ ${#projects[@]} -eq 0 ]]; then
echo "No coding projects found."
exit 0
fi
local mode="LIVE"
$DRY_RUN && mode="DRY RUN"
echo -e "${BOLD}Agent Configuration Upgrade — $(date +%Y-%m-%d) [$mode]${NC}"
echo "========================================================"
for dir in "${projects[@]}"; do
upgrade_project "$dir"
done
echo ""
echo -e "${BOLD}Done.${NC}"
if $DRY_RUN; then
echo -e "${DIM}Run without --dry-run to apply changes.${NC}"
else
echo -e "${DIM}Backups saved as .bak files. Run agent-lint.sh to verify.${NC}"
fi
}
main

493
tools/bootstrap/init-project.sh Executable file
View File

@@ -0,0 +1,493 @@
#!/bin/bash
# init-project.sh - Bootstrap a project for AI-assisted development
# Usage: init-project.sh [OPTIONS]
#
# Creates CLAUDE.md, AGENTS.md, and standard directories using templates.
# Optionally initializes git labels and milestones.
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
TEMPLATE_DIR="$HOME/.config/mosaic/templates/agent"
GIT_SCRIPT_DIR="$HOME/.config/mosaic/tools/git"
SEQUENTIAL_MCP_SCRIPT="$HOME/.config/mosaic/bin/mosaic-ensure-sequential-thinking"
# Defaults
PROJECT_NAME=""
PROJECT_TYPE=""
REPO_URL=""
TASK_PREFIX=""
PROJECT_DESCRIPTION=""
SKIP_LABELS=false
SKIP_CI=false
CICD_DOCKER=false
DRY_RUN=false
declare -a CICD_SERVICES=()
CICD_BRANCHES="main,develop"
show_help() {
cat <<'EOF'
Usage: init-project.sh [OPTIONS]
Bootstrap a project for AI-assisted development.
Options:
-n, --name <name> Project name (required)
-t, --type <type> Project type: nestjs-nextjs, django, generic (default: auto-detect)
-r, --repo <url> Git remote URL
-p, --prefix <prefix> Orchestrator task prefix (e.g., MS, UC)
-d, --description <desc> One-line project description
--skip-labels Skip creating git labels and milestones
--skip-ci Skip copying CI pipeline files
--cicd-docker Generate Docker build/push/link pipeline steps
--cicd-service <name:path> Service for Docker CI (repeatable, requires --cicd-docker)
--cicd-branches <list> Branches for Docker builds (default: main,develop)
--dry-run Show what would be created without creating anything
-h, --help Show this help
Examples:
# Full bootstrap with auto-detection
init-project.sh --name "My App" --description "A web application"
# Specific type
init-project.sh --name "My API" --type django --prefix MA
# Dry run
init-project.sh --name "Test" --type generic --dry-run
# With Docker CI/CD pipeline
init-project.sh --name "My App" --cicd-docker \
--cicd-service "my-api:src/api/Dockerfile" \
--cicd-service "my-web:src/web/Dockerfile"
Project Types:
nestjs-nextjs NestJS + Next.js monorepo (pnpm + TurboRepo)
django Django project (pytest + ruff + mypy)
typescript Standalone TypeScript/Next.js project
python-fastapi Python FastAPI project (pytest + ruff + mypy + uv)
python-library Python library/SDK (pytest + ruff + mypy + uv)
generic Generic project (uses base templates)
auto Auto-detect from project files (default)
EOF
exit 0
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-n|--name)
PROJECT_NAME="$2"
shift 2
;;
-t|--type)
PROJECT_TYPE="$2"
shift 2
;;
-r|--repo)
REPO_URL="$2"
shift 2
;;
-p|--prefix)
TASK_PREFIX="$2"
shift 2
;;
-d|--description)
PROJECT_DESCRIPTION="$2"
shift 2
;;
--skip-labels)
SKIP_LABELS=true
shift
;;
--skip-ci)
SKIP_CI=true
shift
;;
--cicd-docker)
CICD_DOCKER=true
shift
;;
--cicd-service)
CICD_SERVICES+=("$2")
shift 2
;;
--cicd-branches)
CICD_BRANCHES="$2"
shift 2
;;
--dry-run)
DRY_RUN=true
shift
;;
-h|--help)
show_help
;;
*)
echo "Unknown option: $1" >&2
echo "Run with --help for usage" >&2
exit 1
;;
esac
done
# Validate required args
if [[ -z "$PROJECT_NAME" ]]; then
echo "Error: --name is required" >&2
exit 1
fi
# Auto-detect project type if not specified
detect_project_type() {
# Monorepo (pnpm + turbo or npm workspaces with NestJS)
if [[ -f "pnpm-workspace.yaml" ]] || [[ -f "turbo.json" ]]; then
echo "nestjs-nextjs"
return
fi
if [[ -f "package.json" ]] && grep -q '"workspaces"' package.json 2>/dev/null; then
echo "nestjs-nextjs"
return
fi
# Django
if [[ -f "manage.py" ]] && [[ -f "pyproject.toml" ]]; then
echo "django"
return
fi
# FastAPI
if [[ -f "pyproject.toml" ]] && grep -q "fastapi" pyproject.toml 2>/dev/null; then
echo "python-fastapi"
return
fi
# Standalone TypeScript
if [[ -f "tsconfig.json" ]] && [[ -f "package.json" ]]; then
echo "typescript"
return
fi
# Python library/tool
if [[ -f "pyproject.toml" ]]; then
echo "python-library"
return
fi
echo "generic"
}
if [[ -z "$PROJECT_TYPE" || "$PROJECT_TYPE" == "auto" ]]; then
PROJECT_TYPE=$(detect_project_type)
echo "Auto-detected project type: $PROJECT_TYPE"
fi
# Derive defaults
if [[ -z "$REPO_URL" ]]; then
REPO_URL=$(git remote get-url origin 2>/dev/null || echo "")
fi
if [[ -z "$TASK_PREFIX" ]]; then
# Generate prefix from project name initials
TASK_PREFIX=$(echo "$PROJECT_NAME" | sed 's/[^A-Za-z ]//g' | awk '{for(i=1;i<=NF;i++) printf toupper(substr($i,1,1))}')
if [[ -z "$TASK_PREFIX" ]]; then
TASK_PREFIX="PRJ"
fi
fi
if [[ -z "$PROJECT_DESCRIPTION" ]]; then
PROJECT_DESCRIPTION="$PROJECT_NAME"
fi
PROJECT_DIR=$(basename "$(pwd)")
# Detect quality gates, source dir, and stack info based on type
case "$PROJECT_TYPE" in
nestjs-nextjs)
export QUALITY_GATES="pnpm typecheck && pnpm lint && pnpm test"
export SOURCE_DIR="apps"
export BUILD_COMMAND="pnpm build"
export TEST_COMMAND="pnpm test"
export LINT_COMMAND="pnpm lint"
export TYPECHECK_COMMAND="pnpm typecheck"
export FRONTEND_STACK="Next.js + React + TailwindCSS + Shadcn/ui"
export BACKEND_STACK="NestJS + Prisma ORM"
export DATABASE_STACK="PostgreSQL"
export TESTING_STACK="Vitest + Playwright"
export DEPLOYMENT_STACK="Docker + docker-compose"
export CONFIG_FILES="turbo.json, pnpm-workspace.yaml, tsconfig.json"
;;
django)
export QUALITY_GATES="ruff check . && mypy . && pytest tests/"
export SOURCE_DIR="src"
export BUILD_COMMAND="pip install -e ."
export TEST_COMMAND="pytest tests/"
export LINT_COMMAND="ruff check ."
export TYPECHECK_COMMAND="mypy ."
export FRONTEND_STACK="N/A"
export BACKEND_STACK="Django / Django REST Framework"
export DATABASE_STACK="PostgreSQL"
export TESTING_STACK="pytest + pytest-django"
export DEPLOYMENT_STACK="Docker + docker-compose"
export CONFIG_FILES="pyproject.toml"
export PROJECT_SLUG=$(echo "$PROJECT_NAME" | tr '[:upper:]' '[:lower:]' | tr ' ' '_' | sed 's/[^a-z0-9_]//g')
;;
typescript)
PKG_MGR="npm"
[[ -f "pnpm-lock.yaml" ]] && PKG_MGR="pnpm"
[[ -f "yarn.lock" ]] && PKG_MGR="yarn"
export QUALITY_GATES="$PKG_MGR run lint && $PKG_MGR run typecheck && $PKG_MGR test"
export SOURCE_DIR="src"
export BUILD_COMMAND="$PKG_MGR run build"
export TEST_COMMAND="$PKG_MGR test"
export LINT_COMMAND="$PKG_MGR run lint"
export TYPECHECK_COMMAND="npx tsc --noEmit"
export FRAMEWORK="TypeScript"
export PACKAGE_MANAGER="$PKG_MGR"
export FRONTEND_STACK="N/A"
export BACKEND_STACK="N/A"
export DATABASE_STACK="N/A"
export TESTING_STACK="Vitest or Jest"
export DEPLOYMENT_STACK="TBD"
export CONFIG_FILES="tsconfig.json, package.json"
# Detect Next.js
if grep -q '"next"' package.json 2>/dev/null; then
export FRAMEWORK="Next.js"
export FRONTEND_STACK="Next.js + React"
fi
;;
python-fastapi)
export PROJECT_SLUG=$(echo "$PROJECT_NAME" | tr '[:upper:]' '[:lower:]' | tr ' ' '_' | sed 's/[^a-z0-9_]//g')
export QUALITY_GATES="uv run ruff check src/ tests/ && uv run ruff format --check src/ && uv run mypy src/ && uv run pytest --cov"
export SOURCE_DIR="src"
export BUILD_COMMAND="uv sync --all-extras"
export TEST_COMMAND="uv run pytest --cov"
export LINT_COMMAND="uv run ruff check src/ tests/"
export TYPECHECK_COMMAND="uv run mypy src/"
export FRONTEND_STACK="N/A"
export BACKEND_STACK="FastAPI"
export DATABASE_STACK="TBD"
export TESTING_STACK="pytest + httpx"
export DEPLOYMENT_STACK="Docker"
export CONFIG_FILES="pyproject.toml"
;;
python-library)
export PROJECT_SLUG=$(echo "$PROJECT_NAME" | tr '[:upper:]' '[:lower:]' | tr ' ' '_' | sed 's/[^a-z0-9_]//g')
export QUALITY_GATES="uv run ruff check src/ tests/ && uv run ruff format --check src/ && uv run mypy src/ && uv run pytest --cov"
export SOURCE_DIR="src"
export BUILD_COMMAND="uv sync --all-extras"
export TEST_COMMAND="uv run pytest --cov"
export LINT_COMMAND="uv run ruff check src/ tests/"
export TYPECHECK_COMMAND="uv run mypy src/"
export BUILD_SYSTEM="hatchling"
export FRONTEND_STACK="N/A"
export BACKEND_STACK="N/A"
export DATABASE_STACK="N/A"
export TESTING_STACK="pytest"
export DEPLOYMENT_STACK="PyPI / Gitea Packages"
export CONFIG_FILES="pyproject.toml"
;;
*)
export QUALITY_GATES="echo 'No quality gates configured — update CLAUDE.md'"
export SOURCE_DIR="src"
export BUILD_COMMAND="echo 'No build command configured'"
export TEST_COMMAND="echo 'No test command configured'"
export LINT_COMMAND="echo 'No lint command configured'"
export TYPECHECK_COMMAND="echo 'No typecheck command configured'"
export FRONTEND_STACK="TBD"
export BACKEND_STACK="TBD"
export DATABASE_STACK="TBD"
export TESTING_STACK="TBD"
export DEPLOYMENT_STACK="TBD"
export CONFIG_FILES="TBD"
;;
esac
# Export common variables
export PROJECT_NAME
export PROJECT_DESCRIPTION
export PROJECT_DIR
export REPO_URL
export TASK_PREFIX
echo "=== Project Bootstrap ==="
echo " Name: $PROJECT_NAME"
echo " Type: $PROJECT_TYPE"
echo " Prefix: $TASK_PREFIX"
echo " Description: $PROJECT_DESCRIPTION"
echo " Repo: ${REPO_URL:-'(not set)'}"
echo " Directory: $(pwd)"
echo ""
# Select template directory
STACK_TEMPLATE_DIR="$TEMPLATE_DIR/projects/$PROJECT_TYPE"
if [[ ! -d "$STACK_TEMPLATE_DIR" ]]; then
STACK_TEMPLATE_DIR="$TEMPLATE_DIR"
echo "No stack-specific templates found for '$PROJECT_TYPE', using generic templates."
fi
if [[ "$DRY_RUN" == true ]]; then
echo "[DRY RUN] Would create:"
echo " - Validate sequential-thinking MCP hard requirement"
echo " - CLAUDE.md (from $STACK_TEMPLATE_DIR/CLAUDE.md.template)"
echo " - AGENTS.md (from $STACK_TEMPLATE_DIR/AGENTS.md.template)"
echo " - docs/scratchpads/"
echo " - docs/reports/qa-automation/{pending,in-progress,done,escalated}"
echo " - docs/reports/deferred/"
echo " - docs/tasks/"
echo " - docs/releases/"
echo " - docs/templates/"
if [[ "$SKIP_CI" != true ]]; then
echo " - .woodpecker/codex-review.yml"
echo " - .woodpecker/schemas/*.json"
fi
if [[ "$SKIP_LABELS" != true ]]; then
echo " - Standard git labels (epic, feature, bug, task, documentation, security, breaking)"
echo " - Milestone: 0.0.1 - Pre-MVP Foundation"
echo " - Milestone policy: 0.0.x pre-MVP, 0.1.0 for MVP release"
fi
if [[ "$CICD_DOCKER" == true ]]; then
echo " - Docker build/push/link steps appended to .woodpecker.yml"
for svc in "${CICD_SERVICES[@]}"; do
echo " - docker-build-${svc%%:*}"
done
echo " - link-packages"
fi
exit 0
fi
# Enforce sequential-thinking MCP hard requirement.
if [[ ! -x "$SEQUENTIAL_MCP_SCRIPT" ]]; then
echo "Error: Missing sequential-thinking setup helper: $SEQUENTIAL_MCP_SCRIPT" >&2
echo "Install/repair Mosaic at ~/.config/mosaic before bootstrapping projects." >&2
exit 1
fi
if "$SEQUENTIAL_MCP_SCRIPT" >/dev/null 2>&1; then
echo "Verified sequential-thinking MCP configuration"
else
echo "Error: sequential-thinking MCP setup failed (hard requirement)." >&2
echo "Run: $SEQUENTIAL_MCP_SCRIPT" >&2
exit 1
fi
# Create CLAUDE.md
if [[ -f "CLAUDE.md" ]]; then
echo "CLAUDE.md already exists — skipping (rename or delete to recreate)"
else
if [[ -f "$STACK_TEMPLATE_DIR/CLAUDE.md.template" ]]; then
envsubst < "$STACK_TEMPLATE_DIR/CLAUDE.md.template" > CLAUDE.md
echo "Created CLAUDE.md"
else
echo "Warning: No CLAUDE.md template found at $STACK_TEMPLATE_DIR" >&2
fi
fi
# Create AGENTS.md
if [[ -f "AGENTS.md" ]]; then
echo "AGENTS.md already exists — skipping (rename or delete to recreate)"
else
if [[ -f "$STACK_TEMPLATE_DIR/AGENTS.md.template" ]]; then
envsubst < "$STACK_TEMPLATE_DIR/AGENTS.md.template" > AGENTS.md
echo "Created AGENTS.md"
else
echo "Warning: No AGENTS.md template found at $STACK_TEMPLATE_DIR" >&2
fi
fi
# Create directories
mkdir -p \
docs/scratchpads \
docs/reports/qa-automation/pending \
docs/reports/qa-automation/in-progress \
docs/reports/qa-automation/done \
docs/reports/qa-automation/escalated \
docs/reports/deferred \
docs/tasks \
docs/releases \
docs/templates
echo "Created docs/scratchpads/, docs/reports/*, docs/tasks/, docs/releases/, docs/templates/"
# Set up CI/CD pipeline
if [[ "$SKIP_CI" != true ]]; then
CODEX_DIR="$HOME/.config/mosaic/tools/codex"
if [[ -d "$CODEX_DIR/woodpecker" ]]; then
mkdir -p .woodpecker/schemas
cp "$CODEX_DIR/woodpecker/codex-review.yml" .woodpecker/
cp "$CODEX_DIR/schemas/"*.json .woodpecker/schemas/
echo "Created .woodpecker/ with Codex review pipeline"
else
echo "Codex pipeline templates not found — skipping CI setup"
fi
fi
# Generate Docker build/push/link pipeline steps
if [[ "$CICD_DOCKER" == true ]]; then
CICD_SCRIPT="$HOME/.config/mosaic/tools/cicd/generate-docker-steps.sh"
if [[ -x "$CICD_SCRIPT" ]]; then
# Parse org and repo from git remote
CICD_REGISTRY=""
CICD_ORG=""
CICD_REPO_NAME=""
if [[ -n "$REPO_URL" ]]; then
# Extract host from https://host/org/repo.git or git@host:org/repo.git
CICD_REGISTRY=$(echo "$REPO_URL" | sed -E 's|https?://([^/]+)/.*|\1|; s|git@([^:]+):.*|\1|')
CICD_ORG=$(echo "$REPO_URL" | sed -E 's|https?://[^/]+/([^/]+)/.*|\1|; s|git@[^:]+:([^/]+)/.*|\1|')
CICD_REPO_NAME=$(echo "$REPO_URL" | sed -E 's|.*/([^/]+?)(\.git)?$|\1|')
fi
if [[ -n "$CICD_REGISTRY" && -n "$CICD_ORG" && -n "$CICD_REPO_NAME" && ${#CICD_SERVICES[@]} -gt 0 ]]; then
# Build service args
SVC_ARGS=""
for svc in "${CICD_SERVICES[@]}"; do
SVC_ARGS="$SVC_ARGS --service $svc"
done
echo ""
echo "Generating Docker CI/CD pipeline steps..."
# Add kaniko_setup anchor to variables section if .woodpecker.yml exists
if [[ -f ".woodpecker.yml" ]]; then
# Append Docker steps to existing pipeline
"$CICD_SCRIPT" \
--registry "$CICD_REGISTRY" \
--org "$CICD_ORG" \
--repo "$CICD_REPO_NAME" \
$SVC_ARGS \
--branches "$CICD_BRANCHES" >> .woodpecker.yml
echo "Appended Docker build/push/link steps to .woodpecker.yml"
else
echo "Warning: No .woodpecker.yml found — generate quality gates first, then re-run with --cicd-docker" >&2
fi
else
if [[ ${#CICD_SERVICES[@]} -eq 0 ]]; then
echo "Warning: --cicd-docker requires at least one --cicd-service" >&2
else
echo "Warning: Could not parse registry/org/repo from git remote — specify --repo" >&2
fi
fi
else
echo "Docker CI/CD generator not found at $CICD_SCRIPT — skipping" >&2
fi
fi
# Initialize labels and milestones
if [[ "$SKIP_LABELS" != true ]]; then
LABEL_SCRIPT="$SCRIPT_DIR/init-repo-labels.sh"
if [[ -x "$LABEL_SCRIPT" ]]; then
echo ""
echo "Initializing git labels and milestones..."
"$LABEL_SCRIPT"
else
echo "Label init script not found — skipping label setup"
fi
fi
echo ""
echo "=== Bootstrap Complete ==="
echo ""
echo "Next steps:"
echo " 1. Review and customize CLAUDE.md"
echo " 2. Review and customize AGENTS.md"
echo " 3. Update quality gate commands if needed"
echo " 4. Commit: git add CLAUDE.md AGENTS.md docs/ .woodpecker/ && git commit -m 'feat: Bootstrap project for AI development'"
if [[ "$SKIP_CI" != true ]]; then
echo " 5. Add 'codex_api_key' secret to Woodpecker CI"
fi
if [[ "$CICD_DOCKER" == true ]]; then
echo " 6. Add 'gitea_username' and 'gitea_token' secrets to Woodpecker CI"
echo " (token needs package:write scope)"
fi

View File

@@ -0,0 +1,123 @@
#!/bin/bash
# init-repo-labels.sh - Create standard labels and initial milestone for a repository
# Usage: init-repo-labels.sh [--skip-milestone]
#
# Works with both Gitea (tea) and GitHub (gh).
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
GIT_SCRIPT_DIR="$HOME/.config/mosaic/tools/git"
source "$GIT_SCRIPT_DIR/detect-platform.sh"
SKIP_MILESTONE=false
while [[ $# -gt 0 ]]; do
case $1 in
--skip-milestone)
SKIP_MILESTONE=true
shift
;;
-h|--help)
echo "Usage: $(basename "$0") [--skip-milestone]"
echo ""
echo "Create standard labels and initial milestone for the current repository."
echo ""
echo "Options:"
echo " --skip-milestone Skip creating the 0.0.1 pre-MVP milestone"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1" >&2
exit 1
;;
esac
done
PLATFORM=$(detect_platform)
OWNER=$(get_repo_owner)
REPO=$(get_repo_name)
echo "Platform: $PLATFORM"
echo "Repository: $OWNER/$REPO"
echo ""
# Standard labels with colors
# Format: "name|color|description"
LABELS=(
"epic|3E4B9E|Large feature spanning multiple issues"
"feature|0E8A16|New functionality"
"bug|D73A4A|Defect fix"
"task|0075CA|General work item"
"documentation|0075CA|Documentation updates"
"security|B60205|Security-related"
"breaking|D93F0B|Breaking change"
)
create_label_github() {
local name="$1" color="$2" description="$3"
# Check if label already exists
if gh label list --repo "$OWNER/$REPO" --json name -q ".[].name" 2>/dev/null | grep -qx "$name"; then
echo " [skip] '$name' already exists"
return 0
fi
gh label create "$name" \
--repo "$OWNER/$REPO" \
--color "$color" \
--description "$description" 2>/dev/null && \
echo " [created] '$name'" || \
echo " [error] Failed to create '$name'"
}
create_label_gitea() {
local name="$1" color="$2" description="$3"
# Check if label already exists
if tea labels list 2>/dev/null | grep -q "$name"; then
echo " [skip] '$name' already exists"
return 0
fi
tea labels create --name "$name" --color "#$color" --description "$description" 2>/dev/null && \
echo " [created] '$name'" || \
echo " [error] Failed to create '$name'"
}
echo "Creating labels..."
for label_def in "${LABELS[@]}"; do
IFS='|' read -r name color description <<< "$label_def"
case "$PLATFORM" in
github)
create_label_github "$name" "$color" "$description"
;;
gitea)
create_label_gitea "$name" "$color" "$description"
;;
*)
echo "Error: Unsupported platform '$PLATFORM'" >&2
exit 1
;;
esac
done
echo ""
# Create initial pre-MVP milestone
if [[ "$SKIP_MILESTONE" != true ]]; then
echo "Creating initial pre-MVP milestone..."
"$GIT_SCRIPT_DIR/milestone-create.sh" -t "0.0.1" -d "Pre-MVP - Foundation Sprint" 2>/dev/null && \
echo " [created] Milestone '0.0.1 - Pre-MVP'" || \
echo " [skip] Milestone may already exist or creation failed"
echo " [note] Reserve 0.1.0 for MVP release milestone"
echo ""
fi
echo "Label initialization complete."

View File

@@ -0,0 +1,379 @@
#!/bin/bash
# generate-docker-steps.sh - Generate Woodpecker CI pipeline steps for Docker build/push/link
#
# Outputs valid Woodpecker YAML for:
# - Kaniko Docker build & push steps (one per service)
# - Gitea package linking step
# - npm package publish step (optional)
#
# Usage:
# generate-docker-steps.sh \
# --registry git.uscllc.com \
# --org usc \
# --repo uconnect \
# --service backend-api:src/backend-api/Dockerfile \
# --service web-portal:src/web-portal/Dockerfile \
# --branches main,develop \
# [--build-arg backend-api:NEXT_PUBLIC_API_URL=https://api.example.com] \
# [--npm-package @uconnect/schemas:src/schemas] \
# [--npm-registry https://git.uscllc.com/api/packages/usc/npm/] \
# [--depends-on build]
set -e
# Defaults
REGISTRY=""
ORG=""
REPO=""
BRANCHES="main,develop"
DEPENDS_ON="build"
declare -a SERVICES=()
declare -a BUILD_ARGS=()
declare -a NPM_PACKAGES=()
NPM_REGISTRY=""
show_help() {
cat <<'EOF'
Usage: generate-docker-steps.sh [OPTIONS]
Generate Woodpecker CI YAML for Docker build/push/link via Kaniko.
Required:
--registry <host> Gitea hostname (e.g., git.uscllc.com)
--org <name> Gitea organization (e.g., usc)
--repo <name> Repository name (e.g., uconnect)
--service <name:dockerfile> Service to build (repeatable)
Optional:
--branches <list> Comma-separated branches (default: main,develop)
--depends-on <step> Step name Docker builds depend on (default: build)
--build-arg <service:KEY=VAL> Build arg for a service (repeatable)
--npm-package <pkg:path> npm package to publish (repeatable)
--npm-registry <url> npm registry URL for publishing
--kaniko-setup-only Output just the kaniko_setup YAML anchor
-h, --help Show this help
Examples:
# Mosaic Stack pattern
generate-docker-steps.sh \
--registry git.mosaicstack.dev --org mosaic --repo stack \
--service stack-api:apps/api/Dockerfile \
--service stack-web:apps/web/Dockerfile \
--build-arg stack-web:NEXT_PUBLIC_API_URL=https://api.mosaicstack.dev
# U-Connect pattern
generate-docker-steps.sh \
--registry git.uscllc.com --org usc --repo uconnect \
--service uconnect-backend-api:src/backend-api/Dockerfile \
--service uconnect-web-portal:src/web-portal/Dockerfile \
--service uconnect-ingest-api:src/ingest-api/Dockerfile \
--branches main,develop
EOF
exit 0
}
KANIKO_SETUP_ONLY=false
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
--registry) REGISTRY="$2"; shift 2 ;;
--org) ORG="$2"; shift 2 ;;
--repo) REPO="$2"; shift 2 ;;
--service) SERVICES+=("$2"); shift 2 ;;
--branches) BRANCHES="$2"; shift 2 ;;
--depends-on) DEPENDS_ON="$2"; shift 2 ;;
--build-arg) BUILD_ARGS+=("$2"); shift 2 ;;
--npm-package) NPM_PACKAGES+=("$2"); shift 2 ;;
--npm-registry) NPM_REGISTRY="$2"; shift 2 ;;
--kaniko-setup-only) KANIKO_SETUP_ONLY=true; shift ;;
-h|--help) show_help ;;
*) echo "Unknown option: $1" >&2; exit 1 ;;
esac
done
# Validate required args
if [[ -z "$REGISTRY" ]]; then echo "Error: --registry is required" >&2; exit 1; fi
if [[ -z "$ORG" ]]; then echo "Error: --org is required" >&2; exit 1; fi
if [[ -z "$REPO" ]]; then echo "Error: --repo is required" >&2; exit 1; fi
if [[ ${#SERVICES[@]} -eq 0 && "$KANIKO_SETUP_ONLY" != true ]]; then
echo "Error: at least one --service is required" >&2; exit 1
fi
# Parse branches into YAML list
IFS=',' read -ra BRANCH_LIST <<< "$BRANCHES"
BRANCH_YAML="["
for i in "${!BRANCH_LIST[@]}"; do
if [[ $i -gt 0 ]]; then BRANCH_YAML="$BRANCH_YAML, "; fi
BRANCH_YAML="$BRANCH_YAML${BRANCH_LIST[$i]}"
done
BRANCH_YAML="$BRANCH_YAML]"
# Helper: get build args for a specific service
get_build_args_for_service() {
local svc_name="$1"
local args=()
for ba in "${BUILD_ARGS[@]}"; do
local ba_svc="${ba%%:*}"
local ba_val="${ba#*:}"
if [[ "$ba_svc" == "$svc_name" ]]; then
args+=("$ba_val")
fi
done
echo "${args[@]}"
}
# Helper: determine Dockerfile context from path
# e.g., apps/api/Dockerfile -> . (monorepo root)
# docker/postgres/Dockerfile -> docker/postgres
get_context() {
local dockerfile="$1"
local dir
dir=$(dirname "$dockerfile")
# If Dockerfile is at project root or in a top-level apps/src dir, use "."
if [[ "$dir" == "." || "$dir" == apps/* || "$dir" == src/* || "$dir" == packages/* ]]; then
echo "."
else
echo "$dir"
fi
}
# ============================================================
# Output: YAML anchor for kaniko setup
# ============================================================
emit_kaniko_anchor() {
cat <<EOF
# Kaniko base command setup
- &kaniko_setup |
mkdir -p /kaniko/.docker
echo "{\\"auths\\":{\\"${REGISTRY}\\":{\\"username\\":\\"\$GITEA_USER\\",\\"password\\":\\"\$GITEA_TOKEN\\"}}}" > /kaniko/.docker/config.json
EOF
}
if [[ "$KANIKO_SETUP_ONLY" == true ]]; then
emit_kaniko_anchor
exit 0
fi
# ============================================================
# Output: Header comment
# ============================================================
cat <<EOF
# ======================
# Docker Build & Push (${BRANCHES} only)
# ======================
# Generated by: generate-docker-steps.sh
# Registry: ${REGISTRY}/${ORG}
# Requires secrets: gitea_username, gitea_token
#
# Tagging Strategy:
# - Always: commit SHA (first 8 chars)
EOF
for b in "${BRANCH_LIST[@]}"; do
case "$b" in
main) echo " # - main branch: 'latest'" ;;
develop) echo " # - develop branch: 'dev'" ;;
*) echo " # - ${b} branch: '${b}'" ;;
esac
done
echo " # - git tags: version tag (e.g., v1.0.0)"
echo ""
# ============================================================
# Output: Kaniko build step for each service
# ============================================================
for svc in "${SERVICES[@]}"; do
SVC_NAME="${svc%%:*}"
DOCKERFILE="${svc#*:}"
CONTEXT=$(get_context "$DOCKERFILE")
SVC_BUILD_ARGS=$(get_build_args_for_service "$SVC_NAME")
# Build the kaniko command with build args
KANIKO_EXTRA=""
if [[ -n "$SVC_BUILD_ARGS" ]]; then
for arg in $SVC_BUILD_ARGS; do
KANIKO_EXTRA="$KANIKO_EXTRA --build-arg ${arg}"
done
fi
cat <<EOF
# Build and push ${SVC_NAME}
docker-build-${SVC_NAME}:
image: gcr.io/kaniko-project/executor:debug
environment:
GITEA_USER:
from_secret: gitea_username
GITEA_TOKEN:
from_secret: gitea_token
CI_COMMIT_BRANCH: \${CI_COMMIT_BRANCH}
CI_COMMIT_TAG: \${CI_COMMIT_TAG}
CI_COMMIT_SHA: \${CI_COMMIT_SHA}
commands:
- *kaniko_setup
- |
DESTINATIONS="--destination ${REGISTRY}/${ORG}/${SVC_NAME}:\${CI_COMMIT_SHA:0:8}"
EOF
# Branch-specific tags
for b in "${BRANCH_LIST[@]}"; do
case "$b" in
main)
cat <<EOF
if [ "\$CI_COMMIT_BRANCH" = "main" ]; then
DESTINATIONS="\$DESTINATIONS --destination ${REGISTRY}/${ORG}/${SVC_NAME}:latest"
fi
EOF
;;
develop)
cat <<EOF
if [ "\$CI_COMMIT_BRANCH" = "develop" ]; then
DESTINATIONS="\$DESTINATIONS --destination ${REGISTRY}/${ORG}/${SVC_NAME}:dev"
fi
EOF
;;
*)
cat <<EOF
if [ "\$CI_COMMIT_BRANCH" = "${b}" ]; then
DESTINATIONS="\$DESTINATIONS --destination ${REGISTRY}/${ORG}/${SVC_NAME}:${b}"
fi
EOF
;;
esac
done
# Version tag
cat <<EOF
if [ -n "\$CI_COMMIT_TAG" ]; then
DESTINATIONS="\$DESTINATIONS --destination ${REGISTRY}/${ORG}/${SVC_NAME}:\$CI_COMMIT_TAG"
fi
/kaniko/executor --context ${CONTEXT} --dockerfile ${DOCKERFILE}${KANIKO_EXTRA} \$DESTINATIONS
when:
- branch: ${BRANCH_YAML}
event: [push, manual, tag]
depends_on:
- ${DEPENDS_ON}
EOF
done
# ============================================================
# Output: Package linking step
# ============================================================
cat <<EOF
# ======================
# Link Packages to Repository
# ======================
link-packages:
image: alpine:3
environment:
GITEA_TOKEN:
from_secret: gitea_token
commands:
- apk add --no-cache curl
- echo "Waiting 10 seconds for packages to be indexed in registry..."
- sleep 10
- |
set -e
link_package() {
PKG="\$\$1"
echo "Linking \$\$PKG..."
for attempt in 1 2 3; do
STATUS=\$\$(curl -s -o /tmp/link-response.txt -w "%{http_code}" -X POST \\
-H "Authorization: token \$\$GITEA_TOKEN" \\
"https://${REGISTRY}/api/v1/packages/${ORG}/container/\$\$PKG/-/link/${REPO}")
if [ "\$\$STATUS" = "201" ] || [ "\$\$STATUS" = "204" ]; then
echo " Linked \$\$PKG"
return 0
elif [ "\$\$STATUS" = "400" ]; then
echo " \$\$PKG already linked"
return 0
elif [ "\$\$STATUS" = "404" ] && [ \$\$attempt -lt 3 ]; then
echo " \$\$PKG not found yet, waiting 5s (attempt \$\$attempt/3)..."
sleep 5
else
echo " FAILED: \$\$PKG status \$\$STATUS"
cat /tmp/link-response.txt
return 1
fi
done
}
EOF
# List all services to link
for svc in "${SERVICES[@]}"; do
SVC_NAME="${svc%%:*}"
echo " link_package \"${SVC_NAME}\""
done
# Close the link step
cat <<EOF
when:
- branch: ${BRANCH_YAML}
event: [push, manual, tag]
depends_on:
EOF
for svc in "${SERVICES[@]}"; do
SVC_NAME="${svc%%:*}"
echo " - docker-build-${SVC_NAME}"
done
echo ""
# ============================================================
# Output: npm publish step (if requested)
# ============================================================
if [[ ${#NPM_PACKAGES[@]} -gt 0 && -n "$NPM_REGISTRY" ]]; then
cat <<EOF
# ======================
# Publish npm Packages
# ======================
publish-packages:
image: node:20-alpine
environment:
GITEA_TOKEN:
from_secret: gitea_token
commands:
- |
echo "//${NPM_REGISTRY#https://}:_authToken=\$\$GITEA_TOKEN" > .npmrc
EOF
# Detect scope from first package
FIRST_PKG="${NPM_PACKAGES[0]}"
PKG_NAME="${FIRST_PKG%%:*}"
SCOPE="${PKG_NAME%%/*}"
if [[ "$SCOPE" == @* ]]; then
echo " echo \"${SCOPE}:registry=${NPM_REGISTRY}\" >> .npmrc"
fi
for pkg in "${NPM_PACKAGES[@]}"; do
PKG_NAME="${pkg%%:*}"
PKG_PATH="${pkg#*:}"
cat <<EOF
- |
CURRENT=\$\$(node -p "require('./${PKG_PATH}/package.json').version")
PUBLISHED=\$\$(npm view ${PKG_NAME} version 2>/dev/null || echo "0.0.0")
if [ "\$\$CURRENT" = "\$\$PUBLISHED" ]; then
echo "${PKG_NAME}@\$\$CURRENT already published, skipping"
else
echo "Publishing ${PKG_NAME}@\$\$CURRENT (was \$\$PUBLISHED)"
npm publish -w ${PKG_NAME}
fi
EOF
done
cat <<EOF
when:
- branch: [main]
event: [push, manual, tag]
depends_on:
- ${DEPENDS_ON}
EOF
fi

265
tools/codex/README.md Normal file
View File

@@ -0,0 +1,265 @@
# Codex CLI Review Scripts
AI-powered code review and security review scripts using OpenAI's Codex CLI.
These scripts provide **independent** code analysis separate from Claude sessions, giving you a second AI perspective on code changes to catch issues that might be missed.
## Prerequisites
```bash
# Install Codex CLI
npm i -g @openai/codex
# Verify installation
codex --version
# Authenticate (first run)
codex # Will prompt for ChatGPT account or API key
# Verify jq is installed (for JSON processing)
jq --version
```
## Scripts
### `codex-code-review.sh`
General code quality review focusing on:
- **Correctness** — logic errors, edge cases, error handling
- **Code Quality** — complexity, duplication, naming, dead code
- **Testing** — coverage, test quality
- **Performance** — N+1 queries, blocking operations, resource cleanup
- **Dependencies** — deprecated packages
- **Documentation** — comments, public API docs
**Output:** Structured JSON with findings categorized as `blocker`, `should-fix`, or `suggestion`.
### `codex-security-review.sh`
Security vulnerability review focusing on:
- **OWASP Top 10** — injection, broken auth, XSS, CSRF, SSRF, etc.
- **Secrets Detection** — hardcoded credentials, API keys, tokens
- **Injection Flaws** — SQL, NoSQL, OS command, LDAP
- **Auth/Authz Gaps** — missing checks, privilege escalation, IDOR
- **Data Exposure** — logging sensitive data, information disclosure
- **Supply Chain** — vulnerable dependencies, typosquatting
**Output:** Structured JSON with findings categorized as `critical`, `high`, `medium`, or `low` with CWE IDs and OWASP categories.
## Usage
### Review Uncommitted Changes
```bash
# Code review
~/.config/mosaic/tools/codex/codex-code-review.sh --uncommitted
# Security review
~/.config/mosaic/tools/codex/codex-security-review.sh --uncommitted
```
### Review a Pull Request
```bash
# Review and post findings as a PR comment
~/.config/mosaic/tools/codex/codex-code-review.sh -n 42
# Security review and post to PR
~/.config/mosaic/tools/codex/codex-security-review.sh -n 42
```
### Review Against Base Branch
```bash
# Code review changes vs main
~/.config/mosaic/tools/codex/codex-code-review.sh -b main
# Security review changes vs develop
~/.config/mosaic/tools/codex/codex-security-review.sh -b develop
```
### Review a Specific Commit
```bash
~/.config/mosaic/tools/codex/codex-code-review.sh -c abc123f
~/.config/mosaic/tools/codex/codex-security-review.sh -c abc123f
```
### Save Results to File
```bash
# Save JSON output
~/.config/mosaic/tools/codex/codex-code-review.sh --uncommitted -o review-results.json
~/.config/mosaic/tools/codex/codex-security-review.sh --uncommitted -o security-results.json
```
## Options
Both scripts support the same options:
| Option | Description |
|--------|-------------|
| `-n, --pr <number>` | PR number (auto-enables posting to PR) |
| `-b, --base <branch>` | Base branch to diff against (default: main) |
| `-c, --commit <sha>` | Review a specific commit |
| `-o, --output <path>` | Write JSON results to file |
| `--post-to-pr` | Post findings as PR comment (requires -n) |
| `--uncommitted` | Review uncommitted changes (staged + unstaged + untracked) |
| `-h, --help` | Show help |
## Woodpecker CI Integration
Automated PR reviews in CI pipelines.
### Setup
1. **Copy the pipeline template to your repo:**
```bash
cp ~/.config/mosaic/tools/codex/woodpecker/codex-review.yml your-repo/.woodpecker/
```
2. **Copy the schemas directory:**
```bash
cp -r ~/.config/mosaic/tools/codex/schemas your-repo/.woodpecker/
```
3. **Add Codex API key to Woodpecker:**
- Go to your repo in Woodpecker CI
- Settings → Secrets
- Add secret: `codex_api_key` with your OpenAI API key
4. **Commit and push:**
```bash
cd your-repo
git add .woodpecker/
git commit -m "feat: Add Codex AI review pipeline"
git push
```
### Pipeline Behavior
- **Triggers on:** Pull requests
- **Runs:** Code review + Security review in parallel
- **Fails if:**
- Code review finds blockers
- Security review finds critical or high severity issues
- **Outputs:** Structured JSON results in CI logs
## Output Format
### Code Review JSON
```json
{
"summary": "Overall assessment...",
"verdict": "approve|request-changes|comment",
"confidence": 0.85,
"findings": [
{
"severity": "blocker",
"title": "SQL injection vulnerability",
"file": "src/api/users.ts",
"line_start": 42,
"line_end": 45,
"description": "User input directly interpolated into SQL query",
"suggestion": "Use parameterized queries"
}
],
"stats": {
"files_reviewed": 5,
"blockers": 1,
"should_fix": 3,
"suggestions": 8
}
}
```
### Security Review JSON
```json
{
"summary": "Security assessment...",
"risk_level": "high",
"confidence": 0.90,
"findings": [
{
"severity": "high",
"title": "Hardcoded API key",
"file": "src/config.ts",
"line_start": 10,
"description": "API key hardcoded in source",
"cwe_id": "CWE-798",
"owasp_category": "A02:2021-Cryptographic Failures",
"remediation": "Move to environment variables or secrets manager"
}
],
"stats": {
"files_reviewed": 5,
"critical": 0,
"high": 1,
"medium": 2,
"low": 3
}
}
```
## Platform Support
Works with both **GitHub** and **Gitea** via the shared `~/.config/mosaic/tools/git/` infrastructure:
- Auto-detects platform from git remote
- Posts PR comments using `gh` (GitHub) or `tea` (Gitea)
- Unified interface across both platforms
## Architecture
```
codex-code-review.sh
codex-security-review.sh
common.sh
↓ sources
../git/detect-platform.sh (platform detection)
../git/pr-review.sh (post PR comments)
↓ uses
gh (GitHub) or tea (Gitea)
```
## Troubleshooting
### "codex: command not found"
```bash
npm i -g @openai/codex
```
### "jq: command not found"
```bash
# Arch Linux
sudo pacman -S jq
# Debian/Ubuntu
sudo apt install jq
```
### "Error: Not inside a git repository"
Run the script from inside a git repository.
### "No changes found to review"
The specified mode (--uncommitted, --base, etc.) found no changes to review.
### "Codex produced no output"
Check your Codex API key and authentication:
```bash
codex # Re-authenticate if needed
```
## Model Configuration
By default, scripts use the model configured in `~/.codex/config.toml`:
- **Model:** `gpt-5.3-codex` (recommended for code review)
- **Reasoning effort:** `high`
For best results, use `gpt-5.2-codex` or newer for strongest review accuracy.
## See Also
- `~/.config/mosaic/guides/CODE-REVIEW.md` — Manual code review checklist
- `~/.config/mosaic/tools/git/` — Git helper scripts (issue/PR management)
- OpenAI Codex CLI docs: https://developers.openai.com/codex/cli/

238
tools/codex/codex-code-review.sh Executable file
View File

@@ -0,0 +1,238 @@
#!/bin/bash
# codex-code-review.sh - Run an AI-powered code quality review using Codex CLI
# Usage: codex-code-review.sh [OPTIONS]
#
# Runs codex exec in read-only sandbox mode with a structured code review prompt.
# Outputs findings as JSON and optionally posts them to a PR.
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/common.sh"
# Defaults
PR_NUMBER=""
BASE_BRANCH="main"
COMMIT_SHA=""
OUTPUT_FILE=""
POST_TO_PR=false
UNCOMMITTED=false
REVIEW_MODE=""
show_help() {
cat <<'EOF'
Usage: codex-code-review.sh [OPTIONS]
Run an AI-powered code quality review using OpenAI Codex CLI.
Options:
-n, --pr <number> PR number (auto-enables posting findings to PR)
-b, --base <branch> Base branch to diff against (default: main)
-c, --commit <sha> Review a specific commit
-o, --output <path> Write JSON results to file
--post-to-pr Post findings as PR comment (requires -n)
--uncommitted Review uncommitted changes (staged + unstaged + untracked)
-h, --help Show this help
Examples:
# Review uncommitted changes
codex-code-review.sh --uncommitted
# Review a PR and post findings as a comment
codex-code-review.sh -n 42
# Review changes against main, save JSON
codex-code-review.sh -b main -o review.json
# Review a specific commit
codex-code-review.sh -c abc123f
EOF
exit 0
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-n|--pr)
PR_NUMBER="$2"
POST_TO_PR=true
REVIEW_MODE="pr"
shift 2
;;
-b|--base)
BASE_BRANCH="$2"
REVIEW_MODE="base"
shift 2
;;
-c|--commit)
COMMIT_SHA="$2"
REVIEW_MODE="commit"
shift 2
;;
-o|--output)
OUTPUT_FILE="$2"
shift 2
;;
--post-to-pr)
POST_TO_PR=true
shift
;;
--uncommitted)
UNCOMMITTED=true
REVIEW_MODE="uncommitted"
shift
;;
-h|--help)
show_help
;;
*)
echo "Unknown option: $1" >&2
echo "Run with --help for usage" >&2
exit 1
;;
esac
done
# Validate
if [[ -z "$REVIEW_MODE" ]]; then
echo "Error: Specify a review mode: --uncommitted, --base <branch>, --commit <sha>, or --pr <number>" >&2
exit 1
fi
if [[ "$POST_TO_PR" == true && -z "$PR_NUMBER" ]]; then
echo "Error: --post-to-pr requires -n <pr_number>" >&2
exit 1
fi
check_codex
check_jq
# Verify we're in a git repo
if ! git rev-parse --is-inside-work-tree &>/dev/null; then
echo "Error: Not inside a git repository" >&2
exit 1
fi
# Get the diff context
echo "Gathering diff context..." >&2
case "$REVIEW_MODE" in
uncommitted) DIFF_CONTEXT=$(build_diff_context "uncommitted" "") ;;
base) DIFF_CONTEXT=$(build_diff_context "base" "$BASE_BRANCH") ;;
commit) DIFF_CONTEXT=$(build_diff_context "commit" "$COMMIT_SHA") ;;
pr) DIFF_CONTEXT=$(build_diff_context "pr" "$PR_NUMBER") ;;
esac
if [[ -z "$DIFF_CONTEXT" ]]; then
echo "No changes found to review." >&2
exit 0
fi
# Build the review prompt
REVIEW_PROMPT=$(cat <<'PROMPT'
You are an expert code reviewer. Review the following code changes thoroughly.
Focus on issues that are ACTIONABLE and IMPORTANT. Do not flag trivial style issues.
## Review Checklist
### Correctness
- Code does what it claims to do
- Edge cases are handled
- Error conditions are managed properly
- No obvious bugs or logic errors
### Code Quality
- Functions are focused and reasonably sized
- No unnecessary complexity
- DRY - no significant duplication
- Clear naming for variables and functions
- No dead code or commented-out code
### Testing
- Tests exist for new functionality
- Tests cover happy path AND error cases
- No flaky tests introduced
### Performance
- No obvious N+1 queries
- No blocking operations in hot paths
- Resource cleanup (connections, file handles)
### Dependencies
- No deprecated packages
- No unnecessary new dependencies
### Documentation
- Complex logic has explanatory comments
- Public APIs are documented
## Severity Guide
- **blocker**: Must fix before merge (bugs, correctness issues, missing error handling)
- **should-fix**: Important but not blocking (code quality, minor issues)
- **suggestion**: Optional improvements (nice-to-haves)
Only report findings you are confident about (confidence > 0.7).
If the code looks good, say so — don't manufacture issues.
PROMPT
)
# Set up temp files for output and diff
TEMP_OUTPUT=$(mktemp /tmp/codex-review-XXXXXX.json)
TEMP_DIFF=$(mktemp /tmp/codex-diff-XXXXXX.txt)
trap 'rm -f "$TEMP_OUTPUT" "$TEMP_DIFF"' EXIT
SCHEMA_FILE="$SCRIPT_DIR/schemas/code-review-schema.json"
# Write diff to temp file
echo "$DIFF_CONTEXT" > "$TEMP_DIFF"
echo "Running Codex code review..." >&2
echo " Diff size: $(wc -l < "$TEMP_DIFF") lines" >&2
# Build full prompt with diff reference
FULL_PROMPT="${REVIEW_PROMPT}
Here are the code changes to review:
\`\`\`diff
$(cat "$TEMP_DIFF")
\`\`\`"
# Run codex exec with prompt from stdin to avoid arg length limits
echo "$FULL_PROMPT" | codex exec \
--sandbox read-only \
--output-schema "$SCHEMA_FILE" \
-o "$TEMP_OUTPUT" \
- 2>&1 | while IFS= read -r line; do
echo " [codex] $line" >&2
done
# Check output was produced
if [[ ! -s "$TEMP_OUTPUT" ]]; then
echo "Error: Codex produced no output" >&2
exit 1
fi
# Validate JSON
if ! jq empty "$TEMP_OUTPUT" 2>/dev/null; then
echo "Error: Codex output is not valid JSON" >&2
cat "$TEMP_OUTPUT" >&2
exit 1
fi
# Save output if requested
if [[ -n "$OUTPUT_FILE" ]]; then
cp "$TEMP_OUTPUT" "$OUTPUT_FILE"
echo "Results saved to: $OUTPUT_FILE" >&2
fi
# Post to PR if requested
if [[ "$POST_TO_PR" == true && -n "$PR_NUMBER" ]]; then
echo "Posting findings to PR #$PR_NUMBER..." >&2
post_to_pr "$PR_NUMBER" "$TEMP_OUTPUT" "code"
echo "Posted review to PR #$PR_NUMBER" >&2
fi
# Always print results to stdout
print_results "$TEMP_OUTPUT" "code"

View File

@@ -0,0 +1,235 @@
#!/bin/bash
# codex-security-review.sh - Run an AI-powered security vulnerability review using Codex CLI
# Usage: codex-security-review.sh [OPTIONS]
#
# Runs codex exec in read-only sandbox mode with a security-focused review prompt.
# Outputs findings as JSON and optionally posts them to a PR.
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/common.sh"
# Defaults
PR_NUMBER=""
BASE_BRANCH="main"
COMMIT_SHA=""
OUTPUT_FILE=""
POST_TO_PR=false
UNCOMMITTED=false
REVIEW_MODE=""
show_help() {
cat <<'EOF'
Usage: codex-security-review.sh [OPTIONS]
Run an AI-powered security vulnerability review using OpenAI Codex CLI.
Options:
-n, --pr <number> PR number (auto-enables posting findings to PR)
-b, --base <branch> Base branch to diff against (default: main)
-c, --commit <sha> Review a specific commit
-o, --output <path> Write JSON results to file
--post-to-pr Post findings as PR comment (requires -n)
--uncommitted Review uncommitted changes (staged + unstaged + untracked)
-h, --help Show this help
Examples:
# Security review uncommitted changes
codex-security-review.sh --uncommitted
# Security review a PR and post findings
codex-security-review.sh -n 42
# Security review against main, save JSON
codex-security-review.sh -b main -o security.json
# Security review a specific commit
codex-security-review.sh -c abc123f
EOF
exit 0
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-n|--pr)
PR_NUMBER="$2"
POST_TO_PR=true
REVIEW_MODE="pr"
shift 2
;;
-b|--base)
BASE_BRANCH="$2"
REVIEW_MODE="base"
shift 2
;;
-c|--commit)
COMMIT_SHA="$2"
REVIEW_MODE="commit"
shift 2
;;
-o|--output)
OUTPUT_FILE="$2"
shift 2
;;
--post-to-pr)
POST_TO_PR=true
shift
;;
--uncommitted)
UNCOMMITTED=true
REVIEW_MODE="uncommitted"
shift
;;
-h|--help)
show_help
;;
*)
echo "Unknown option: $1" >&2
echo "Run with --help for usage" >&2
exit 1
;;
esac
done
# Validate
if [[ -z "$REVIEW_MODE" ]]; then
echo "Error: Specify a review mode: --uncommitted, --base <branch>, --commit <sha>, or --pr <number>" >&2
exit 1
fi
if [[ "$POST_TO_PR" == true && -z "$PR_NUMBER" ]]; then
echo "Error: --post-to-pr requires -n <pr_number>" >&2
exit 1
fi
check_codex
check_jq
# Verify we're in a git repo
if ! git rev-parse --is-inside-work-tree &>/dev/null; then
echo "Error: Not inside a git repository" >&2
exit 1
fi
# Get the diff context
echo "Gathering diff context..." >&2
case "$REVIEW_MODE" in
uncommitted) DIFF_CONTEXT=$(build_diff_context "uncommitted" "") ;;
base) DIFF_CONTEXT=$(build_diff_context "base" "$BASE_BRANCH") ;;
commit) DIFF_CONTEXT=$(build_diff_context "commit" "$COMMIT_SHA") ;;
pr) DIFF_CONTEXT=$(build_diff_context "pr" "$PR_NUMBER") ;;
esac
if [[ -z "$DIFF_CONTEXT" ]]; then
echo "No changes found to review." >&2
exit 0
fi
# Build the security review prompt
REVIEW_PROMPT=$(cat <<'PROMPT'
You are an expert application security engineer performing a security-focused code review.
Your goal is to identify vulnerabilities, security anti-patterns, and data exposure risks.
## Security Review Scope
### OWASP Top 10 (2021)
- A01: Broken Access Control — missing authorization checks, IDOR, privilege escalation
- A02: Cryptographic Failures — weak algorithms, plaintext secrets, missing encryption
- A03: Injection — SQL, NoSQL, OS command, LDAP, XPath injection
- A04: Insecure Design — missing threat modeling, unsafe business logic
- A05: Security Misconfiguration — debug mode, default credentials, unnecessary features
- A06: Vulnerable Components — known CVEs in dependencies
- A07: Authentication Failures — weak auth, missing MFA, session issues
- A08: Data Integrity Failures — deserialization, unsigned updates
- A09: Logging Failures — sensitive data in logs, missing audit trails
- A10: SSRF — unvalidated URLs, internal service access
### Additional Checks
- Hardcoded secrets, API keys, tokens, passwords
- Insecure direct object references
- Missing input validation at trust boundaries
- Cross-Site Scripting (XSS) — reflected, stored, DOM-based
- Cross-Site Request Forgery (CSRF) protection
- Insecure file handling (path traversal, unrestricted upload)
- Race conditions and TOCTOU vulnerabilities
- Information disclosure (stack traces, verbose errors)
- Supply chain risks (typosquatting, dependency confusion)
## Severity Guide
- **critical**: Exploitable vulnerability with immediate impact (RCE, auth bypass, data breach)
- **high**: Significant vulnerability requiring prompt fix (injection, XSS, secrets exposure)
- **medium**: Vulnerability with limited exploitability or impact (missing headers, weak config)
- **low**: Minor security concern or hardening opportunity (informational, defense-in-depth)
## Rules
- Include CWE IDs when applicable
- Include OWASP category when applicable
- Provide specific remediation steps for every finding
- Only report findings you are confident about
- Do NOT flag non-security code quality issues
- If no security issues found, say so clearly
PROMPT
)
# Set up temp files for output and diff
TEMP_OUTPUT=$(mktemp /tmp/codex-security-XXXXXX.json)
TEMP_DIFF=$(mktemp /tmp/codex-diff-XXXXXX.txt)
trap 'rm -f "$TEMP_OUTPUT" "$TEMP_DIFF"' EXIT
SCHEMA_FILE="$SCRIPT_DIR/schemas/security-review-schema.json"
# Write diff to temp file
echo "$DIFF_CONTEXT" > "$TEMP_DIFF"
echo "Running Codex security review..." >&2
echo " Diff size: $(wc -l < "$TEMP_DIFF") lines" >&2
# Build full prompt with diff reference
FULL_PROMPT="${REVIEW_PROMPT}
Here are the code changes to security review:
\`\`\`diff
$(cat "$TEMP_DIFF")
\`\`\`"
# Run codex exec with prompt from stdin to avoid arg length limits
echo "$FULL_PROMPT" | codex exec \
--sandbox read-only \
--output-schema "$SCHEMA_FILE" \
-o "$TEMP_OUTPUT" \
- 2>&1 | while IFS= read -r line; do
echo " [codex] $line" >&2
done
# Check output was produced
if [[ ! -s "$TEMP_OUTPUT" ]]; then
echo "Error: Codex produced no output" >&2
exit 1
fi
# Validate JSON
if ! jq empty "$TEMP_OUTPUT" 2>/dev/null; then
echo "Error: Codex output is not valid JSON" >&2
cat "$TEMP_OUTPUT" >&2
exit 1
fi
# Save output if requested
if [[ -n "$OUTPUT_FILE" ]]; then
cp "$TEMP_OUTPUT" "$OUTPUT_FILE"
echo "Results saved to: $OUTPUT_FILE" >&2
fi
# Post to PR if requested
if [[ "$POST_TO_PR" == true && -n "$PR_NUMBER" ]]; then
echo "Posting findings to PR #$PR_NUMBER..." >&2
post_to_pr "$PR_NUMBER" "$TEMP_OUTPUT" "security"
echo "Posted security review to PR #$PR_NUMBER" >&2
fi
# Always print results to stdout
print_results "$TEMP_OUTPUT" "security"

191
tools/codex/common.sh Executable file
View File

@@ -0,0 +1,191 @@
#!/bin/bash
# common.sh - Shared utilities for Codex review scripts
# Source this file from review scripts: source "$(dirname "${BASH_SOURCE[0]}")/common.sh"
set -e
CODEX_SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
GIT_SCRIPT_DIR="$CODEX_SCRIPT_DIR/../git"
# Source platform detection
source "$GIT_SCRIPT_DIR/detect-platform.sh"
# Check codex is installed
check_codex() {
if ! command -v codex &>/dev/null; then
echo "Error: codex CLI not found. Install with: npm i -g @openai/codex" >&2
exit 1
fi
}
# Check jq is installed (needed for JSON processing)
check_jq() {
if ! command -v jq &>/dev/null; then
echo "Error: jq not found. Install with your package manager." >&2
exit 1
fi
}
# Build the codex exec command args for the review mode
# Arguments: $1=mode (--uncommitted|--base|--commit), $2=value (branch/sha)
build_diff_context() {
local mode="$1"
local value="$2"
local diff_text=""
case "$mode" in
uncommitted)
diff_text=$(git diff HEAD 2>/dev/null; git diff --cached 2>/dev/null; git ls-files --others --exclude-standard 2>/dev/null | while read -r f; do echo "=== NEW FILE: $f ==="; cat "$f" 2>/dev/null; done)
;;
base)
diff_text=$(git diff "${value}...HEAD" 2>/dev/null)
;;
commit)
diff_text=$(git show "$value" 2>/dev/null)
;;
pr)
# For PRs, we need to fetch the PR diff
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
diff_text=$(gh pr diff "$value" 2>/dev/null)
elif [[ "$PLATFORM" == "gitea" ]]; then
# tea doesn't have a direct pr diff command, use git
local pr_base
pr_base=$(tea pr list --fields index,base --output simple 2>/dev/null | grep "^${value}" | awk '{print $2}')
if [[ -n "$pr_base" ]]; then
diff_text=$(git diff "${pr_base}...HEAD" 2>/dev/null)
else
# Fallback: fetch PR info via API
local repo_info
repo_info=$(get_repo_info)
local remote_url
remote_url=$(git remote get-url origin 2>/dev/null)
local host
host=$(echo "$remote_url" | sed -E 's|.*://([^/]+).*|\1|; s|.*@([^:]+).*|\1|')
diff_text=$(curl -s "https://${host}/api/v1/repos/${repo_info}/pulls/${value}" \
-H "Authorization: token $(tea login list --output simple 2>/dev/null | head -1 | awk '{print $2}')" \
2>/dev/null | jq -r '.diff_url // empty')
if [[ -n "$diff_text" && "$diff_text" != "null" ]]; then
diff_text=$(curl -s "$diff_text" 2>/dev/null)
else
diff_text=$(git diff "main...HEAD" 2>/dev/null)
fi
fi
fi
;;
esac
echo "$diff_text"
}
# Format JSON findings as markdown for PR comments
# Arguments: $1=json_file, $2=review_type (code|security)
format_findings_as_markdown() {
local json_file="$1"
local review_type="$2"
if [[ ! -f "$json_file" ]]; then
echo "Error: JSON file not found: $json_file" >&2
return 1
fi
local summary verdict confidence
summary=$(jq -r '.summary' "$json_file")
confidence=$(jq -r '.confidence' "$json_file")
if [[ "$review_type" == "code" ]]; then
verdict=$(jq -r '.verdict' "$json_file")
local blockers should_fix suggestions files_reviewed
blockers=$(jq -r '.stats.blockers' "$json_file")
should_fix=$(jq -r '.stats.should_fix' "$json_file")
suggestions=$(jq -r '.stats.suggestions' "$json_file")
files_reviewed=$(jq -r '.stats.files_reviewed' "$json_file")
cat <<EOF
## Codex Code Review
**Verdict:** ${verdict} | **Confidence:** ${confidence} | **Files reviewed:** ${files_reviewed}
**Findings:** ${blockers} blockers, ${should_fix} should-fix, ${suggestions} suggestions
### Summary
${summary}
EOF
else
local risk_level critical high medium low files_reviewed
risk_level=$(jq -r '.risk_level' "$json_file")
critical=$(jq -r '.stats.critical' "$json_file")
high=$(jq -r '.stats.high' "$json_file")
medium=$(jq -r '.stats.medium' "$json_file")
low=$(jq -r '.stats.low' "$json_file")
files_reviewed=$(jq -r '.stats.files_reviewed' "$json_file")
cat <<EOF
## Codex Security Review
**Risk Level:** ${risk_level} | **Confidence:** ${confidence} | **Files reviewed:** ${files_reviewed}
**Findings:** ${critical} critical, ${high} high, ${medium} medium, ${low} low
### Summary
${summary}
EOF
fi
# Output findings
local finding_count
finding_count=$(jq '.findings | length' "$json_file")
if [[ "$finding_count" -gt 0 ]]; then
echo "### Findings"
echo ""
jq -r '.findings[] | "#### [\(.severity | ascii_upcase)] \(.title)\n- **File:** `\(.file)`\(if .line_start then " (L\(.line_start)\(if .line_end and .line_end != .line_start then "-L\(.line_end)" else "" end))" else "" end)\n- \(.description)\(if .suggestion then "\n- **Suggestion:** \(.suggestion)" else "" end)\(if .cwe_id then "\n- **CWE:** \(.cwe_id)" else "" end)\(if .owasp_category then "\n- **OWASP:** \(.owasp_category)" else "" end)\(if .remediation then "\n- **Remediation:** \(.remediation)" else "" end)\n"' "$json_file"
else
echo "*No issues found.*"
fi
echo "---"
echo "*Reviewed by Codex ($(codex --version 2>/dev/null || echo "unknown"))*"
}
# Post review findings to a PR
# Arguments: $1=pr_number, $2=json_file, $3=review_type (code|security)
post_to_pr() {
local pr_number="$1"
local json_file="$2"
local review_type="$3"
local markdown
markdown=$(format_findings_as_markdown "$json_file" "$review_type")
detect_platform
# Determine review action based on findings
local action="comment"
if [[ "$review_type" == "code" ]]; then
local verdict
verdict=$(jq -r '.verdict' "$json_file")
action="$verdict"
else
local risk_level
risk_level=$(jq -r '.risk_level' "$json_file")
case "$risk_level" in
critical|high) action="request-changes" ;;
medium) action="comment" ;;
low|none) action="comment" ;;
esac
fi
# Post the review
"$GIT_SCRIPT_DIR/pr-review.sh" -n "$pr_number" -a "$action" -c "$markdown"
}
# Print review results to stdout
# Arguments: $1=json_file, $2=review_type (code|security)
print_results() {
local json_file="$1"
local review_type="$2"
format_findings_as_markdown "$json_file" "$review_type"
}

View File

@@ -0,0 +1,84 @@
{
"type": "object",
"additionalProperties": false,
"properties": {
"summary": {
"type": "string",
"description": "Brief overall assessment of the code changes"
},
"verdict": {
"type": "string",
"enum": ["approve", "request-changes", "comment"],
"description": "Overall review verdict"
},
"confidence": {
"type": "number",
"minimum": 0,
"maximum": 1,
"description": "Confidence score for the review (0-1)"
},
"findings": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": false,
"properties": {
"severity": {
"type": "string",
"enum": ["blocker", "should-fix", "suggestion"],
"description": "Finding severity: blocker (must fix), should-fix (important), suggestion (optional)"
},
"title": {
"type": "string",
"description": "Short title describing the issue"
},
"file": {
"type": "string",
"description": "File path where the issue was found"
},
"line_start": {
"type": "integer",
"description": "Starting line number"
},
"line_end": {
"type": "integer",
"description": "Ending line number"
},
"description": {
"type": "string",
"description": "Detailed explanation of the issue"
},
"suggestion": {
"type": "string",
"description": "Suggested fix or improvement"
}
},
"required": ["severity", "title", "file", "line_start", "line_end", "description", "suggestion"]
}
},
"stats": {
"type": "object",
"additionalProperties": false,
"properties": {
"files_reviewed": {
"type": "integer",
"description": "Number of files reviewed"
},
"blockers": {
"type": "integer",
"description": "Count of blocker findings"
},
"should_fix": {
"type": "integer",
"description": "Count of should-fix findings"
},
"suggestions": {
"type": "integer",
"description": "Count of suggestion findings"
}
},
"required": ["files_reviewed", "blockers", "should_fix", "suggestions"]
}
},
"required": ["summary", "verdict", "confidence", "findings", "stats"]
}

View File

@@ -0,0 +1,96 @@
{
"type": "object",
"additionalProperties": false,
"properties": {
"summary": {
"type": "string",
"description": "Brief overall security assessment of the code changes"
},
"risk_level": {
"type": "string",
"enum": ["critical", "high", "medium", "low", "none"],
"description": "Overall security risk level"
},
"confidence": {
"type": "number",
"minimum": 0,
"maximum": 1,
"description": "Confidence score for the review (0-1)"
},
"findings": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": false,
"properties": {
"severity": {
"type": "string",
"enum": ["critical", "high", "medium", "low"],
"description": "Vulnerability severity level"
},
"title": {
"type": "string",
"description": "Short title describing the vulnerability"
},
"file": {
"type": "string",
"description": "File path where the vulnerability was found"
},
"line_start": {
"type": "integer",
"description": "Starting line number"
},
"line_end": {
"type": "integer",
"description": "Ending line number"
},
"description": {
"type": "string",
"description": "Detailed explanation of the vulnerability"
},
"cwe_id": {
"type": "string",
"description": "CWE identifier if applicable (e.g., CWE-79)"
},
"owasp_category": {
"type": "string",
"description": "OWASP Top 10 category if applicable (e.g., A03:2021-Injection)"
},
"remediation": {
"type": "string",
"description": "Specific remediation steps to fix the vulnerability"
}
},
"required": ["severity", "title", "file", "line_start", "line_end", "description", "cwe_id", "owasp_category", "remediation"]
}
},
"stats": {
"type": "object",
"additionalProperties": false,
"properties": {
"files_reviewed": {
"type": "integer",
"description": "Number of files reviewed"
},
"critical": {
"type": "integer",
"description": "Count of critical findings"
},
"high": {
"type": "integer",
"description": "Count of high findings"
},
"medium": {
"type": "integer",
"description": "Count of medium findings"
},
"low": {
"type": "integer",
"description": "Count of low findings"
}
},
"required": ["files_reviewed", "critical", "high", "medium", "low"]
}
},
"required": ["summary", "risk_level", "confidence", "findings", "stats"]
}

View File

@@ -0,0 +1,90 @@
# Codex AI Review Pipeline for Woodpecker CI
# Drop this into your repo's .woodpecker/ directory to enable automated
# code and security reviews on every pull request.
#
# Required secrets:
# - codex_api_key: OpenAI API key or Codex-compatible key
#
# Optional secrets:
# - gitea_token: Gitea API token for posting PR comments (if not using tea CLI auth)
when:
event: pull_request
variables:
- &node_image "node:22-slim"
- &install_codex "npm i -g @openai/codex"
steps:
# --- Code Quality Review ---
code-review:
image: *node_image
environment:
CODEX_API_KEY:
from_secret: codex_api_key
commands:
- *install_codex
- apt-get update -qq && apt-get install -y -qq jq git > /dev/null 2>&1
# Generate the diff
- git fetch origin ${CI_COMMIT_TARGET_BRANCH:-main}
- DIFF=$(git diff origin/${CI_COMMIT_TARGET_BRANCH:-main}...HEAD)
# Run code review with structured output
- |
codex exec \
--sandbox read-only \
--output-schema .woodpecker/schemas/code-review-schema.json \
-o /tmp/code-review.json \
"You are an expert code reviewer. Review the following code changes for correctness, code quality, testing, performance, and documentation issues. Only flag actionable, important issues. Categorize as blocker/should-fix/suggestion. If code looks good, say so.
Changes:
$DIFF"
# Output summary
- echo "=== Code Review Results ==="
- jq '.' /tmp/code-review.json
- |
BLOCKERS=$(jq '.stats.blockers // 0' /tmp/code-review.json)
if [ "$BLOCKERS" -gt 0 ]; then
echo "FAIL: $BLOCKERS blocker(s) found"
exit 1
fi
echo "PASS: No blockers found"
# --- Security Review ---
security-review:
image: *node_image
environment:
CODEX_API_KEY:
from_secret: codex_api_key
commands:
- *install_codex
- apt-get update -qq && apt-get install -y -qq jq git > /dev/null 2>&1
# Generate the diff
- git fetch origin ${CI_COMMIT_TARGET_BRANCH:-main}
- DIFF=$(git diff origin/${CI_COMMIT_TARGET_BRANCH:-main}...HEAD)
# Run security review with structured output
- |
codex exec \
--sandbox read-only \
--output-schema .woodpecker/schemas/security-review-schema.json \
-o /tmp/security-review.json \
"You are an expert application security engineer. Review the following code changes for security vulnerabilities including OWASP Top 10, hardcoded secrets, injection flaws, auth/authz gaps, XSS, CSRF, SSRF, path traversal, and supply chain risks. Include CWE IDs and remediation steps. Only flag real security issues, not code quality.
Changes:
$DIFF"
# Output summary
- echo "=== Security Review Results ==="
- jq '.' /tmp/security-review.json
- |
CRITICAL=$(jq '.stats.critical // 0' /tmp/security-review.json)
HIGH=$(jq '.stats.high // 0' /tmp/security-review.json)
if [ "$CRITICAL" -gt 0 ] || [ "$HIGH" -gt 0 ]; then
echo "FAIL: $CRITICAL critical, $HIGH high severity finding(s)"
exit 1
fi
echo "PASS: No critical or high severity findings"

View File

@@ -0,0 +1,65 @@
#!/usr/bin/env bash
# mosaic-context-loader.sh — SessionStart hook for Claude Code
# Injects mandatory Mosaic config files into agent context at session init.
# Stdout from this script is added to Claude's context before processing.
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
# Mandatory load order (per AGENTS.md contract)
MANDATORY_FILES=(
"$MOSAIC_HOME/SOUL.md"
"$MOSAIC_HOME/USER.md"
"$MOSAIC_HOME/STANDARDS.md"
"$MOSAIC_HOME/AGENTS.md"
"$MOSAIC_HOME/TOOLS.md"
)
# E2E delivery guide (case-insensitive lookup)
E2E_DELIVERY=""
for candidate in \
"$MOSAIC_HOME/guides/E2E-DELIVERY.md" \
"$MOSAIC_HOME/guides/e2e-delivery.md"; do
if [[ -f "$candidate" ]]; then
E2E_DELIVERY="$candidate"
break
fi
done
# Runtime-specific reference
RUNTIME_FILE="$MOSAIC_HOME/runtime/claude/RUNTIME.md"
# Project-local AGENTS.md (cwd at session start)
PROJECT_AGENTS=""
if [[ -f "./AGENTS.md" ]]; then
PROJECT_AGENTS="./AGENTS.md"
fi
emit_file() {
local filepath="$1"
local label="${2:-$(basename "$filepath")}"
if [[ -f "$filepath" ]]; then
echo "=== MOSAIC: $label ==="
cat "$filepath"
echo ""
fi
}
echo "=== MOSAIC CONTEXT INJECTION (SessionStart) ==="
echo ""
for f in "${MANDATORY_FILES[@]}"; do
emit_file "$f"
done
if [[ -n "$E2E_DELIVERY" ]]; then
emit_file "$E2E_DELIVERY" "E2E-DELIVERY.md"
fi
if [[ -n "$PROJECT_AGENTS" ]]; then
emit_file "$PROJECT_AGENTS" "Project AGENTS.md ($(pwd))"
fi
emit_file "$RUNTIME_FILE" "Claude RUNTIME.md"
echo "=== END MOSAIC CONTEXT INJECTION ==="

65
tools/coolify/README.md Normal file
View File

@@ -0,0 +1,65 @@
# Coolify Tool Suite
Manage Coolify container deployment platform (projects, services, deployments, environment variables).
## Prerequisites
- `jq` and `curl` installed
- Coolify credentials in `~/src/jarvis-brain/credentials.json` (or `$MOSAIC_CREDENTIALS_FILE`)
- Required fields: `coolify.url`, `coolify.app_token`
## Scripts
| Script | Purpose |
|--------|---------|
| `team-list.sh` | List teams |
| `project-list.sh` | List projects |
| `service-list.sh` | List all services |
| `service-status.sh` | Get service details and status |
| `deploy.sh` | Trigger service deployment |
| `env-set.sh` | Set environment variable on a service |
## Common Options
- `-f json` — JSON output (default: table)
- `-u uuid` — Service UUID (for service-specific operations)
- `-h` — Show help
## API Reference
- Base URL: `http://10.1.1.44:8000`
- API prefix: `/api/v1/`
- Auth: Bearer token in `Authorization` header
- Rate limit: 200 requests per interval
## Known Limitations
- **FQDN updates on compose sub-apps not supported via API.** Workaround: update directly in Coolify's PostgreSQL DB (`coolify-db` container, `service_applications` table).
- **Compose must be base64-encoded** in `docker_compose_raw` field when creating services via API.
- **Don't send `type` with `docker_compose_raw`** — API rejects payloads with both fields.
## Coolify Magic Variables
Coolify reads special env vars from compose files:
- `SERVICE_FQDN_{NAME}_{PORT}` — assigns a domain to a compose service
- `SERVICE_URL_{NAME}_{PORT}` — internal URL reference
- Must use list-style env syntax (`- SERVICE_FQDN_API_3001`), NOT dict-style.
## Examples
```bash
# List all projects
~/.config/mosaic/tools/coolify/project-list.sh
# List services as JSON
~/.config/mosaic/tools/coolify/service-list.sh -f json
# Check service status
~/.config/mosaic/tools/coolify/service-status.sh -u <uuid>
# Set an env var
~/.config/mosaic/tools/coolify/env-set.sh -u <uuid> -k DATABASE_URL -v "postgres://..."
# Deploy a service
~/.config/mosaic/tools/coolify/deploy.sh -u <uuid>
```

61
tools/coolify/deploy.sh Executable file
View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
#
# deploy.sh — Trigger Coolify service deployment
#
# Usage: deploy.sh -u <uuid> [-f]
#
# Options:
# -u uuid Service UUID (required)
# -f Force restart (stop then start)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
UUID=""
FORCE=false
while getopts "u:fh" opt; do
case $opt in
u) UUID="$OPTARG" ;;
f) FORCE=true ;;
h) head -11 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -u <uuid> [-f]" >&2; exit 1 ;;
esac
done
if [[ -z "$UUID" ]]; then
echo "Error: -u uuid is required" >&2
exit 1
fi
if [[ "$FORCE" == "true" ]]; then
echo "Stopping service $UUID..."
curl -s -o /dev/null -w "" \
-X POST \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/services/${UUID}/stop"
sleep 2
fi
echo "Starting service $UUID..."
response=$(curl -s -w "\n%{http_code}" \
-X POST \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/services/${UUID}/start")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" && "$http_code" != "201" && "$http_code" != "202" ]]; then
echo "Error: Deployment failed (HTTP $http_code)" >&2
echo "$body" | jq -r '.' 2>/dev/null >&2 || echo "$body" >&2
exit 1
fi
echo "Deployment triggered successfully for service $UUID"
echo "$body" | jq -r '.message // empty' 2>/dev/null || true

65
tools/coolify/env-set.sh Executable file
View File

@@ -0,0 +1,65 @@
#!/usr/bin/env bash
#
# env-set.sh — Set environment variable on a Coolify service
#
# Usage: env-set.sh -u <uuid> -k <key> -v <value> [--preview]
#
# Options:
# -u uuid Service UUID (required)
# -k key Environment variable name (required)
# -v value Environment variable value (required)
# --preview Set as preview-only variable
# -h Show this help
#
# Note: Changes take effect on next deploy/restart.
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
UUID=""
KEY=""
VALUE=""
IS_PREVIEW="false"
while [[ $# -gt 0 ]]; do
case $1 in
-u) UUID="$2"; shift 2 ;;
-k) KEY="$2"; shift 2 ;;
-v) VALUE="$2"; shift 2 ;;
--preview) IS_PREVIEW="true"; shift ;;
-h) head -15 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -u <uuid> -k <key> -v <value> [--preview]" >&2; exit 1 ;;
esac
done
if [[ -z "$UUID" || -z "$KEY" || -z "$VALUE" ]]; then
echo "Error: -u uuid, -k key, and -v value are required" >&2
exit 1
fi
payload=$(jq -n \
--arg key "$KEY" \
--arg value "$VALUE" \
--argjson preview "$IS_PREVIEW" \
'{key: $key, value: $value, is_preview: $preview}')
response=$(curl -s -w "\n%{http_code}" \
-X PATCH \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
-d "$payload" \
"${COOLIFY_URL}/api/v1/services/${UUID}/envs")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" && "$http_code" != "201" ]]; then
echo "Error: Failed to set environment variable (HTTP $http_code)" >&2
echo "$body" | jq -r '.' 2>/dev/null >&2 || echo "$body" >&2
exit 1
fi
echo "Set $KEY on service $UUID"
echo "Note: Redeploy the service to apply the change"

52
tools/coolify/project-list.sh Executable file
View File

@@ -0,0 +1,52 @@
#!/usr/bin/env bash
#
# project-list.sh — List Coolify projects
#
# Usage: project-list.sh [-f format]
#
# Options:
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
FORMAT="table"
while getopts "f:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
h) head -10 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format]" >&2; exit 1 ;;
esac
done
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/projects")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list projects (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "UUID NAME DESCRIPTION"
echo "------------------------------------ ---------------------------- ----------------------------------------"
echo "$body" | jq -r '.[] | [
.uuid,
.name,
(.description // "—")
] | @tsv' | while IFS=$'\t' read -r uuid name desc; do
printf "%-36s %-28s %s\n" "$uuid" "${name:0:28}" "${desc:0:40}"
done

53
tools/coolify/service-list.sh Executable file
View File

@@ -0,0 +1,53 @@
#!/usr/bin/env bash
#
# service-list.sh — List Coolify services
#
# Usage: service-list.sh [-f format]
#
# Options:
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
FORMAT="table"
while getopts "f:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
h) head -10 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format]" >&2; exit 1 ;;
esac
done
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/services")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list services (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "UUID NAME TYPE STATUS"
echo "------------------------------------ ---------------------------- ------------ ----------"
echo "$body" | jq -r '.[] | [
.uuid,
.name,
(.type // "unknown"),
(.status // "unknown")
] | @tsv' | while IFS=$'\t' read -r uuid name type status; do
printf "%-36s %-28s %-12s %s\n" "$uuid" "${name:0:28}" "${type:0:12}" "$status"
done

62
tools/coolify/service-status.sh Executable file
View File

@@ -0,0 +1,62 @@
#!/usr/bin/env bash
#
# service-status.sh — Get Coolify service status and details
#
# Usage: service-status.sh -u <uuid> [-f format]
#
# Options:
# -u uuid Service UUID (required)
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
UUID=""
FORMAT="table"
while getopts "u:f:h" opt; do
case $opt in
u) UUID="$OPTARG" ;;
f) FORMAT="$OPTARG" ;;
h) head -12 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -u <uuid> [-f format]" >&2; exit 1 ;;
esac
done
if [[ -z "$UUID" ]]; then
echo "Error: -u uuid is required" >&2
exit 1
fi
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/services/${UUID}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to get service status (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "Service Details"
echo "==============="
echo "$body" | jq -r '
" UUID: \(.uuid)\n" +
" Name: \(.name)\n" +
" Type: \(.type // "unknown")\n" +
" Status: \(.status // "unknown")\n" +
" FQDN: \(.fqdn // "none")\n" +
" Created: \(.created_at // "unknown")\n" +
" Updated: \(.updated_at // "unknown")"
'

52
tools/coolify/team-list.sh Executable file
View File

@@ -0,0 +1,52 @@
#!/usr/bin/env bash
#
# team-list.sh — List Coolify teams
#
# Usage: team-list.sh [-f format]
#
# Options:
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials coolify
FORMAT="table"
while getopts "f:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
h) head -10 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format]" >&2; exit 1 ;;
esac
done
response=$(curl -s -w "\n%{http_code}" \
-H "Authorization: Bearer $COOLIFY_TOKEN" \
-H "Content-Type: application/json" \
"${COOLIFY_URL}/api/v1/teams")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list teams (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "ID NAME DESCRIPTION"
echo "---- ---------------------------- ----------------------------------------"
echo "$body" | jq -r '.[] | [
(.id | tostring),
.name,
(.description // "—")
] | @tsv' | while IFS=$'\t' read -r id name desc; do
printf "%-4s %-28s %s\n" "$id" "${name:0:28}" "${desc:0:40}"
done

247
tools/git/ci-queue-wait.ps1 Normal file
View File

@@ -0,0 +1,247 @@
# ci-queue-wait.ps1 - Wait until project CI queue is clear (no running/queued pipeline on branch head)
# Usage: .\ci-queue-wait.ps1 [-Branch main] [-TimeoutSeconds 900] [-IntervalSeconds 15] [-Purpose merge] [-RequireStatus]
[CmdletBinding()]
param(
[Alias("B")]
[string]$Branch = "main",
[Alias("t")]
[int]$TimeoutSeconds = 900,
[Alias("i")]
[int]$IntervalSeconds = 15,
[ValidateSet("push", "merge")]
[string]$Purpose = "merge",
[switch]$RequireStatus,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: ci-queue-wait.ps1 [-Branch main] [-TimeoutSeconds 900] [-IntervalSeconds 15] [-Purpose push|merge] [-RequireStatus]
Options:
-Branch, -B BRANCH Branch head to inspect (default: main)
-TimeoutSeconds, -t SECONDS Max wait time (default: 900)
-IntervalSeconds, -i SECONDS Poll interval (default: 15)
-Purpose VALUE push or merge (default: merge)
-RequireStatus Fail if no CI status contexts are present
-Help, -h Show help
"@
}
if ($Help) {
Show-Usage
exit 0
}
if ($TimeoutSeconds -lt 1 -or $IntervalSeconds -lt 1) {
Write-Error "TimeoutSeconds and IntervalSeconds must be positive integers."
exit 1
}
function Get-RemoteHost {
$remoteUrl = git remote get-url origin 2>$null
if ([string]::IsNullOrEmpty($remoteUrl)) { return $null }
if ($remoteUrl -match "^https?://([^/]+)/") { return $Matches[1] }
if ($remoteUrl -match "^git@([^:]+):") { return $Matches[1] }
return $null
}
function Get-GiteaToken {
param([string]$Host)
if ($env:GITEA_TOKEN) { return $env:GITEA_TOKEN }
$credPath = Join-Path $HOME ".git-credentials"
if (-not (Test-Path $credPath)) { return $null }
$line = Get-Content $credPath | Where-Object { $_ -like "*$Host*" } | Select-Object -First 1
if (-not $line) { return $null }
if ($line -match 'https?://[^@]*:([^@/]+)@') {
return $Matches[1]
}
return $null
}
function Get-QueueState {
param([object]$Payload)
$pending = @("pending", "queued", "running", "waiting")
$failure = @("failure", "error", "failed")
$success = @("success")
$state = ""
if ($null -ne $Payload.state) {
$state = "$($Payload.state)".ToLowerInvariant()
}
if ($pending -contains $state) { return "pending" }
if ($failure -contains $state) { return "terminal-failure" }
if ($success -contains $state) { return "terminal-success" }
$values = @()
$statuses = @()
if ($null -ne $Payload.statuses) { $statuses = @($Payload.statuses) }
foreach ($s in $statuses) {
if ($null -eq $s) { continue }
$v = ""
if ($null -ne $s.status) { $v = "$($s.status)".ToLowerInvariant() }
elseif ($null -ne $s.state) { $v = "$($s.state)".ToLowerInvariant() }
if (-not [string]::IsNullOrEmpty($v)) { $values += $v }
}
if ($values.Count -eq 0 -and [string]::IsNullOrEmpty($state)) { return "no-status" }
if (($values | Where-Object { $pending -contains $_ }).Count -gt 0) { return "pending" }
if (($values | Where-Object { $failure -contains $_ }).Count -gt 0) { return "terminal-failure" }
if ($values.Count -gt 0 -and ($values | Where-Object { -not ($success -contains $_) }).Count -eq 0) { return "terminal-success" }
return "unknown"
}
function Print-PendingContexts {
param([object]$Payload)
$pending = @("pending", "queued", "running", "waiting")
$statuses = @()
if ($null -ne $Payload.statuses) { $statuses = @($Payload.statuses) }
if ($statuses.Count -eq 0) {
Write-Host "[ci-queue-wait] no status contexts reported"
return
}
$found = $false
foreach ($s in $statuses) {
if ($null -eq $s) { continue }
$name = if ($s.context) { $s.context } elseif ($s.name) { $s.name } else { "unknown-context" }
$value = if ($s.status) { "$($s.status)".ToLowerInvariant() } elseif ($s.state) { "$($s.state)".ToLowerInvariant() } else { "unknown" }
$target = if ($s.target_url) { $s.target_url } elseif ($s.url) { $s.url } else { "" }
if ($pending -contains $value) {
$found = $true
if ($target) {
Write-Host "[ci-queue-wait] pending: $name=$value ($target)"
}
else {
Write-Host "[ci-queue-wait] pending: $name=$value"
}
}
}
if (-not $found) {
Write-Host "[ci-queue-wait] no pending contexts"
}
}
$platform = Get-GitPlatform
$owner = Get-GitRepoOwner
$repo = Get-GitRepoName
if ([string]::IsNullOrEmpty($owner) -or [string]::IsNullOrEmpty($repo)) {
Write-Error "Could not determine repository owner/name from git remote."
exit 1
}
$headSha = $null
$host = $null
$giteaToken = $null
switch ($platform) {
"github" {
if (-not (Get-Command gh -ErrorAction SilentlyContinue)) {
Write-Error "gh CLI is required for GitHub CI queue guard."
exit 1
}
$headSha = (& gh api "repos/$owner/$repo/branches/$Branch" --jq ".commit.sha").Trim()
if ([string]::IsNullOrEmpty($headSha)) {
Write-Error "Could not resolve $Branch head SHA."
exit 1
}
Write-Host "[ci-queue-wait] platform=github purpose=$Purpose branch=$Branch sha=$headSha"
}
"gitea" {
$host = Get-RemoteHost
if ([string]::IsNullOrEmpty($host)) {
Write-Error "Could not determine remote host."
exit 1
}
$giteaToken = Get-GiteaToken -Host $host
if ([string]::IsNullOrEmpty($giteaToken)) {
Write-Error "Gitea token not found. Set GITEA_TOKEN or configure ~/.git-credentials."
exit 1
}
try {
$branchUrl = "https://$host/api/v1/repos/$owner/$repo/branches/$Branch"
$branchPayload = Invoke-RestMethod -Method Get -Uri $branchUrl -Headers @{ Authorization = "token $giteaToken" }
$headSha = ($branchPayload.commit.id | Out-String).Trim()
}
catch {
Write-Error "Could not resolve $Branch head SHA from Gitea API."
exit 1
}
if ([string]::IsNullOrEmpty($headSha)) {
Write-Error "Could not resolve $Branch head SHA."
exit 1
}
Write-Host "[ci-queue-wait] platform=gitea purpose=$Purpose branch=$Branch sha=$headSha"
}
default {
Write-Error "Unsupported platform '$platform'."
exit 1
}
}
$deadline = (Get-Date).AddSeconds($TimeoutSeconds)
while ($true) {
if ((Get-Date) -gt $deadline) {
Write-Error "Timed out waiting for CI queue to clear on $Branch after ${TimeoutSeconds}s."
exit 124
}
try {
if ($platform -eq "github") {
$statusJson = & gh api "repos/$owner/$repo/commits/$headSha/status"
$payload = $statusJson | ConvertFrom-Json
}
else {
$statusUrl = "https://$host/api/v1/repos/$owner/$repo/commits/$headSha/status"
$payload = Invoke-RestMethod -Method Get -Uri $statusUrl -Headers @{ Authorization = "token $giteaToken" }
}
}
catch {
Write-Error "Failed to query commit status for queue guard."
exit 1
}
$state = Get-QueueState -Payload $payload
Write-Host "[ci-queue-wait] state=$state purpose=$Purpose branch=$Branch"
switch ($state) {
"pending" {
Print-PendingContexts -Payload $payload
Start-Sleep -Seconds $IntervalSeconds
}
"no-status" {
if ($RequireStatus) {
Write-Error "No CI status contexts found while -RequireStatus is set."
exit 1
}
Write-Host "[ci-queue-wait] no status contexts present; proceeding."
exit 0
}
"terminal-success" { exit 0 }
"terminal-failure" { exit 0 }
"unknown" { exit 0 }
default { exit 0 }
}
}

307
tools/git/ci-queue-wait.sh Executable file
View File

@@ -0,0 +1,307 @@
#!/bin/bash
# ci-queue-wait.sh - Wait until project CI queue is clear (no running/queued pipeline on branch head)
# Usage: ci-queue-wait.sh [-B branch] [-t timeout_sec] [-i interval_sec] [--purpose push|merge] [--require-status]
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
BRANCH="main"
TIMEOUT_SEC=900
INTERVAL_SEC=15
PURPOSE="merge"
REQUIRE_STATUS=0
usage() {
cat <<EOF
Usage: $(basename "$0") [-B branch] [-t timeout_sec] [-i interval_sec] [--purpose push|merge] [--require-status]
Options:
-B, --branch BRANCH Branch head to inspect (default: main)
-t, --timeout SECONDS Max wait time in seconds (default: 900)
-i, --interval SECONDS Poll interval in seconds (default: 15)
--purpose VALUE Log context: push|merge (default: merge)
--require-status Fail if no CI status contexts are present
-h, --help Show this help
Examples:
$(basename "$0")
$(basename "$0") --purpose push -B main -t 600 -i 10
EOF
}
get_remote_host() {
local remote_url
remote_url=$(git remote get-url origin 2>/dev/null || true)
if [[ -z "$remote_url" ]]; then
return 1
fi
if [[ "$remote_url" =~ ^https?://([^/]+)/ ]]; then
echo "${BASH_REMATCH[1]}"
return 0
fi
if [[ "$remote_url" =~ ^git@([^:]+): ]]; then
echo "${BASH_REMATCH[1]}"
return 0
fi
return 1
}
get_gitea_token() {
local host="$1"
if [[ -n "${GITEA_TOKEN:-}" ]]; then
echo "$GITEA_TOKEN"
return 0
fi
local creds="$HOME/.git-credentials"
if [[ -f "$creds" ]]; then
local token
token=$(grep -F "$host" "$creds" 2>/dev/null | sed -n 's#https\?://[^@]*:\([^@/]*\)@.*#\1#p' | head -n 1)
if [[ -n "$token" ]]; then
echo "$token"
return 0
fi
fi
return 1
}
get_state_from_status_json() {
python3 - <<'PY'
import json
import sys
try:
payload = json.load(sys.stdin)
except Exception:
print("unknown")
raise SystemExit(0)
statuses = payload.get("statuses") or []
state = (payload.get("state") or "").lower()
pending_values = {"pending", "queued", "running", "waiting"}
failure_values = {"failure", "error", "failed"}
success_values = {"success"}
if state in pending_values:
print("pending")
raise SystemExit(0)
if state in failure_values:
print("terminal-failure")
raise SystemExit(0)
if state in success_values:
print("terminal-success")
raise SystemExit(0)
values = []
for item in statuses:
if not isinstance(item, dict):
continue
value = (item.get("status") or item.get("state") or "").lower()
if value:
values.append(value)
if not values and not state:
print("no-status")
elif any(v in pending_values for v in values):
print("pending")
elif any(v in failure_values for v in values):
print("terminal-failure")
elif values and all(v in success_values for v in values):
print("terminal-success")
else:
print("unknown")
PY
}
print_pending_contexts() {
python3 - <<'PY'
import json
import sys
try:
payload = json.load(sys.stdin)
except Exception:
print("[ci-queue-wait] unable to decode status payload")
raise SystemExit(0)
statuses = payload.get("statuses") or []
if not statuses:
print("[ci-queue-wait] no status contexts reported")
raise SystemExit(0)
pending_values = {"pending", "queued", "running", "waiting"}
found = False
for item in statuses:
if not isinstance(item, dict):
continue
name = item.get("context") or item.get("name") or "unknown-context"
value = (item.get("status") or item.get("state") or "unknown").lower()
target = item.get("target_url") or item.get("url") or ""
if value in pending_values:
found = True
if target:
print(f"[ci-queue-wait] pending: {name}={value} ({target})")
else:
print(f"[ci-queue-wait] pending: {name}={value}")
if not found:
print("[ci-queue-wait] no pending contexts")
PY
}
github_get_branch_head_sha() {
local owner="$1"
local repo="$2"
local branch="$3"
gh api "repos/${owner}/${repo}/branches/${branch}" --jq '.commit.sha'
}
github_get_commit_status_json() {
local owner="$1"
local repo="$2"
local sha="$3"
gh api "repos/${owner}/${repo}/commits/${sha}/status"
}
gitea_get_branch_head_sha() {
local host="$1"
local repo="$2"
local branch="$3"
local token="$4"
local url="https://${host}/api/v1/repos/${repo}/branches/${branch}"
curl -fsS -H "Authorization: token ${token}" "$url" | python3 -c '
import json, sys
data = json.load(sys.stdin)
commit = data.get("commit") or {}
print((commit.get("id") or "").strip())
'
}
gitea_get_commit_status_json() {
local host="$1"
local repo="$2"
local sha="$3"
local token="$4"
local url="https://${host}/api/v1/repos/${repo}/commits/${sha}/status"
curl -fsS -H "Authorization: token ${token}" "$url"
}
while [[ $# -gt 0 ]]; do
case "$1" in
-B|--branch)
BRANCH="$2"
shift 2
;;
-t|--timeout)
TIMEOUT_SEC="$2"
shift 2
;;
-i|--interval)
INTERVAL_SEC="$2"
shift 2
;;
--purpose)
PURPOSE="$2"
shift 2
;;
--require-status)
REQUIRE_STATUS=1
shift
;;
-h|--help)
usage
exit 0
;;
*)
echo "Unknown option: $1" >&2
usage >&2
exit 1
;;
esac
done
if ! [[ "$TIMEOUT_SEC" =~ ^[0-9]+$ ]] || ! [[ "$INTERVAL_SEC" =~ ^[0-9]+$ ]]; then
echo "Error: timeout and interval must be integer seconds." >&2
exit 1
fi
OWNER=$(get_repo_owner)
REPO=$(get_repo_name)
detect_platform > /dev/null
PLATFORM="${PLATFORM:-unknown}"
if [[ "$PLATFORM" == "github" ]]; then
if ! command -v gh >/dev/null 2>&1; then
echo "Error: gh CLI is required for GitHub CI queue guard." >&2
exit 1
fi
HEAD_SHA=$(github_get_branch_head_sha "$OWNER" "$REPO" "$BRANCH")
if [[ -z "$HEAD_SHA" ]]; then
echo "Error: Could not resolve ${BRANCH} head SHA." >&2
exit 1
fi
echo "[ci-queue-wait] platform=github purpose=${PURPOSE} branch=${BRANCH} sha=${HEAD_SHA}"
elif [[ "$PLATFORM" == "gitea" ]]; then
HOST=$(get_remote_host) || {
echo "Error: Could not determine remote host." >&2
exit 1
}
TOKEN=$(get_gitea_token "$HOST") || {
echo "Error: Gitea token not found. Set GITEA_TOKEN or configure ~/.git-credentials." >&2
exit 1
}
HEAD_SHA=$(gitea_get_branch_head_sha "$HOST" "$OWNER/$REPO" "$BRANCH" "$TOKEN")
if [[ -z "$HEAD_SHA" ]]; then
echo "Error: Could not resolve ${BRANCH} head SHA." >&2
exit 1
fi
echo "[ci-queue-wait] platform=gitea purpose=${PURPOSE} branch=${BRANCH} sha=${HEAD_SHA}"
else
echo "Error: Unsupported platform '${PLATFORM}'." >&2
exit 1
fi
START_TS=$(date +%s)
DEADLINE_TS=$((START_TS + TIMEOUT_SEC))
while true; do
NOW_TS=$(date +%s)
if (( NOW_TS > DEADLINE_TS )); then
echo "Error: Timed out waiting for CI queue to clear on ${BRANCH} after ${TIMEOUT_SEC}s." >&2
exit 124
fi
if [[ "$PLATFORM" == "github" ]]; then
STATUS_JSON=$(github_get_commit_status_json "$OWNER" "$REPO" "$HEAD_SHA")
else
STATUS_JSON=$(gitea_get_commit_status_json "$HOST" "$OWNER/$REPO" "$HEAD_SHA" "$TOKEN")
fi
STATE=$(printf '%s' "$STATUS_JSON" | get_state_from_status_json)
echo "[ci-queue-wait] state=${STATE} purpose=${PURPOSE} branch=${BRANCH}"
case "$STATE" in
pending)
printf '%s' "$STATUS_JSON" | print_pending_contexts
sleep "$INTERVAL_SEC"
;;
no-status)
if [[ "$REQUIRE_STATUS" -eq 1 ]]; then
echo "Error: No CI status contexts found for ${BRANCH} while --require-status is set." >&2
exit 1
fi
echo "[ci-queue-wait] no status contexts present; proceeding."
exit 0
;;
terminal-success|terminal-failure|unknown)
# Queue guard only blocks on pending/running/queued states.
exit 0
;;
*)
echo "[ci-queue-wait] unrecognized state '${STATE}', proceeding conservatively."
exit 0
;;
esac
done

View File

@@ -0,0 +1,83 @@
# detect-platform.ps1 - Detect git platform (Gitea or GitHub) for current repo
# Usage: . .\detect-platform.ps1; Get-GitPlatform
# or: .\detect-platform.ps1 (prints platform name)
function Get-GitPlatform {
[CmdletBinding()]
param()
$remoteUrl = git remote get-url origin 2>$null
if ([string]::IsNullOrEmpty($remoteUrl)) {
Write-Error "Not a git repository or no origin remote"
return $null
}
# Check for GitHub
if ($remoteUrl -match "github\.com") {
return "github"
}
# Check for common Gitea indicators
# Gitea URLs typically don't contain github.com, gitlab.com, bitbucket.org
if ($remoteUrl -notmatch "gitlab\.com" -and $remoteUrl -notmatch "bitbucket\.org") {
# Assume Gitea for self-hosted repos
return "gitea"
}
return "unknown"
}
function Get-GitRepoInfo {
[CmdletBinding()]
param()
$remoteUrl = git remote get-url origin 2>$null
if ([string]::IsNullOrEmpty($remoteUrl)) {
Write-Error "Not a git repository or no origin remote"
return $null
}
# Extract owner/repo from URL
# Handles: git@host:owner/repo.git, https://host/owner/repo.git, https://host/owner/repo
$repoPath = $remoteUrl
if ($remoteUrl -match "^git@") {
$repoPath = ($remoteUrl -split ":")[1]
} else {
# Remove protocol and host
$repoPath = $remoteUrl -replace "^https?://[^/]+/", ""
}
# Remove .git suffix if present
$repoPath = $repoPath -replace "\.git$", ""
return $repoPath
}
function Get-GitRepoOwner {
[CmdletBinding()]
param()
$repoInfo = Get-GitRepoInfo
if ($repoInfo) {
return ($repoInfo -split "/")[0]
}
return $null
}
function Get-GitRepoName {
[CmdletBinding()]
param()
$repoInfo = Get-GitRepoInfo
if ($repoInfo) {
return ($repoInfo -split "/")[-1]
}
return $null
}
# If script is run directly (not dot-sourced), output the platform
if ($MyInvocation.InvocationName -ne ".") {
Get-GitPlatform
}

80
tools/git/detect-platform.sh Executable file
View File

@@ -0,0 +1,80 @@
#!/bin/bash
# detect-platform.sh - Detect git platform (Gitea or GitHub) for current repo
# Usage: source detect-platform.sh && detect_platform
# or: ./detect-platform.sh (prints platform name)
detect_platform() {
local remote_url
remote_url=$(git remote get-url origin 2>/dev/null)
if [[ -z "$remote_url" ]]; then
echo "error: not a git repository or no origin remote" >&2
return 1
fi
# Check for GitHub
if [[ "$remote_url" == *"github.com"* ]]; then
PLATFORM="github"
export PLATFORM
echo "github"
return 0
fi
# Check for common Gitea indicators
# Gitea URLs typically don't contain github.com, gitlab.com, bitbucket.org
if [[ "$remote_url" != *"gitlab.com"* ]] && \
[[ "$remote_url" != *"bitbucket.org"* ]]; then
# Assume Gitea for self-hosted repos
PLATFORM="gitea"
export PLATFORM
echo "gitea"
return 0
fi
PLATFORM="unknown"
export PLATFORM
echo "unknown"
return 1
}
get_repo_info() {
local remote_url
remote_url=$(git remote get-url origin 2>/dev/null)
if [[ -z "$remote_url" ]]; then
echo "error: not a git repository or no origin remote" >&2
return 1
fi
# Extract owner/repo from URL
# Handles: git@host:owner/repo.git, https://host/owner/repo.git, https://host/owner/repo
local repo_path
if [[ "$remote_url" == git@* ]]; then
repo_path="${remote_url#*:}"
else
repo_path="${remote_url#*://}"
repo_path="${repo_path#*/}"
fi
# Remove .git suffix if present
repo_path="${repo_path%.git}"
echo "$repo_path"
}
get_repo_owner() {
local repo_info
repo_info=$(get_repo_info)
echo "${repo_info%%/*}"
}
get_repo_name() {
local repo_info
repo_info=$(get_repo_info)
echo "${repo_info##*/}"
}
# If script is run directly (not sourced), output the platform
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
detect_platform
fi

111
tools/git/issue-assign.ps1 Normal file
View File

@@ -0,0 +1,111 @@
# issue-assign.ps1 - Assign issues on Gitea or GitHub
# Usage: .\issue-assign.ps1 -Issue ISSUE_NUMBER [-Assignee assignee] [-Labels labels] [-Milestone milestone]
[CmdletBinding()]
param(
[Parameter(Mandatory=$true)]
[Alias("i")]
[int]$Issue,
[Alias("a")]
[string]$Assignee,
[Alias("l")]
[string]$Labels,
[Alias("m")]
[string]$Milestone,
[Alias("r")]
[switch]$RemoveAssignee,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: issue-assign.ps1 [OPTIONS]
Assign or update an issue on the current repository (Gitea or GitHub).
Options:
-Issue, -i NUMBER Issue number (required)
-Assignee, -a USER Assign to user (use @me for self)
-Labels, -l LABELS Add comma-separated labels
-Milestone, -m NAME Set milestone
-RemoveAssignee, -r Remove current assignee
-Help, -h Show this help message
Examples:
.\issue-assign.ps1 -i 42 -a "username"
.\issue-assign.ps1 -i 42 -l "in-progress" -m "0.2.0"
.\issue-assign.ps1 -i 42 -a @me
"@
exit 1
}
if ($Help) {
Show-Usage
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
if ($Assignee) {
gh issue edit $Issue --add-assignee $Assignee
}
if ($RemoveAssignee) {
$current = gh issue view $Issue --json assignees -q '.assignees[].login' 2>$null
if ($current) {
$assignees = ($current -split "`n") -join ","
gh issue edit $Issue --remove-assignee $assignees
}
}
if ($Labels) {
gh issue edit $Issue --add-label $Labels
}
if ($Milestone) {
gh issue edit $Issue --milestone $Milestone
}
Write-Host "Issue #$Issue updated successfully"
}
"gitea" {
$needsEdit = $false
$cmd = @("tea", "issue", "edit", $Issue)
if ($Assignee) {
$cmd += @("--assignees", $Assignee)
$needsEdit = $true
}
if ($Labels) {
$cmd += @("--labels", $Labels)
$needsEdit = $true
}
if ($Milestone) {
$milestoneList = tea milestones list 2>$null
$milestoneId = ($milestoneList | Select-String "^\s*(\d+).*$Milestone" | ForEach-Object { $_.Matches.Groups[1].Value } | Select-Object -First 1)
if ($milestoneId) {
$cmd += @("--milestone", $milestoneId)
$needsEdit = $true
} else {
Write-Warning "Could not find milestone '$Milestone'"
}
}
if ($needsEdit) {
& $cmd[0] $cmd[1..($cmd.Length-1)]
Write-Host "Issue #$Issue updated successfully"
} else {
Write-Host "No changes specified"
}
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

135
tools/git/issue-assign.sh Executable file
View File

@@ -0,0 +1,135 @@
#!/bin/bash
# issue-assign.sh - Assign issues on Gitea or GitHub
# Usage: issue-assign.sh -i ISSUE_NUMBER [-a assignee] [-l labels] [-m milestone]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
ISSUE=""
ASSIGNEE=""
LABELS=""
MILESTONE=""
REMOVE_ASSIGNEE=false
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
Assign or update an issue on the current repository (Gitea or GitHub).
Options:
-i, --issue NUMBER Issue number (required)
-a, --assignee USER Assign to user (use @me for self)
-l, --labels LABELS Add comma-separated labels
-m, --milestone NAME Set milestone
-r, --remove-assignee Remove current assignee
-h, --help Show this help message
Examples:
$(basename "$0") -i 42 -a "username"
$(basename "$0") -i 42 -l "in-progress" -m "0.2.0"
$(basename "$0") -i 42 -a @me
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE="$2"
shift 2
;;
-a|--assignee)
ASSIGNEE="$2"
shift 2
;;
-l|--labels)
LABELS="$2"
shift 2
;;
-m|--milestone)
MILESTONE="$2"
shift 2
;;
-r|--remove-assignee)
REMOVE_ASSIGNEE=true
shift
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
if [[ -z "$ISSUE" ]]; then
echo "Error: Issue number is required (-i)" >&2
usage
fi
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
if [[ -n "$ASSIGNEE" ]]; then
gh issue edit "$ISSUE" --add-assignee "$ASSIGNEE"
fi
if [[ "$REMOVE_ASSIGNEE" == true ]]; then
# Get current assignees and remove them
CURRENT=$(gh issue view "$ISSUE" --json assignees -q '.assignees[].login' 2>/dev/null | tr '\n' ',')
if [[ -n "$CURRENT" ]]; then
gh issue edit "$ISSUE" --remove-assignee "${CURRENT%,}"
fi
fi
if [[ -n "$LABELS" ]]; then
gh issue edit "$ISSUE" --add-label "$LABELS"
fi
if [[ -n "$MILESTONE" ]]; then
gh issue edit "$ISSUE" --milestone "$MILESTONE"
fi
echo "Issue #$ISSUE updated successfully"
;;
gitea)
# tea issue edit syntax
CMD="tea issue edit $ISSUE"
NEEDS_EDIT=false
if [[ -n "$ASSIGNEE" ]]; then
# tea uses --assignees flag
CMD="$CMD --assignees \"$ASSIGNEE\""
NEEDS_EDIT=true
fi
if [[ -n "$LABELS" ]]; then
# tea uses --labels flag (replaces existing)
CMD="$CMD --labels \"$LABELS\""
NEEDS_EDIT=true
fi
if [[ -n "$MILESTONE" ]]; then
MILESTONE_ID=$(tea milestones list 2>/dev/null | grep -E "^\s*[0-9]+" | grep "$MILESTONE" | awk '{print $1}' | head -1)
if [[ -n "$MILESTONE_ID" ]]; then
CMD="$CMD --milestone $MILESTONE_ID"
NEEDS_EDIT=true
else
echo "Warning: Could not find milestone '$MILESTONE'" >&2
fi
fi
if [[ "$NEEDS_EDIT" == true ]]; then
eval "$CMD"
echo "Issue #$ISSUE updated successfully"
else
echo "No changes specified"
fi
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

64
tools/git/issue-close.sh Executable file
View File

@@ -0,0 +1,64 @@
#!/bin/bash
# issue-close.sh - Close an issue on GitHub or Gitea
# Usage: issue-close.sh -i <issue_number> [-c <comment>]
set -e
# Source platform detection
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
ISSUE_NUMBER=""
COMMENT=""
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE_NUMBER="$2"
shift 2
;;
-c|--comment)
COMMENT="$2"
shift 2
;;
-h|--help)
echo "Usage: issue-close.sh -i <issue_number> [-c <comment>]"
echo ""
echo "Options:"
echo " -i, --issue Issue number (required)"
echo " -c, --comment Comment to add before closing (optional)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$ISSUE_NUMBER" ]]; then
echo "Error: Issue number is required (-i)"
exit 1
fi
# Detect platform and close issue
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
if [[ -n "$COMMENT" ]]; then
gh issue comment "$ISSUE_NUMBER" --body "$COMMENT"
fi
gh issue close "$ISSUE_NUMBER"
echo "Closed GitHub issue #$ISSUE_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
if [[ -n "$COMMENT" ]]; then
tea issue comment "$ISSUE_NUMBER" "$COMMENT"
fi
tea issue close "$ISSUE_NUMBER"
echo "Closed Gitea issue #$ISSUE_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

61
tools/git/issue-comment.sh Executable file
View File

@@ -0,0 +1,61 @@
#!/bin/bash
# issue-comment.sh - Add a comment to an issue on GitHub or Gitea
# Usage: issue-comment.sh -i <issue_number> -c <comment>
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
ISSUE_NUMBER=""
COMMENT=""
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE_NUMBER="$2"
shift 2
;;
-c|--comment)
COMMENT="$2"
shift 2
;;
-h|--help)
echo "Usage: issue-comment.sh -i <issue_number> -c <comment>"
echo ""
echo "Options:"
echo " -i, --issue Issue number (required)"
echo " -c, --comment Comment text (required)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$ISSUE_NUMBER" ]]; then
echo "Error: Issue number is required (-i)"
exit 1
fi
if [[ -z "$COMMENT" ]]; then
echo "Error: Comment is required (-c)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
gh issue comment "$ISSUE_NUMBER" --body "$COMMENT"
echo "Added comment to GitHub issue #$ISSUE_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
tea issue comment "$ISSUE_NUMBER" "$COMMENT"
echo "Added comment to Gitea issue #$ISSUE_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,80 @@
# issue-create.ps1 - Create issues on Gitea or GitHub
# Usage: .\issue-create.ps1 -Title "Title" [-Body "Body"] [-Labels "label1,label2"] [-Milestone "milestone"]
[CmdletBinding()]
param(
[Parameter(Mandatory=$true)]
[Alias("t")]
[string]$Title,
[Alias("b")]
[string]$Body,
[Alias("l")]
[string]$Labels,
[Alias("m")]
[string]$Milestone,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: issue-create.ps1 [OPTIONS]
Create an issue on the current repository (Gitea or GitHub).
Options:
-Title, -t TITLE Issue title (required)
-Body, -b BODY Issue body/description
-Labels, -l LABELS Comma-separated labels (e.g., "bug,feature")
-Milestone, -m NAME Milestone name to assign
-Help, -h Show this help message
Examples:
.\issue-create.ps1 -Title "Fix login bug" -Labels "bug,priority-high"
.\issue-create.ps1 -t "Add dark mode" -b "Implement theme switching" -m "0.2.0"
"@
exit 1
}
if ($Help) {
Show-Usage
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
$cmd = @("gh", "issue", "create", "--title", $Title)
if ($Body) { $cmd += @("--body", $Body) }
if ($Labels) { $cmd += @("--label", $Labels) }
if ($Milestone) { $cmd += @("--milestone", $Milestone) }
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
"gitea" {
$cmd = @("tea", "issue", "create", "--title", $Title)
if ($Body) { $cmd += @("--description", $Body) }
if ($Labels) { $cmd += @("--labels", $Labels) }
if ($Milestone) {
# Try to get milestone ID by name
$milestoneList = tea milestones list 2>$null
$milestoneId = ($milestoneList | Select-String "^\s*(\d+).*$Milestone" | ForEach-Object { $_.Matches.Groups[1].Value } | Select-Object -First 1)
if ($milestoneId) {
$cmd += @("--milestone", $milestoneId)
} else {
Write-Warning "Could not find milestone '$Milestone', creating without milestone"
}
}
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

172
tools/git/issue-create.sh Executable file
View File

@@ -0,0 +1,172 @@
#!/bin/bash
# issue-create.sh - Create issues on Gitea or GitHub
# Usage: issue-create.sh -t "Title" [-b "Body"] [-l "label1,label2"] [-m "milestone"]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
TITLE=""
BODY=""
LABELS=""
MILESTONE=""
get_remote_host() {
local remote_url
remote_url=$(git remote get-url origin 2>/dev/null || true)
if [[ -z "$remote_url" ]]; then
return 1
fi
if [[ "$remote_url" =~ ^https?://([^/]+)/ ]]; then
echo "${BASH_REMATCH[1]}"
return 0
fi
if [[ "$remote_url" =~ ^git@([^:]+): ]]; then
echo "${BASH_REMATCH[1]}"
return 0
fi
return 1
}
get_gitea_token() {
local host="$1"
if [[ -n "${GITEA_TOKEN:-}" ]]; then
echo "$GITEA_TOKEN"
return 0
fi
local creds="$HOME/.git-credentials"
if [[ -f "$creds" ]]; then
local token
token=$(grep -F "$host" "$creds" 2>/dev/null | sed -n 's#https\?://[^@]*:\([^@/]*\)@.*#\1#p' | head -n 1)
if [[ -n "$token" ]]; then
echo "$token"
return 0
fi
fi
return 1
}
gitea_issue_create_api() {
local host repo token url payload
host=$(get_remote_host) || {
echo "Error: could not determine remote host for API fallback" >&2
return 1
}
repo=$(get_repo_info) || {
echo "Error: could not determine repo owner/name for API fallback" >&2
return 1
}
token=$(get_gitea_token "$host") || {
echo "Error: Gitea token not found for API fallback (set GITEA_TOKEN or configure ~/.git-credentials)" >&2
return 1
}
if [[ -n "$LABELS" || -n "$MILESTONE" ]]; then
echo "Warning: API fallback currently applies title/body only; labels/milestone require authenticated tea setup." >&2
fi
payload=$(TITLE="$TITLE" BODY="$BODY" python3 - <<'PY'
import json
import os
payload = {"title": os.environ["TITLE"]}
body = os.environ.get("BODY", "")
if body:
payload["body"] = body
print(json.dumps(payload))
PY
)
url="https://${host}/api/v1/repos/${repo}/issues"
curl -fsS -X POST \
-H "Authorization: token ${token}" \
-H "Content-Type: application/json" \
-d "$payload" \
"$url"
}
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
Create an issue on the current repository (Gitea or GitHub).
Options:
-t, --title TITLE Issue title (required)
-b, --body BODY Issue body/description
-l, --labels LABELS Comma-separated labels (e.g., "bug,feature")
-m, --milestone NAME Milestone name to assign
-h, --help Show this help message
Examples:
$(basename "$0") -t "Fix login bug" -l "bug,priority-high"
$(basename "$0") -t "Add dark mode" -b "Implement theme switching" -m "0.2.0"
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-t|--title)
TITLE="$2"
shift 2
;;
-b|--body)
BODY="$2"
shift 2
;;
-l|--labels)
LABELS="$2"
shift 2
;;
-m|--milestone)
MILESTONE="$2"
shift 2
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
if [[ -z "$TITLE" ]]; then
echo "Error: Title is required (-t)" >&2
usage
fi
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
CMD="gh issue create --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --body \"$BODY\""
[[ -n "$LABELS" ]] && CMD="$CMD --label \"$LABELS\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
eval "$CMD"
;;
gitea)
if command -v tea >/dev/null 2>&1; then
CMD="tea issue create --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --description \"$BODY\""
[[ -n "$LABELS" ]] && CMD="$CMD --labels \"$LABELS\""
# tea accepts milestone by name directly (verified 2026-02-05)
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
if eval "$CMD"; then
exit 0
fi
echo "Warning: tea issue create failed, trying Gitea API fallback..." >&2
fi
gitea_issue_create_api
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

84
tools/git/issue-edit.sh Executable file
View File

@@ -0,0 +1,84 @@
#!/bin/bash
# issue-edit.sh - Edit an issue on GitHub or Gitea
# Usage: issue-edit.sh -i <issue_number> [-t <title>] [-b <body>] [-l <labels>] [-m <milestone>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
ISSUE_NUMBER=""
TITLE=""
BODY=""
LABELS=""
MILESTONE=""
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE_NUMBER="$2"
shift 2
;;
-t|--title)
TITLE="$2"
shift 2
;;
-b|--body)
BODY="$2"
shift 2
;;
-l|--labels)
LABELS="$2"
shift 2
;;
-m|--milestone)
MILESTONE="$2"
shift 2
;;
-h|--help)
echo "Usage: issue-edit.sh -i <issue_number> [-t <title>] [-b <body>] [-l <labels>] [-m <milestone>]"
echo ""
echo "Options:"
echo " -i, --issue Issue number (required)"
echo " -t, --title New title"
echo " -b, --body New body/description"
echo " -l, --labels Labels (comma-separated, replaces existing)"
echo " -m, --milestone Milestone name"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$ISSUE_NUMBER" ]]; then
echo "Error: Issue number is required (-i)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
CMD="gh issue edit $ISSUE_NUMBER"
[[ -n "$TITLE" ]] && CMD="$CMD --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --body \"$BODY\""
[[ -n "$LABELS" ]] && CMD="$CMD --add-label \"$LABELS\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
eval $CMD
echo "Updated GitHub issue #$ISSUE_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
CMD="tea issue edit $ISSUE_NUMBER"
[[ -n "$TITLE" ]] && CMD="$CMD --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --description \"$BODY\""
[[ -n "$LABELS" ]] && CMD="$CMD --add-labels \"$LABELS\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
eval $CMD
echo "Updated Gitea issue #$ISSUE_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

78
tools/git/issue-list.ps1 Normal file
View File

@@ -0,0 +1,78 @@
# issue-list.ps1 - List issues on Gitea or GitHub
# Usage: .\issue-list.ps1 [-State state] [-Label label] [-Milestone milestone] [-Assignee assignee]
[CmdletBinding()]
param(
[Alias("s")]
[ValidateSet("open", "closed", "all")]
[string]$State = "open",
[Alias("l")]
[string]$Label,
[Alias("m")]
[string]$Milestone,
[Alias("a")]
[string]$Assignee,
[Alias("n")]
[int]$Limit = 100,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: issue-list.ps1 [OPTIONS]
List issues from the current repository (Gitea or GitHub).
Options:
-State, -s STATE Filter by state: open, closed, all (default: open)
-Label, -l LABEL Filter by label
-Milestone, -m NAME Filter by milestone name
-Assignee, -a USER Filter by assignee
-Limit, -n N Maximum issues to show (default: 100)
-Help, -h Show this help message
Examples:
.\issue-list.ps1 # List open issues
.\issue-list.ps1 -s all -l bug # All issues with 'bug' label
.\issue-list.ps1 -m "0.2.0" # Issues in milestone 0.2.0
"@
exit 1
}
if ($Help) {
Show-Usage
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
$cmd = @("gh", "issue", "list", "--state", $State, "--limit", $Limit)
if ($Label) { $cmd += @("--label", $Label) }
if ($Milestone) { $cmd += @("--milestone", $Milestone) }
if ($Assignee) { $cmd += @("--assignee", $Assignee) }
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
"gitea" {
$cmd = @("tea", "issues", "list", "--state", $State, "--limit", $Limit)
if ($Label) { $cmd += @("--labels", $Label) }
if ($Milestone) { $cmd += @("--milestones", $Milestone) }
& $cmd[0] $cmd[1..($cmd.Length-1)]
if ($Assignee) {
Write-Warning "Assignee filtering may require manual review for Gitea"
}
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

96
tools/git/issue-list.sh Executable file
View File

@@ -0,0 +1,96 @@
#!/bin/bash
# issue-list.sh - List issues on Gitea or GitHub
# Usage: issue-list.sh [-s state] [-l label] [-m milestone] [-a assignee]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
STATE="open"
LABEL=""
MILESTONE=""
ASSIGNEE=""
LIMIT=100
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
List issues from the current repository (Gitea or GitHub).
Options:
-s, --state STATE Filter by state: open, closed, all (default: open)
-l, --label LABEL Filter by label
-m, --milestone NAME Filter by milestone name
-a, --assignee USER Filter by assignee
-n, --limit N Maximum issues to show (default: 100)
-h, --help Show this help message
Examples:
$(basename "$0") # List open issues
$(basename "$0") -s all -l bug # All issues with 'bug' label
$(basename "$0") -m "0.2.0" # Issues in milestone 0.2.0
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-s|--state)
STATE="$2"
shift 2
;;
-l|--label)
LABEL="$2"
shift 2
;;
-m|--milestone)
MILESTONE="$2"
shift 2
;;
-a|--assignee)
ASSIGNEE="$2"
shift 2
;;
-n|--limit)
LIMIT="$2"
shift 2
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
CMD="gh issue list --state $STATE --limit $LIMIT"
[[ -n "$LABEL" ]] && CMD="$CMD --label \"$LABEL\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
[[ -n "$ASSIGNEE" ]] && CMD="$CMD --assignee \"$ASSIGNEE\""
eval "$CMD"
;;
gitea)
CMD="tea issues list --state $STATE --limit $LIMIT"
[[ -n "$LABEL" ]] && CMD="$CMD --labels \"$LABEL\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestones \"$MILESTONE\""
# Note: tea may not support assignee filter directly
eval "$CMD"
if [[ -n "$ASSIGNEE" ]]; then
echo "Note: Assignee filtering may require manual review for Gitea" >&2
fi
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

62
tools/git/issue-reopen.sh Executable file
View File

@@ -0,0 +1,62 @@
#!/bin/bash
# issue-reopen.sh - Reopen a closed issue on GitHub or Gitea
# Usage: issue-reopen.sh -i <issue_number> [-c <comment>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
ISSUE_NUMBER=""
COMMENT=""
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE_NUMBER="$2"
shift 2
;;
-c|--comment)
COMMENT="$2"
shift 2
;;
-h|--help)
echo "Usage: issue-reopen.sh -i <issue_number> [-c <comment>]"
echo ""
echo "Options:"
echo " -i, --issue Issue number (required)"
echo " -c, --comment Comment to add when reopening (optional)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$ISSUE_NUMBER" ]]; then
echo "Error: Issue number is required (-i)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
if [[ -n "$COMMENT" ]]; then
gh issue comment "$ISSUE_NUMBER" --body "$COMMENT"
fi
gh issue reopen "$ISSUE_NUMBER"
echo "Reopened GitHub issue #$ISSUE_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
if [[ -n "$COMMENT" ]]; then
tea issue comment "$ISSUE_NUMBER" "$COMMENT"
fi
tea issue reopen "$ISSUE_NUMBER"
echo "Reopened Gitea issue #$ISSUE_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

112
tools/git/issue-view.sh Executable file
View File

@@ -0,0 +1,112 @@
#!/bin/bash
# issue-view.sh - View issue details on GitHub or Gitea
# Usage: issue-view.sh -i <issue_number>
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
ISSUE_NUMBER=""
get_remote_host() {
local remote_url
remote_url=$(git remote get-url origin 2>/dev/null || true)
if [[ -z "$remote_url" ]]; then
return 1
fi
if [[ "$remote_url" =~ ^https?://([^/]+)/ ]]; then
echo "${BASH_REMATCH[1]}"
return 0
fi
if [[ "$remote_url" =~ ^git@([^:]+): ]]; then
echo "${BASH_REMATCH[1]}"
return 0
fi
return 1
}
get_gitea_token() {
local host="$1"
if [[ -n "${GITEA_TOKEN:-}" ]]; then
echo "$GITEA_TOKEN"
return 0
fi
local creds="$HOME/.git-credentials"
if [[ -f "$creds" ]]; then
local token
token=$(grep -F "$host" "$creds" 2>/dev/null | sed -n 's#https\?://[^@]*:\([^@/]*\)@.*#\1#p' | head -n 1)
if [[ -n "$token" ]]; then
echo "$token"
return 0
fi
fi
return 1
}
gitea_issue_view_api() {
local host repo token url
host=$(get_remote_host) || {
echo "Error: could not determine remote host for API fallback" >&2
return 1
}
repo=$(get_repo_info) || {
echo "Error: could not determine repo owner/name for API fallback" >&2
return 1
}
token=$(get_gitea_token "$host") || {
echo "Error: Gitea token not found for API fallback (set GITEA_TOKEN or configure ~/.git-credentials)" >&2
return 1
}
url="https://${host}/api/v1/repos/${repo}/issues/${ISSUE_NUMBER}"
if command -v python3 >/dev/null 2>&1; then
curl -fsS -H "Authorization: token ${token}" "$url" | python3 -m json.tool
else
curl -fsS -H "Authorization: token ${token}" "$url"
fi
}
while [[ $# -gt 0 ]]; do
case $1 in
-i|--issue)
ISSUE_NUMBER="$2"
shift 2
;;
-h|--help)
echo "Usage: issue-view.sh -i <issue_number>"
echo ""
echo "Options:"
echo " -i, --issue Issue number (required)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$ISSUE_NUMBER" ]]; then
echo "Error: Issue number is required (-i)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
gh issue view "$ISSUE_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
if command -v tea >/dev/null 2>&1; then
if tea issue "$ISSUE_NUMBER"; then
exit 0
fi
echo "Warning: tea issue view failed, trying Gitea API fallback..." >&2
fi
gitea_issue_view_api
else
echo "Error: Unknown platform"
exit 1
fi

50
tools/git/milestone-close.sh Executable file
View File

@@ -0,0 +1,50 @@
#!/bin/bash
# milestone-close.sh - Close a milestone on GitHub or Gitea
# Usage: milestone-close.sh -t <title>
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
TITLE=""
while [[ $# -gt 0 ]]; do
case $1 in
-t|--title)
TITLE="$2"
shift 2
;;
-h|--help)
echo "Usage: milestone-close.sh -t <title>"
echo ""
echo "Options:"
echo " -t, --title Milestone title (required)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$TITLE" ]]; then
echo "Error: Milestone title is required (-t)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
gh api -X PATCH "/repos/{owner}/{repo}/milestones/$(gh api "/repos/{owner}/{repo}/milestones" --jq ".[] | select(.title==\"$TITLE\") | .number")" -f state=closed
echo "Closed GitHub milestone: $TITLE"
elif [[ "$PLATFORM" == "gitea" ]]; then
tea milestone close "$TITLE"
echo "Closed Gitea milestone: $TITLE"
else
echo "Error: Unknown platform"
exit 1
fi

View File

@@ -0,0 +1,98 @@
# milestone-create.ps1 - Create milestones on Gitea or GitHub
# Usage: .\milestone-create.ps1 -Title "Title" [-Description "Description"] [-Due "YYYY-MM-DD"]
[CmdletBinding()]
param(
[Alias("t")]
[string]$Title,
[Alias("d")]
[string]$Description,
[string]$Due,
[switch]$List,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: milestone-create.ps1 [OPTIONS]
Create or list milestones on the current repository (Gitea or GitHub).
Versioning Convention:
- Features get dedicated milestones
- Pre-MVP milestones MUST use 0.0.x and MUST start at 0.0.1
- 0.1.0 is reserved for MVP release
- After MVP, continue semantic progression (0.1.x, 0.2.x, ...)
Options:
-Title, -t TITLE Milestone title/version (e.g., "0.0.1")
-Description, -d DESC Milestone description
-Due DATE Due date (YYYY-MM-DD format)
-List List existing milestones
-Help, -h Show this help message
Examples:
.\milestone-create.ps1 -List
.\milestone-create.ps1 -t "0.0.1" -d "Pre-MVP Foundation Sprint"
.\milestone-create.ps1 -t "0.1.0" -d "MVP Release" -Due "2025-03-01"
"@
exit 1
}
if ($Help) {
Show-Usage
}
$platform = Get-GitPlatform
if ($List) {
switch ($platform) {
"github" {
gh api repos/:owner/:repo/milestones --jq '.[] | "\(.number)`t\(.title)`t\(.state)`t\(.open_issues)/\(.closed_issues) issues"'
}
"gitea" {
tea milestones list
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}
exit 0
}
if (-not $Title) {
Write-Error "Title is required (-t) for creating milestones"
Show-Usage
}
switch ($platform) {
"github" {
$payload = @{ title = $Title }
if ($Description) { $payload.description = $Description }
if ($Due) { $payload.due_on = "${Due}T00:00:00Z" }
$json = $payload | ConvertTo-Json -Compress
$json | gh api repos/:owner/:repo/milestones --method POST --input -
Write-Host "Milestone '$Title' created successfully"
}
"gitea" {
$cmd = @("tea", "milestones", "create", "--title", $Title)
if ($Description) { $cmd += @("--description", $Description) }
if ($Due) { $cmd += @("--deadline", $Due) }
& $cmd[0] $cmd[1..($cmd.Length-1)]
Write-Host "Milestone '$Title' created successfully"
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

117
tools/git/milestone-create.sh Executable file
View File

@@ -0,0 +1,117 @@
#!/bin/bash
# milestone-create.sh - Create milestones on Gitea or GitHub
# Usage: milestone-create.sh -t "Title" [-d "Description"] [--due "YYYY-MM-DD"]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
TITLE=""
DESCRIPTION=""
DUE_DATE=""
LIST_ONLY=false
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
Create or list milestones on the current repository (Gitea or GitHub).
Versioning Convention:
- Features get dedicated milestones
- Pre-MVP milestones MUST use 0.0.x and MUST start at 0.0.1
- 0.1.0 is reserved for MVP release
- After MVP, continue semantic progression (0.1.x, 0.2.x, ...)
Options:
-t, --title TITLE Milestone title/version (e.g., "0.0.1")
-d, --desc DESCRIPTION Milestone description
--due DATE Due date (YYYY-MM-DD format)
--list List existing milestones
-h, --help Show this help message
Examples:
$(basename "$0") --list
$(basename "$0") -t "0.0.1" -d "Pre-MVP Foundation Sprint"
$(basename "$0") -t "0.1.0" -d "MVP Release" --due "2025-03-01"
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-t|--title)
TITLE="$2"
shift 2
;;
-d|--desc)
DESCRIPTION="$2"
shift 2
;;
--due)
DUE_DATE="$2"
shift 2
;;
--list)
LIST_ONLY=true
shift
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
PLATFORM=$(detect_platform)
if [[ "$LIST_ONLY" == true ]]; then
case "$PLATFORM" in
github)
gh api repos/:owner/:repo/milestones --jq '.[] | "\(.number)\t\(.title)\t\(.state)\t\(.open_issues)/\(.closed_issues) issues"'
;;
gitea)
tea milestones list
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac
exit 0
fi
if [[ -z "$TITLE" ]]; then
echo "Error: Title is required (-t) for creating milestones" >&2
usage
fi
case "$PLATFORM" in
github)
# GitHub uses the API for milestone creation
JSON_PAYLOAD="{\"title\":\"$TITLE\""
[[ -n "$DESCRIPTION" ]] && JSON_PAYLOAD="$JSON_PAYLOAD,\"description\":\"$DESCRIPTION\""
[[ -n "$DUE_DATE" ]] && JSON_PAYLOAD="$JSON_PAYLOAD,\"due_on\":\"${DUE_DATE}T00:00:00Z\""
JSON_PAYLOAD="$JSON_PAYLOAD}"
gh api repos/:owner/:repo/milestones --method POST --input - <<< "$JSON_PAYLOAD"
echo "Milestone '$TITLE' created successfully"
;;
gitea)
CMD="tea milestones create --title \"$TITLE\""
[[ -n "$DESCRIPTION" ]] && CMD="$CMD --description \"$DESCRIPTION\""
[[ -n "$DUE_DATE" ]] && CMD="$CMD --deadline \"$DUE_DATE\""
eval "$CMD"
echo "Milestone '$TITLE' created successfully"
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

43
tools/git/milestone-list.sh Executable file
View File

@@ -0,0 +1,43 @@
#!/bin/bash
# milestone-list.sh - List milestones on GitHub or Gitea
# Usage: milestone-list.sh [-s <state>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
STATE="open"
while [[ $# -gt 0 ]]; do
case $1 in
-s|--state)
STATE="$2"
shift 2
;;
-h|--help)
echo "Usage: milestone-list.sh [-s <state>]"
echo ""
echo "Options:"
echo " -s, --state Filter by state: open, closed, all (default: open)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
gh api "/repos/{owner}/{repo}/milestones?state=$STATE" --jq '.[] | "\(.title) (\(.state)) - \(.open_issues) open, \(.closed_issues) closed"'
elif [[ "$PLATFORM" == "gitea" ]]; then
tea milestone list
else
echo "Error: Unknown platform"
exit 1
fi

273
tools/git/pr-ci-wait.sh Executable file
View File

@@ -0,0 +1,273 @@
#!/bin/bash
# pr-ci-wait.sh - Wait for PR CI status to reach terminal state (GitHub/Gitea)
# Usage: pr-ci-wait.sh -n <pr_number> [-t timeout_sec] [-i interval_sec]
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
PR_NUMBER=""
TIMEOUT_SEC=1800
INTERVAL_SEC=15
usage() {
cat <<EOF
Usage: $(basename "$0") -n <pr_number> [-t timeout_sec] [-i interval_sec]
Options:
-n, --number NUMBER PR number (required)
-t, --timeout SECONDS Max wait time in seconds (default: 1800)
-i, --interval SECONDS Poll interval in seconds (default: 15)
-h, --help Show this help
Examples:
$(basename "$0") -n 643
$(basename "$0") -n 643 -t 900 -i 10
EOF
}
get_remote_host() {
local remote_url
remote_url=$(git remote get-url origin 2>/dev/null || true)
if [[ -z "$remote_url" ]]; then
return 1
fi
if [[ "$remote_url" =~ ^https?://([^/]+)/ ]]; then
echo "${BASH_REMATCH[1]}"
return 0
fi
if [[ "$remote_url" =~ ^git@([^:]+): ]]; then
echo "${BASH_REMATCH[1]}"
return 0
fi
return 1
}
get_gitea_token() {
local host="$1"
if [[ -n "${GITEA_TOKEN:-}" ]]; then
echo "$GITEA_TOKEN"
return 0
fi
local creds="$HOME/.git-credentials"
if [[ -f "$creds" ]]; then
local token
token=$(grep -F "$host" "$creds" 2>/dev/null | sed -n 's#https\?://[^@]*:\([^@/]*\)@.*#\1#p' | head -n 1)
if [[ -n "$token" ]]; then
echo "$token"
return 0
fi
fi
return 1
}
extract_state_from_status_json() {
python3 - <<'PY'
import json
import sys
try:
payload = json.load(sys.stdin)
except Exception:
print("unknown")
raise SystemExit(0)
state = (payload.get("state") or "").lower()
if state in {"success", "pending", "failure", "error"}:
print(state)
raise SystemExit(0)
statuses = payload.get("statuses") or []
values = []
for item in statuses:
if not isinstance(item, dict):
continue
value = (item.get("status") or item.get("state") or "").lower()
if value:
values.append(value)
if any(v in {"failure", "error"} for v in values):
print("failure")
elif values and all(v == "success" for v in values):
print("success")
elif any(v in {"pending", "running", "queued", "waiting"} for v in values):
print("pending")
else:
print("unknown")
PY
}
print_status_summary() {
python3 - <<'PY'
import json
import sys
try:
payload = json.load(sys.stdin)
except Exception:
print("[pr-ci-wait] status payload unavailable")
raise SystemExit(0)
statuses = payload.get("statuses") or []
if not statuses:
print("[pr-ci-wait] no status contexts reported yet")
raise SystemExit(0)
for item in statuses:
if not isinstance(item, dict):
continue
name = item.get("context") or item.get("name") or "unknown-context"
state = item.get("status") or item.get("state") or "unknown-state"
target = item.get("target_url") or item.get("url") or ""
if target:
print(f"[pr-ci-wait] {name}: {state} ({target})")
else:
print(f"[pr-ci-wait] {name}: {state}")
PY
}
github_get_pr_head_sha() {
gh pr view "$PR_NUMBER" --json headRefOid --jq '.headRefOid'
}
github_get_commit_status_json() {
local owner="$1"
local repo="$2"
local sha="$3"
gh api "repos/${owner}/${repo}/commits/${sha}/status"
}
gitea_get_pr_head_sha() {
local host="$1"
local repo="$2"
local token="$3"
local url="https://${host}/api/v1/repos/${repo}/pulls/${PR_NUMBER}"
curl -fsS -H "Authorization: token ${token}" "$url" | python3 -c '
import json, sys
data = json.load(sys.stdin)
print((data.get("head") or {}).get("sha", ""))
'
}
gitea_get_commit_status_json() {
local host="$1"
local repo="$2"
local token="$3"
local sha="$4"
local url="https://${host}/api/v1/repos/${repo}/commits/${sha}/status"
curl -fsS -H "Authorization: token ${token}" "$url"
}
while [[ $# -gt 0 ]]; do
case "$1" in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-t|--timeout)
TIMEOUT_SEC="$2"
shift 2
;;
-i|--interval)
INTERVAL_SEC="$2"
shift 2
;;
-h|--help)
usage
exit 0
;;
*)
echo "Unknown option: $1" >&2
usage >&2
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)." >&2
usage >&2
exit 1
fi
if ! [[ "$TIMEOUT_SEC" =~ ^[0-9]+$ ]] || ! [[ "$INTERVAL_SEC" =~ ^[0-9]+$ ]]; then
echo "Error: timeout and interval must be integer seconds." >&2
exit 1
fi
detect_platform > /dev/null
OWNER=$(get_repo_owner)
REPO=$(get_repo_name)
START_TS=$(date +%s)
DEADLINE_TS=$((START_TS + TIMEOUT_SEC))
if [[ "$PLATFORM" == "github" ]]; then
if ! command -v gh >/dev/null 2>&1; then
echo "Error: gh CLI is required for GitHub CI status polling." >&2
exit 1
fi
HEAD_SHA=$(github_get_pr_head_sha)
if [[ -z "$HEAD_SHA" ]]; then
echo "Error: Could not resolve head SHA for PR #$PR_NUMBER." >&2
exit 1
fi
echo "[pr-ci-wait] Platform=github PR=#${PR_NUMBER} head_sha=${HEAD_SHA}"
elif [[ "$PLATFORM" == "gitea" ]]; then
HOST=$(get_remote_host) || {
echo "Error: Could not determine remote host." >&2
exit 1
}
TOKEN=$(get_gitea_token "$HOST") || {
echo "Error: Gitea token not found. Set GITEA_TOKEN or configure ~/.git-credentials." >&2
exit 1
}
HEAD_SHA=$(gitea_get_pr_head_sha "$HOST" "$OWNER/$REPO" "$TOKEN")
if [[ -z "$HEAD_SHA" ]]; then
echo "Error: Could not resolve head SHA for PR #$PR_NUMBER." >&2
exit 1
fi
echo "[pr-ci-wait] Platform=gitea host=${HOST} PR=#${PR_NUMBER} head_sha=${HEAD_SHA}"
else
echo "Error: Unsupported platform '${PLATFORM}'." >&2
exit 1
fi
while true; do
NOW_TS=$(date +%s)
if (( NOW_TS > DEADLINE_TS )); then
echo "Error: Timed out waiting for CI status on PR #$PR_NUMBER after ${TIMEOUT_SEC}s." >&2
exit 124
fi
if [[ "$PLATFORM" == "github" ]]; then
STATUS_JSON=$(github_get_commit_status_json "$OWNER" "$REPO" "$HEAD_SHA")
else
STATUS_JSON=$(gitea_get_commit_status_json "$HOST" "$OWNER/$REPO" "$TOKEN" "$HEAD_SHA")
fi
STATE=$(printf '%s' "$STATUS_JSON" | extract_state_from_status_json)
echo "[pr-ci-wait] state=${STATE} pr=#${PR_NUMBER} sha=${HEAD_SHA}"
case "$STATE" in
success)
printf '%s' "$STATUS_JSON" | print_status_summary
echo "[pr-ci-wait] CI is green for PR #$PR_NUMBER."
exit 0
;;
failure|error)
printf '%s' "$STATUS_JSON" | print_status_summary
echo "Error: CI reported ${STATE} for PR #$PR_NUMBER." >&2
exit 1
;;
pending|unknown)
sleep "$INTERVAL_SEC"
;;
*)
echo "[pr-ci-wait] Unrecognized state '${STATE}', continuing to poll..."
sleep "$INTERVAL_SEC"
;;
esac
done

62
tools/git/pr-close.sh Executable file
View File

@@ -0,0 +1,62 @@
#!/bin/bash
# pr-close.sh - Close a pull request without merging on GitHub or Gitea
# Usage: pr-close.sh -n <pr_number> [-c <comment>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
PR_NUMBER=""
COMMENT=""
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-c|--comment)
COMMENT="$2"
shift 2
;;
-h|--help)
echo "Usage: pr-close.sh -n <pr_number> [-c <comment>]"
echo ""
echo "Options:"
echo " -n, --number PR number (required)"
echo " -c, --comment Comment before closing (optional)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
if [[ -n "$COMMENT" ]]; then
gh pr comment "$PR_NUMBER" --body "$COMMENT"
fi
gh pr close "$PR_NUMBER"
echo "Closed GitHub PR #$PR_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
if [[ -n "$COMMENT" ]]; then
tea pr comment "$PR_NUMBER" "$COMMENT"
fi
tea pr close "$PR_NUMBER"
echo "Closed Gitea PR #$PR_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

130
tools/git/pr-create.ps1 Normal file
View File

@@ -0,0 +1,130 @@
# pr-create.ps1 - Create pull requests on Gitea or GitHub
# Usage: .\pr-create.ps1 -Title "Title" [-Body "Body"] [-Base base] [-Head head] [-Labels "labels"] [-Milestone "milestone"]
[CmdletBinding()]
param(
[Alias("t")]
[string]$Title,
[Alias("b")]
[string]$Body,
[Alias("B")]
[string]$Base,
[Alias("H")]
[string]$Head,
[Alias("l")]
[string]$Labels,
[Alias("m")]
[string]$Milestone,
[Alias("i")]
[int]$Issue,
[Alias("d")]
[switch]$Draft,
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: pr-create.ps1 [OPTIONS]
Create a pull request on the current repository (Gitea or GitHub).
Options:
-Title, -t TITLE PR title (required, or use -Issue)
-Body, -b BODY PR description/body
-Base, -B BRANCH Base branch to merge into (default: main/master)
-Head, -H BRANCH Head branch with changes (default: current branch)
-Labels, -l LABELS Comma-separated labels
-Milestone, -m NAME Milestone name
-Issue, -i NUMBER Link to issue (auto-generates title if not provided)
-Draft, -d Create as draft PR
-Help Show this help message
Examples:
.\pr-create.ps1 -Title "Add login feature" -Body "Implements user authentication"
.\pr-create.ps1 -t "Fix bug" -B main -H feature/fix-123
.\pr-create.ps1 -i 42 -b "Implements the feature described in #42"
.\pr-create.ps1 -t "WIP: New feature" -Draft
"@
exit 1
}
if ($Help) {
Show-Usage
}
# If no title but issue provided, generate title
if (-not $Title -and $Issue) {
$Title = "Fixes #$Issue"
}
if (-not $Title) {
Write-Error "Title is required (-t) or provide an issue (-i)"
Show-Usage
}
# Default head branch to current branch
if (-not $Head) {
$Head = git branch --show-current
}
# Add issue reference to body if provided
if ($Issue) {
if ($Body) {
$Body = "$Body`n`nFixes #$Issue"
} else {
$Body = "Fixes #$Issue"
}
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
$cmd = @("gh", "pr", "create", "--title", $Title)
if ($Body) { $cmd += @("--body", $Body) }
if ($Base) { $cmd += @("--base", $Base) }
if ($Head) { $cmd += @("--head", $Head) }
if ($Labels) { $cmd += @("--label", $Labels) }
if ($Milestone) { $cmd += @("--milestone", $Milestone) }
if ($Draft) { $cmd += "--draft" }
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
"gitea" {
$cmd = @("tea", "pr", "create", "--title", $Title)
if ($Body) { $cmd += @("--description", $Body) }
if ($Base) { $cmd += @("--base", $Base) }
if ($Head) { $cmd += @("--head", $Head) }
if ($Labels) { $cmd += @("--labels", $Labels) }
if ($Milestone) {
$milestoneList = tea milestones list 2>$null
$milestoneId = ($milestoneList | Select-String "^\s*(\d+).*$Milestone" | ForEach-Object { $_.Matches.Groups[1].Value } | Select-Object -First 1)
if ($milestoneId) {
$cmd += @("--milestone", $milestoneId)
} else {
Write-Warning "Could not find milestone '$Milestone', creating without milestone"
}
}
if ($Draft) {
Write-Warning "Draft PR may not be supported by your tea version"
}
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

164
tools/git/pr-create.sh Executable file
View File

@@ -0,0 +1,164 @@
#!/bin/bash
# pr-create.sh - Create pull requests on Gitea or GitHub
# Usage: pr-create.sh -t "Title" [-b "Body"] [-B base] [-H head] [-l "labels"] [-m "milestone"]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
TITLE=""
BODY=""
BASE_BRANCH=""
HEAD_BRANCH=""
LABELS=""
MILESTONE=""
DRAFT=false
ISSUE=""
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
Create a pull request on the current repository (Gitea or GitHub).
Options:
-t, --title TITLE PR title (required, or use --issue)
-b, --body BODY PR description/body
-B, --base BRANCH Base branch to merge into (default: main/master)
-H, --head BRANCH Head branch with changes (default: current branch)
-l, --labels LABELS Comma-separated labels
-m, --milestone NAME Milestone name
-i, --issue NUMBER Link to issue (auto-generates title if not provided)
-d, --draft Create as draft PR
-h, --help Show this help message
Examples:
$(basename "$0") -t "Add login feature" -b "Implements user authentication"
$(basename "$0") -t "Fix bug" -B main -H feature/fix-123
$(basename "$0") -i 42 -b "Implements the feature described in #42"
$(basename "$0") -t "WIP: New feature" --draft
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-t|--title)
TITLE="$2"
shift 2
;;
-b|--body)
BODY="$2"
shift 2
;;
-B|--base)
BASE_BRANCH="$2"
shift 2
;;
-H|--head)
HEAD_BRANCH="$2"
shift 2
;;
-l|--labels)
LABELS="$2"
shift 2
;;
-m|--milestone)
MILESTONE="$2"
shift 2
;;
-i|--issue)
ISSUE="$2"
shift 2
;;
-d|--draft)
DRAFT=true
shift
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
# If no title but issue provided, generate title
if [[ -z "$TITLE" ]] && [[ -n "$ISSUE" ]]; then
TITLE="Fixes #$ISSUE"
fi
if [[ -z "$TITLE" ]]; then
echo "Error: Title is required (-t) or provide an issue (-i)" >&2
usage
fi
# Default head branch to current branch
if [[ -z "$HEAD_BRANCH" ]]; then
HEAD_BRANCH=$(git branch --show-current)
fi
# Add issue reference to body if provided
if [[ -n "$ISSUE" ]]; then
if [[ -n "$BODY" ]]; then
BODY="$BODY
Fixes #$ISSUE"
else
BODY="Fixes #$ISSUE"
fi
fi
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
CMD="gh pr create --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --body \"$BODY\""
[[ -n "$BASE_BRANCH" ]] && CMD="$CMD --base \"$BASE_BRANCH\""
[[ -n "$HEAD_BRANCH" ]] && CMD="$CMD --head \"$HEAD_BRANCH\""
[[ -n "$LABELS" ]] && CMD="$CMD --label \"$LABELS\""
[[ -n "$MILESTONE" ]] && CMD="$CMD --milestone \"$MILESTONE\""
[[ "$DRAFT" == true ]] && CMD="$CMD --draft"
eval "$CMD"
;;
gitea)
# tea pull create syntax
CMD="tea pr create --title \"$TITLE\""
[[ -n "$BODY" ]] && CMD="$CMD --description \"$BODY\""
[[ -n "$BASE_BRANCH" ]] && CMD="$CMD --base \"$BASE_BRANCH\""
[[ -n "$HEAD_BRANCH" ]] && CMD="$CMD --head \"$HEAD_BRANCH\""
# Handle labels for tea
if [[ -n "$LABELS" ]]; then
# tea may use --labels flag
CMD="$CMD --labels \"$LABELS\""
fi
# Handle milestone for tea
if [[ -n "$MILESTONE" ]]; then
MILESTONE_ID=$(tea milestones list 2>/dev/null | grep -E "^\s*[0-9]+" | grep "$MILESTONE" | awk '{print $1}' | head -1)
if [[ -n "$MILESTONE_ID" ]]; then
CMD="$CMD --milestone $MILESTONE_ID"
else
echo "Warning: Could not find milestone '$MILESTONE', creating without milestone" >&2
fi
fi
# Note: tea may not support --draft flag in all versions
if [[ "$DRAFT" == true ]]; then
echo "Note: Draft PR may not be supported by your tea version" >&2
fi
eval "$CMD"
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

88
tools/git/pr-diff.sh Executable file
View File

@@ -0,0 +1,88 @@
#!/bin/bash
# pr-diff.sh - Get the diff for a pull request on GitHub or Gitea
# Usage: pr-diff.sh -n <pr_number> [-o <output_file>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
PR_NUMBER=""
OUTPUT_FILE=""
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-o|--output)
OUTPUT_FILE="$2"
shift 2
;;
-h|--help)
echo "Usage: pr-diff.sh -n <pr_number> [-o <output_file>]"
echo ""
echo "Options:"
echo " -n, --number PR number (required)"
echo " -o, --output Output file (optional, prints to stdout if omitted)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)" >&2
exit 1
fi
detect_platform > /dev/null
if [[ "$PLATFORM" == "github" ]]; then
if [[ -n "$OUTPUT_FILE" ]]; then
gh pr diff "$PR_NUMBER" > "$OUTPUT_FILE"
else
gh pr diff "$PR_NUMBER"
fi
elif [[ "$PLATFORM" == "gitea" ]]; then
# tea doesn't have a direct diff command — use the API
OWNER=$(get_repo_owner)
REPO=$(get_repo_name)
REMOTE_URL=$(git remote get-url origin 2>/dev/null)
# Extract host from remote URL
if [[ "$REMOTE_URL" == https://* ]]; then
HOST=$(echo "$REMOTE_URL" | sed -E 's|https://([^/]+)/.*|\1|')
elif [[ "$REMOTE_URL" == git@* ]]; then
HOST=$(echo "$REMOTE_URL" | sed -E 's|git@([^:]+):.*|\1|')
else
echo "Error: Cannot determine host from remote URL" >&2
exit 1
fi
DIFF_URL="https://${HOST}/api/v1/repos/${OWNER}/${REPO}/pulls/${PR_NUMBER}.diff"
# Use tea's auth token if available
TEA_TOKEN=$(tea login list 2>/dev/null | grep "$HOST" | awk '{print $NF}' || true)
if [[ -n "$TEA_TOKEN" ]]; then
DIFF_CONTENT=$(curl -sS -H "Authorization: token $TEA_TOKEN" "$DIFF_URL")
else
DIFF_CONTENT=$(curl -sS "$DIFF_URL")
fi
if [[ -n "$OUTPUT_FILE" ]]; then
echo "$DIFF_CONTENT" > "$OUTPUT_FILE"
else
echo "$DIFF_CONTENT"
fi
else
echo "Error: Unknown platform" >&2
exit 1
fi

76
tools/git/pr-list.ps1 Normal file
View File

@@ -0,0 +1,76 @@
# pr-list.ps1 - List pull requests on Gitea or GitHub
# Usage: .\pr-list.ps1 [-State state] [-Label label] [-Author author]
[CmdletBinding()]
param(
[Alias("s")]
[ValidateSet("open", "closed", "merged", "all")]
[string]$State = "open",
[Alias("l")]
[string]$Label,
[Alias("a")]
[string]$Author,
[Alias("n")]
[int]$Limit = 100,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: pr-list.ps1 [OPTIONS]
List pull requests from the current repository (Gitea or GitHub).
Options:
-State, -s STATE Filter by state: open, closed, merged, all (default: open)
-Label, -l LABEL Filter by label
-Author, -a USER Filter by author
-Limit, -n N Maximum PRs to show (default: 100)
-Help, -h Show this help message
Examples:
.\pr-list.ps1 # List open PRs
.\pr-list.ps1 -s all # All PRs
.\pr-list.ps1 -s merged -a username # Merged PRs by user
"@
exit 1
}
if ($Help) {
Show-Usage
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
$cmd = @("gh", "pr", "list", "--state", $State, "--limit", $Limit)
if ($Label) { $cmd += @("--label", $Label) }
if ($Author) { $cmd += @("--author", $Author) }
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
"gitea" {
$cmd = @("tea", "pr", "list", "--state", $State, "--limit", $Limit)
if ($Label) {
Write-Warning "Label filtering may require manual review for Gitea"
}
if ($Author) {
Write-Warning "Author filtering may require manual review for Gitea"
}
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}

93
tools/git/pr-list.sh Executable file
View File

@@ -0,0 +1,93 @@
#!/bin/bash
# pr-list.sh - List pull requests on Gitea or GitHub
# Usage: pr-list.sh [-s state] [-l label] [-a author]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
STATE="open"
LABEL=""
AUTHOR=""
LIMIT=100
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
List pull requests from the current repository (Gitea or GitHub).
Options:
-s, --state STATE Filter by state: open, closed, merged, all (default: open)
-l, --label LABEL Filter by label
-a, --author USER Filter by author
-n, --limit N Maximum PRs to show (default: 100)
-h, --help Show this help message
Examples:
$(basename "$0") # List open PRs
$(basename "$0") -s all # All PRs
$(basename "$0") -s merged -a username # Merged PRs by user
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-s|--state)
STATE="$2"
shift 2
;;
-l|--label)
LABEL="$2"
shift 2
;;
-a|--author)
AUTHOR="$2"
shift 2
;;
-n|--limit)
LIMIT="$2"
shift 2
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
CMD="gh pr list --state $STATE --limit $LIMIT"
[[ -n "$LABEL" ]] && CMD="$CMD --label \"$LABEL\""
[[ -n "$AUTHOR" ]] && CMD="$CMD --author \"$AUTHOR\""
eval "$CMD"
;;
gitea)
# tea pr list - note: tea uses 'pulls' subcommand in some versions
CMD="tea pr list --state $STATE --limit $LIMIT"
# tea filtering may be limited
if [[ -n "$LABEL" ]]; then
echo "Note: Label filtering may require manual review for Gitea" >&2
fi
if [[ -n "$AUTHOR" ]]; then
echo "Note: Author filtering may require manual review for Gitea" >&2
fi
eval "$CMD"
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac

98
tools/git/pr-merge.ps1 Executable file
View File

@@ -0,0 +1,98 @@
# pr-merge.ps1 - Merge pull requests on Gitea or GitHub
# Usage: .\pr-merge.ps1 -Number PR_NUMBER [-Method squash] [-DeleteBranch]
[CmdletBinding()]
param(
[Parameter(Mandatory=$true)]
[Alias("n")]
[int]$Number,
[Alias("m")]
[string]$Method = "squash",
[Alias("d")]
[switch]$DeleteBranch,
[switch]$SkipQueueGuard,
[Alias("h")]
[switch]$Help
)
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
. "$ScriptDir\detect-platform.ps1"
function Show-Usage {
@"
Usage: pr-merge.ps1 [OPTIONS]
Merge a pull request on the current repository (Gitea or GitHub).
Options:
-Number, -n NUMBER PR number to merge (required)
-Method, -m METHOD Merge method: squash only (default: squash)
-DeleteBranch, -d Delete the head branch after merge
-SkipQueueGuard Skip CI queue guard wait before merge
-Help, -h Show this help message
Examples:
.\pr-merge.ps1 -n 42 # Merge PR #42
.\pr-merge.ps1 -n 42 -m squash # Squash merge
.\pr-merge.ps1 -n 42 -d # Squash merge and delete branch
"@
exit 1
}
if ($Help) {
Show-Usage
}
if ($Method -ne "squash") {
Write-Error "Mosaic policy enforces squash merge only. Received '$Method'."
exit 1
}
$platform = Get-GitPlatform
switch ($platform) {
"github" {
$baseRef = (& gh pr view $Number --json baseRefName --jq ".baseRefName").Trim()
if ($baseRef -ne "main") {
Write-Error "Mosaic policy allows merges only for PRs targeting 'main' (found '$baseRef')."
exit 1
}
if (-not $SkipQueueGuard) {
$timeout = if ($env:MOSAIC_CI_QUEUE_TIMEOUT_SEC) { [int]$env:MOSAIC_CI_QUEUE_TIMEOUT_SEC } else { 900 }
$interval = if ($env:MOSAIC_CI_QUEUE_POLL_SEC) { [int]$env:MOSAIC_CI_QUEUE_POLL_SEC } else { 15 }
& "$ScriptDir\ci-queue-wait.ps1" -Purpose merge -Branch $baseRef -TimeoutSeconds $timeout -IntervalSeconds $interval
if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE }
}
$cmd = @("gh", "pr", "merge", $Number, "--squash")
if ($DeleteBranch) { $cmd += "--delete-branch" }
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
"gitea" {
if (-not $SkipQueueGuard) {
$timeout = if ($env:MOSAIC_CI_QUEUE_TIMEOUT_SEC) { [int]$env:MOSAIC_CI_QUEUE_TIMEOUT_SEC } else { 900 }
$interval = if ($env:MOSAIC_CI_QUEUE_POLL_SEC) { [int]$env:MOSAIC_CI_QUEUE_POLL_SEC } else { 15 }
& "$ScriptDir\ci-queue-wait.ps1" -Purpose merge -Branch "main" -TimeoutSeconds $timeout -IntervalSeconds $interval
if ($LASTEXITCODE -ne 0) { exit $LASTEXITCODE }
}
$cmd = @("tea", "pr", "merge", $Number, "--style", "squash")
if ($DeleteBranch) {
Write-Warning "Branch deletion after merge may need to be done separately with tea"
}
& $cmd[0] $cmd[1..($cmd.Length-1)]
}
default {
Write-Error "Could not detect git platform"
exit 1
}
}
Write-Host "PR #$Number merged successfully"

116
tools/git/pr-merge.sh Executable file
View File

@@ -0,0 +1,116 @@
#!/bin/bash
# pr-merge.sh - Merge pull requests on Gitea or GitHub
# Usage: pr-merge.sh -n PR_NUMBER [-m squash] [-d] [--skip-queue-guard]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Default values
PR_NUMBER=""
MERGE_METHOD="squash"
DELETE_BRANCH=false
SKIP_QUEUE_GUARD=false
usage() {
cat <<EOF
Usage: $(basename "$0") [OPTIONS]
Merge a pull request on the current repository (Gitea or GitHub).
Options:
-n, --number NUMBER PR number to merge (required)
-m, --method METHOD Merge method: squash only (default: squash)
-d, --delete-branch Delete the head branch after merge
--skip-queue-guard Skip CI queue guard wait before merge
-h, --help Show this help message
Examples:
$(basename "$0") -n 42 # Merge PR #42
$(basename "$0") -n 42 -m squash # Squash merge
$(basename "$0") -n 42 -d # Squash merge and delete branch
$(basename "$0") -n 42 --skip-queue-guard # Skip queue guard wait
EOF
exit 1
}
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-m|--method)
MERGE_METHOD="$2"
shift 2
;;
-d|--delete-branch)
DELETE_BRANCH=true
shift
;;
--skip-queue-guard)
SKIP_QUEUE_GUARD=true
shift
;;
-h|--help)
usage
;;
*)
echo "Unknown option: $1" >&2
usage
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)" >&2
usage
fi
if [[ "$MERGE_METHOD" != "squash" ]]; then
echo "Error: Mosaic policy enforces squash merge only. Received '$MERGE_METHOD'." >&2
exit 1
fi
BASE_BRANCH="$("$SCRIPT_DIR/pr-metadata.sh" -n "$PR_NUMBER" | python3 -c 'import json, sys; print((json.load(sys.stdin).get("baseRefName") or "").strip())')"
if [[ "$BASE_BRANCH" != "main" ]]; then
echo "Error: Mosaic policy allows merges only for PRs targeting 'main' (found '$BASE_BRANCH')." >&2
exit 1
fi
if [[ "$SKIP_QUEUE_GUARD" != true ]]; then
"$SCRIPT_DIR/ci-queue-wait.sh" \
--purpose merge \
-B "$BASE_BRANCH" \
-t "${MOSAIC_CI_QUEUE_TIMEOUT_SEC:-900}" \
-i "${MOSAIC_CI_QUEUE_POLL_SEC:-15}"
fi
PLATFORM=$(detect_platform)
case "$PLATFORM" in
github)
CMD="gh pr merge $PR_NUMBER --squash"
[[ "$DELETE_BRANCH" == true ]] && CMD="$CMD --delete-branch"
eval "$CMD"
;;
gitea)
# tea pr merge syntax
CMD="tea pr merge $PR_NUMBER --style squash"
# Delete branch after merge if requested
if [[ "$DELETE_BRANCH" == true ]]; then
echo "Note: Branch deletion after merge may need to be done separately with tea" >&2
fi
eval "$CMD"
;;
*)
echo "Error: Could not detect git platform" >&2
exit 1
;;
esac
echo "PR #$PR_NUMBER merged successfully"

114
tools/git/pr-metadata.sh Executable file
View File

@@ -0,0 +1,114 @@
#!/bin/bash
# pr-metadata.sh - Get PR metadata as JSON on GitHub or Gitea
# Usage: pr-metadata.sh -n <pr_number> [-o <output_file>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
PR_NUMBER=""
OUTPUT_FILE=""
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-o|--output)
OUTPUT_FILE="$2"
shift 2
;;
-h|--help)
echo "Usage: pr-metadata.sh -n <pr_number> [-o <output_file>]"
echo ""
echo "Options:"
echo " -n, --number PR number (required)"
echo " -o, --output Output file (optional, prints to stdout if omitted)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)" >&2
exit 1
fi
detect_platform > /dev/null
if [[ "$PLATFORM" == "github" ]]; then
METADATA=$(gh pr view "$PR_NUMBER" --json number,title,body,state,author,headRefName,baseRefName,files,labels,assignees,milestone,createdAt,updatedAt,url,isDraft)
if [[ -n "$OUTPUT_FILE" ]]; then
echo "$METADATA" > "$OUTPUT_FILE"
else
echo "$METADATA"
fi
elif [[ "$PLATFORM" == "gitea" ]]; then
OWNER=$(get_repo_owner)
REPO=$(get_repo_name)
REMOTE_URL=$(git remote get-url origin 2>/dev/null)
# Extract host from remote URL
if [[ "$REMOTE_URL" == https://* ]]; then
HOST=$(echo "$REMOTE_URL" | sed -E 's|https://([^/]+)/.*|\1|')
elif [[ "$REMOTE_URL" == git@* ]]; then
HOST=$(echo "$REMOTE_URL" | sed -E 's|git@([^:]+):.*|\1|')
else
echo "Error: Cannot determine host from remote URL" >&2
exit 1
fi
API_URL="https://${HOST}/api/v1/repos/${OWNER}/${REPO}/pulls/${PR_NUMBER}"
# Use tea's auth token if available
TEA_TOKEN=$(tea login list 2>/dev/null | grep "$HOST" | awk '{print $NF}' || true)
if [[ -n "$TEA_TOKEN" ]]; then
RAW=$(curl -sS -H "Authorization: token $TEA_TOKEN" "$API_URL")
else
RAW=$(curl -sS "$API_URL")
fi
# Normalize Gitea response to match our expected schema
METADATA=$(echo "$RAW" | python3 -c "
import json, sys
data = json.load(sys.stdin)
normalized = {
'number': data.get('number'),
'title': data.get('title'),
'body': data.get('body', ''),
'state': data.get('state'),
'author': data.get('user', {}).get('login', ''),
'headRefName': data.get('head', {}).get('ref', ''),
'baseRefName': data.get('base', {}).get('ref', ''),
'labels': [l.get('name', '') for l in data.get('labels', [])],
'assignees': [a.get('login', '') for a in data.get('assignees', [])],
'milestone': data.get('milestone', {}).get('title', '') if data.get('milestone') else '',
'createdAt': data.get('created_at', ''),
'updatedAt': data.get('updated_at', ''),
'url': data.get('html_url', ''),
'isDraft': data.get('draft', False),
'mergeable': data.get('mergeable'),
'diffUrl': data.get('diff_url', ''),
}
json.dump(normalized, sys.stdout, indent=2)
")
if [[ -n "$OUTPUT_FILE" ]]; then
echo "$METADATA" > "$OUTPUT_FILE"
else
echo "$METADATA"
fi
else
echo "Error: Unknown platform" >&2
exit 1
fi

115
tools/git/pr-review.sh Executable file
View File

@@ -0,0 +1,115 @@
#!/bin/bash
# pr-review.sh - Review a pull request on GitHub or Gitea
# Usage: pr-review.sh -n <pr_number> -a <action> [-c <comment>]
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
PR_NUMBER=""
ACTION=""
COMMENT=""
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-a|--action)
ACTION="$2"
shift 2
;;
-c|--comment)
COMMENT="$2"
shift 2
;;
-h|--help)
echo "Usage: pr-review.sh -n <pr_number> -a <action> [-c <comment>]"
echo ""
echo "Options:"
echo " -n, --number PR number (required)"
echo " -a, --action Review action: approve, request-changes, comment (required)"
echo " -c, --comment Review comment (required for request-changes)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)"
exit 1
fi
if [[ -z "$ACTION" ]]; then
echo "Error: Action is required (-a): approve, request-changes, comment"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
case $ACTION in
approve)
gh pr review "$PR_NUMBER" --approve ${COMMENT:+--body "$COMMENT"}
echo "Approved GitHub PR #$PR_NUMBER"
;;
request-changes)
if [[ -z "$COMMENT" ]]; then
echo "Error: Comment required for request-changes"
exit 1
fi
gh pr review "$PR_NUMBER" --request-changes --body "$COMMENT"
echo "Requested changes on GitHub PR #$PR_NUMBER"
;;
comment)
if [[ -z "$COMMENT" ]]; then
echo "Error: Comment required"
exit 1
fi
gh pr review "$PR_NUMBER" --comment --body "$COMMENT"
echo "Added review comment to GitHub PR #$PR_NUMBER"
;;
*)
echo "Error: Unknown action: $ACTION"
exit 1
;;
esac
elif [[ "$PLATFORM" == "gitea" ]]; then
case $ACTION in
approve)
tea pr approve "$PR_NUMBER" ${COMMENT:+--comment "$COMMENT"}
echo "Approved Gitea PR #$PR_NUMBER"
;;
request-changes)
if [[ -z "$COMMENT" ]]; then
echo "Error: Comment required for request-changes"
exit 1
fi
tea pr reject "$PR_NUMBER" --comment "$COMMENT"
echo "Requested changes on Gitea PR #$PR_NUMBER"
;;
comment)
if [[ -z "$COMMENT" ]]; then
echo "Error: Comment required"
exit 1
fi
tea pr comment "$PR_NUMBER" "$COMMENT"
echo "Added comment to Gitea PR #$PR_NUMBER"
;;
*)
echo "Error: Unknown action: $ACTION"
exit 1
;;
esac
else
echo "Error: Unknown platform"
exit 1
fi

48
tools/git/pr-view.sh Executable file
View File

@@ -0,0 +1,48 @@
#!/bin/bash
# pr-view.sh - View pull request details on GitHub or Gitea
# Usage: pr-view.sh -n <pr_number>
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/detect-platform.sh"
# Parse arguments
PR_NUMBER=""
while [[ $# -gt 0 ]]; do
case $1 in
-n|--number)
PR_NUMBER="$2"
shift 2
;;
-h|--help)
echo "Usage: pr-view.sh -n <pr_number>"
echo ""
echo "Options:"
echo " -n, --number PR number (required)"
echo " -h, --help Show this help"
exit 0
;;
*)
echo "Unknown option: $1"
exit 1
;;
esac
done
if [[ -z "$PR_NUMBER" ]]; then
echo "Error: PR number is required (-n)"
exit 1
fi
detect_platform
if [[ "$PLATFORM" == "github" ]]; then
gh pr view "$PR_NUMBER"
elif [[ "$PLATFORM" == "gitea" ]]; then
tea pr "$PR_NUMBER"
else
echo "Error: Unknown platform"
exit 1
fi

55
tools/glpi/README.md Normal file
View File

@@ -0,0 +1,55 @@
# GLPI Tool Suite
Manage GLPI IT service management (tickets, computers/assets, users).
## Prerequisites
- `jq` and `curl` installed
- GLPI credentials in `~/src/jarvis-brain/credentials.json` (or `$MOSAIC_CREDENTIALS_FILE`)
- Required fields: `glpi.url`, `glpi.app_token`, `glpi.user_token`
## Authentication
GLPI uses a two-step auth flow:
1. `session-init.sh` exchanges app_token + user_token for a session_token
2. All subsequent calls use the session_token + app_token
The session token is cached at `~/.cache/mosaic/glpi-session` and auto-refreshed when expired.
## Scripts
| Script | Purpose |
|--------|---------|
| `session-init.sh` | Initialize and cache API session |
| `computer-list.sh` | List computers/IT assets |
| `ticket-list.sh` | List tickets (filter by status) |
| `ticket-create.sh` | Create a new ticket |
| `user-list.sh` | List users |
## Common Options
- `-f json` — JSON output (default: table)
- `-l limit` — Result count (default: 50)
- `-h` — Show help
## API Reference
- Base URL: `https://help.uscllc.com/apirest.php`
- Auth headers: `App-Token` + `Session-Token`
- Pattern: RESTful item-based (`/ItemType/{id}`)
## Examples
```bash
# List all tickets
~/.config/mosaic/tools/glpi/ticket-list.sh
# List only open tickets
~/.config/mosaic/tools/glpi/ticket-list.sh -s new
# Create a ticket
~/.config/mosaic/tools/glpi/ticket-create.sh -t "Server down" -c "Web server unresponsive" -p 4
# List computers as JSON
~/.config/mosaic/tools/glpi/computer-list.sh -f json
```

59
tools/glpi/computer-list.sh Executable file
View File

@@ -0,0 +1,59 @@
#!/usr/bin/env bash
#
# computer-list.sh — List GLPI computers/assets
#
# Usage: computer-list.sh [-f format] [-l limit]
#
# Options:
# -f format Output format: table (default), json
# -l limit Number of results (default: 50)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials glpi
FORMAT="table"
LIMIT=50
while getopts "f:l:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
l) LIMIT="$OPTARG" ;;
h) head -11 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-l limit]" >&2; exit 1 ;;
esac
done
SESSION_TOKEN=$("$SCRIPT_DIR/session-init.sh" -q)
response=$(curl -sk -w "\n%{http_code}" \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Session-Token: $SESSION_TOKEN" \
"${GLPI_URL}/Computer?range=0-${LIMIT}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list computers (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "ID NAME SERIAL STATUS"
echo "------ ---------------------------- ------------------ ----------"
echo "$body" | jq -r '.[] | [
(.id | tostring),
.name,
(.serial // "—"),
(.states_id | tostring)
] | @tsv' | while IFS=$'\t' read -r id name serial states_id; do
printf "%-6s %-28s %-18s %s\n" "$id" "${name:0:28}" "${serial:0:18}" "$states_id"
done

85
tools/glpi/session-init.sh Executable file
View File

@@ -0,0 +1,85 @@
#!/usr/bin/env bash
#
# session-init.sh — Initialize GLPI API session
#
# Usage: session-init.sh [-f] [-q]
#
# Authenticates with GLPI and caches the session token at
# ~/.cache/mosaic/glpi-session.
#
# Options:
# -f Force re-authentication (ignore cached session)
# -q Quiet mode — only output the session token
# -h Show this help
#
# Environment variables (or credentials.json):
# GLPI_URL — GLPI API base URL
# GLPI_APP_TOKEN — GLPI application token
# GLPI_USER_TOKEN — GLPI user token
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials glpi
CACHE_DIR="$HOME/.cache/mosaic"
CACHE_FILE="$CACHE_DIR/glpi-session"
FORCE=false
QUIET=false
while getopts "fqh" opt; do
case $opt in
f) FORCE=true ;;
q) QUIET=true ;;
h) head -18 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f] [-q]" >&2; exit 1 ;;
esac
done
# Check cached session validity
if [[ "$FORCE" == "false" ]] && [[ -f "$CACHE_FILE" ]]; then
cached_token=$(cat "$CACHE_FILE")
if [[ -n "$cached_token" ]]; then
# Validate with a lightweight call
http_code=$(curl -sk -o /dev/null -w "%{http_code}" \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Session-Token: $cached_token" \
"${GLPI_URL}/getMyEntities")
if [[ "$http_code" == "200" ]]; then
[[ "$QUIET" == "false" ]] && echo "Using cached session (valid)" >&2
echo "$cached_token"
exit 0
fi
[[ "$QUIET" == "false" ]] && echo "Cached session expired, re-authenticating..." >&2
fi
fi
# Initialize session
response=$(curl -sk -w "\n%{http_code}" \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Authorization: user_token $GLPI_USER_TOKEN" \
"${GLPI_URL}/initSession")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to initialize GLPI session (HTTP $http_code)" >&2
echo "$body" | jq -r '.' 2>/dev/null >&2 || echo "$body" >&2
exit 1
fi
session_token=$(echo "$body" | jq -r '.session_token // empty')
if [[ -z "$session_token" ]]; then
echo "Error: No session_token in response" >&2
exit 1
fi
# Cache the session
mkdir -p "$CACHE_DIR"
echo "$session_token" > "$CACHE_FILE"
chmod 600 "$CACHE_FILE"
[[ "$QUIET" == "false" ]] && echo "Session initialized and cached" >&2
echo "$session_token"

77
tools/glpi/ticket-create.sh Executable file
View File

@@ -0,0 +1,77 @@
#!/usr/bin/env bash
#
# ticket-create.sh — Create a GLPI ticket
#
# Usage: ticket-create.sh -t <title> -c <content> [-p priority] [-y type]
#
# Options:
# -t title Ticket title (required)
# -c content Ticket description (required)
# -p priority 1=VeryLow, 2=Low, 3=Medium (default), 4=High, 5=VeryHigh, 6=Major
# -y type 1=Incident (default), 2=Request
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials glpi
TITLE=""
CONTENT=""
PRIORITY=3
TYPE=1
FORMAT="table"
while getopts "t:c:p:y:f:h" opt; do
case $opt in
t) TITLE="$OPTARG" ;;
c) CONTENT="$OPTARG" ;;
p) PRIORITY="$OPTARG" ;;
y) TYPE="$OPTARG" ;;
f) FORMAT="$OPTARG" ;;
h) head -13 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 -t <title> -c <content> [-p priority] [-y type]" >&2; exit 1 ;;
esac
done
if [[ -z "$TITLE" || -z "$CONTENT" ]]; then
echo "Error: -t title and -c content are required" >&2
exit 1
fi
SESSION_TOKEN=$("$SCRIPT_DIR/session-init.sh" -q)
payload=$(jq -n \
--arg name "$TITLE" \
--arg content "$CONTENT" \
--argjson priority "$PRIORITY" \
--argjson type "$TYPE" \
'{input: {name: $name, content: $content, priority: $priority, type: $type}}')
response=$(curl -sk -w "\n%{http_code}" -X POST \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Session-Token: $SESSION_TOKEN" \
-H "Content-Type: application/json" \
-d "$payload" \
"${GLPI_URL}/Ticket")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "201" && "$http_code" != "200" ]]; then
echo "Error: Failed to create ticket (HTTP $http_code)" >&2
echo "$body" | jq -r '.' 2>/dev/null >&2 || echo "$body" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
else
ticket_id=$(echo "$body" | jq -r '.id // .message // .')
echo "Ticket created: #$ticket_id"
echo " Title: $TITLE"
echo " Priority: $PRIORITY"
echo " Type: $([ "$TYPE" = "1" ] && echo "Incident" || echo "Request")"
fi

88
tools/glpi/ticket-list.sh Executable file
View File

@@ -0,0 +1,88 @@
#!/usr/bin/env bash
#
# ticket-list.sh — List GLPI tickets
#
# Usage: ticket-list.sh [-f format] [-l limit] [-s status]
#
# Options:
# -f format Output format: table (default), json
# -l limit Number of results (default: 50)
# -s status Filter: new, processing, pending, solved, closed
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials glpi
FORMAT="table"
LIMIT=50
STATUS=""
while getopts "f:l:s:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
l) LIMIT="$OPTARG" ;;
s) STATUS="$OPTARG" ;;
h) head -13 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-l limit] [-s status]" >&2; exit 1 ;;
esac
done
SESSION_TOKEN=$("$SCRIPT_DIR/session-init.sh" -q)
ENDPOINT="${GLPI_URL}/Ticket?range=0-${LIMIT}&order=DESC&sort=date_mod"
# Map status names to GLPI status IDs
if [[ -n "$STATUS" ]]; then
case "$STATUS" in
new) STATUS_ID=1 ;;
processing|assigned) STATUS_ID=2 ;;
pending|planned) STATUS_ID=3 ;;
solved) STATUS_ID=5 ;;
closed) STATUS_ID=6 ;;
*) echo "Error: Unknown status '$STATUS'. Use: new, processing, pending, solved, closed" >&2; exit 1 ;;
esac
ENDPOINT="${ENDPOINT}&searchText[status]=${STATUS_ID}"
fi
response=$(curl -sk -w "\n%{http_code}" \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Session-Token: $SESSION_TOKEN" \
"$ENDPOINT")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list tickets (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "ID PRIORITY STATUS TITLE DATE"
echo "------ -------- ------ ---------------------------------------- ----------"
echo "$body" | jq -r '.[] | [
(.id | tostring),
(.priority | tostring),
(.status | tostring),
.name,
(.date_mod | split(" ")[0])
] | @tsv' | while IFS=$'\t' read -r id priority status name date; do
# Map priority numbers
case "$priority" in
1) pri="VLow" ;; 2) pri="Low" ;; 3) pri="Med" ;;
4) pri="High" ;; 5) pri="VHigh" ;; 6) pri="Major" ;; *) pri="$priority" ;;
esac
# Map status numbers
case "$status" in
1) stat="New" ;; 2) stat="Proc" ;; 3) stat="Pend" ;;
4) stat="Plan" ;; 5) stat="Solv" ;; 6) stat="Clos" ;; *) stat="$status" ;;
esac
printf "%-6s %-8s %-6s %-40s %s\n" "$id" "$pri" "$stat" "${name:0:40}" "$date"
done

61
tools/glpi/user-list.sh Executable file
View File

@@ -0,0 +1,61 @@
#!/usr/bin/env bash
#
# user-list.sh — List GLPI users
#
# Usage: user-list.sh [-f format] [-l limit]
#
# Options:
# -f format Output format: table (default), json
# -l limit Number of results (default: 50)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
load_credentials glpi
FORMAT="table"
LIMIT=50
while getopts "f:l:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
l) LIMIT="$OPTARG" ;;
h) head -11 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-l limit]" >&2; exit 1 ;;
esac
done
SESSION_TOKEN=$("$SCRIPT_DIR/session-init.sh" -q)
response=$(curl -sk -w "\n%{http_code}" \
-H "App-Token: $GLPI_APP_TOKEN" \
-H "Session-Token: $SESSION_TOKEN" \
"${GLPI_URL}/User?range=0-${LIMIT}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list users (HTTP $http_code)" >&2
exit 1
fi
if [[ "$FORMAT" == "json" ]]; then
echo "$body" | jq '.'
exit 0
fi
echo "ID USERNAME REALNAME FIRSTNAME ACTIVE"
echo "------ -------------------- -------------------- -------------------- ------"
echo "$body" | jq -r '.[] | [
(.id | tostring),
(.name // "—"),
(.realname // "—"),
(.firstname // "—"),
(if .is_active == 1 then "yes" else "no" end)
] | @tsv' | while IFS=$'\t' read -r id name realname firstname active; do
printf "%-6s %-20s %-20s %-20s %s\n" \
"$id" "${name:0:20}" "${realname:0:20}" "${firstname:0:20}" "$active"
done

194
tools/health/stack-health.sh Executable file
View File

@@ -0,0 +1,194 @@
#!/usr/bin/env bash
#
# stack-health.sh — Check health of all configured Mosaic stack services
#
# Usage: stack-health.sh [-f format] [-s service] [-q]
#
# Checks connectivity to all services configured in credentials.json.
# For each service, makes a lightweight API call and reports status.
#
# Options:
# -f format Output format: table (default), json
# -s service Check only a specific service
# -q Quiet — exit code only (0=all healthy, 1=any unhealthy)
# -h Show this help
set -euo pipefail
MOSAIC_HOME="${MOSAIC_HOME:-$HOME/.config/mosaic}"
source "$MOSAIC_HOME/tools/_lib/credentials.sh"
FORMAT="table"
SINGLE_SERVICE=""
QUIET=false
CRED_FILE="${MOSAIC_CREDENTIALS_FILE:-$HOME/src/jarvis-brain/credentials.json}"
while getopts "f:s:qh" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
s) SINGLE_SERVICE="$OPTARG" ;;
q) QUIET=true ;;
h) head -15 "$0" | grep "^#" | sed 's/^# \?//'; exit 0 ;;
*) echo "Usage: $0 [-f format] [-s service] [-q]" >&2; exit 1 ;;
esac
done
if [[ ! -f "$CRED_FILE" ]]; then
echo "Error: Credentials file not found: $CRED_FILE" >&2
exit 1
fi
# Colors (disabled if not a terminal or quiet mode)
if [[ -t 1 ]] && [[ "$QUIET" == "false" ]]; then
GREEN='\033[0;32m' RED='\033[0;31m' YELLOW='\033[0;33m' RESET='\033[0m'
else
GREEN='' RED='' YELLOW='' RESET=''
fi
TOTAL=0
HEALTHY=0
RESULTS="[]"
check_service() {
local name="$1"
local display_name="$2"
local url="$3"
local endpoint="$4"
local auth_header="$5"
local insecure="${6:-false}"
TOTAL=$((TOTAL + 1))
local curl_args=(-s -o /dev/null -w "%{http_code} %{time_total}" --connect-timeout 5 --max-time 10)
[[ -n "$auth_header" ]] && curl_args+=(-H "$auth_header")
[[ "$insecure" == "true" ]] && curl_args+=(-k)
local result
result=$(curl "${curl_args[@]}" "${url}${endpoint}" 2>/dev/null) || result="000 0.000"
local http_code response_time status_text
http_code=$(echo "$result" | awk '{print $1}')
response_time=$(echo "$result" | awk '{print $2}')
if [[ "$http_code" -ge 200 && "$http_code" -lt 400 ]]; then
status_text="UP"
HEALTHY=$((HEALTHY + 1))
elif [[ "$http_code" == "000" ]]; then
status_text="DOWN"
elif [[ "$http_code" == "401" || "$http_code" == "403" ]]; then
# Auth error but service is reachable
status_text="AUTH_ERR"
HEALTHY=$((HEALTHY + 1)) # Service is up, just auth issue
else
status_text="ERROR"
fi
# Append to JSON results
RESULTS=$(echo "$RESULTS" | jq --arg n "$name" --arg d "$display_name" \
--arg u "$url" --arg s "$status_text" --arg c "$http_code" --arg t "$response_time" \
'. + [{name: $n, display_name: $d, url: $u, status: $s, http_code: ($c | tonumber), response_time: $t}]')
if [[ "$QUIET" == "false" && "$FORMAT" == "table" ]]; then
local color="$GREEN"
[[ "$status_text" == "DOWN" || "$status_text" == "ERROR" ]] && color="$RED"
[[ "$status_text" == "AUTH_ERR" ]] && color="$YELLOW"
printf " %-22s %-35s ${color}%-8s${RESET} %ss\n" \
"$display_name" "$url" "$status_text" "$response_time"
fi
}
# Discover and check services
[[ "$QUIET" == "false" && "$FORMAT" == "table" ]] && {
echo ""
echo " SERVICE URL STATUS RESPONSE"
echo " ---------------------- ----------------------------------- -------- --------"
}
# Portainer
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "portainer" ]]; then
portainer_url=$(jq -r '.portainer.url // empty' "$CRED_FILE")
portainer_key=$(jq -r '.portainer.api_key // empty' "$CRED_FILE")
if [[ -n "$portainer_url" ]]; then
check_service "portainer" "Portainer" "$portainer_url" "/api/system/status" \
"X-API-Key: $portainer_key" "true"
fi
fi
# Coolify
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "coolify" ]]; then
coolify_url=$(jq -r '.coolify.url // empty' "$CRED_FILE")
coolify_token=$(jq -r '.coolify.app_token // empty' "$CRED_FILE")
if [[ -n "$coolify_url" ]]; then
check_service "coolify" "Coolify" "$coolify_url" "/api/v1/teams" \
"Authorization: Bearer $coolify_token" "false"
fi
fi
# Authentik
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "authentik" ]]; then
authentik_url=$(jq -r '.authentik.url // empty' "$CRED_FILE")
if [[ -n "$authentik_url" ]]; then
check_service "authentik" "Authentik" "$authentik_url" "/-/health/ready/" "" "true"
fi
fi
# GLPI
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "glpi" ]]; then
glpi_url=$(jq -r '.glpi.url // empty' "$CRED_FILE")
if [[ -n "$glpi_url" ]]; then
check_service "glpi" "GLPI" "$glpi_url" "/" "" "true"
fi
fi
# Gitea instances
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "gitea" ]]; then
for instance in mosaicstack usc; do
gitea_url=$(jq -r ".gitea.${instance}.url // empty" "$CRED_FILE")
if [[ -n "$gitea_url" ]]; then
display="Gitea (${instance})"
check_service "gitea-${instance}" "$display" "$gitea_url" "/api/v1/version" "" "true"
fi
done
fi
# GitHub
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "github" ]]; then
github_token=$(jq -r '.github.token // empty' "$CRED_FILE")
if [[ -n "$github_token" ]]; then
check_service "github" "GitHub" "https://api.github.com" "/rate_limit" \
"Authorization: Bearer $github_token" "false"
fi
fi
# Woodpecker
if [[ -z "$SINGLE_SERVICE" || "$SINGLE_SERVICE" == "woodpecker" ]]; then
woodpecker_url=$(jq -r '.woodpecker.url // empty' "$CRED_FILE")
woodpecker_token=$(jq -r '.woodpecker.token // empty' "$CRED_FILE")
if [[ -n "$woodpecker_url" && -n "$woodpecker_token" ]]; then
check_service "woodpecker" "Woodpecker CI" "$woodpecker_url" "/api/user" \
"Authorization: Bearer $woodpecker_token" "true"
elif [[ "$QUIET" == "false" && "$FORMAT" == "table" ]]; then
printf " %-22s %-35s ${YELLOW}%-8s${RESET} %s\n" \
"Woodpecker CI" "—" "NOTOKEN" "—"
fi
fi
# Output
if [[ "$FORMAT" == "json" ]]; then
jq -n --argjson results "$RESULTS" --argjson total "$TOTAL" --argjson healthy "$HEALTHY" \
'{total: $total, healthy: $healthy, results: $results}'
exit 0
fi
if [[ "$QUIET" == "false" && "$FORMAT" == "table" ]]; then
echo ""
UNHEALTHY=$((TOTAL - HEALTHY))
if [[ "$UNHEALTHY" -eq 0 ]]; then
echo -e " ${GREEN}All $TOTAL services healthy${RESET}"
else
echo -e " ${RED}$UNHEALTHY/$TOTAL services unhealthy${RESET}"
fi
echo ""
fi
# Exit code: 0 if all healthy, 1 if any unhealthy
[[ "$HEALTHY" -eq "$TOTAL" ]]

View File

@@ -0,0 +1,85 @@
# Mosaic Matrix Orchestrator Rail
Runtime-agnostic orchestration rail for delegating work to worker agents and enforcing
mechanical quality gates.
## Purpose
- Decouple orchestration from any single agent runtime feature set
- Persist state in repo-local `.mosaic/orchestrator/` files
- Emit structured events for Matrix transport and audit trails
- Enforce rails before marking tasks complete
## Components
- `protocol/` - JSON schemas for task/event payloads
- `controller/mosaic_orchestrator.py` - deterministic controller loop
- `adapters/` - runtime adapter guidance
## Repo Contract
The controller expects this layout in each bootstrapped repo:
```text
.mosaic/orchestrator/
config.json
tasks.json
state.json
events.ndjson
logs/
results/
```
## Quick Start
From a bootstrapped repo:
```bash
~/.config/mosaic/bin/mosaic-orchestrator-matrix-cycle
~/.config/mosaic/bin/mosaic-orchestrator-run --once
~/.config/mosaic/bin/mosaic-orchestrator-drain
```
Continuous loop:
```bash
~/.config/mosaic/bin/mosaic-orchestrator-run --poll-sec 10
```
Sync from `docs/TASKS.md` to queue:
```bash
~/.config/mosaic/bin/mosaic-orchestrator-sync-tasks --apply
```
Set worker command when needed:
```bash
export MOSAIC_WORKER_EXEC="codex -p"
# or
export MOSAIC_WORKER_EXEC="opencode -p"
```
Publish new orchestrator events to Matrix:
```bash
~/.config/mosaic/bin/mosaic-orchestrator-matrix-publish
```
Consume Matrix task messages into `tasks.json`:
```bash
~/.config/mosaic/bin/mosaic-orchestrator-matrix-consume
```
## Matrix Note
This rail writes canonical events to `.mosaic/orchestrator/events.ndjson`.
The Matrix transport bridge publishes those events into the configured control room
and can consume task commands from that room.
Task injection message format (room text):
```text
!mosaic-task {"id":"TASK-123","title":"Fix bug","command":"echo run","quality_gates":["pnpm lint"]}
```

View File

@@ -0,0 +1,52 @@
# Adapter Contract
Runtime adapters translate task commands into concrete worker invocations.
## Minimal Contract
Each task should define either:
1. `command` directly in `tasks.json`, or
2. controller-level `worker.command_template` in `.mosaic/orchestrator/config.json`
`command_template` may use:
- `{task_id}`
- `{task_title}`
- `{task_file}`
## Examples
Codex:
```json
{
"worker": {
"command_template": "codex \"run task {task_id}: {task_title}\""
}
}
```
Claude:
```json
{
"worker": {
"command_template": "claude -p \"Execute task {task_id}: {task_title}\""
}
}
```
OpenCode:
```json
{
"worker": {
"command_template": "opencode \"execute task {task_id}: {task_title}\""
}
}
```
## Recommendation
Prefer explicit per-task `command` for deterministic execution and auditability.

View File

@@ -0,0 +1,2 @@
__pycache__/
*.pyc

View File

@@ -0,0 +1,346 @@
#!/usr/bin/env python3
"""Deterministic orchestrator controller for Mosaic task delegation."""
from __future__ import annotations
import argparse
import datetime as dt
import json
import os
import pathlib
import subprocess
import sys
import time
import uuid
from typing import Any
def now_iso() -> str:
return dt.datetime.now(dt.timezone.utc).isoformat()
def load_json(path: pathlib.Path, default: Any) -> Any:
if not path.exists():
return default
with path.open("r", encoding="utf-8") as f:
return json.load(f)
def save_json(path: pathlib.Path, data: Any) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
tmp = path.with_suffix(path.suffix + ".tmp")
with tmp.open("w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
f.write("\n")
tmp.replace(path)
def append_event(events_path: pathlib.Path, event: dict[str, Any]) -> None:
events_path.parent.mkdir(parents=True, exist_ok=True)
with events_path.open("a", encoding="utf-8") as f:
f.write(json.dumps(event, ensure_ascii=True) + "\n")
def emit_event(
events_path: pathlib.Path,
event_type: str,
task_id: str,
status: str,
source: str,
message: str,
metadata: dict[str, Any] | None = None,
) -> None:
append_event(
events_path,
{
"event_id": str(uuid.uuid4()),
"event_type": event_type,
"task_id": task_id,
"status": status,
"timestamp": now_iso(),
"source": source,
"message": message,
"metadata": metadata or {},
},
)
def run_shell(command: str, cwd: pathlib.Path, log_path: pathlib.Path, timeout_sec: int) -> tuple[int, str, bool]:
log_path.parent.mkdir(parents=True, exist_ok=True)
with log_path.open("a", encoding="utf-8") as log:
log.write(f"\n[{now_iso()}] COMMAND: {command}\n")
log.flush()
proc = subprocess.Popen(
["bash", "-lc", command],
cwd=str(cwd),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
encoding="utf-8",
)
timed_out = False
try:
output, _ = proc.communicate(timeout=max(1, timeout_sec))
code = proc.returncode
except subprocess.TimeoutExpired:
timed_out = True
proc.kill()
output, _ = proc.communicate()
code = 124
log.write(f"[{now_iso()}] TIMEOUT: exceeded {timeout_sec}s\n")
if output:
log.write(output)
log.write(f"[{now_iso()}] EXIT: {code}\n")
return code, output or "", timed_out
def render_command_template(template: str, task: dict[str, Any], task_file: pathlib.Path) -> str:
return (
template.replace("{task_id}", str(task.get("id", "")))
.replace("{task_title}", str(task.get("title", "")))
.replace("{task_file}", str(task_file))
)
def parse_dep_list(raw: Any) -> list[str]:
if isinstance(raw, list):
return [str(x).strip() for x in raw if str(x).strip()]
if isinstance(raw, str):
return [x.strip() for x in raw.split(",") if x.strip()]
return []
def is_completed_status(status: str) -> bool:
return status in {"completed", "done"}
def pick_next_task(tasks: list[dict[str, Any]]) -> dict[str, Any] | None:
status_by_id = {str(t.get("id", "")): str(t.get("status", "")) for t in tasks}
for task in tasks:
if task.get("status", "pending") != "pending":
continue
deps = parse_dep_list(task.get("depends_on"))
deps_ready = all(is_completed_status(status_by_id.get(dep, "")) for dep in deps)
if deps_ready:
return task
return None
def run_single_task(repo_root: pathlib.Path, orch_dir: pathlib.Path, config: dict[str, Any]) -> bool:
tasks_path = orch_dir / "tasks.json"
state_path = orch_dir / "state.json"
events_path = orch_dir / "events.ndjson"
logs_dir = orch_dir / "logs"
results_dir = orch_dir / "results"
tasks = load_json(tasks_path, {"tasks": []})
task_items = tasks.get("tasks", [])
if not isinstance(task_items, list):
raise ValueError("tasks.json must contain {'tasks': [...]} structure")
task = pick_next_task(task_items)
if not task:
return False
task_id = str(task.get("id", "unknown-task"))
max_attempts = int(task.get("max_attempts") or config.get("worker", {}).get("max_attempts") or 1)
attempt = int(task.get("attempts", 0)) + 1
task["attempts"] = attempt
task["max_attempts"] = max_attempts
task["status"] = "running"
task["started_at"] = now_iso()
save_json(tasks_path, {"tasks": task_items})
state = load_json(state_path, {"running_task_id": None, "updated_at": None})
state["running_task_id"] = task_id
state["updated_at"] = now_iso()
save_json(state_path, state)
emit_event(events_path, "task.assigned", task_id, "running", "controller", "Task assigned")
emit_event(events_path, "task.started", task_id, "running", "worker", "Worker execution started")
log_path = logs_dir / f"{task_id}.log"
task_file = orch_dir / f"task-{task_id}.json"
save_json(task_file, task)
cmd = str(task.get("command", "")).strip()
if not cmd:
template = str(config.get("worker", {}).get("command_template", "")).strip()
if template:
cmd = render_command_template(template, task, task_file)
if not cmd:
task["status"] = "failed"
task["failed_at"] = now_iso()
task["error"] = "No task command or worker command_template configured."
save_json(tasks_path, {"tasks": task_items})
emit_event(events_path, "task.failed", task_id, "failed", "controller", task["error"])
state["running_task_id"] = None
state["updated_at"] = now_iso()
save_json(state_path, state)
return True
timeout_sec = int(task.get("timeout_seconds") or config.get("worker", {}).get("timeout_seconds") or 7200)
rc, _, timed_out = run_shell(cmd, repo_root, log_path, timeout_sec)
if rc != 0:
task["error"] = f"Worker command timed out after {timeout_sec}s" if timed_out else f"Worker command failed with exit code {rc}"
if attempt < max_attempts:
task["status"] = "pending"
task["last_failed_at"] = now_iso()
emit_event(
events_path,
"task.retry.scheduled",
task_id,
"pending",
"worker",
f"{task['error']}; retry {attempt + 1}/{max_attempts}",
)
else:
task["status"] = "failed"
task["failed_at"] = now_iso()
emit_event(events_path, "task.failed", task_id, "failed", "worker", task["error"])
save_json(tasks_path, {"tasks": task_items})
state["running_task_id"] = None
state["updated_at"] = now_iso()
save_json(state_path, state)
save_json(
results_dir / f"{task_id}.json",
{"task_id": task_id, "status": task["status"], "exit_code": rc, "attempt": attempt, "max_attempts": max_attempts},
)
return True
gates = task.get("quality_gates") or config.get("quality_gates") or []
all_passed = True
gate_results: list[dict[str, Any]] = []
for gate in gates:
gate_cmd = str(gate).strip()
if not gate_cmd:
continue
emit_event(events_path, "rail.check.started", task_id, "running", "quality-gate", f"Running gate: {gate_cmd}")
gate_rc, _, gate_timed_out = run_shell(gate_cmd, repo_root, log_path, timeout_sec)
if gate_rc == 0:
emit_event(events_path, "rail.check.passed", task_id, "running", "quality-gate", f"Gate passed: {gate_cmd}")
else:
all_passed = False
emit_event(
events_path,
"rail.check.failed",
task_id,
"failed",
"quality-gate",
f"Gate timed out after {timeout_sec}s: {gate_cmd}" if gate_timed_out else f"Gate failed ({gate_rc}): {gate_cmd}",
)
gate_results.append({"command": gate_cmd, "exit_code": gate_rc})
if all_passed:
task["status"] = "completed"
task["completed_at"] = now_iso()
emit_event(events_path, "task.completed", task_id, "completed", "controller", "Task completed")
else:
task["error"] = "One or more quality gates failed"
if attempt < max_attempts:
task["status"] = "pending"
task["last_failed_at"] = now_iso()
emit_event(
events_path,
"task.retry.scheduled",
task_id,
"pending",
"controller",
f"{task['error']}; retry {attempt + 1}/{max_attempts}",
)
else:
task["status"] = "failed"
task["failed_at"] = now_iso()
emit_event(events_path, "task.failed", task_id, "failed", "controller", task["error"])
save_json(tasks_path, {"tasks": task_items})
state["running_task_id"] = None
state["updated_at"] = now_iso()
save_json(state_path, state)
save_json(
results_dir / f"{task_id}.json",
{
"task_id": task_id,
"status": task["status"],
"completed_at": task.get("completed_at"),
"failed_at": task.get("failed_at"),
"gate_results": gate_results,
},
)
return True
def queue_state(orch_dir: pathlib.Path) -> dict[str, int]:
tasks = load_json(orch_dir / "tasks.json", {"tasks": []})
task_items = tasks.get("tasks", [])
if not isinstance(task_items, list):
return {"pending": 0, "running": 0, "runnable": 0}
pending = 0
running = 0
runnable = 0
status_by_id = {str(t.get("id", "")): str(t.get("status", "")) for t in task_items}
for task in task_items:
status = str(task.get("status", "pending"))
if status == "pending":
pending += 1
deps = parse_dep_list(task.get("depends_on"))
if all(is_completed_status(status_by_id.get(dep, "")) for dep in deps):
runnable += 1
if status == "running":
running += 1
return {"pending": pending, "running": running, "runnable": runnable}
def main() -> int:
parser = argparse.ArgumentParser(description="Mosaic deterministic orchestrator controller")
parser.add_argument("--repo", default=os.getcwd(), help="Repository root (default: cwd)")
parser.add_argument("--once", action="store_true", help="Process at most one pending task and exit")
parser.add_argument("--until-drained", action="store_true", help="Run until no pending tasks remain (or blocked)")
parser.add_argument("--poll-sec", type=int, default=10, help="Polling interval for continuous mode")
args = parser.parse_args()
repo_root = pathlib.Path(args.repo).resolve()
orch_dir = repo_root / ".mosaic" / "orchestrator"
config_path = orch_dir / "config.json"
if not config_path.exists():
print(f"[mosaic-orchestrator] missing config: {config_path}", file=sys.stderr)
return 1
config = load_json(config_path, {})
if not config.get("enabled", False):
print("[mosaic-orchestrator] disabled in .mosaic/orchestrator/config.json (enabled=false)")
return 0
if args.once:
processed = run_single_task(repo_root, orch_dir, config)
if not processed:
print("[mosaic-orchestrator] no pending tasks")
return 0
print(f"[mosaic-orchestrator] loop start repo={repo_root} poll={args.poll_sec}s")
while True:
try:
processed = run_single_task(repo_root, orch_dir, config)
if not processed:
qs = queue_state(orch_dir)
if args.until_drained:
if qs["pending"] == 0 and qs["running"] == 0:
print("[mosaic-orchestrator] drained: no pending tasks")
return 0
if qs["pending"] > 0 and qs["runnable"] == 0 and qs["running"] == 0:
print("[mosaic-orchestrator] blocked: pending tasks remain but dependencies are unmet", file=sys.stderr)
return 2
time.sleep(max(1, args.poll_sec))
except KeyboardInterrupt:
print("\n[mosaic-orchestrator] stopping")
return 0
except Exception as exc: # pragma: no cover
print(f"[mosaic-orchestrator] error: {exc}", file=sys.stderr)
time.sleep(max(1, args.poll_sec))
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,195 @@
#!/usr/bin/env python3
"""Sync docs/TASKS.md rows into .mosaic/orchestrator/tasks.json."""
from __future__ import annotations
import argparse
import json
import os
import pathlib
from typing import Any
def load_json(path: pathlib.Path, default: Any) -> Any:
if not path.exists():
return default
with path.open("r", encoding="utf-8") as f:
return json.load(f)
def save_json(path: pathlib.Path, data: Any) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
tmp = path.with_suffix(path.suffix + ".tmp")
with tmp.open("w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
f.write("\n")
tmp.replace(path)
def split_pipe_row(line: str) -> list[str]:
row = line.strip()
if row.startswith("|"):
row = row[1:]
if row.endswith("|"):
row = row[:-1]
return [c.strip() for c in row.split("|")]
def parse_tasks_markdown(path: pathlib.Path) -> list[dict[str, str]]:
if not path.exists():
return []
lines = path.read_text(encoding="utf-8").splitlines()
header_idx = -1
headers: list[str] = []
for i, line in enumerate(lines):
if "|" not in line:
continue
cells = [x.lower() for x in split_pipe_row(line)]
if "id" in cells and "status" in cells and "description" in cells:
header_idx = i
headers = cells
break
if header_idx < 0:
return []
rows: list[dict[str, str]] = []
for line in lines[header_idx + 2 :]:
if not line.strip().startswith("|"):
if rows:
break
continue
cells = split_pipe_row(line)
if len(cells) < len(headers):
cells += [""] * (len(headers) - len(cells))
row = {headers[i]: cells[i] for i in range(len(headers))}
task_id = row.get("id", "").strip()
if not task_id or task_id.lower() == "id":
continue
rows.append(row)
return rows
def map_status(raw: str) -> str:
value = raw.strip().lower()
mapping = {
"not-started": "pending",
"todo": "pending",
"pending": "pending",
"in-progress": "pending",
"needs-qa": "pending",
"done": "completed",
"completed": "completed",
"failed": "failed",
}
return mapping.get(value, "pending")
def parse_depends(raw: str) -> list[str]:
return [x.strip() for x in raw.split(",") if x.strip()]
def build_task(
row: dict[str, str],
existing: dict[str, Any],
runtime_default: str,
source_path: str,
) -> dict[str, Any]:
task_id = row.get("id", "").strip()
description = row.get("description", "").strip()
issue = row.get("issue", "").strip()
repo = row.get("repo", "").strip()
branch = row.get("branch", "").strip()
depends_on = parse_depends(row.get("depends_on", ""))
task = dict(existing)
task["id"] = task_id
task["title"] = description or task_id
task["description"] = description
task["status"] = map_status(row.get("status", "pending"))
task["depends_on"] = depends_on
task["runtime"] = str(task.get("runtime") or runtime_default or "codex")
task["command"] = str(task.get("command") or "")
task["quality_gates"] = task.get("quality_gates") or []
metadata = dict(task.get("metadata") or {})
metadata.update(
{
"source": source_path,
"issue": issue,
"repo": repo,
"branch": branch,
}
)
task["metadata"] = metadata
return task
def main() -> int:
parser = argparse.ArgumentParser(description="Sync docs/TASKS.md into .mosaic/orchestrator/tasks.json")
parser.add_argument("--repo", default=os.getcwd(), help="Repository root (default: cwd)")
parser.add_argument("--docs", default="docs/TASKS.md", help="Path to tasks markdown (repo-relative)")
parser.add_argument(
"--tasks-json",
default=".mosaic/orchestrator/tasks.json",
help="Path to orchestrator tasks JSON (repo-relative)",
)
parser.add_argument("--keep-unlisted", action="store_true", help="Retain tasks already in JSON but missing from docs/TASKS.md")
parser.add_argument("--apply", action="store_true", help="Write changes (default is dry-run)")
args = parser.parse_args()
repo = pathlib.Path(args.repo).resolve()
docs_path = (repo / args.docs).resolve()
# Backward compatibility: fall back to legacy lowercase path when default path is absent.
if args.docs == "docs/TASKS.md" and not docs_path.exists():
legacy_docs_path = (repo / "docs/tasks.md").resolve()
if legacy_docs_path.exists():
docs_path = legacy_docs_path
tasks_path = (repo / args.tasks_json).resolve()
config_path = repo / ".mosaic" / "orchestrator" / "config.json"
config = load_json(config_path, {})
runtime_default = str(config.get("worker", {}).get("runtime") or "codex")
rows = parse_tasks_markdown(docs_path)
try:
source_path = str(docs_path.relative_to(repo))
except ValueError:
source_path = str(docs_path)
existing_payload = load_json(tasks_path, {"tasks": []})
existing_tasks = existing_payload.get("tasks", [])
if not isinstance(existing_tasks, list):
existing_tasks = []
existing_by_id = {str(t.get("id", "")): t for t in existing_tasks}
out_tasks: list[dict[str, Any]] = []
seen: set[str] = set()
for row in rows:
task_id = row.get("id", "").strip()
if not task_id:
continue
seen.add(task_id)
out_tasks.append(
build_task(
row,
existing_by_id.get(task_id, {}),
runtime_default,
source_path,
)
)
if args.keep_unlisted:
for task in existing_tasks:
task_id = str(task.get("id", ""))
if task_id and task_id not in seen:
out_tasks.append(task)
payload = {"tasks": out_tasks}
if args.apply:
save_json(tasks_path, payload)
print(f"[mosaic-orchestrator-sync] wrote {len(out_tasks)} tasks -> {tasks_path}")
else:
print(f"[mosaic-orchestrator-sync] dry-run: {len(out_tasks)} tasks would be written -> {tasks_path}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,64 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://mosaicstack.dev/schemas/orchestrator/event.schema.json",
"title": "Mosaic Orchestrator Event",
"type": "object",
"required": [
"event_id",
"event_type",
"task_id",
"status",
"timestamp",
"source"
],
"properties": {
"event_id": {
"type": "string",
"description": "UUID string"
},
"event_type": {
"type": "string",
"enum": [
"task.assigned",
"task.started",
"task.progress",
"task.completed",
"task.failed",
"rail.check.started",
"rail.check.passed",
"rail.check.failed"
]
},
"task_id": {
"type": "string"
},
"status": {
"type": "string",
"enum": [
"pending",
"running",
"completed",
"failed"
]
},
"timestamp": {
"type": "string",
"format": "date-time"
},
"source": {
"type": "string",
"enum": [
"controller",
"worker",
"quality-gate"
]
},
"message": {
"type": "string"
},
"metadata": {
"type": "object"
}
},
"additionalProperties": true
}

View File

@@ -0,0 +1,49 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://mosaicstack.dev/schemas/orchestrator/task.schema.json",
"title": "Mosaic Orchestrator Task",
"type": "object",
"required": [
"id",
"title",
"status"
],
"properties": {
"id": {
"type": "string"
},
"title": {
"type": "string"
},
"description": {
"type": "string"
},
"status": {
"type": "string",
"enum": [
"pending",
"running",
"completed",
"failed"
]
},
"runtime": {
"type": "string",
"description": "Preferred worker runtime, e.g. codex, claude, opencode"
},
"command": {
"type": "string",
"description": "Worker command to execute for this task"
},
"quality_gates": {
"type": "array",
"items": {
"type": "string"
}
},
"metadata": {
"type": "object"
}
},
"additionalProperties": true
}

View File

@@ -0,0 +1,2 @@
__pycache__/
*.pyc

View File

@@ -0,0 +1,200 @@
#!/usr/bin/env python3
"""Matrix transport bridge for Mosaic orchestrator events/tasks."""
from __future__ import annotations
import argparse
import json
import pathlib
import urllib.parse
import urllib.request
import uuid
from typing import Any
def load_json(path: pathlib.Path, default: Any) -> Any:
if not path.exists():
return default
with path.open("r", encoding="utf-8") as f:
return json.load(f)
def save_json(path: pathlib.Path, data: Any) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
tmp = path.with_suffix(path.suffix + ".tmp")
with tmp.open("w", encoding="utf-8") as f:
json.dump(data, f, indent=2)
f.write("\n")
tmp.replace(path)
def matrix_request(
homeserver: str,
access_token: str,
method: str,
path: str,
payload: dict[str, Any] | None = None,
) -> dict[str, Any]:
url = homeserver.rstrip("/") + path
body = None
headers = {"Authorization": f"Bearer {access_token}"}
if payload is not None:
body = json.dumps(payload, ensure_ascii=True).encode("utf-8")
headers["Content-Type"] = "application/json"
req = urllib.request.Request(url, method=method, data=body, headers=headers)
with urllib.request.urlopen(req, timeout=30) as resp:
raw = resp.read().decode("utf-8")
return json.loads(raw) if raw else {}
def matrix_send_message(homeserver: str, access_token: str, room_id: str, message: str) -> None:
txn = str(uuid.uuid4())
path = f"/_matrix/client/v3/rooms/{urllib.parse.quote(room_id, safe='')}/send/m.room.message/{txn}"
matrix_request(
homeserver,
access_token,
"PUT",
path,
{"msgtype": "m.text", "body": message},
)
def format_event_message(event: dict[str, Any]) -> str:
et = event.get("event_type", "unknown")
tid = event.get("task_id", "unknown")
status = event.get("status", "unknown")
msg = event.get("message", "")
return f"[mosaic-orch] {et} task={tid} status={status} :: {msg}"
def publish_events(repo: pathlib.Path, config: dict[str, Any]) -> int:
orch = repo / ".mosaic" / "orchestrator"
events_path = orch / "events.ndjson"
bridge_state_path = orch / "matrix_state.json"
state = load_json(bridge_state_path, {"last_published_line": 0, "since": None})
homeserver = str(config.get("matrix", {}).get("homeserver_url", "")).strip()
token = str(config.get("matrix", {}).get("access_token", "")).strip()
room_id = str(config.get("matrix", {}).get("control_room_id", "")).strip()
if not homeserver or not token or not room_id:
raise ValueError("matrix homeserver_url, access_token, and control_room_id are required")
if not events_path.exists():
return 0
lines = events_path.read_text(encoding="utf-8").splitlines()
start = int(state.get("last_published_line", 0))
published = 0
for idx, line in enumerate(lines[start:], start=start + 1):
if not line.strip():
continue
event = json.loads(line)
matrix_send_message(homeserver, token, room_id, format_event_message(event))
state["last_published_line"] = idx
published += 1
save_json(bridge_state_path, state)
return published
def parse_task_command(body: str) -> dict[str, Any] | None:
raw = body.strip()
if raw.startswith("!mosaic-task "):
payload = raw[len("!mosaic-task ") :].strip()
elif raw.startswith("@mosaic task "):
payload = raw[len("@mosaic task ") :].strip()
else:
return None
task = json.loads(payload)
if not isinstance(task, dict):
raise ValueError("task payload must be a JSON object")
if "id" not in task or "title" not in task:
raise ValueError("task payload requires id and title")
task.setdefault("status", "pending")
return task
def consume_tasks(repo: pathlib.Path, config: dict[str, Any]) -> int:
orch = repo / ".mosaic" / "orchestrator"
tasks_path = orch / "tasks.json"
bridge_state_path = orch / "matrix_state.json"
state = load_json(bridge_state_path, {"last_published_line": 0, "since": None})
tasks = load_json(tasks_path, {"tasks": []})
task_items = tasks.get("tasks", [])
if not isinstance(task_items, list):
raise ValueError("tasks.json must contain {'tasks': [...]} structure")
homeserver = str(config.get("matrix", {}).get("homeserver_url", "")).strip()
token = str(config.get("matrix", {}).get("access_token", "")).strip()
room_id = str(config.get("matrix", {}).get("control_room_id", "")).strip()
bot_user_id = str(config.get("matrix", {}).get("bot_user_id", "")).strip()
if not homeserver or not token or not room_id:
raise ValueError("matrix homeserver_url, access_token, and control_room_id are required")
since = state.get("since")
path = "/_matrix/client/v3/sync?timeout=1"
if since:
path += "&since=" + urllib.parse.quote(str(since), safe="")
sync = matrix_request(homeserver, token, "GET", path)
if "next_batch" in sync:
state["since"] = sync["next_batch"]
room_timeline = (
sync.get("rooms", {})
.get("join", {})
.get(room_id, {})
.get("timeline", {})
.get("events", [])
)
added = 0
existing = {str(t.get("id")) for t in task_items if isinstance(t, dict)}
for evt in room_timeline:
if evt.get("type") != "m.room.message":
continue
sender = str(evt.get("sender", ""))
if bot_user_id and sender == bot_user_id:
continue
body = str(evt.get("content", {}).get("body", ""))
task = parse_task_command(body)
if not task:
continue
task_id = str(task.get("id"))
if task_id in existing:
continue
task_items.append(task)
existing.add(task_id)
added += 1
save_json(tasks_path, {"tasks": task_items})
save_json(bridge_state_path, state)
return added
def main() -> int:
p = argparse.ArgumentParser(description="Mosaic Matrix transport bridge")
p.add_argument("--repo", default=".", help="Repository root")
p.add_argument("--mode", required=True, choices=["publish", "consume"], help="Bridge mode")
args = p.parse_args()
repo = pathlib.Path(args.repo).resolve()
config = load_json(repo / ".mosaic" / "orchestrator" / "config.json", {})
if not config.get("enabled", False):
print("[mosaic-orch-matrix] disabled in config (enabled=false)")
return 0
if str(config.get("transport", "")).strip() != "matrix":
print("[mosaic-orch-matrix] config transport != matrix; nothing to do")
return 0
if args.mode == "publish":
count = publish_events(repo, config)
print(f"[mosaic-orch-matrix] published_events={count}")
return 0
count = consume_tasks(repo, config)
print(f"[mosaic-orch-matrix] consumed_tasks={count}")
return 0
if __name__ == "__main__":
raise SystemExit(main())

210
tools/portainer/README.md Normal file
View File

@@ -0,0 +1,210 @@
# Portainer CLI Scripts
CLI tools for managing Portainer stacks via the API.
## Setup
### Environment Variables
Set these environment variables before using the scripts:
```bash
export PORTAINER_URL="https://portainer.example.com:9443"
export PORTAINER_API_KEY="your-api-key-here"
```
You can add these to your shell profile (`~/.bashrc`, `~/.zshrc`) or use a `.env` file.
### Creating an API Key
1. Log in to Portainer
2. Click your username in the top right corner > "My account"
3. Scroll to "Access tokens" section
4. Click "Add access token"
5. Enter a descriptive name (e.g., "CLI scripts")
6. Copy the token immediately (you cannot view it again)
### Dependencies
- `curl` - HTTP client
- `jq` - JSON processor
Both are typically pre-installed on most Linux distributions.
## Scripts
### stack-list.sh
List all Portainer stacks.
```bash
# List all stacks in table format
stack-list.sh
# List stacks in JSON format
stack-list.sh -f json
# List only stack names
stack-list.sh -f names
stack-list.sh -q
# Filter by endpoint ID
stack-list.sh -e 1
```
### stack-status.sh
Show status and containers for a stack.
```bash
# Show stack status
stack-status.sh -n mystack
# Show status in JSON format
stack-status.sh -n mystack -f json
# Use stack ID instead of name
stack-status.sh -i 5
```
### stack-redeploy.sh
Redeploy a stack. For git-based stacks, this pulls the latest from the repository.
```bash
# Redeploy a stack by name
stack-redeploy.sh -n mystack
# Redeploy and pull latest images
stack-redeploy.sh -n mystack -p
# Redeploy by stack ID
stack-redeploy.sh -i 5 -p
```
### stack-logs.sh
View logs for stack services/containers.
```bash
# List available services in a stack
stack-logs.sh -n mystack
# View logs for a specific service
stack-logs.sh -n mystack -s webapp
# Show last 200 lines
stack-logs.sh -n mystack -s webapp -t 200
# Follow logs (stream)
stack-logs.sh -n mystack -s webapp -f
# Include timestamps
stack-logs.sh -n mystack -s webapp --timestamps
```
### stack-start.sh
Start an inactive stack.
```bash
stack-start.sh -n mystack
stack-start.sh -i 5
```
### stack-stop.sh
Stop a running stack.
```bash
stack-stop.sh -n mystack
stack-stop.sh -i 5
```
### endpoint-list.sh
List all Portainer endpoints/environments.
```bash
# List in table format
endpoint-list.sh
# List in JSON format
endpoint-list.sh -f json
```
## Common Workflows
### CI/CD Redeploy
After pushing changes to a git-based stack's repository:
```bash
# Redeploy with latest images
stack-redeploy.sh -n myapp -p
# Check status
stack-status.sh -n myapp
# View logs to verify startup
stack-logs.sh -n myapp -s api -t 50
```
### Debugging a Failing Stack
```bash
# Check overall status
stack-status.sh -n myapp
# List all services
stack-logs.sh -n myapp
# View logs for failing service
stack-logs.sh -n myapp -s worker -t 200
# Follow logs in real-time
stack-logs.sh -n myapp -s worker -f
```
### Restart a Stack
```bash
# Stop the stack
stack-stop.sh -n myapp
# Start it again
stack-start.sh -n myapp
# Or just redeploy (pulls latest images)
stack-redeploy.sh -n myapp -p
```
## Error Handling
All scripts:
- Exit with code 0 on success
- Exit with code 1 on error
- Print errors to stderr
- Validate required environment variables before making API calls
## API Reference
These scripts use the Portainer CE API. Key endpoints:
| Endpoint | Method | Description |
|----------|--------|-------------|
| `/api/stacks` | GET | List all stacks |
| `/api/stacks/{id}` | GET | Get stack details |
| `/api/stacks/{id}/file` | GET | Get stack compose file |
| `/api/stacks/{id}` | PUT | Update/redeploy stack |
| `/api/stacks/{id}/git/redeploy` | PUT | Redeploy git-based stack |
| `/api/stacks/{id}/start` | POST | Start inactive stack |
| `/api/stacks/{id}/stop` | POST | Stop running stack |
| `/api/endpoints` | GET | List all environments |
| `/api/endpoints/{id}/docker/containers/json` | GET | List containers |
| `/api/endpoints/{id}/docker/containers/{id}/logs` | GET | Get container logs |
For full API documentation, see:
- [Portainer API Access](https://docs.portainer.io/api/access)
- [Portainer API Examples](https://docs.portainer.io/api/examples)
- [Portainer API Docs](https://docs.portainer.io/api/docs)

View File

@@ -0,0 +1,85 @@
#!/usr/bin/env bash
#
# endpoint-list.sh - List all Portainer endpoints/environments
#
# Usage: endpoint-list.sh [-f format]
#
# Environment variables:
# PORTAINER_URL - Portainer instance URL (e.g., https://portainer.example.com:9443)
# PORTAINER_API_KEY - API access token
#
# Options:
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
# Default values
FORMAT="table"
# Parse arguments
while getopts "f:h" opt; do
case $opt in
f) FORMAT="$OPTARG" ;;
h)
head -16 "$0" | grep "^#" | sed 's/^# \?//'
exit 0
;;
*)
echo "Usage: $0 [-f format]" >&2
exit 1
;;
esac
done
# Validate environment
if [[ -z "${PORTAINER_URL:-}" ]]; then
echo "Error: PORTAINER_URL environment variable not set" >&2
exit 1
fi
if [[ -z "${PORTAINER_API_KEY:-}" ]]; then
echo "Error: PORTAINER_API_KEY environment variable not set" >&2
exit 1
fi
# Remove trailing slash from URL
PORTAINER_URL="${PORTAINER_URL%/}"
# Fetch endpoints
response=$(curl -s -w "\n%{http_code}" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
"${PORTAINER_URL}/api/endpoints")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: API request failed with status $http_code" >&2
echo "$body" >&2
exit 1
fi
# Output based on format
case "$FORMAT" in
json)
echo "$body" | jq '.'
;;
table)
echo "ID NAME TYPE STATUS URL"
echo "---- ---------------------------- ---------- -------- ---"
echo "$body" | jq -r '.[] | [
.Id,
.Name,
(if .Type == 1 then "docker" elif .Type == 2 then "agent" elif .Type == 3 then "azure" elif .Type == 4 then "edge" elif .Type == 5 then "kubernetes" else "unknown" end),
(if .Status == 1 then "up" elif .Status == 2 then "down" else "unknown" end),
.URL
] | @tsv' | while IFS=$'\t' read -r id name type status url; do
printf "%-4s %-28s %-10s %-8s %s\n" "$id" "$name" "$type" "$status" "$url"
done
;;
*)
echo "Error: Unknown format '$FORMAT'. Use: table, json" >&2
exit 1
;;
esac

100
tools/portainer/stack-list.sh Executable file
View File

@@ -0,0 +1,100 @@
#!/usr/bin/env bash
#
# stack-list.sh - List all Portainer stacks
#
# Usage: stack-list.sh [-e endpoint_id] [-f format] [-q]
#
# Environment variables:
# PORTAINER_URL - Portainer instance URL (e.g., https://portainer.example.com:9443)
# PORTAINER_API_KEY - API access token
#
# Options:
# -e endpoint_id Filter by endpoint/environment ID
# -f format Output format: table (default), json, names
# -q Quiet mode - only output stack names (shortcut for -f names)
# -h Show this help
set -euo pipefail
# Default values
ENDPOINT_FILTER=""
FORMAT="table"
QUIET=false
# Parse arguments
while getopts "e:f:qh" opt; do
case $opt in
e) ENDPOINT_FILTER="$OPTARG" ;;
f) FORMAT="$OPTARG" ;;
q) QUIET=true; FORMAT="names" ;;
h)
head -20 "$0" | grep "^#" | sed 's/^# \?//'
exit 0
;;
*)
echo "Usage: $0 [-e endpoint_id] [-f format] [-q]" >&2
exit 1
;;
esac
done
# Validate environment
if [[ -z "${PORTAINER_URL:-}" ]]; then
echo "Error: PORTAINER_URL environment variable not set" >&2
exit 1
fi
if [[ -z "${PORTAINER_API_KEY:-}" ]]; then
echo "Error: PORTAINER_API_KEY environment variable not set" >&2
exit 1
fi
# Remove trailing slash from URL
PORTAINER_URL="${PORTAINER_URL%/}"
# Fetch stacks
response=$(curl -s -w "\n%{http_code}" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
"${PORTAINER_URL}/api/stacks")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: API request failed with status $http_code" >&2
echo "$body" >&2
exit 1
fi
# Filter by endpoint if specified
if [[ -n "$ENDPOINT_FILTER" ]]; then
body=$(echo "$body" | jq --arg eid "$ENDPOINT_FILTER" '[.[] | select(.EndpointId == ($eid | tonumber))]')
fi
# Output based on format
case "$FORMAT" in
json)
echo "$body" | jq '.'
;;
names)
echo "$body" | jq -r '.[].Name'
;;
table)
echo "ID NAME STATUS TYPE ENDPOINT CREATED"
echo "---- ---------------------------- -------- -------- -------- -------"
echo "$body" | jq -r '.[] | [
.Id,
.Name,
(if .Status == 1 then "active" elif .Status == 2 then "inactive" else "unknown" end),
(if .Type == 1 then "swarm" elif .Type == 2 then "compose" elif .Type == 3 then "k8s" else "unknown" end),
.EndpointId,
(.CreationDate | split("T")[0] // "N/A")
] | @tsv' | while IFS=$'\t' read -r id name status type endpoint created; do
printf "%-4s %-28s %-8s %-8s %-8s %s\n" "$id" "$name" "$status" "$type" "$endpoint" "$created"
done
;;
*)
echo "Error: Unknown format '$FORMAT'. Use: table, json, names" >&2
exit 1
;;
esac

183
tools/portainer/stack-logs.sh Executable file
View File

@@ -0,0 +1,183 @@
#!/usr/bin/env bash
#
# stack-logs.sh - Get logs for a stack service/container
#
# Usage: stack-logs.sh -n <stack-name> [-s service-name] [-t tail] [-f]
#
# Environment variables:
# PORTAINER_URL - Portainer instance URL (e.g., https://portainer.example.com:9443)
# PORTAINER_API_KEY - API access token
#
# Options:
# -n name Stack name (required)
# -s service Service/container name (optional - if omitted, lists available services)
# -t tail Number of lines to show from the end (default: 100)
# -f Follow log output (stream logs)
# --timestamps Show timestamps
# -h Show this help
set -euo pipefail
# Default values
STACK_NAME=""
SERVICE_NAME=""
TAIL_LINES="100"
FOLLOW=false
TIMESTAMPS=false
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
-n) STACK_NAME="$2"; shift 2 ;;
-s) SERVICE_NAME="$2"; shift 2 ;;
-t) TAIL_LINES="$2"; shift 2 ;;
-f) FOLLOW=true; shift ;;
--timestamps) TIMESTAMPS=true; shift ;;
-h|--help)
head -20 "$0" | grep "^#" | sed 's/^# \?//'
exit 0
;;
*)
echo "Unknown option: $1" >&2
echo "Usage: $0 -n <stack-name> [-s service-name] [-t tail] [-f]" >&2
exit 1
;;
esac
done
# Validate environment
if [[ -z "${PORTAINER_URL:-}" ]]; then
echo "Error: PORTAINER_URL environment variable not set" >&2
exit 1
fi
if [[ -z "${PORTAINER_API_KEY:-}" ]]; then
echo "Error: PORTAINER_API_KEY environment variable not set" >&2
exit 1
fi
if [[ -z "$STACK_NAME" ]]; then
echo "Error: -n <stack-name> is required" >&2
exit 1
fi
# Remove trailing slash from URL
PORTAINER_URL="${PORTAINER_URL%/}"
# Function to make API requests
api_request() {
local method="$1"
local endpoint="$2"
curl -s -w "\n%{http_code}" -X "$method" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
"${PORTAINER_URL}${endpoint}"
}
# Get stack info by name
response=$(api_request GET "/api/stacks")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list stacks (HTTP $http_code)" >&2
exit 1
fi
stack_info=$(echo "$body" | jq --arg name "$STACK_NAME" '.[] | select(.Name == $name)')
if [[ -z "$stack_info" || "$stack_info" == "null" ]]; then
echo "Error: Stack '$STACK_NAME' not found" >&2
exit 1
fi
ENDPOINT_ID=$(echo "$stack_info" | jq -r '.EndpointId')
# Get containers for this stack
response=$(api_request GET "/api/endpoints/${ENDPOINT_ID}/docker/containers/json?all=true")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to get containers (HTTP $http_code)" >&2
exit 1
fi
# Filter containers belonging to this stack
containers=$(echo "$body" | jq --arg name "$STACK_NAME" '[.[] | select(
(.Labels["com.docker.compose.project"] == $name) or
(.Labels["com.docker.stack.namespace"] == $name)
)]')
container_count=$(echo "$containers" | jq 'length')
if [[ "$container_count" -eq 0 ]]; then
echo "Error: No containers found for stack '$STACK_NAME'" >&2
exit 1
fi
# If no service specified, list available services
if [[ -z "$SERVICE_NAME" ]]; then
echo "Available services in stack '$STACK_NAME':"
echo ""
echo "$containers" | jq -r '.[] |
(.Labels["com.docker.compose.service"] // .Labels["com.docker.swarm.service.name"] // .Names[0]) as $svc |
"\(.Names[0] | ltrimstr("/")) (\($svc // "unknown"))"'
echo ""
echo "Use -s <service-name> to view logs for a specific service."
exit 0
fi
# Find container matching service name
# Match against service label or container name
container=$(echo "$containers" | jq --arg svc "$SERVICE_NAME" 'first(.[] | select(
(.Labels["com.docker.compose.service"] == $svc) or
(.Labels["com.docker.swarm.service.name"] == $svc) or
(.Names[] | contains($svc))
))')
if [[ -z "$container" || "$container" == "null" ]]; then
echo "Error: Service '$SERVICE_NAME' not found in stack '$STACK_NAME'" >&2
echo ""
echo "Available services:"
echo "$containers" | jq -r '.[] |
.Labels["com.docker.compose.service"] // .Labels["com.docker.swarm.service.name"] // .Names[0]'
exit 1
fi
CONTAINER_ID=$(echo "$container" | jq -r '.Id')
CONTAINER_NAME=$(echo "$container" | jq -r '.Names[0]' | sed 's/^\///')
echo "Fetching logs for: $CONTAINER_NAME"
echo "Container ID: ${CONTAINER_ID:0:12}"
echo "---"
# Build query parameters
params="stdout=true&stderr=true&tail=${TAIL_LINES}"
if [[ "$TIMESTAMPS" == "true" ]]; then
params="${params}&timestamps=true"
fi
if [[ "$FOLLOW" == "true" ]]; then
params="${params}&follow=true"
fi
# Get logs
# Note: Docker API returns raw log stream, not JSON
if [[ "$FOLLOW" == "true" ]]; then
# Stream logs
curl -s -N \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
"${PORTAINER_URL}/api/endpoints/${ENDPOINT_ID}/docker/containers/${CONTAINER_ID}/logs?${params}" | \
# Docker log format has 8-byte header per line, strip it
while IFS= read -r line; do
# Remove docker stream header (first 8 bytes per chunk)
echo "$line" | cut -c9-
done
else
# Get logs (non-streaming)
curl -s \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
"${PORTAINER_URL}/api/endpoints/${ENDPOINT_ID}/docker/containers/${CONTAINER_ID}/logs?${params}" | \
# Docker log format has 8-byte header per line, attempt to strip it
sed 's/^.\{8\}//' 2>/dev/null || cat
fi

183
tools/portainer/stack-redeploy.sh Executable file
View File

@@ -0,0 +1,183 @@
#!/usr/bin/env bash
#
# stack-redeploy.sh - Redeploy a Portainer stack
#
# For git-based stacks, this pulls the latest from the repository and redeploys.
# For file-based stacks, this redeploys with the current stack file.
#
# Usage: stack-redeploy.sh -n <stack-name> [-p] [-e endpoint_id]
#
# Environment variables:
# PORTAINER_URL - Portainer instance URL (e.g., https://portainer.example.com:9443)
# PORTAINER_API_KEY - API access token
#
# Options:
# -n name Stack name (required)
# -i id Stack ID (alternative to -n)
# -p Pull latest images before redeploying
# -e endpoint_id Endpoint/environment ID (auto-detected from stack if not provided)
# -h Show this help
set -euo pipefail
# Default values
STACK_NAME=""
STACK_ID=""
PULL_IMAGE=false
ENDPOINT_ID=""
# Parse arguments
while getopts "n:i:pe:h" opt; do
case $opt in
n) STACK_NAME="$OPTARG" ;;
i) STACK_ID="$OPTARG" ;;
p) PULL_IMAGE=true ;;
e) ENDPOINT_ID="$OPTARG" ;;
h)
head -22 "$0" | grep "^#" | sed 's/^# \?//'
exit 0
;;
*)
echo "Usage: $0 -n <stack-name> [-p] [-e endpoint_id]" >&2
exit 1
;;
esac
done
# Validate environment
if [[ -z "${PORTAINER_URL:-}" ]]; then
echo "Error: PORTAINER_URL environment variable not set" >&2
exit 1
fi
if [[ -z "${PORTAINER_API_KEY:-}" ]]; then
echo "Error: PORTAINER_API_KEY environment variable not set" >&2
exit 1
fi
if [[ -z "$STACK_NAME" && -z "$STACK_ID" ]]; then
echo "Error: Either -n <stack-name> or -i <stack-id> is required" >&2
exit 1
fi
# Remove trailing slash from URL
PORTAINER_URL="${PORTAINER_URL%/}"
# Function to make API requests
api_request() {
local method="$1"
local endpoint="$2"
local data="${3:-}"
local args=(-s -w "\n%{http_code}" -X "$method" -H "X-API-Key: ${PORTAINER_API_KEY}")
if [[ -n "$data" ]]; then
args+=(-H "Content-Type: application/json" -d "$data")
fi
curl "${args[@]}" "${PORTAINER_URL}${endpoint}"
}
# Get stack info by name or ID
if [[ -n "$STACK_NAME" ]]; then
echo "Looking up stack '$STACK_NAME'..."
response=$(api_request GET "/api/stacks")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list stacks (HTTP $http_code)" >&2
exit 1
fi
stack_info=$(echo "$body" | jq --arg name "$STACK_NAME" '.[] | select(.Name == $name)')
if [[ -z "$stack_info" || "$stack_info" == "null" ]]; then
echo "Error: Stack '$STACK_NAME' not found" >&2
exit 1
fi
STACK_ID=$(echo "$stack_info" | jq -r '.Id')
ENDPOINT_ID_FROM_STACK=$(echo "$stack_info" | jq -r '.EndpointId')
else
# Get stack info by ID
response=$(api_request GET "/api/stacks/${STACK_ID}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to get stack (HTTP $http_code)" >&2
exit 1
fi
stack_info="$body"
STACK_NAME=$(echo "$stack_info" | jq -r '.Name')
ENDPOINT_ID_FROM_STACK=$(echo "$stack_info" | jq -r '.EndpointId')
fi
# Use endpoint ID from stack if not provided
if [[ -z "$ENDPOINT_ID" ]]; then
ENDPOINT_ID="$ENDPOINT_ID_FROM_STACK"
fi
# Check if this is a git-based stack
git_config=$(echo "$stack_info" | jq -r '.GitConfig // empty')
if [[ -n "$git_config" && "$git_config" != "null" ]]; then
echo "Stack '$STACK_NAME' (ID: $STACK_ID) is git-based"
echo "Triggering git pull and redeploy..."
# Git-based stack redeploy
# The git redeploy endpoint pulls from the repository and redeploys
request_body=$(jq -n \
--argjson pullImage "$PULL_IMAGE" \
'{
"pullImage": $pullImage,
"prune": false,
"repositoryReferenceName": "",
"repositoryAuthentication": false
}')
response=$(api_request PUT "/api/stacks/${STACK_ID}/git/redeploy?endpointId=${ENDPOINT_ID}" "$request_body")
else
echo "Stack '$STACK_NAME' (ID: $STACK_ID) is file-based"
# Get the current stack file content
echo "Fetching current stack file..."
response=$(api_request GET "/api/stacks/${STACK_ID}/file")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to get stack file (HTTP $http_code)" >&2
exit 1
fi
stack_file_content=$(echo "$body" | jq -r '.StackFileContent')
echo "Redeploying..."
request_body=$(jq -n \
--argjson pullImage "$PULL_IMAGE" \
--arg stackFile "$stack_file_content" \
'{
"pullImage": $pullImage,
"prune": false,
"stackFileContent": $stackFile
}')
response=$(api_request PUT "/api/stacks/${STACK_ID}?endpointId=${ENDPOINT_ID}" "$request_body")
fi
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" == "200" ]]; then
echo "Successfully redeployed stack '$STACK_NAME'"
if [[ "$PULL_IMAGE" == "true" ]]; then
echo " - Pulled latest images"
fi
else
echo "Error: Redeploy failed (HTTP $http_code)" >&2
echo "$body" | jq '.' 2>/dev/null || echo "$body" >&2
exit 1
fi

114
tools/portainer/stack-start.sh Executable file
View File

@@ -0,0 +1,114 @@
#!/usr/bin/env bash
#
# stack-start.sh - Start an inactive Portainer stack
#
# Usage: stack-start.sh -n <stack-name>
#
# Environment variables:
# PORTAINER_URL - Portainer instance URL (e.g., https://portainer.example.com:9443)
# PORTAINER_API_KEY - API access token
#
# Options:
# -n name Stack name (required)
# -i id Stack ID (alternative to -n)
# -h Show this help
set -euo pipefail
# Default values
STACK_NAME=""
STACK_ID=""
# Parse arguments
while getopts "n:i:h" opt; do
case $opt in
n) STACK_NAME="$OPTARG" ;;
i) STACK_ID="$OPTARG" ;;
h)
head -16 "$0" | grep "^#" | sed 's/^# \?//'
exit 0
;;
*)
echo "Usage: $0 -n <stack-name>" >&2
exit 1
;;
esac
done
# Validate environment
if [[ -z "${PORTAINER_URL:-}" ]]; then
echo "Error: PORTAINER_URL environment variable not set" >&2
exit 1
fi
if [[ -z "${PORTAINER_API_KEY:-}" ]]; then
echo "Error: PORTAINER_API_KEY environment variable not set" >&2
exit 1
fi
if [[ -z "$STACK_NAME" && -z "$STACK_ID" ]]; then
echo "Error: Either -n <stack-name> or -i <stack-id> is required" >&2
exit 1
fi
# Remove trailing slash from URL
PORTAINER_URL="${PORTAINER_URL%/}"
# Function to make API requests
api_request() {
local method="$1"
local endpoint="$2"
curl -s -w "\n%{http_code}" -X "$method" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
"${PORTAINER_URL}${endpoint}"
}
# Get stack info by name
if [[ -n "$STACK_NAME" ]]; then
response=$(api_request GET "/api/stacks")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list stacks (HTTP $http_code)" >&2
exit 1
fi
stack_info=$(echo "$body" | jq --arg name "$STACK_NAME" '.[] | select(.Name == $name)')
if [[ -z "$stack_info" || "$stack_info" == "null" ]]; then
echo "Error: Stack '$STACK_NAME' not found" >&2
exit 1
fi
STACK_ID=$(echo "$stack_info" | jq -r '.Id')
ENDPOINT_ID=$(echo "$stack_info" | jq -r '.EndpointId')
else
response=$(api_request GET "/api/stacks/${STACK_ID}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to get stack (HTTP $http_code)" >&2
exit 1
fi
stack_info="$body"
STACK_NAME=$(echo "$stack_info" | jq -r '.Name')
ENDPOINT_ID=$(echo "$stack_info" | jq -r '.EndpointId')
fi
echo "Starting stack '$STACK_NAME' (ID: $STACK_ID)..."
response=$(api_request POST "/api/stacks/${STACK_ID}/start?endpointId=${ENDPOINT_ID}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" == "200" ]]; then
echo "Successfully started stack '$STACK_NAME'"
else
echo "Error: Failed to start stack (HTTP $http_code)" >&2
echo "$body" | jq '.' 2>/dev/null || echo "$body" >&2
exit 1
fi

185
tools/portainer/stack-status.sh Executable file
View File

@@ -0,0 +1,185 @@
#!/usr/bin/env bash
#
# stack-status.sh - Show stack service status
#
# Usage: stack-status.sh -n <stack-name> [-f format]
#
# Environment variables:
# PORTAINER_URL - Portainer instance URL (e.g., https://portainer.example.com:9443)
# PORTAINER_API_KEY - API access token
#
# Options:
# -n name Stack name (required)
# -i id Stack ID (alternative to -n)
# -f format Output format: table (default), json
# -h Show this help
set -euo pipefail
# Default values
STACK_NAME=""
STACK_ID=""
FORMAT="table"
# Parse arguments
while getopts "n:i:f:h" opt; do
case $opt in
n) STACK_NAME="$OPTARG" ;;
i) STACK_ID="$OPTARG" ;;
f) FORMAT="$OPTARG" ;;
h)
head -18 "$0" | grep "^#" | sed 's/^# \?//'
exit 0
;;
*)
echo "Usage: $0 -n <stack-name> [-f format]" >&2
exit 1
;;
esac
done
# Validate environment
if [[ -z "${PORTAINER_URL:-}" ]]; then
echo "Error: PORTAINER_URL environment variable not set" >&2
exit 1
fi
if [[ -z "${PORTAINER_API_KEY:-}" ]]; then
echo "Error: PORTAINER_API_KEY environment variable not set" >&2
exit 1
fi
if [[ -z "$STACK_NAME" && -z "$STACK_ID" ]]; then
echo "Error: Either -n <stack-name> or -i <stack-id> is required" >&2
exit 1
fi
# Remove trailing slash from URL
PORTAINER_URL="${PORTAINER_URL%/}"
# Function to make API requests
api_request() {
local method="$1"
local endpoint="$2"
curl -s -w "\n%{http_code}" -X "$method" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
"${PORTAINER_URL}${endpoint}"
}
# Get stack info by name
if [[ -n "$STACK_NAME" ]]; then
response=$(api_request GET "/api/stacks")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list stacks (HTTP $http_code)" >&2
exit 1
fi
stack_info=$(echo "$body" | jq --arg name "$STACK_NAME" '.[] | select(.Name == $name)')
if [[ -z "$stack_info" || "$stack_info" == "null" ]]; then
echo "Error: Stack '$STACK_NAME' not found" >&2
exit 1
fi
STACK_ID=$(echo "$stack_info" | jq -r '.Id')
ENDPOINT_ID=$(echo "$stack_info" | jq -r '.EndpointId')
STACK_NAME=$(echo "$stack_info" | jq -r '.Name')
else
response=$(api_request GET "/api/stacks/${STACK_ID}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to get stack (HTTP $http_code)" >&2
exit 1
fi
stack_info="$body"
ENDPOINT_ID=$(echo "$stack_info" | jq -r '.EndpointId')
STACK_NAME=$(echo "$stack_info" | jq -r '.Name')
fi
# Get stack type
STACK_TYPE=$(echo "$stack_info" | jq -r '.Type')
STACK_STATUS=$(echo "$stack_info" | jq -r 'if .Status == 1 then "active" elif .Status == 2 then "inactive" else "unknown" end')
# Get containers for this stack
# Containers are labeled with com.docker.compose.project or com.docker.stack.namespace
response=$(api_request GET "/api/endpoints/${ENDPOINT_ID}/docker/containers/json?all=true")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to get containers (HTTP $http_code)" >&2
exit 1
fi
# Filter containers belonging to this stack
# Check both compose project label and stack namespace label
containers=$(echo "$body" | jq --arg name "$STACK_NAME" '[.[] | select(
(.Labels["com.docker.compose.project"] == $name) or
(.Labels["com.docker.stack.namespace"] == $name)
)]')
container_count=$(echo "$containers" | jq 'length')
# Output based on format
if [[ "$FORMAT" == "json" ]]; then
jq -n \
--arg name "$STACK_NAME" \
--arg id "$STACK_ID" \
--arg status "$STACK_STATUS" \
--arg type "$STACK_TYPE" \
--argjson containers "$containers" \
'{
stack: {
name: $name,
id: ($id | tonumber),
status: $status,
type: (if $type == "1" then "swarm" elif $type == "2" then "compose" else "kubernetes" end)
},
containers: [$containers[] | {
name: .Names[0],
id: .Id[0:12],
image: .Image,
state: .State,
status: .Status,
created: .Created
}]
}'
exit 0
fi
# Table output
echo "Stack: $STACK_NAME (ID: $STACK_ID)"
echo "Status: $STACK_STATUS"
echo "Type: $(if [[ "$STACK_TYPE" == "1" ]]; then echo "swarm"; elif [[ "$STACK_TYPE" == "2" ]]; then echo "compose"; else echo "kubernetes"; fi)"
echo "Containers: $container_count"
echo ""
if [[ "$container_count" -gt 0 ]]; then
echo "CONTAINER ID NAME IMAGE STATE STATUS"
echo "------------ -------------------------------------- ------------------------------ ---------- ------"
echo "$containers" | jq -r '.[] | [
.Id[0:12],
.Names[0],
.Image,
.State,
.Status
] | @tsv' | while IFS=$'\t' read -r id name image state status; do
# Clean up container name (remove leading /)
name="${name#/}"
# Truncate long values
name="${name:0:38}"
image="${image:0:30}"
printf "%-12s %-38s %-30s %-10s %s\n" "$id" "$name" "$image" "$state" "$status"
done
else
echo "No containers found for this stack."
echo ""
echo "Note: If the stack was recently created or is inactive, containers may not exist yet."
fi

114
tools/portainer/stack-stop.sh Executable file
View File

@@ -0,0 +1,114 @@
#!/usr/bin/env bash
#
# stack-stop.sh - Stop a running Portainer stack
#
# Usage: stack-stop.sh -n <stack-name>
#
# Environment variables:
# PORTAINER_URL - Portainer instance URL (e.g., https://portainer.example.com:9443)
# PORTAINER_API_KEY - API access token
#
# Options:
# -n name Stack name (required)
# -i id Stack ID (alternative to -n)
# -h Show this help
set -euo pipefail
# Default values
STACK_NAME=""
STACK_ID=""
# Parse arguments
while getopts "n:i:h" opt; do
case $opt in
n) STACK_NAME="$OPTARG" ;;
i) STACK_ID="$OPTARG" ;;
h)
head -16 "$0" | grep "^#" | sed 's/^# \?//'
exit 0
;;
*)
echo "Usage: $0 -n <stack-name>" >&2
exit 1
;;
esac
done
# Validate environment
if [[ -z "${PORTAINER_URL:-}" ]]; then
echo "Error: PORTAINER_URL environment variable not set" >&2
exit 1
fi
if [[ -z "${PORTAINER_API_KEY:-}" ]]; then
echo "Error: PORTAINER_API_KEY environment variable not set" >&2
exit 1
fi
if [[ -z "$STACK_NAME" && -z "$STACK_ID" ]]; then
echo "Error: Either -n <stack-name> or -i <stack-id> is required" >&2
exit 1
fi
# Remove trailing slash from URL
PORTAINER_URL="${PORTAINER_URL%/}"
# Function to make API requests
api_request() {
local method="$1"
local endpoint="$2"
curl -s -w "\n%{http_code}" -X "$method" \
-H "X-API-Key: ${PORTAINER_API_KEY}" \
"${PORTAINER_URL}${endpoint}"
}
# Get stack info by name
if [[ -n "$STACK_NAME" ]]; then
response=$(api_request GET "/api/stacks")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to list stacks (HTTP $http_code)" >&2
exit 1
fi
stack_info=$(echo "$body" | jq --arg name "$STACK_NAME" '.[] | select(.Name == $name)')
if [[ -z "$stack_info" || "$stack_info" == "null" ]]; then
echo "Error: Stack '$STACK_NAME' not found" >&2
exit 1
fi
STACK_ID=$(echo "$stack_info" | jq -r '.Id')
ENDPOINT_ID=$(echo "$stack_info" | jq -r '.EndpointId')
else
response=$(api_request GET "/api/stacks/${STACK_ID}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" != "200" ]]; then
echo "Error: Failed to get stack (HTTP $http_code)" >&2
exit 1
fi
stack_info="$body"
STACK_NAME=$(echo "$stack_info" | jq -r '.Name')
ENDPOINT_ID=$(echo "$stack_info" | jq -r '.EndpointId')
fi
echo "Stopping stack '$STACK_NAME' (ID: $STACK_ID)..."
response=$(api_request POST "/api/stacks/${STACK_ID}/stop?endpointId=${ENDPOINT_ID}")
http_code=$(echo "$response" | tail -n1)
body=$(echo "$response" | sed '$d')
if [[ "$http_code" == "200" ]]; then
echo "Successfully stopped stack '$STACK_NAME'"
else
echo "Error: Failed to stop stack (HTTP $http_code)" >&2
echo "$body" | jq '.' 2>/dev/null || echo "$body" >&2
exit 1
fi

15
tools/qa/debug-hook.sh Executable file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
# Debug hook to identify available variables
echo "=== Hook Debug ===" >> /tmp/hook-debug.log
echo "Date: $(date)" >> /tmp/hook-debug.log
echo "All args: $@" >> /tmp/hook-debug.log
echo "Arg count: $#" >> /tmp/hook-debug.log
echo "Arg 1: ${1:-EMPTY}" >> /tmp/hook-debug.log
echo "Arg 2: ${2:-EMPTY}" >> /tmp/hook-debug.log
echo "Arg 3: ${3:-EMPTY}" >> /tmp/hook-debug.log
echo "Environment:" >> /tmp/hook-debug.log
env | grep -i file >> /tmp/hook-debug.log 2>/dev/null || true
env | grep -i path >> /tmp/hook-debug.log 2>/dev/null || true
env | grep -i tool >> /tmp/hook-debug.log 2>/dev/null || true
echo "==================" >> /tmp/hook-debug.log

197
tools/qa/qa-hook-handler.sh Executable file
View File

@@ -0,0 +1,197 @@
#!/bin/bash
# Universal QA hook handler with robust error handling
# Location: ~/.config/mosaic/tools/qa/qa-hook-handler.sh
# Don't exit on unset variables initially to handle missing params gracefully
set -eo pipefail
PROJECT_ROOT=$(git rev-parse --show-toplevel 2>/dev/null || pwd)
TOOL_NAME="${1:-}"
FILE_PATH="${2:-}"
# Debug logging
echo "[DEBUG] Script called with args: \$1='$1' \$2='$2'" >> "$PROJECT_ROOT/logs/qa-automation.log" 2>/dev/null || true
# Validate inputs
if [ -z "$FILE_PATH" ] || [ -z "$TOOL_NAME" ]; then
echo "[ERROR] Missing required parameters: tool='$TOOL_NAME' file='$FILE_PATH'" >&2
echo "[ERROR] Usage: $0 <tool> <file_path>" >&2
# Log to file if possible
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] Missing parameters - tool='$TOOL_NAME' file='$FILE_PATH'" >> "$PROJECT_ROOT/logs/qa-automation.log" 2>/dev/null || true
exit 1
fi
# Now enable strict mode after parameter handling
set -u
# Skip non-JS/TS files
if ! [[ "$FILE_PATH" =~ \.(ts|tsx|js|jsx|mjs|cjs)$ ]]; then
echo "[INFO] Skipping non-JS/TS file: $FILE_PATH"
exit 0
fi
# Generate naming components
TIMESTAMP=$(date '+%Y%m%d-%H%M')
SANITIZED_NAME=$(echo "$FILE_PATH" | sed 's/\//-/g' | sed 's/^-//' | sed 's/\.\./\./g')
ITERATION=1
# Log file for debugging
LOG_FILE="$PROJECT_ROOT/logs/qa-automation.log"
mkdir -p "$(dirname "$LOG_FILE")"
# Function to detect Epic with fallback
detect_epic() {
local file_path="$1"
local epic=""
# Try to detect Epic from path patterns
case "$file_path" in
*/apps/frontend/src/components/*adapter*|*/apps/frontend/src/views/*adapter*)
epic="E.3001-ADAPTER-CONFIG-SYSTEM"
;;
*/services/backend/src/adapters/*)
epic="E.3001-ADAPTER-CONFIG-SYSTEM"
;;
*/services/backend/src/*)
epic="E.2004-enterprise-data-synchronization-engine"
;;
*/services/syncagent-debezium/*|*/services/syncagent-n8n/*)
epic="E.2004-enterprise-data-synchronization-engine"
;;
*)
epic="" # General QA
;;
esac
echo "$epic"
}
# Detect Epic association
EPIC_FOLDER=$(detect_epic "$FILE_PATH")
# Function to setup report directory with creation if needed
setup_report_dir() {
local epic="$1"
local project_root="$2"
local report_dir=""
if [ -n "$epic" ]; then
# Check if Epic directory exists
local epic_dir="$project_root/docs/task-management/epics/active/$epic"
if [ -d "$epic_dir" ]; then
# Epic exists, use it
report_dir="$epic_dir/reports/qa-automation/pending"
echo "[INFO] Using existing Epic: $epic" | tee -a "$LOG_FILE"
else
# Epic doesn't exist, check if we should create it
local epic_parent="$project_root/docs/task-management/epics/active"
if [ -d "$epic_parent" ]; then
# Parent exists, create Epic structure
echo "[WARN] Epic $epic not found, creating structure..." | tee -a "$LOG_FILE"
mkdir -p "$epic_dir/reports/qa-automation/pending"
mkdir -p "$epic_dir/reports/qa-automation/in-progress"
mkdir -p "$epic_dir/reports/qa-automation/done"
mkdir -p "$epic_dir/reports/qa-automation/escalated"
# Create Epic README
cat > "$epic_dir/README.md" << EOF
# Epic: $epic
**Status**: Active
**Created**: $(date '+%Y-%m-%d')
**Purpose**: Auto-created by QA automation system
## Description
This Epic was automatically created to organize QA remediation reports.
## QA Automation
- Reports are stored in \`reports/qa-automation/\`
- Pending issues: \`reports/qa-automation/pending/\`
- Escalated issues: \`reports/qa-automation/escalated/\`
EOF
report_dir="$epic_dir/reports/qa-automation/pending"
echo "[INFO] Created Epic structure: $epic" | tee -a "$LOG_FILE"
else
# Epic structure doesn't exist, fall back to general
echo "[WARN] Epic structure not found, using general QA" | tee -a "$LOG_FILE"
report_dir="$project_root/docs/reports/qa-automation/pending"
fi
fi
else
# No Epic association, use general
report_dir="$project_root/docs/reports/qa-automation/pending"
echo "[INFO] No Epic association, using general QA" | tee -a "$LOG_FILE"
fi
# Ensure directory exists
mkdir -p "$report_dir"
echo "$report_dir"
}
# Setup report directory (capture only the last line which is the path)
REPORT_DIR=$(setup_report_dir "$EPIC_FOLDER" "$PROJECT_ROOT" | tail -1)
# Check for existing reports from same timestamp
check_existing_iteration() {
local dir="$1"
local name="$2"
local timestamp="$3"
local max_iter=0
for file in "$dir"/${name}_${timestamp}_*_remediation_needed.md; do
if [ -f "$file" ]; then
# Extract iteration number
local iter=$(echo "$file" | sed 's/.*_\([0-9]\+\)_remediation_needed\.md$/\1/')
if [ "$iter" -gt "$max_iter" ]; then
max_iter=$iter
fi
fi
done
echo $((max_iter + 1))
}
ITERATION=$(check_existing_iteration "$REPORT_DIR" "$SANITIZED_NAME" "$TIMESTAMP")
# Check if we're at max iterations
if [ "$ITERATION" -gt 5 ]; then
echo "[ERROR] Max iterations (5) reached for $FILE_PATH" | tee -a "$LOG_FILE"
# Move to escalated immediately
REPORT_DIR="${REPORT_DIR/pending/escalated}"
mkdir -p "$REPORT_DIR"
ITERATION=5 # Cap at 5
fi
# Create report filename
REPORT_FILE="${SANITIZED_NAME}_${TIMESTAMP}_${ITERATION}_remediation_needed.md"
REPORT_PATH="$REPORT_DIR/$REPORT_FILE"
# Log the action
echo "[$(date '+%Y-%m-%d %H:%M:%S')] QA Hook: $TOOL_NAME on $FILE_PATH" | tee -a "$LOG_FILE"
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Creating report: $REPORT_PATH" | tee -a "$LOG_FILE"
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Epic: ${EPIC_FOLDER:-general}, Iteration: $ITERATION" | tee -a "$LOG_FILE"
# Create a task file for the QA agent instead of calling Claude directly
cat > "$REPORT_PATH" << EOF
# QA Remediation Report
**File:** $FILE_PATH
**Tool Used:** $TOOL_NAME
**Epic:** ${EPIC_FOLDER:-general}
**Iteration:** $ITERATION
**Generated:** $(date '+%Y-%m-%d %H:%M:%S')
## Status
Pending QA validation
## Next Steps
This report was created by the QA automation hook.
To process this report, run:
\`\`\`bash
claude -p "Use Task tool to launch universal-qa-agent for report: $REPORT_PATH"
\`\`\`
EOF
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Created report template at: $REPORT_PATH" | tee -a "$LOG_FILE"

59
tools/qa/qa-hook-stdin.sh Executable file
View File

@@ -0,0 +1,59 @@
#!/bin/bash
# QA Hook handler that reads from stdin
# Location: ~/.config/mosaic/tools/qa/qa-hook-stdin.sh
set -eo pipefail
PROJECT_ROOT=$(git rev-parse --show-toplevel 2>/dev/null || pwd)
LOG_FILE="$PROJECT_ROOT/logs/qa-automation.log"
mkdir -p "$(dirname "$LOG_FILE")"
# Read JSON from stdin
JSON_INPUT=$(cat)
# Log raw input for debugging
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Hook triggered with JSON:" >> "$LOG_FILE"
echo "$JSON_INPUT" >> "$LOG_FILE"
# Extract file path using jq if available, otherwise use grep/sed
if command -v jq &> /dev/null; then
# Try multiple paths - tool_input.file_path is the actual structure from Claude Code
FILE_PATH=$(echo "$JSON_INPUT" | jq -r '.tool_input.file_path // .tool_response.filePath // .file_path // .path // .file // empty' 2>/dev/null || echo "")
TOOL_NAME=$(echo "$JSON_INPUT" | jq -r '.tool_name // .tool // .matcher // "Edit"' 2>/dev/null || echo "Edit")
else
# Fallback parsing without jq - search in tool_input first
FILE_PATH=$(echo "$JSON_INPUT" | grep -o '"tool_input"[^}]*}' | grep -o '"file_path"[[:space:]]*:[[:space:]]*"[^"]*"' | sed 's/.*"file_path"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/' | head -1)
if [ -z "$FILE_PATH" ]; then
FILE_PATH=$(echo "$JSON_INPUT" | grep -o '"file_path"[[:space:]]*:[[:space:]]*"[^"]*"' | sed 's/.*"file_path"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/' | head -1)
fi
if [ -z "$FILE_PATH" ]; then
FILE_PATH=$(echo "$JSON_INPUT" | grep -o '"filePath"[[:space:]]*:[[:space:]]*"[^"]*"' | sed 's/.*"filePath"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/' | head -1)
fi
TOOL_NAME=$(echo "$JSON_INPUT" | grep -o '"tool_name"[[:space:]]*:[[:space:]]*"[^"]*"' | sed 's/.*"tool_name"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/' | head -1)
if [ -z "$TOOL_NAME" ]; then
TOOL_NAME=$(echo "$JSON_INPUT" | grep -o '"tool"[[:space:]]*:[[:space:]]*"[^"]*"' | sed 's/.*"tool"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/' | head -1)
fi
[ -z "$TOOL_NAME" ] && TOOL_NAME="Edit"
fi
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Extracted: tool=$TOOL_NAME file=$FILE_PATH" >> "$LOG_FILE"
# Validate we got a file path
if [ -z "$FILE_PATH" ]; then
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] Could not extract file path from JSON" >> "$LOG_FILE"
exit 0 # Exit successfully to not block Claude
fi
# Skip non-JS/TS files
if ! [[ "$FILE_PATH" =~ \.(ts|tsx|js|jsx|mjs|cjs)$ ]]; then
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [INFO] Skipping non-JS/TS file: $FILE_PATH" >> "$LOG_FILE"
exit 0
fi
# Call the main QA handler with extracted parameters
if [ -f ~/.config/mosaic/tools/qa/qa-hook-handler.sh ]; then
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Calling QA handler for $FILE_PATH" >> "$LOG_FILE"
~/.config/mosaic/tools/qa/qa-hook-handler.sh "$TOOL_NAME" "$FILE_PATH" 2>&1 | tee -a "$LOG_FILE"
else
echo "[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR] QA handler script not found" >> "$LOG_FILE"
fi

19
tools/qa/qa-hook-wrapper.sh Executable file
View File

@@ -0,0 +1,19 @@
#!/bin/bash
# Wrapper script that handles hook invocation more robustly
# Get the most recently modified JS/TS file as a fallback
RECENT_FILE=$(find . -type f \( -name "*.ts" -o -name "*.tsx" -o -name "*.js" -o -name "*.jsx" \) -mmin -1 2>/dev/null | head -1)
# Use provided file path or fallback to recent file
FILE_PATH="${2:-$RECENT_FILE}"
TOOL_NAME="${1:-Edit}"
# Log the attempt
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Hook wrapper called: tool=$TOOL_NAME file=$FILE_PATH" >> logs/qa-automation.log 2>/dev/null || true
# Call the actual QA handler if we have a file
if [ -n "$FILE_PATH" ]; then
~/.config/mosaic/tools/qa/qa-hook-handler.sh "$TOOL_NAME" "$FILE_PATH"
else
echo "[$(date '+%Y-%m-%d %H:%M:%S')] No file path available for QA check" >> logs/qa-automation.log 2>/dev/null || true
fi

91
tools/qa/qa-queue-monitor.sh Executable file
View File

@@ -0,0 +1,91 @@
#!/bin/bash
# Monitor QA queues with graceful handling of missing directories
# Location: ~/.config/mosaic/tools/qa/qa-queue-monitor.sh
PROJECT_ROOT=$(git rev-parse --show-toplevel 2>/dev/null || pwd)
echo "=== QA Automation Queue Status ==="
echo "Project: $(basename "$PROJECT_ROOT")"
echo "Time: $(date '+%Y-%m-%d %H:%M:%S')"
echo
# Function to count files safely
count_files() {
local dir="$1"
if [ -d "$dir" ]; then
ls "$dir" 2>/dev/null | wc -l
else
echo "0"
fi
}
# Check Epic-specific queues
EPIC_BASE="$PROJECT_ROOT/docs/task-management/epics/active"
if [ -d "$EPIC_BASE" ]; then
for EPIC_DIR in "$EPIC_BASE"/*/; do
if [ -d "$EPIC_DIR" ]; then
EPIC_NAME=$(basename "$EPIC_DIR")
QA_DIR="$EPIC_DIR/reports/qa-automation"
if [ -d "$QA_DIR" ]; then
echo "Epic: $EPIC_NAME"
echo " Pending: $(count_files "$QA_DIR/pending")"
echo " In Progress: $(count_files "$QA_DIR/in-progress")"
echo " Done: $(count_files "$QA_DIR/done")"
echo " Escalated: $(count_files "$QA_DIR/escalated")"
# Show escalated files if any
if [ -d "$QA_DIR/escalated" ] && [ "$(ls "$QA_DIR/escalated" 2>/dev/null | wc -l)" -gt 0 ]; then
echo " ⚠️ Escalated Issues:"
for file in "$QA_DIR/escalated"/*_remediation_needed.md; do
if [ -f "$file" ]; then
echo " - $(basename "$file")"
fi
done
fi
echo
fi
fi
done
else
echo "[WARN] No Epic structure found at: $EPIC_BASE"
echo
fi
# Check general queue
GENERAL_DIR="$PROJECT_ROOT/docs/reports/qa-automation"
if [ -d "$GENERAL_DIR" ]; then
echo "General (Non-Epic):"
echo " Pending: $(count_files "$GENERAL_DIR/pending")"
echo " In Progress: $(count_files "$GENERAL_DIR/in-progress")"
echo " Done: $(count_files "$GENERAL_DIR/done")"
echo " Escalated: $(count_files "$GENERAL_DIR/escalated")"
# Show escalated files
if [ -d "$GENERAL_DIR/escalated" ] && [ "$(ls "$GENERAL_DIR/escalated" 2>/dev/null | wc -l)" -gt 0 ]; then
echo " ⚠️ Escalated Issues:"
for file in "$GENERAL_DIR/escalated"/*_remediation_needed.md; do
if [ -f "$file" ]; then
echo " - $(basename "$file")"
fi
done
fi
else
echo "[INFO] No general QA directory found (will be created on first use)"
fi
echo
echo "=== Recent Activity ==="
# Show last 5 log entries
if [ -f "$PROJECT_ROOT/logs/qa-automation.log" ]; then
tail -5 "$PROJECT_ROOT/logs/qa-automation.log"
else
echo "No activity log found"
fi
echo
echo "=== Queue Processing Tips ==="
echo "• View pending reports: ls -la $PROJECT_ROOT/docs/reports/qa-automation/pending/"
echo "• Check stale reports: find $PROJECT_ROOT -path '*/in-progress/*' -mmin +60"
echo "• Manual escalation: mv {report} {path}/escalated/"
echo "• View full log: tail -f $PROJECT_ROOT/logs/qa-automation.log"

View File

@@ -0,0 +1,66 @@
#!/bin/bash
# Universal remediation hook handler with error recovery
# Location: ~/.config/mosaic/tools/qa/remediation-hook-handler.sh
set -euo pipefail
PROJECT_ROOT=$(git rev-parse --show-toplevel 2>/dev/null || pwd)
REPORT_FILE="${1:-}"
# Validate input
if [ -z "$REPORT_FILE" ] || [ ! -f "$REPORT_FILE" ]; then
echo "[ERROR] Invalid or missing report file: $REPORT_FILE" >&2
exit 1
fi
LOG_FILE="$PROJECT_ROOT/logs/qa-automation.log"
mkdir -p "$(dirname "$LOG_FILE")"
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Remediation triggered for: $REPORT_FILE" | tee -a "$LOG_FILE"
# Extract components from path and filename
BASE_NAME=$(basename "$REPORT_FILE" _remediation_needed.md)
DIR_PATH=$(dirname "$REPORT_FILE")
# Validate directory structure
if [[ ! "$DIR_PATH" =~ /pending$ ]]; then
echo "[ERROR] Report not in pending directory: $DIR_PATH" | tee -a "$LOG_FILE"
exit 1
fi
# Setup in-progress directory
IN_PROGRESS_DIR="${DIR_PATH/pending/in-progress}"
# Handle missing in-progress directory
if [ ! -d "$IN_PROGRESS_DIR" ]; then
echo "[WARN] Creating missing in-progress directory: $IN_PROGRESS_DIR" | tee -a "$LOG_FILE"
mkdir -p "$IN_PROGRESS_DIR"
# Also ensure done and escalated exist
mkdir -p "${DIR_PATH/pending/done}"
mkdir -p "${DIR_PATH/pending/escalated}"
fi
# Move from pending to in-progress (with error handling)
if ! mv "$REPORT_FILE" "$IN_PROGRESS_DIR/" 2>/dev/null; then
echo "[ERROR] Failed to move report to in-progress" | tee -a "$LOG_FILE"
# Check if already in progress
if [ -f "$IN_PROGRESS_DIR/$(basename "$REPORT_FILE")" ]; then
echo "[WARN] Report already in progress, skipping" | tee -a "$LOG_FILE"
exit 0
fi
exit 1
fi
# Create actions file
ACTIONS_FILE="${BASE_NAME}_remediation_actions.md"
ACTIONS_PATH="$IN_PROGRESS_DIR/$ACTIONS_FILE"
echo "[$(date '+%Y-%m-%d %H:%M:%S')] Starting remediation: $ACTIONS_PATH" | tee -a "$LOG_FILE"
# Trigger remediation agent
claude -p "Use Task tool to launch auto-remediation-agent for:
- Remediation Report: $IN_PROGRESS_DIR/$(basename "$REPORT_FILE")
- Actions File: $ACTIONS_PATH
- Max Iterations: 5
Process the report, create action plan using Sequential Thinking, research with Context7, and execute fixes systematically." 2>&1 | tee -a "$LOG_FILE"

189
tools/quality/PHILOSOPHY.md Normal file
View File

@@ -0,0 +1,189 @@
# Why Hard Rails Matter
## The Problem We Discovered
In AI-assisted development, we found:
1. **Process adherence fails** - Agents claim to do code review but miss critical issues
2. **Manual review insufficient** - Even AI-assisted review missed hardcoded passwords, SQL injection
3. **Scale breaks quality** - 50 issues in a single patch release despite explicit QA processes
### Real-World Case Study
**Production patch validation:**
After explicit code review and QA processes, we discovered **50 issues**:
**Security Issues (9):**
- 4 hardcoded passwords committed to repository
- 1 SQL injection vulnerability
- World-readable .env files
- XSS vulnerabilities (CSP unsafe-inline)
**Type Safety Issues (11):**
- TypeScript strict mode DISABLED (`"strict": false`)
- ESLint explicitly ALLOWING any types (`no-explicit-any: 'off'`)
- Missing return types
- Type assertion overuse
**Silent Failures (9):**
- Errors swallowed in try/catch blocks
- Functions returning wrong types on error
- No error logging
- Network failures treated as false instead of errors
**Test Coverage Gaps (10):**
- No test coverage requirements
- No testing framework setup
- Code shipped with 0% coverage
**Build Failures (2):**
- Code committed that doesn't compile
- Tests committed that fail
**Dependency Issues (6):**
- Critical CVEs not caught
- Version conflicts between packages
## The Solution: Mechanical Enforcement
Don't **ask** agents to:
- "Please do code review"
- "Make sure to run tests"
- "Check for security issues"
Instead, **BLOCK** commits that:
- Have type errors
- Contain hardcoded secrets
- Don't pass tests
- Have security vulnerabilities
### Why This Works
**Example: Type Safety**
**Process-based (fails):**
```
Human: "Please avoid using 'any' types"
Agent: "I'll make sure to use proper types"
*Agent uses any types anyway*
```
**Mechanically enforced (works):**
```
Agent writes: const x: any = 123;
Git hook runs: ❌ Error: no-explicit-any
Commit blocked
Agent must fix to proceed
```
The agent doesn't get to **claim** it followed the process. The automated gate **determines** if code is acceptable.
## Design Principles
### 1. Fail Fast
Detect issues at commit time, not in CI, not in code review, not in production.
**Timeline:**
- ⚡ Commit time: Type errors, lint errors, secrets → **BLOCKED**
- 🔄 CI time: Build failures, test failures, CVEs → **BLOCKED**
- 👀 Code review: Architecture, design, business logic
- 🚀 Production: (Issues should never reach here)
### 2. Non-Negotiable
No agent can bypass enforcement. No "skip hooks" flag. No emergency override.
If the code doesn't pass gates, it doesn't get committed. Period.
### 3. Portable
Same enforcement across:
- All projects
- All developers (human + AI)
- All environments (local, CI, production)
### 4. Minimal Friction
Auto-fix where possible:
- Prettier formats code automatically
- ESLint --fix corrects simple issues
- Only block when can't auto-fix
### 5. Clear Feedback
When enforcement blocks a commit, tell the agent:
- ❌ What's wrong (type error, lint violation, etc.)
- 📍 Where it is (file:line)
- ✅ How to fix it (expected type, remove 'any', etc.)
## Impact Prediction
Based on a 50-issue production analysis:
| Phase | Enforcement | Issues Prevented |
|-------|-------------|------------------|
| **Phase 1** | Pre-commit + strict mode + ESLint | 25 of 50 (50%) |
| **Phase 2** | + CI expansion + npm audit | 35 of 50 (70%) |
| **Phase 3** | + OWASP + coverage gates | 45 of 50 (90%) |
**The remaining 10%** require human judgment:
- Architecture decisions
- Business logic correctness
- User experience
- Performance optimization
## Agent Behavior Evolution
### Before Quality Rails
```
Agent: "I've completed the feature and run all tests"
Reality: Code has type errors, no tests written, hardcoded password
Result: 50 issues discovered in code review
```
### After Quality Rails
```
Agent writes code with 'any' type
Git hook: ❌ no-explicit-any
Agent rewrites with proper type
Git hook: ✅ Pass
Agent writes code with hardcoded password
Git hook: ❌ Secret detected
Agent moves to environment variable
Git hook: ✅ Pass
Agent commits without tests
CI: ❌ Coverage below 80%
Agent writes tests
CI: ✅ Pass
```
**The agent learns:** Good code passes gates, bad code is rejected.
## Why This Matters for AI Development
AI agents are **deterministically bad** at self-enforcement:
- They claim to follow processes
- They **believe** they're following processes
- Output proves otherwise
But AI agents are **good** at responding to mechanical feedback:
- Clear error messages
- Specific line numbers
- Concrete fix requirements
Quality Rails exploits this strength and avoids the weakness.
## Conclusion
**Process compliance:** Agents claim → Output fails
**Mechanical enforcement:** Gates determine → Output succeeds
This is not philosophical. This is pragmatic. Based on 50 real issues from production code.
Quality Rails exists because **process-based quality doesn't work at scale with AI agents.**
Mechanical enforcement does.

166
tools/quality/README.md Normal file
View File

@@ -0,0 +1,166 @@
# Quality Rails
Portable quality enforcement for TypeScript, Python, and Node.js projects.
## 🎯 What This Prevents
Based on real-world validation of 50 issues in a production codebase:
- ❌ Hardcoded passwords
- ❌ SQL injection vulnerabilities
- ❌ Type safety violations (`any` types)
- ❌ Missing test coverage
- ❌ Build failures
- ❌ Dependency vulnerabilities
**70% of these issues are prevented mechanically with quality-rails.**
## ⚡ Quick Start (Mosaic)
### New Project
```bash
# Apply template from Mosaic
~/.config/mosaic/bin/mosaic-quality-apply --template typescript-node --target /path/to/project
# Install dependencies
cd /path/to/project
npm install
# Initialize git hooks
npx husky install
# Verify enforcement is working
~/.config/mosaic/bin/mosaic-quality-verify --target /path/to/project
```
### Existing Project
```bash
# Same as above - works for new or existing projects
~/.config/mosaic/bin/mosaic-quality-apply --template typescript-node --target /path/to/existing-project
```
## 🛡️ What You Get
**TypeScript strict mode** - All type checks enabled
**ESLint blocking `any` types** - no-explicit-any: error
**Pre-commit hooks** - Type check + lint + format before commit
**Secret scanning** - Block hardcoded passwords/API keys
**CI/CD templates** - Woodpecker, GitHub Actions, GitLab
**Test coverage enforcement** - 80% threshold
**Security scanning** - npm audit, OWASP checks
## 📦 Available Templates
| Template | Language | Framework | Status |
|----------|----------|-----------|--------|
| `typescript-node` | TypeScript | Node.js | ✅ Ready |
| `typescript-nextjs` | TypeScript | Next.js | ✅ Ready |
| `monorepo` | TypeScript | TurboRepo + pnpm | ✅ Ready |
| `python` | Python | - | 🚧 Coming Soon |
### Monorepo Template
Perfect for projects combining **Next.js frontend** + **NestJS backend** in one repository.
Features:
- 🎯 **Multi-package aware** - lint-staged only checks changed packages
-**TurboRepo caching** - Faster builds and tests
- 🔀 **Parallel dev servers** - Run web + API simultaneously
- 📦 **pnpm workspaces** - Efficient dependency management
- 🛡️ **Package-specific rules** - Next.js and NestJS get appropriate ESLint configs
Example structure:
```
monorepo/
├── apps/
│ ├── web/ # Next.js frontend
│ └── api/ # NestJS backend
└── packages/
├── shared-types/
├── ui/
└── config/
```
## 🧪 How It Works
### Pre-Commit (Local Enforcement)
```bash
# You try to commit code with a type error
git commit -m "Add feature"
# Quality rails blocks it:
❌ Type error: Type 'number' is not assignable to type 'string'
❌ ESLint: Unexpected any. Specify a different type.
✋ Commit blocked - fix errors and try again
```
### CI/CD (Remote Enforcement)
```yaml
# Woodpecker pipeline runs:
✓ npm audit (dependency security)
✓ eslint (code quality)
✓ tsc --noEmit (type checking)
✓ jest --coverage (tests + coverage)
✓ npm run build (compilation)
# If any step fails, merge is blocked
```
## 🎓 Philosophy
**Process compliance doesn't work.**
Instructing AI agents to "do code review" or "run tests" fails. They claim to follow processes but output quality doesn't match claims.
**Mechanical enforcement works.**
Quality rails don't ask agents to follow processes. They **block commits** that don't pass automated checks.
- Type errors? → **Commit blocked**
- Hardcoded secrets? → **Commit blocked**
- Test failures? → **Commit blocked**
- Missing coverage? → **Commit blocked**
This works for **any agent runtime** (Codex, Claude, OpenCode, Gemini, etc.) because enforcement is mechanical, not instructional.
[Read more: PHILOSOPHY.md](./PHILOSOPHY.md)
## 📖 Documentation
- [TypeScript Setup Guide](./docs/TYPESCRIPT-SETUP.md)
- [CI/CD Configuration](./docs/CI-SETUP.md)
## 🔧 Scripts
| Script | Purpose |
|--------|---------|
| `scripts/install.sh` | Install template to project (Linux/Mac) |
| `scripts/install.ps1` | Install template to project (Windows) |
| `scripts/verify.sh` | Verify enforcement is working (Linux/Mac) |
| `scripts/verify.ps1` | Verify enforcement is working (Windows) |
## 🚀 Roadmap
- [x] TypeScript/Node template
- [x] Pre-commit enforcement (husky + lint-staged)
- [x] CI/CD templates (Woodpecker, GitHub Actions)
- [x] Installation scripts
- [x] Verification testing
- [x] Next.js template
- [x] Monorepo template
- [ ] Python template
- [ ] Coverage visualization
- [ ] IDE integration (VSCode extension)
## 🤝 Contributing
Quality Rails is based on lessons learned from real production codebases. Contributions welcome!
## 📝 License
MIT License - See LICENSE file for details
## 🙏 Credits
Built to solve real problems discovered in AI-assisted development workflows.
Based on validation findings from a production patch milestone.

View File

@@ -0,0 +1,174 @@
# CI/CD Configuration Guide
Configure Woodpecker CI, GitHub Actions, or GitLab CI for quality enforcement.
## Woodpecker CI
Quality Rails includes `.woodpecker.yml` template.
### Pipeline Stages
1. **Install** - Dependencies
2. **Security Audit** - npm audit for CVEs
3. **Lint** - ESLint checks
4. **Type Check** - TypeScript compilation
5. **Test** - Jest with coverage thresholds
6. **Build** - Production build
### Configuration
No additional configuration needed. Push to repository and Woodpecker runs automatically.
### Blocking Merges
Configure Woodpecker to block merges on pipeline failure:
1. Repository Settings → Protected Branches
2. Require Woodpecker pipeline to pass
## GitHub Actions
Copy from `templates/typescript-node/.github/workflows/quality.yml`:
```yaml
name: Quality Enforcement
on: [push, pull_request]
jobs:
quality:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 20
- run: npm ci
- run: npm audit --audit-level=high
- run: npm run lint
- run: npm run type-check
- run: npm run test -- --coverage
- run: npm run build
```
### Blocking Merges
1. Repository Settings → Branches → Branch protection rules
2. Require status checks to pass: `quality`
## GitLab CI
Copy from `templates/typescript-node/.gitlab-ci.yml`:
```yaml
stages:
- install
- audit
- quality
- build
install:
stage: install
script:
- npm ci
audit:
stage: audit
script:
- npm audit --audit-level=high
lint:
stage: quality
script:
- npm run lint
typecheck:
stage: quality
script:
- npm run type-check
test:
stage: quality
script:
- npm run test -- --coverage
build:
stage: build
script:
- npm run build
```
## Coverage Enforcement
Configure Jest coverage thresholds in `package.json`:
```json
{
"jest": {
"coverageThreshold": {
"global": {
"branches": 80,
"functions": 80,
"lines": 80,
"statements": 80
}
}
}
}
```
CI will fail if coverage drops below threshold.
## Security Scanning
### npm audit
Runs automatically in CI. Adjust sensitivity:
```bash
npm audit --audit-level=moderate # Block moderate+
npm audit --audit-level=high # Block high+critical only
npm audit --audit-level=critical # Block critical only
```
### Snyk Integration
Add to CI for additional security:
```yaml
- run: npx snyk test
```
Requires `SNYK_TOKEN` environment variable.
## Notification Setup
### Woodpecker
Configure in Woodpecker UI:
- Slack/Discord webhooks
- Email notifications
- Status badges
### GitHub Actions
Add notification step:
```yaml
- name: Notify on failure
if: failure()
run: |
curl -X POST $WEBHOOK_URL -d "Build failed"
```
## Troubleshooting
**Pipeline fails but pre-commit passed:**
- CI runs all packages, pre-commit only checks changed files
- Fix issues in all packages, not just changed files
**npm audit blocks on low-severity:**
- Adjust `--audit-level` to `moderate` or `high`
**Coverage threshold too strict:**
- Lower thresholds in package.json
- Add coverage exceptions for specific files

View File

@@ -0,0 +1,164 @@
# TypeScript Project Setup Guide
Step-by-step guide to add Quality Rails to a TypeScript project.
## Prerequisites
- Node.js 18+ and npm/pnpm
- Git repository initialized
- TypeScript project (or create with `npm init` + `tsc --init`)
## Installation
### 1. Clone Quality Rails
```bash
git clone git@git.mosaicstack.dev:mosaic/quality-rails.git
```
### 2. Run Installation Script
```bash
# From your project directory
../quality-rails/scripts/install.sh --template typescript-node --target .
```
This copies:
- `.husky/pre-commit` - Git hooks
- `.lintstagedrc.js` - Pre-commit checks
- `.eslintrc.js` - Strict ESLint rules
- `tsconfig.json` - TypeScript strict mode
- `.woodpecker.yml` - CI pipeline
### 3. Install Dependencies
Add to your `package.json`:
```json
{
"scripts": {
"lint": "eslint 'src/**/*.{ts,tsx}' --max-warnings=0",
"type-check": "tsc --noEmit",
"test": "jest",
"build": "tsc",
"prepare": "husky install"
},
"devDependencies": {
"@typescript-eslint/eslint-plugin": "^7.0.0",
"@typescript-eslint/parser": "^7.0.0",
"eslint": "^9.0.0",
"eslint-config-prettier": "^9.0.0",
"eslint-plugin-prettier": "^5.0.0",
"eslint-plugin-security": "^3.0.0",
"husky": "^9.1.7",
"jest": "^29.0.0",
"lint-staged": "^16.2.7",
"prettier": "^3.0.0",
"typescript": "^5.6.0"
}
}
```
Then run:
```bash
npm install
npx husky install
```
### 4. Verify Enforcement
```bash
../quality-rails/scripts/verify.sh
```
Should output:
```
✅ PASS: Type errors blocked
✅ PASS: 'any' types blocked
✅ PASS: Lint errors blocked
```
## What Gets Enforced
### TypeScript Strict Mode
All strict checks enabled in `tsconfig.json`:
```json
{
"compilerOptions": {
"strict": true,
"noImplicitAny": true,
"strictNullChecks": true,
"strictFunctionTypes": true,
"strictBindCallApply": true,
"strictPropertyInitialization": true,
"noImplicitThis": true,
"alwaysStrict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedIndexedAccess": true,
"noImplicitOverride": true,
"noPropertyAccessFromIndexSignature": true
}
}
```
### ESLint Rules
Key rules in `.eslintrc.js`:
```javascript
{
'@typescript-eslint/no-explicit-any': 'error', // Block 'any' types
'@typescript-eslint/explicit-function-return-type': 'warn', // Require return types
'@typescript-eslint/no-floating-promises': 'error', // Catch unhandled promises
'@typescript-eslint/no-misused-promises': 'error', // Prevent promise misuse
}
```
### Pre-Commit Checks
On every `git commit`, runs:
1. ESLint with --max-warnings=0
2. TypeScript type check
3. Prettier formatting
4. Secret scanning (if git-secrets installed)
If any fail → **commit blocked**.
## Troubleshooting
### "husky - pre-commit hook exited with code 1"
This means pre-commit checks failed. Read the error output:
```
src/example.ts:5:14 - error TS2322: Type 'number' is not assignable to type 'string'
```
Fix the error and commit again.
### "Cannot find module '@typescript-eslint/parser'"
Dependencies not installed:
```bash
npm install
```
### Pre-commit hooks not running
Husky not initialized:
```bash
npx husky install
```
## Customization
See [CUSTOMIZATION.md](./CUSTOMIZATION.md) for adjusting strictness levels.
## CI/CD Setup
See [CI-SETUP.md](./CI-SETUP.md) for Woodpecker/GitHub Actions configuration.

View File

@@ -0,0 +1,53 @@
# Quality Rails Installation Script (Windows)
param(
[Parameter(Mandatory=$true)]
[string]$Template,
[Parameter(Mandatory=$false)]
[string]$TargetDir = "."
)
$ErrorActionPreference = "Stop"
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
$RepoRoot = Split-Path -Parent $ScriptDir
$TemplateDir = Join-Path $RepoRoot "templates\$Template"
if (-not (Test-Path $TemplateDir)) {
Write-Error "Template '$Template' not found at $TemplateDir"
Write-Host "Available templates: typescript-node, typescript-nextjs, python, monorepo"
exit 1
}
Write-Host "Installing Quality Rails: $Template"
Write-Host "Target directory: $TargetDir"
Write-Host ""
# Copy template files
Write-Host "Copying template files..."
if (Test-Path "$TemplateDir\.husky") {
Copy-Item -Path "$TemplateDir\.husky" -Destination $TargetDir -Recurse -Force
}
Copy-Item -Path "$TemplateDir\.lintstagedrc.js" -Destination $TargetDir -Force -ErrorAction SilentlyContinue
Copy-Item -Path "$TemplateDir\.eslintrc.strict.js" -Destination "$TargetDir\.eslintrc.js" -Force -ErrorAction SilentlyContinue
Copy-Item -Path "$TemplateDir\tsconfig.strict.json" -Destination "$TargetDir\tsconfig.json" -Force -ErrorAction SilentlyContinue
Copy-Item -Path "$TemplateDir\.woodpecker.yml" -Destination $TargetDir -Force -ErrorAction SilentlyContinue
Write-Host "✓ Files copied"
if (Test-Path "$TargetDir\package.json") {
Write-Host ""
Write-Host "⚠ package.json exists. Please manually merge dependencies from:"
Write-Host " $TemplateDir\package.json.snippet"
} else {
Write-Host "⚠ No package.json found. Create one and add dependencies from:"
Write-Host " $TemplateDir\package.json.snippet"
}
Write-Host ""
Write-Host "✓ Quality Rails installed successfully!"
Write-Host ""
Write-Host "Next steps:"
Write-Host "1. Install dependencies: npm install"
Write-Host "2. Initialize husky: npx husky install"
Write-Host "3. Run verification: ..\quality-rails\scripts\verify.ps1"

View File

@@ -0,0 +1,75 @@
#!/bin/bash
set -e
# Quality Rails Installation Script
# Usage: ./install.sh --template typescript-node [--target /path/to/project]
TEMPLATE=""
TARGET_DIR="."
# Parse arguments
while [[ $# -gt 0 ]]; do
case $1 in
--template)
TEMPLATE="$2"
shift 2
;;
--target)
TARGET_DIR="$2"
shift 2
;;
*)
echo "Unknown option: $1"
echo "Usage: $0 --template <template-name> [--target <directory>]"
exit 1
;;
esac
done
if [ -z "$TEMPLATE" ]; then
echo "Error: --template is required"
echo "Available templates: typescript-node, typescript-nextjs, python, monorepo"
exit 1
fi
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(dirname "$SCRIPT_DIR")"
TEMPLATE_DIR="$REPO_ROOT/templates/$TEMPLATE"
if [ ! -d "$TEMPLATE_DIR" ]; then
echo "Error: Template '$TEMPLATE' not found at $TEMPLATE_DIR"
exit 1
fi
echo "Installing Quality Rails: $TEMPLATE"
echo "Target directory: $TARGET_DIR"
echo ""
# Copy template files
echo "Copying template files..."
cp -r "$TEMPLATE_DIR/.husky" "$TARGET_DIR/" 2>/dev/null || true
cp "$TEMPLATE_DIR/.lintstagedrc.js" "$TARGET_DIR/" 2>/dev/null || true
cp "$TEMPLATE_DIR/.eslintrc.strict.js" "$TARGET_DIR/.eslintrc.js" 2>/dev/null || true
cp "$TEMPLATE_DIR/tsconfig.strict.json" "$TARGET_DIR/tsconfig.json" 2>/dev/null || true
cp "$TEMPLATE_DIR/.woodpecker.yml" "$TARGET_DIR/" 2>/dev/null || true
echo "✓ Files copied"
# Check if package.json exists
if [ -f "$TARGET_DIR/package.json" ]; then
echo ""
echo "⚠ package.json exists. Please manually merge dependencies from:"
echo " $TEMPLATE_DIR/package.json.snippet"
else
echo "⚠ No package.json found. Create one and add dependencies from:"
echo " $TEMPLATE_DIR/package.json.snippet"
fi
echo ""
echo "✓ Quality Rails installed successfully!"
echo ""
echo "Next steps:"
echo "1. Install dependencies: npm install"
echo "2. Initialize husky: npx husky install"
echo "3. Run verification: ~/.config/mosaic/bin/mosaic-quality-verify --target $TARGET_DIR"
echo ""

View File

@@ -0,0 +1,57 @@
# Quality Rails Verification Script (Windows)
Write-Host "═══════════════════════════════════════════"
Write-Host "Quality Rails Enforcement Verification"
Write-Host "═══════════════════════════════════════════"
Write-Host ""
$Passed = 0
$Failed = 0
# Test 1: Type error blocked
Write-Host "Test 1: Type errors should be blocked..."
"const x: string = 123;" | Out-File -FilePath test-file.ts -Encoding utf8
git add test-file.ts 2>$null
$output = git commit -m "Test commit" 2>&1 | Out-String
if ($output -match "error") {
Write-Host "✅ PASS: Type errors blocked" -ForegroundColor Green
$Passed++
} else {
Write-Host "❌ FAIL: Type errors NOT blocked" -ForegroundColor Red
$Failed++
}
git reset HEAD test-file.ts 2>$null
Remove-Item test-file.ts -ErrorAction SilentlyContinue
# Test 2: any type blocked
Write-Host ""
Write-Host "Test 2: 'any' types should be blocked..."
"const x: any = 123;" | Out-File -FilePath test-file.ts -Encoding utf8
git add test-file.ts 2>$null
$output = git commit -m "Test commit" 2>&1 | Out-String
if ($output -match "no-explicit-any") {
Write-Host "✅ PASS: 'any' types blocked" -ForegroundColor Green
$Passed++
} else {
Write-Host "❌ FAIL: 'any' types NOT blocked" -ForegroundColor Red
$Failed++
}
git reset HEAD test-file.ts 2>$null
Remove-Item test-file.ts -ErrorAction SilentlyContinue
# Summary
Write-Host ""
Write-Host "═══════════════════════════════════════════"
Write-Host "Verification Summary"
Write-Host "═══════════════════════════════════════════"
Write-Host "✅ Passed: $Passed"
Write-Host "❌ Failed: $Failed"
Write-Host ""
if ($Failed -eq 0) {
Write-Host "🎉 All tests passed! Quality enforcement is working." -ForegroundColor Green
exit 0
} else {
Write-Host "⚠ Some tests failed. Review configuration." -ForegroundColor Yellow
exit 1
}

92
tools/quality/scripts/verify.sh Executable file
View File

@@ -0,0 +1,92 @@
#!/bin/bash
# Quality Rails Verification Script
# Tests that enforcement actually works
echo "═══════════════════════════════════════════"
echo "Quality Rails Enforcement Verification"
echo "═══════════════════════════════════════════"
echo ""
PASSED=0
FAILED=0
# Test 1: Type error blocked
echo "Test 1: Type errors should be blocked..."
echo "const x: string = 123;" > test-file.ts
git add test-file.ts 2>/dev/null
if git commit -m "Test commit" 2>&1 | grep -q "error"; then
echo "✅ PASS: Type errors blocked"
((PASSED++))
else
echo "❌ FAIL: Type errors NOT blocked"
((FAILED++))
fi
git reset HEAD test-file.ts 2>/dev/null
rm test-file.ts 2>/dev/null
# Test 2: any type blocked
echo ""
echo "Test 2: 'any' types should be blocked..."
echo "const x: any = 123;" > test-file.ts
git add test-file.ts 2>/dev/null
if git commit -m "Test commit" 2>&1 | grep -q "no-explicit-any"; then
echo "✅ PASS: 'any' types blocked"
((PASSED++))
else
echo "❌ FAIL: 'any' types NOT blocked"
((FAILED++))
fi
git reset HEAD test-file.ts 2>/dev/null
rm test-file.ts 2>/dev/null
# Test 3: Hardcoded secret blocked (if git-secrets installed)
echo ""
echo "Test 3: Hardcoded secrets should be blocked..."
if command -v git-secrets &> /dev/null; then
echo "const password = 'SuperSecret123!';" > test-file.ts
git add test-file.ts 2>/dev/null
if git commit -m "Test commit" 2>&1 | grep -q -i "secret\|password"; then
echo "✅ PASS: Secrets blocked"
((PASSED++))
else
echo "⚠ WARN: Secrets NOT blocked (git-secrets may need configuration)"
((FAILED++))
fi
git reset HEAD test-file.ts 2>/dev/null
rm test-file.ts 2>/dev/null
else
echo "⚠ SKIP: git-secrets not installed"
fi
# Test 4: Lint error blocked
echo ""
echo "Test 4: Lint errors should be blocked..."
echo "const x=123" > test-file.ts # Missing semicolon
git add test-file.ts 2>/dev/null
if git commit -m "Test commit" 2>&1 | grep -q "prettier"; then
echo "✅ PASS: Lint errors blocked"
((PASSED++))
else
echo "❌ FAIL: Lint errors NOT blocked"
((FAILED++))
fi
git reset HEAD test-file.ts 2>/dev/null
rm test-file.ts 2>/dev/null
# Summary
echo ""
echo "═══════════════════════════════════════════"
echo "Verification Summary"
echo "═══════════════════════════════════════════"
echo "✅ Passed: $PASSED"
echo "❌ Failed: $FAILED"
echo ""
if [ $FAILED -eq 0 ]; then
echo "🎉 All tests passed! Quality enforcement is working."
exit 0
else
echo "⚠ Some tests failed. Review configuration."
exit 1
fi

View File

@@ -0,0 +1,64 @@
// Root ESLint config for monorepo
module.exports = {
root: true,
parser: '@typescript-eslint/parser',
parserOptions: {
ecmaVersion: 2022,
sourceType: 'module',
},
plugins: ['@typescript-eslint', 'security'],
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'plugin:security/recommended',
'plugin:prettier/recommended',
],
rules: {
// Type Safety - STRICT
'@typescript-eslint/no-explicit-any': 'error',
'@typescript-eslint/explicit-function-return-type': 'warn',
'@typescript-eslint/explicit-module-boundary-types': 'error',
'@typescript-eslint/no-unused-vars': ['error', { argsIgnorePattern: '^_' }],
// Promise/Async Safety
'@typescript-eslint/no-floating-promises': 'error',
'@typescript-eslint/no-misused-promises': 'error',
'@typescript-eslint/await-thenable': 'error',
// Code Quality
'@typescript-eslint/no-var-requires': 'error',
'@typescript-eslint/prefer-nullish-coalescing': 'warn',
'@typescript-eslint/prefer-optional-chain': 'warn',
// Prettier
'prettier/prettier': [
'error',
{
endOfLine: 'auto',
},
],
},
ignorePatterns: [
'node_modules',
'dist',
'build',
'.next',
'out',
'coverage',
'.turbo',
],
overrides: [
{
// Next.js apps
files: ['apps/**/app/**/*.{ts,tsx}', 'apps/**/pages/**/*.{ts,tsx}'],
extends: ['next/core-web-vitals'],
},
{
// NestJS apps
files: ['apps/**/*.controller.ts', 'apps/**/*.service.ts', 'apps/**/*.module.ts'],
rules: {
'@typescript-eslint/explicit-function-return-type': 'error',
},
},
],
};

View File

@@ -0,0 +1,2 @@
npx lint-staged
npx git-secrets --scan || echo "Warning: git-secrets not installed"

Some files were not shown because too many files have changed in this diff Show More