chore: Sync MokoStandards 04.01.00 #100

Merged
jmiller-moko merged 32 commits from chore/sync-mokostandards-v04.01.00 into main 2026-03-27 00:00:09 +00:00
12 changed files with 2598 additions and 89 deletions

163
.github/workflows/auto-release.yml vendored Normal file
View File

@@ -0,0 +1,163 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Release
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/auto-release.yml
# VERSION: 04.01.00
# BRIEF: Auto-create a GitHub Release on every push to main with version from README.md
# NOTE: Synced via bulk-repo-sync to .github/workflows/auto-release.yml in all governed repos.
# For Dolibarr (crm-module) repos, also updates $this->version in the module descriptor.
name: Auto Release
on:
push:
branches:
- main
- master
permissions:
contents: write
jobs:
release:
name: Create Release
runs-on: ubuntu-latest
# Skip bot commits (version sync, [skip ci]) to avoid infinite loops
if: >-
!contains(github.event.head_commit.message, '[skip ci]') &&
github.actor != 'github-actions[bot]'
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
token: ${{ secrets.GH_TOKEN || github.token }}
fetch-depth: 0
- name: Extract version from README.md
id: version
run: |
VERSION=$(grep -oP '^\s*VERSION:\s*\K[0-9]{2}\.[0-9]{2}\.[0-9]{2}' README.md | head -1)
if [ -z "$VERSION" ]; then
echo "⚠️ No VERSION found in README.md — skipping release"
echo "skip=true" >> "$GITHUB_OUTPUT"
exit 0
fi
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
echo "tag=v${VERSION}" >> "$GITHUB_OUTPUT"
echo "skip=false" >> "$GITHUB_OUTPUT"
echo "✅ Version: $VERSION (tag: v${VERSION})"
- name: Check if tag already exists
if: steps.version.outputs.skip != 'true'
id: tag_check
run: |
TAG="${{ steps.version.outputs.tag }}"
if git rev-parse "$TAG" >/dev/null 2>&1; then
echo " Tag $TAG already exists — skipping release"
echo "exists=true" >> "$GITHUB_OUTPUT"
else
echo "exists=false" >> "$GITHUB_OUTPUT"
fi
- name: Update Dolibarr module version
if: >-
steps.version.outputs.skip != 'true' &&
steps.tag_check.outputs.exists != 'true'
run: |
PLATFORM=""
if [ -f ".moko-standards" ]; then
PLATFORM=$(grep -E '^platform:' .moko-standards | sed 's/.*:[[:space:]]*//' | tr -d '"')
fi
VERSION="${{ steps.version.outputs.version }}"
if [ "$PLATFORM" = "crm-module" ]; then
echo "📦 Dolibarr release — setting module version to '${VERSION}'"
# Update $this->version in the module descriptor (core/modules/mod*.class.php)
find . -path "*/core/modules/mod*.class.php" -exec \
sed -i "s/\(\$this->version\s*=\s*\)['\"][^'\"]*['\"]/\1'${VERSION}'/" {} + 2>/dev/null || true
fi
if [ "$PLATFORM" = "waas-component" ]; then
echo "📦 Joomla release — setting manifest version to '${VERSION}'"
# Update <version> tag in Joomla XML manifest files
find . -maxdepth 2 -name "*.xml" -exec grep -l '<extension' {} \; 2>/dev/null | while read -r manifest; do
sed -i "s|<version>[^<]*</version>|<version>${VERSION}</version>|" "$manifest" 2>/dev/null || true
done
fi
# Commit the version update if anything changed
if ! git diff --quiet; then
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add -A
git commit -m "chore(release): set version to ${VERSION} [skip ci]" \
--author="github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
git push
fi
- name: Extract changelog entry
if: >-
steps.version.outputs.skip != 'true' &&
steps.tag_check.outputs.exists != 'true'
id: changelog
run: |
VERSION="${{ steps.version.outputs.version }}"
# Try to extract the section for this version from CHANGELOG.md
NOTES=""
if [ -f "CHANGELOG.md" ]; then
# Extract text between this version's heading and the next heading
NOTES=$(awk "/^##.*${VERSION}/,/^## /" CHANGELOG.md | head -50 | sed '1d;$d')
fi
if [ -z "$NOTES" ]; then
NOTES="Release ${VERSION}"
fi
# Write to file to avoid shell escaping issues
echo "$NOTES" > /tmp/release_notes.md
echo "✅ Release notes prepared"
- name: Create tag and release
if: >-
steps.version.outputs.skip != 'true' &&
steps.tag_check.outputs.exists != 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
TAG="${{ steps.version.outputs.tag }}"
VERSION="${{ steps.version.outputs.version }}"
# Create the tag
git tag "$TAG"
git push origin "$TAG"
# Create the release
gh release create "$TAG" \
--title "${VERSION}" \
--notes-file /tmp/release_notes.md \
--target main
echo "🚀 Release ${VERSION} created: $TAG"
- name: Summary
if: steps.version.outputs.skip != 'true'
run: |
VERSION="${{ steps.version.outputs.version }}"
TAG="${{ steps.version.outputs.tag }}"
if [ "${{ steps.tag_check.outputs.exists }}" = "true" ]; then
echo "## Release — ${VERSION}" >> $GITHUB_STEP_SUMMARY
echo "Tag \`${TAG}\` already exists — no new release created." >> $GITHUB_STEP_SUMMARY
else
echo "## 🚀 Release — ${VERSION}" >> $GITHUB_STEP_SUMMARY
echo "Created tag \`${TAG}\` and GitHub Release." >> $GITHUB_STEP_SUMMARY
fi

View File

@@ -5,17 +5,15 @@
# SPDX-License-Identifier: GPL-3.0-or-later
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow.Template
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Security
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/generic/codeql-analysis.yml
# VERSION: 04.00.15
# BRIEF: CodeQL security scanning workflow (generic — all repo types)
# NOTE: Deployed to .github/workflows/codeql-analysis.yml in governed repos.
# CodeQL does not support PHP directly; JavaScript scans JSON/YAML/shell.
# For PHP-specific security scanning see standards-compliance.yml.
# PATH: /.github/workflows/codeql-analysis.yml
# VERSION: 04.01.00
# BRIEF: CodeQL security scanning workflow for PHP codebase
# NOTE: Repository is PHP-only (v04.00.04). Python was removed Feb 12, 2026.
name: CodeQL Security Scanning
name: "CodeQL Security Scanning"
on:
push:
@@ -30,7 +28,7 @@ on:
- dev/**
- rc/**
schedule:
# Weekly on Monday at 06:00 UTC
# Run weekly on Monday at 6:00 AM UTC
- cron: '0 6 * * 1'
workflow_dispatch:
@@ -42,60 +40,65 @@ permissions:
jobs:
analyze:
name: Analyze (${{ matrix.language }})
name: Configuration Security Scan
runs-on: ubuntu-latest
timeout-minutes: 360
strategy:
fail-fast: false
matrix:
# CodeQL does not support PHP. Use 'javascript' to scan JSON, YAML,
# and shell scripts. Add 'actions' to scan GitHub Actions workflows.
language: ['javascript', 'actions']
# No language matrix - PHP-only repository
# CodeQL scans workflow files, configs, and scripts for security issues
# PHP security handled by SecurityValidator enterprise library
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
uses: github/codeql-action/init@0d579ffd059c29b07949a3cce3983f0780820c98 # v4
with:
languages: ${{ matrix.language }}
# No languages specified - scan configurations only
# Reference explicit config to scan YAML, JSON, shell scripts
config-file: ./.github/codeql/codeql-config.yml
# Use security-extended query suite for comprehensive coverage
queries: security-extended,security-and-quality
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Skip autobuild - no code compilation needed for config scanning
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
uses: github/codeql-action/analyze@0d579ffd059c29b07949a3cce3983f0780820c98 # v4
with:
category: "/language:${{ matrix.language }}"
category: "/language:config"
upload: true
output: sarif-results
wait-for-processing: true
- name: Upload SARIF results
- name: Upload SARIF results (optional)
if: always()
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.5.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v4.5.0
with:
name: codeql-results-${{ matrix.language }}
name: codeql-results-config
path: sarif-results
retention-days: 30
- name: Step summary
- name: Check for Critical/High Findings
if: always()
run: |
echo "### 🔍 CodeQL — ${{ matrix.language }}" >> $GITHUB_STEP_SUMMARY
echo "### 🔍 CodeQL Security Analysis Complete" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Scan Type**: Configuration Security" >> $GITHUB_STEP_SUMMARY
echo "**Query Suite**: security-extended, security-and-quality" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Note**: MokoStandards is PHP-only (v04.00.04)." >> $GITHUB_STEP_SUMMARY
echo "This scan analyzes workflow files, JSON configs, YAML, and shell scripts." >> $GITHUB_STEP_SUMMARY
echo "For PHP-specific security: Use PHP SecurityValidator enterprise library." >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
URL="https://github.com/${{ github.repository }}/security/code-scanning"
echo "See the [Security tab]($URL) for findings." >> $GITHUB_STEP_SUMMARY
echo "Check the [Security tab]($URL) for detailed findings." >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| Severity | SLA |" >> $GITHUB_STEP_SUMMARY
echo "|----------|-----|" >> $GITHUB_STEP_SUMMARY
echo "| Critical | 7 days |" >> $GITHUB_STEP_SUMMARY
echo "| High | 14 days |" >> $GITHUB_STEP_SUMMARY
echo "| Medium | 30 days |" >> $GITHUB_STEP_SUMMARY
echo "| Low | 60 days / next release |" >> $GITHUB_STEP_SUMMARY
echo "**Response Requirements**:" >> $GITHUB_STEP_SUMMARY
echo "- Critical: Fix within 7 days" >> $GITHUB_STEP_SUMMARY
echo "- High: Fix within 14 days" >> $GITHUB_STEP_SUMMARY
echo "- Medium: Fix within 30 days" >> $GITHUB_STEP_SUMMARY
echo "- Low: Fix within 60 days or next release" >> $GITHUB_STEP_SUMMARY
summary:
name: Security Scan Summary
@@ -104,12 +107,17 @@ jobs:
if: always()
steps:
- name: Summary
- name: Generate Summary
run: |
echo "### 🛡️ CodeQL Complete" >> $GITHUB_STEP_SUMMARY
echo "### 🛡️ Security Scanning Complete" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "All CodeQL security scans have completed." >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Trigger**: ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
echo "**Branch**: ${{ github.ref_name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Trigger:** ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
echo "**Branch:** ${{ github.ref_name }}" >> $GITHUB_STEP_SUMMARY
SECURITY_URL="https://github.com/${{ github.repository }}/security"
echo "" >> $GITHUB_STEP_SUMMARY
echo "📊 [View all security alerts]($SECURITY_URL)" >> $GITHUB_STEP_SUMMARY
POLICY_URL="https://github.com/${{ github.repository }}"
POLICY_URL="${POLICY_URL}/blob/main/docs/policy/security-scanning.md"
echo "📋 [Security scanning policy]($POLICY_URL)" >> $GITHUB_STEP_SUMMARY

665
.github/workflows/deploy-demo.yml vendored Normal file
View File

@@ -0,0 +1,665 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Deploy
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/deploy-demo.yml
# VERSION: 04.01.00
# BRIEF: SFTP deployment workflow for demo server — synced to all governed repos
# NOTE: Synced via bulk-repo-sync to .github/workflows/deploy-demo.yml in all governed repos.
# Port is resolved in order: DEMO_FTP_PORT variable → :port suffix in DEMO_FTP_HOST → 22.
name: Deploy to Demo Server (SFTP)
# Deploys the contents of the src/ directory to the demo server via SFTP.
# Triggers on push/merge to main — deploys the production-ready build to the demo server.
#
# Required org-level variables: DEMO_FTP_HOST, DEMO_FTP_PATH, DEMO_FTP_USERNAME
# Optional org-level variable: DEMO_FTP_PORT (auto-detected from host or defaults to 22)
# Optional org/repo variable: DEMO_FTP_SUFFIX — when set, appended to DEMO_FTP_PATH to form the
# full remote destination: DEMO_FTP_PATH/DEMO_FTP_SUFFIX
# Ignore rules: Place a .ftp_ignore file in the repository root. Each non-empty,
# non-comment line is a regex pattern tested against the relative path
# of each file (e.g. "subdir/file.txt"). The .gitignore is also
# respected automatically.
# Required org-level secret: DEMO_FTP_KEY (preferred) or DEMO_FTP_PASSWORD
#
# Access control: only users with admin or maintain role on the repository may deploy.
on:
push:
branches:
- main
- master
paths:
- 'src/**'
pull_request:
types: [opened, synchronize, reopened, closed]
branches:
- main
- master
paths:
- 'src/**'
workflow_dispatch:
inputs:
clear_remote:
description: 'Delete all files inside the remote destination folder before uploading'
required: false
default: false
type: boolean
permissions:
contents: read
pull-requests: write
jobs:
check-permission:
name: Verify Deployment Permission
runs-on: ubuntu-latest
steps:
- name: Check actor permission
env:
# Prefer the org-scoped GH_TOKEN secret (needed for the org membership
# fallback). Falls back to the built-in github.token so the collaborator
# endpoint still works even if GH_TOKEN is not configured.
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
ACTOR="${{ github.actor }}"
REPO="${{ github.repository }}"
ORG="${{ github.repository_owner }}"
METHOD=""
AUTHORIZED="false"
# Hardcoded authorized users — always allowed to deploy
AUTHORIZED_USERS="jmiller-moko github-actions[bot]"
for user in $AUTHORIZED_USERS; do
if [ "$ACTOR" = "$user" ]; then
AUTHORIZED="true"
METHOD="hardcoded allowlist"
PERMISSION="admin"
break
fi
done
# For other actors, check repo/org permissions via API
if [ "$AUTHORIZED" != "true" ]; then
PERMISSION=$(gh api "repos/${REPO}/collaborators/${ACTOR}/permission" \
--jq '.permission' 2>/dev/null)
METHOD="repo collaborator API"
if [ -z "$PERMISSION" ]; then
ORG_ROLE=$(gh api "orgs/${ORG}/memberships/${ACTOR}" \
--jq '.role' 2>/dev/null)
METHOD="org membership API"
if [ "$ORG_ROLE" = "owner" ]; then
PERMISSION="admin"
else
PERMISSION="none"
fi
fi
case "$PERMISSION" in
admin|maintain) AUTHORIZED="true" ;;
esac
fi
# Write detailed summary
{
echo "## 🔐 Deploy Authorization"
echo ""
echo "| Field | Value |"
echo "|-------|-------|"
echo "| **Actor** | \`${ACTOR}\` |"
echo "| **Repository** | \`${REPO}\` |"
echo "| **Permission** | \`${PERMISSION}\` |"
echo "| **Method** | ${METHOD} |"
echo "| **Authorized** | ${AUTHORIZED} |"
echo "| **Trigger** | \`${{ github.event_name }}\` |"
echo "| **Branch** | \`${{ github.ref_name }}\` |"
echo ""
} >> "$GITHUB_STEP_SUMMARY"
if [ "$AUTHORIZED" = "true" ]; then
echo "✅ ${ACTOR} authorized to deploy (${METHOD})" >> "$GITHUB_STEP_SUMMARY"
else
echo "❌ ${ACTOR} is NOT authorized to deploy." >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "Deployment requires one of:" >> "$GITHUB_STEP_SUMMARY"
echo "- Being in the hardcoded allowlist" >> "$GITHUB_STEP_SUMMARY"
echo "- Having \`admin\` or \`maintain\` role on the repository" >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
deploy:
name: SFTP Deploy → Demo
runs-on: ubuntu-latest
needs: [check-permission]
if: >-
!startsWith(github.head_ref || github.ref_name, 'chore/') &&
(github.event_name == 'workflow_dispatch' ||
github.event_name == 'push' ||
(github.event_name == 'pull_request' &&
(github.event.action == 'opened' ||
github.event.action == 'synchronize' ||
github.event.action == 'reopened' ||
github.event.pull_request.merged == true)))
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Resolve source directory
id: source
run: |
SRC="src"
if [ ! -d "$SRC" ]; then
echo "⚠️ No src/ directory found — skipping deployment"
echo "skip=true" >> "$GITHUB_OUTPUT"
else
COUNT=$(find "$SRC" -maxdepth 0 -type d > /dev/null && find "$SRC" -type f | wc -l)
echo "✅ Source: src/ (${COUNT} file(s))"
echo "skip=false" >> "$GITHUB_OUTPUT"
echo "dir=${SRC}" >> "$GITHUB_OUTPUT"
fi
- name: Preview files to deploy
if: steps.source.outputs.skip == 'false'
env:
SOURCE_DIR: ${{ steps.source.outputs.dir }}
run: |
# ── Convert a gitignore-style glob line to an ERE pattern ──────────────
ftp_ignore_to_regex() {
local line="$1"
local anchored=false
# Strip inline comments and whitespace
line=$(printf '%s' "$line" | sed 's/[[:space:]]*#.*$//' | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
[ -z "$line" ] && return
# Skip negation patterns (not supported)
[[ "$line" == !* ]] && return
# Trailing slash = directory marker; strip it
line="${line%/}"
# Leading slash = anchored to root; strip it
if [[ "$line" == /* ]]; then
anchored=true
line="${line#/}"
fi
# Escape ERE special chars, then restore glob semantics
local regex
regex=$(printf '%s' "$line" \
| sed 's/[.+^${}()|[\\]/\\&/g' \
| sed 's/\\\*\\\*/\x01/g' \
| sed 's/\\\*/[^\/]*/g' \
| sed 's/\x01/.*/g' \
| sed 's/\\\?/[^\/]/g')
if $anchored; then
printf '^%s(/|$)' "$regex"
else
printf '(^|/)%s(/|$)' "$regex"
fi
}
# ── Read .ftp_ignore (gitignore-style globs) ─────────────────────────
IGNORE_PATTERNS=()
IGNORE_SOURCES=()
if [ -f ".ftp_ignore" ]; then
while IFS= read -r line; do
[[ "$line" =~ ^[[:space:]]*$ || "$line" =~ ^[[:space:]]*# ]] && continue
regex=$(ftp_ignore_to_regex "$line")
[ -n "$regex" ] && IGNORE_PATTERNS+=("$regex") && IGNORE_SOURCES+=("$line")
done < ".ftp_ignore"
fi
# ── Walk src/ and classify every file ────────────────────────────────
WILL_UPLOAD=()
IGNORED_FILES=()
while IFS= read -r -d '' file; do
rel="${file#${SOURCE_DIR}/}"
SKIP=false
for i in "${!IGNORE_PATTERNS[@]}"; do
if echo "$rel" | grep -qE "${IGNORE_PATTERNS[$i]}" 2>/dev/null; then
IGNORED_FILES+=("$rel | .ftp_ignore \`${IGNORE_SOURCES[$i]}\`")
SKIP=true; break
fi
done
$SKIP && continue
if [ -f ".gitignore" ]; then
if [ -f ".gitignore" ]; then
git check-ignore -q "$rel" 2>/dev/null && {
IGNORED_FILES+=("$rel | .gitignore")
continue
} || true
fi
WILL_UPLOAD+=("$rel")
done < <(find "$SOURCE_DIR" -type f -print0 | sort -z)
UPLOAD_COUNT="${#WILL_UPLOAD[@]}"
IGNORE_COUNT="${#IGNORED_FILES[@]}"
echo " ${UPLOAD_COUNT} file(s) will be uploaded, ${IGNORE_COUNT} ignored"
# ── Write deployment preview to step summary ──────────────────────────
{
echo "## 📋 Deployment Preview"
echo ""
echo "| Field | Value |"
echo "|---|---|"
echo "| Source | \`${SOURCE_DIR}/\` |"
echo "| Files to upload | **${UPLOAD_COUNT}** |"
echo "| Files ignored | **${IGNORE_COUNT}** |"
echo ""
if [ "${UPLOAD_COUNT}" -gt 0 ]; then
echo "### 📂 Files that will be uploaded"
echo '```'
printf '%s\n' "${WILL_UPLOAD[@]}"
echo '```'
echo ""
fi
if [ "${IGNORE_COUNT}" -gt 0 ]; then
echo "### ⏭️ Files excluded"
echo "| File | Reason |"
echo "|---|---|"
for entry in "${IGNORED_FILES[@]}"; do
f="${entry% | *}"; r="${entry##* | }"
echo "| \`${f}\` | ${r} |"
done
echo ""
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Resolve SFTP host and port
if: steps.source.outputs.skip == 'false'
id: conn
env:
HOST_RAW: ${{ vars.DEMO_FTP_HOST }}
PORT_VAR: ${{ vars.DEMO_FTP_PORT }}
run: |
HOST="$HOST_RAW"
PORT="$PORT_VAR"
# Priority 1 — explicit DEMO_FTP_PORT variable
if [ -n "$PORT" ]; then
echo " Using explicit DEMO_FTP_PORT=${PORT}"
# Priority 2 — port embedded in DEMO_FTP_HOST (host:port)
elif [[ "$HOST" == *:* ]]; then
PORT="${HOST##*:}"
HOST="${HOST%:*}"
echo " Extracted port ${PORT} from DEMO_FTP_HOST"
# Priority 3 — SFTP default
else
PORT="22"
echo " No port specified — defaulting to SFTP port 22"
fi
echo "host=${HOST}" >> "$GITHUB_OUTPUT"
echo "port=${PORT}" >> "$GITHUB_OUTPUT"
echo "SFTP target: ${HOST}:${PORT}"
- name: Build remote path
if: steps.source.outputs.skip == 'false'
id: remote
env:
DEMO_FTP_PATH: ${{ vars.DEMO_FTP_PATH }}
DEMO_FTP_SUFFIX: ${{ vars.DEMO_FTP_SUFFIX }}
run: |
BASE="$DEMO_FTP_PATH"
if [ -z "$BASE" ]; then
echo "❌ DEMO_FTP_PATH is not set."
echo " Configure it as an org-level variable (Settings → Variables) and"
echo " ensure this repository has been granted access to it."
exit 1
fi
# DEMO_FTP_SUFFIX is required — it identifies the remote subdirectory for this repo.
# Without it we cannot safely determine the deployment target.
if [ -z "$DEMO_FTP_SUFFIX" ]; then
echo "⏭️ DEMO_FTP_SUFFIX variable is not set — skipping deployment."
echo " Set DEMO_FTP_SUFFIX as a repo or org variable to enable deploy-demo."
echo "skip=true" >> "$GITHUB_OUTPUT"
echo "path=" >> "$GITHUB_OUTPUT"
exit 0
fi
REMOTE="${BASE%/}/${DEMO_FTP_SUFFIX#/}"
# ── Platform-specific path safety guards ──────────────────────────────
PLATFORM=""
if [ -f ".moko-standards" ]; then
PLATFORM=$(grep -E '^platform:' .moko-standards | sed 's/.*:[[:space:]]*//' | tr -d '"')
fi
if [ "$PLATFORM" = "crm-module" ]; then
# Dolibarr modules must deploy under htdocs/custom/ — guard against
# accidentally overwriting server root or unrelated directories.
if [[ "$REMOTE" != *custom* ]]; then
echo "❌ Safety check failed: Dolibarr (crm-module) remote path must contain 'custom'."
echo " Current path: ${REMOTE}"
echo " Set DEMO_FTP_SUFFIX to the module's htdocs/custom/ subdirectory."
exit 1
fi
fi
if [ "$PLATFORM" = "waas-component" ]; then
# Joomla extensions may only deploy to the server's tmp/ directory.
if [[ "$REMOTE" != *tmp* ]]; then
echo "❌ Safety check failed: Joomla (waas-component) remote path must contain 'tmp'."
echo " Current path: ${REMOTE}"
echo " Set DEMO_FTP_SUFFIX to a path under the server tmp/ directory."
exit 1
fi
fi
echo " Remote path: ${REMOTE}"
echo "path=${REMOTE}" >> "$GITHUB_OUTPUT"
- name: Detect SFTP authentication method
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
id: auth
env:
HAS_KEY: ${{ secrets.DEMO_FTP_KEY }}
HAS_PASSWORD: ${{ secrets.DEMO_FTP_PASSWORD }}
run: |
if [ -n "$HAS_KEY" ] && [ -n "$HAS_PASSWORD" ]; then
# Both set: key auth with password as passphrase; falls back to password-only if key fails
echo "method=key" >> "$GITHUB_OUTPUT"
echo "use_passphrase=true" >> "$GITHUB_OUTPUT"
echo "has_password=true" >> "$GITHUB_OUTPUT"
echo " Primary: SSH key + passphrase (DEMO_FTP_KEY / DEMO_FTP_PASSWORD)"
echo " Fallback: password-only auth if key authentication fails"
elif [ -n "$HAS_KEY" ]; then
# Key only: no passphrase, no password fallback
echo "method=key" >> "$GITHUB_OUTPUT"
echo "use_passphrase=false" >> "$GITHUB_OUTPUT"
echo "has_password=false" >> "$GITHUB_OUTPUT"
echo " Using SSH key authentication (DEMO_FTP_KEY, no passphrase, no fallback)"
elif [ -n "$HAS_PASSWORD" ]; then
# Password only: direct SFTP password auth
echo "method=password" >> "$GITHUB_OUTPUT"
echo "use_passphrase=false" >> "$GITHUB_OUTPUT"
echo "has_password=true" >> "$GITHUB_OUTPUT"
echo " Using password authentication (DEMO_FTP_PASSWORD)"
else
echo "❌ No SFTP credentials configured."
echo " Set DEMO_FTP_KEY (preferred) or DEMO_FTP_PASSWORD as an org-level secret."
exit 1
fi
- name: Setup PHP
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
uses: shivammathur/setup-php@44454db4f0199b8b9685a5d763dc37cbf79108e1 # v2.31.0
with:
php-version: '8.1'
tools: composer
- name: Setup MokoStandards deploy tools
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
COMPOSER_AUTH: '{"github-oauth":{"github.com":"${{ secrets.GH_TOKEN || github.token }}"}}'
run: |
git clone --depth 1 --quiet \
"https://x-access-token:${GH_TOKEN}@github.com/mokoconsulting-tech/MokoStandards.git" \
/tmp/mokostandards
cd /tmp/mokostandards
composer install --no-dev --no-interaction --quiet
- name: Clear remote destination folder
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
SFTP_HOST: ${{ steps.conn.outputs.host }}
SFTP_PORT: ${{ steps.conn.outputs.port }}
SFTP_USER: ${{ vars.DEMO_FTP_USERNAME }}
SFTP_KEY: ${{ secrets.DEMO_FTP_KEY }}
SFTP_PASSWORD: ${{ secrets.DEMO_FTP_PASSWORD }}
AUTH_METHOD: ${{ steps.auth.outputs.method }}
USE_PASSPHRASE: ${{ steps.auth.outputs.use_passphrase }}
HAS_PASSWORD: ${{ steps.auth.outputs.has_password }}
REMOTE_PATH: ${{ steps.remote.outputs.path }}
run: |
cat > /tmp/moko_clear.php << 'PHPEOF'
<?php
declare(strict_types=1);
require '/tmp/mokostandards/vendor/autoload.php';
use phpseclib3\Net\SFTP;
use phpseclib3\Crypt\PublicKeyLoader;
$host = (string) getenv('SFTP_HOST');
$port = (int) getenv('SFTP_PORT');
$username = (string) getenv('SFTP_USER');
$authMethod = (string) getenv('AUTH_METHOD');
$usePassphrase = getenv('USE_PASSPHRASE') === 'true';
$hasPassword = getenv('HAS_PASSWORD') === 'true';
$remotePath = rtrim((string) getenv('REMOTE_PATH'), '/');
echo "⚠️ Clearing remote folder: {$remotePath}\n";
$sftp = new SFTP($host, $port);
// ── Authentication ──────────────────────────────────────────────
if ($authMethod === 'key') {
$keyData = (string) getenv('SFTP_KEY');
$passphrase = $usePassphrase ? (string) getenv('SFTP_PASSWORD') : false;
$password = $hasPassword ? (string) getenv('SFTP_PASSWORD') : '';
$key = PublicKeyLoader::load($keyData, $passphrase);
if (!$sftp->login($username, $key)) {
if ($password !== '') {
echo "⚠️ Key auth failed — falling back to password\n";
if (!$sftp->login($username, $password)) {
fwrite(STDERR, "❌ Both key and password authentication failed\n");
exit(1);
}
echo "✅ Connected via password authentication (key fallback)\n";
} else {
fwrite(STDERR, "❌ Key authentication failed and no password fallback is available\n");
exit(1);
}
} else {
echo "✅ Connected via SSH key authentication\n";
}
} else {
if (!$sftp->login($username, (string) getenv('SFTP_PASSWORD'))) {
fwrite(STDERR, "❌ Password authentication failed\n");
exit(1);
}
echo "✅ Connected via password authentication\n";
}
// ── Recursive delete ────────────────────────────────────────────
function rmrf(SFTP $sftp, string $path): void
{
$entries = $sftp->nlist($path);
if ($entries === false) {
return; // path does not exist — nothing to clear
}
foreach ($entries as $name) {
if ($name === '.' || $name === '..') {
continue;
}
$entry = "{$path}/{$name}";
if ($sftp->is_dir($entry)) {
rmrf($sftp, $entry);
$sftp->rmdir($entry);
echo " 🗑️ Removed dir: {$entry}\n";
} else {
$sftp->delete($entry);
echo " 🗑️ Removed file: {$entry}\n";
}
}
}
// ── Create remote directory tree ────────────────────────────────
function sftpMakedirs(SFTP $sftp, string $path): void
{
$parts = array_values(array_filter(explode('/', $path), fn(string $p) => $p !== ''));
$current = str_starts_with($path, '/') ? '' : '';
foreach ($parts as $part) {
$current .= '/' . $part;
$sftp->mkdir($current); // silently returns false if already exists
}
}
rmrf($sftp, $remotePath);
sftpMakedirs($sftp, $remotePath);
echo "✅ Remote folder ready: {$remotePath}\n";
PHPEOF
php /tmp/moko_clear.php
- name: Deploy via SFTP
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
SFTP_HOST: ${{ steps.conn.outputs.host }}
SFTP_PORT: ${{ steps.conn.outputs.port }}
SFTP_USER: ${{ vars.DEMO_FTP_USERNAME }}
SFTP_KEY: ${{ secrets.DEMO_FTP_KEY }}
SFTP_PASSWORD: ${{ secrets.DEMO_FTP_PASSWORD }}
AUTH_METHOD: ${{ steps.auth.outputs.method }}
USE_PASSPHRASE: ${{ steps.auth.outputs.use_passphrase }}
REMOTE_PATH: ${{ steps.remote.outputs.path }}
SOURCE_DIR: ${{ steps.source.outputs.dir }}
run: |
# ── Write SSH key to temp file (key auth only) ────────────────────────
if [ "$AUTH_METHOD" = "key" ]; then
printf '%s' "$SFTP_KEY" > /tmp/deploy_key
chmod 600 /tmp/deploy_key
fi
# ── Generate sftp-config.json safely via jq ───────────────────────────
if [ "$AUTH_METHOD" = "key" ]; then
jq -n \
--arg host "$SFTP_HOST" \
--argjson port "${SFTP_PORT:-22}" \
--arg user "$SFTP_USER" \
--arg path "$REMOTE_PATH" \
--arg key "/tmp/deploy_key" \
'{host:$host, port:$port, user:$user, remote_path:$path, ssh_key_file:$key}' \
> /tmp/sftp-config.json
else
jq -n \
--arg host "$SFTP_HOST" \
--argjson port "${SFTP_PORT:-22}" \
--arg user "$SFTP_USER" \
--arg path "$REMOTE_PATH" \
--arg pass "$SFTP_PASSWORD" \
'{host:$host, port:$port, user:$user, remote_path:$path, password:$pass}' \
> /tmp/sftp-config.json
fi
# ── Run deploy-sftp.php from MokoStandards ────────────────────────────
DEPLOY_ARGS=(--path . --src-dir "$SOURCE_DIR" --config /tmp/sftp-config.json)
if [ "$USE_PASSPHRASE" = "true" ]; then
DEPLOY_ARGS+=(--key-passphrase "$SFTP_PASSWORD")
fi
php /tmp/mokostandards/api/deploy/deploy-sftp.php "${DEPLOY_ARGS[@]}"
# (deploy-sftp.php handles dotfile skipping and .ftp_ignore natively)
# Remove temp files that should never be left behind
rm -f /tmp/deploy_key /tmp/sftp-config.json
- name: Create or update failure issue
if: failure()
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
RUN_URL="${{ github.server_url }}/${REPO}/actions/runs/${{ github.run_id }}"
ACTOR="${{ github.actor }}"
BRANCH="${{ github.ref_name }}"
EVENT="${{ github.event_name }}"
NOW=$(date -u '+%Y-%m-%d %H:%M:%S UTC')
LABEL="deploy-failure"
TITLE="fix: Demo deployment failed — ${REPO}"
BODY="## Demo Deployment Failed
A deployment to the demo server failed and requires attention.
| Field | Value |
|-------|-------|
| **Repository** | \`${REPO}\` |
| **Branch** | \`${BRANCH}\` |
| **Trigger** | ${EVENT} |
| **Actor** | @${ACTOR} |
| **Failed at** | ${NOW} |
| **Run** | [View workflow run](${RUN_URL}) |
### Next steps
1. Review the [workflow run log](${RUN_URL}) for the specific error.
2. Fix the underlying issue (credentials, SFTP connectivity, permissions).
3. Re-trigger the deployment via **Actions → Deploy to Demo Server → Run workflow**.
---
*Auto-created by deploy-demo.yml — close this issue once the deployment is resolved.*"
# Ensure the label exists (idempotent — no-op if already present)
gh label create "$LABEL" \
--repo "$REPO" \
--color "CC0000" \
--description "Automated deploy failure tracking" \
--force 2>/dev/null || true
# Look for an existing open deploy-failure issue
EXISTING=$(gh api "repos/${REPO}/issues?labels=${LABEL}&state=open&per_page=1" \
--jq '.[0].number' 2>/dev/null)
if [ -n "$EXISTING" ] && [ "$EXISTING" != "null" ]; then
gh api "repos/${REPO}/issues/${EXISTING}" \
-X PATCH \
-f title="$TITLE" \
-f body="$BODY" \
--silent
echo "📋 Failure issue #${EXISTING} updated: ${REPO}" >> "$GITHUB_STEP_SUMMARY"
else
gh issue create \
--repo "$REPO" \
--title "$TITLE" \
--body "$BODY" \
--label "$LABEL" \
| tee -a "$GITHUB_STEP_SUMMARY"
fi
- name: Deployment summary
if: always()
run: |
if [ "${{ steps.source.outputs.skip }}" == "true" ]; then
echo "### ⏭️ Deployment Skipped" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "No \`src/\` directory found in this repository." >> "$GITHUB_STEP_SUMMARY"
elif [ "${{ job.status }}" == "success" ]; then
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "### ✅ Demo Deployment Successful" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "| Field | Value |" >> "$GITHUB_STEP_SUMMARY"
echo "|-------|-------|" >> "$GITHUB_STEP_SUMMARY"
echo "| Host | \`${{ steps.conn.outputs.host }}:${{ steps.conn.outputs.port }}\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Remote path | \`${{ steps.remote.outputs.path }}\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Source | \`src/\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Trigger | ${{ github.event_name }} |" >> "$GITHUB_STEP_SUMMARY"
echo "| Auth | ${{ steps.auth.outputs.method }} |" >> "$GITHUB_STEP_SUMMARY"
echo "| Clear remote | ${{ inputs.clear_remote || 'false' }} |" >> "$GITHUB_STEP_SUMMARY"
else
echo "### ❌ Demo Deployment Failed" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "Check the job log above for error details." >> "$GITHUB_STEP_SUMMARY"
fi

View File

@@ -22,7 +22,7 @@
# INGROUP: MokoStandards.Deploy
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/deploy-dev.yml
# VERSION: 04.00.27
# VERSION: 04.01.00
# BRIEF: SFTP deployment workflow for development server — synced to all governed repos
# NOTE: Synced via bulk-repo-sync to .github/workflows/deploy-dev.yml in all governed repos.
# Port is resolved in order: DEV_FTP_PORT variable → :port suffix in DEV_FTP_HOST → 22.
@@ -48,19 +48,19 @@ name: Deploy to Dev Server (SFTP)
on:
push:
branches:
- main
- master
- 'dev/**'
- develop
- development
paths:
- 'src/**'
pull_request:
types: [opened, synchronize, reopened, closed]
branches:
- main
- master
- 'dev/**'
- develop
- development
paths:
- 'src/**'
workflow_dispatch:
inputs:
clear_remote:
@@ -89,52 +89,82 @@ jobs:
REPO="${{ github.repository }}"
ORG="${{ github.repository_owner }}"
# Try the per-repo collaborator endpoint first.
# This returns 404 for org owners who are not listed as explicit
# collaborators, so we fall back to the org membership role check.
METHOD=""
AUTHORIZED="false"
# Hardcoded authorized users — always allowed to deploy
AUTHORIZED_USERS="jmiller-moko github-actions[bot]"
for user in $AUTHORIZED_USERS; do
if [ "$ACTOR" = "$user" ]; then
AUTHORIZED="true"
METHOD="hardcoded allowlist"
PERMISSION="admin"
break
fi
done
# For other actors, check repo/org permissions via API
if [ "$AUTHORIZED" != "true" ]; then
PERMISSION=$(gh api "repos/${REPO}/collaborators/${ACTOR}/permission" \
--jq '.permission' 2>/dev/null)
METHOD="repo collaborator API"
if [ -z "$PERMISSION" ]; then
# Collaborator endpoint returned nothing — try org membership.
# Requires a token with read:org scope (secrets.GH_TOKEN).
# github.token alone is insufficient for this endpoint.
ORG_ROLE=$(gh api "orgs/${ORG}/memberships/${ACTOR}" \
--jq '.role' 2>/dev/null)
METHOD="org membership API"
if [ "$ORG_ROLE" = "owner" ]; then
PERMISSION="admin"
echo " ${ACTOR} is an org owner — granting admin access"
else
# Both checks failed — token may lack read:org scope.
echo "⚠️ Could not determine permission for ${ACTOR}."
echo " Add GH_TOKEN (PAT with read:org scope) as an org secret to fix this."
PERMISSION="none"
fi
fi
case "$PERMISSION" in
admin|maintain)
echo "✅ ${ACTOR} has '${PERMISSION}' permission — authorized to deploy"
;;
*)
echo "❌ Deployment requires admin or maintain role."
echo " ${ACTOR} has '${PERMISSION}' — contact your org administrator."
exit 1
;;
admin|maintain) AUTHORIZED="true" ;;
esac
fi
# Write detailed summary
{
echo "## 🔐 Deploy Authorization"
echo ""
echo "| Field | Value |"
echo "|-------|-------|"
echo "| **Actor** | \`${ACTOR}\` |"
echo "| **Repository** | \`${REPO}\` |"
echo "| **Permission** | \`${PERMISSION}\` |"
echo "| **Method** | ${METHOD} |"
echo "| **Authorized** | ${AUTHORIZED} |"
echo "| **Trigger** | \`${{ github.event_name }}\` |"
echo "| **Branch** | \`${{ github.ref_name }}\` |"
echo ""
} >> "$GITHUB_STEP_SUMMARY"
if [ "$AUTHORIZED" = "true" ]; then
echo "✅ ${ACTOR} authorized to deploy (${METHOD})" >> "$GITHUB_STEP_SUMMARY"
else
echo "❌ ${ACTOR} is NOT authorized to deploy." >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "Deployment requires one of:" >> "$GITHUB_STEP_SUMMARY"
echo "- Being in the hardcoded allowlist" >> "$GITHUB_STEP_SUMMARY"
echo "- Having \`admin\` or \`maintain\` role on the repository" >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
deploy:
name: SFTP Deploy → Dev
runs-on: ubuntu-latest
needs: [check-permission]
if: >-
github.event_name == 'workflow_dispatch' ||
!startsWith(github.head_ref || github.ref_name, 'chore/') &&
(github.event_name == 'workflow_dispatch' ||
github.event_name == 'push' ||
(github.event_name == 'pull_request' &&
(github.event.action == 'opened' ||
github.event.action == 'synchronize' ||
github.event.action == 'reopened' ||
github.event.pull_request.merged == true))
github.event.pull_request.merged == true)))
steps:
- name: Checkout repository
@@ -214,9 +244,11 @@ jobs:
fi
done
$SKIP && continue
if [ -f ".gitignore" ] && git check-ignore -q "$rel" 2>/dev/null; then
if [ -f ".gitignore" ]; then
git check-ignore -q "$rel" 2>/dev/null && {
IGNORED_FILES+=("$rel | .gitignore")
continue
} || true
fi
WILL_UPLOAD+=("$rel")
done < <(find "$SOURCE_DIR" -type f -print0 | sort -z)
@@ -542,6 +574,25 @@ jobs:
DEPLOY_ARGS+=(--key-passphrase "$SFTP_PASSWORD")
fi
# ── For Dolibarr (crm-module): set version to "development" before deploy ─
PLATFORM=""
if [ -f ".moko-standards" ]; then
PLATFORM=$(grep -E '^platform:' .moko-standards | sed 's/.*:[[:space:]]*//' | tr -d '"')
fi
if [ "$PLATFORM" = "crm-module" ]; then
echo "📦 Dolibarr dev deploy — setting module version to 'development'"
find "$SOURCE_DIR" -path "*/core/modules/mod*.class.php" -exec \
sed -i "s/\(\$this->version\s*=\s*\)['\"][^'\"]*['\"]/\1'development'/" {} + 2>/dev/null || true
fi
if [ "$PLATFORM" = "waas-component" ]; then
echo "📦 Joomla dev deploy — setting manifest version to 'development'"
find "$SOURCE_DIR" -maxdepth 2 -name "*.xml" -exec grep -l '<extension' {} \; 2>/dev/null | while read -r manifest; do
sed -i "s|<version>[^<]*</version>|<version>development</version>|" "$manifest" 2>/dev/null || true
done
fi
php /tmp/mokostandards/api/deploy/deploy-sftp.php "${DEPLOY_ARGS[@]}"
# (deploy-sftp.php handles dotfile skipping and .ftp_ignore natively)
# Remove temp files that should never be left behind

645
.github/workflows/deploy-rs.yml vendored Normal file
View File

@@ -0,0 +1,645 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Deploy
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/deploy-rs.yml
# VERSION: 04.01.00
# BRIEF: SFTP deployment workflow for release staging server — synced to all governed repos
# NOTE: Synced via bulk-repo-sync to .github/workflows/deploy-rs.yml in all governed repos.
# Port is resolved in order: RS_FTP_PORT variable → :port suffix in RS_FTP_HOST → 22.
name: Deploy to RS Server (SFTP)
# Deploys the contents of the src/ directory to the release staging server via SFTP.
# Triggers on push/merge to main — deploys the production-ready build to the release staging server.
#
# Required org-level variables: RS_FTP_HOST, RS_FTP_PATH, RS_FTP_USERNAME
# Optional org-level variable: RS_FTP_PORT (auto-detected from host or defaults to 22)
# Optional org/repo variable: RS_FTP_SUFFIX — when set, appended to RS_FTP_PATH to form the
# full remote destination: RS_FTP_PATH/RS_FTP_SUFFIX
# Ignore rules: Place a .ftp_ignore file in the repository root. Each non-empty,
# non-comment line is a regex pattern tested against the relative path
# of each file (e.g. "subdir/file.txt"). The .gitignore is also
# respected automatically.
# Required org-level secret: RS_FTP_KEY (preferred) or RS_FTP_PASSWORD
#
# Access control: only users with admin or maintain role on the repository may deploy.
on:
push:
branches:
- main
- master
paths:
- 'src/**'
pull_request:
types: [opened, synchronize, reopened, closed]
branches:
- main
- master
paths:
- 'src/**'
workflow_dispatch:
inputs:
clear_remote:
description: 'Delete all files inside the remote destination folder before uploading'
required: false
default: false
type: boolean
permissions:
contents: read
pull-requests: write
jobs:
check-permission:
name: Verify Deployment Permission
runs-on: ubuntu-latest
steps:
- name: Check actor permission
env:
# Prefer the org-scoped GH_TOKEN secret (needed for the org membership
# fallback). Falls back to the built-in github.token so the collaborator
# endpoint still works even if GH_TOKEN is not configured.
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
ACTOR="${{ github.actor }}"
REPO="${{ github.repository }}"
ORG="${{ github.repository_owner }}"
METHOD=""
AUTHORIZED="false"
# Hardcoded authorized users — always allowed to deploy
AUTHORIZED_USERS="jmiller-moko github-actions[bot]"
for user in $AUTHORIZED_USERS; do
if [ "$ACTOR" = "$user" ]; then
AUTHORIZED="true"
METHOD="hardcoded allowlist"
PERMISSION="admin"
break
fi
done
# For other actors, check repo/org permissions via API
if [ "$AUTHORIZED" != "true" ]; then
PERMISSION=$(gh api "repos/${REPO}/collaborators/${ACTOR}/permission" \
--jq '.permission' 2>/dev/null)
METHOD="repo collaborator API"
if [ -z "$PERMISSION" ]; then
ORG_ROLE=$(gh api "orgs/${ORG}/memberships/${ACTOR}" \
--jq '.role' 2>/dev/null)
METHOD="org membership API"
if [ "$ORG_ROLE" = "owner" ]; then
PERMISSION="admin"
else
PERMISSION="none"
fi
fi
case "$PERMISSION" in
admin|maintain) AUTHORIZED="true" ;;
esac
fi
# Write detailed summary
{
echo "## 🔐 Deploy Authorization"
echo ""
echo "| Field | Value |"
echo "|-------|-------|"
echo "| **Actor** | \`${ACTOR}\` |"
echo "| **Repository** | \`${REPO}\` |"
echo "| **Permission** | \`${PERMISSION}\` |"
echo "| **Method** | ${METHOD} |"
echo "| **Authorized** | ${AUTHORIZED} |"
echo "| **Trigger** | \`${{ github.event_name }}\` |"
echo "| **Branch** | \`${{ github.ref_name }}\` |"
echo ""
} >> "$GITHUB_STEP_SUMMARY"
if [ "$AUTHORIZED" = "true" ]; then
echo "✅ ${ACTOR} authorized to deploy (${METHOD})" >> "$GITHUB_STEP_SUMMARY"
else
echo "❌ ${ACTOR} is NOT authorized to deploy." >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "Deployment requires one of:" >> "$GITHUB_STEP_SUMMARY"
echo "- Being in the hardcoded allowlist" >> "$GITHUB_STEP_SUMMARY"
echo "- Having \`admin\` or \`maintain\` role on the repository" >> "$GITHUB_STEP_SUMMARY"
exit 1
fi
deploy:
name: SFTP Deploy → RS
runs-on: ubuntu-latest
needs: [check-permission]
if: >-
!startsWith(github.head_ref || github.ref_name, 'chore/') &&
(github.event_name == 'workflow_dispatch' ||
github.event_name == 'push' ||
(github.event_name == 'pull_request' &&
(github.event.action == 'opened' ||
github.event.action == 'synchronize' ||
github.event.action == 'reopened' ||
github.event.pull_request.merged == true)))
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Resolve source directory
id: source
run: |
SRC="src"
if [ ! -d "$SRC" ]; then
echo "⚠️ No src/ directory found — skipping deployment"
echo "skip=true" >> "$GITHUB_OUTPUT"
else
COUNT=$(find "$SRC" -maxdepth 0 -type d > /dev/null && find "$SRC" -type f | wc -l)
echo "✅ Source: src/ (${COUNT} file(s))"
echo "skip=false" >> "$GITHUB_OUTPUT"
echo "dir=${SRC}" >> "$GITHUB_OUTPUT"
fi
- name: Preview files to deploy
if: steps.source.outputs.skip == 'false'
env:
SOURCE_DIR: ${{ steps.source.outputs.dir }}
run: |
# ── Convert a gitignore-style glob line to an ERE pattern ──────────────
ftp_ignore_to_regex() {
local line="$1"
local anchored=false
# Strip inline comments and whitespace
line=$(printf '%s' "$line" | sed 's/[[:space:]]*#.*$//' | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
[ -z "$line" ] && return
# Skip negation patterns (not supported)
[[ "$line" == !* ]] && return
# Trailing slash = directory marker; strip it
line="${line%/}"
# Leading slash = anchored to root; strip it
if [[ "$line" == /* ]]; then
anchored=true
line="${line#/}"
fi
# Escape ERE special chars, then restore glob semantics
local regex
regex=$(printf '%s' "$line" \
| sed 's/[.+^${}()|[\\]/\\&/g' \
| sed 's/\\\*\\\*/\x01/g' \
| sed 's/\\\*/[^\/]*/g' \
| sed 's/\x01/.*/g' \
| sed 's/\\\?/[^\/]/g')
if $anchored; then
printf '^%s(/|$)' "$regex"
else
printf '(^|/)%s(/|$)' "$regex"
fi
}
# ── Read .ftp_ignore (gitignore-style globs) ─────────────────────────
IGNORE_PATTERNS=()
IGNORE_SOURCES=()
if [ -f ".ftp_ignore" ]; then
while IFS= read -r line; do
[[ "$line" =~ ^[[:space:]]*$ || "$line" =~ ^[[:space:]]*# ]] && continue
regex=$(ftp_ignore_to_regex "$line")
[ -n "$regex" ] && IGNORE_PATTERNS+=("$regex") && IGNORE_SOURCES+=("$line")
done < ".ftp_ignore"
fi
# ── Walk src/ and classify every file ────────────────────────────────
WILL_UPLOAD=()
IGNORED_FILES=()
while IFS= read -r -d '' file; do
rel="${file#${SOURCE_DIR}/}"
SKIP=false
for i in "${!IGNORE_PATTERNS[@]}"; do
if echo "$rel" | grep -qE "${IGNORE_PATTERNS[$i]}" 2>/dev/null; then
IGNORED_FILES+=("$rel | .ftp_ignore \`${IGNORE_SOURCES[$i]}\`")
SKIP=true; break
fi
done
$SKIP && continue
if [ -f ".gitignore" ]; then
git check-ignore -q "$rel" 2>/dev/null && {
IGNORED_FILES+=("$rel | .gitignore")
continue
} || true
fi
WILL_UPLOAD+=("$rel")
done < <(find "$SOURCE_DIR" -type f -print0 | sort -z)
UPLOAD_COUNT="${#WILL_UPLOAD[@]}"
IGNORE_COUNT="${#IGNORED_FILES[@]}"
echo " ${UPLOAD_COUNT} file(s) will be uploaded, ${IGNORE_COUNT} ignored"
# ── Write deployment preview to step summary ──────────────────────────
{
echo "## 📋 Deployment Preview"
echo ""
echo "| Field | Value |"
echo "|---|---|"
echo "| Source | \`${SOURCE_DIR}/\` |"
echo "| Files to upload | **${UPLOAD_COUNT}** |"
echo "| Files ignored | **${IGNORE_COUNT}** |"
echo ""
if [ "${UPLOAD_COUNT}" -gt 0 ]; then
echo "### 📂 Files that will be uploaded"
echo '```'
printf '%s\n' "${WILL_UPLOAD[@]}"
echo '```'
echo ""
fi
if [ "${IGNORE_COUNT}" -gt 0 ]; then
echo "### ⏭️ Files excluded"
echo "| File | Reason |"
echo "|---|---|"
for entry in "${IGNORED_FILES[@]}"; do
f="${entry% | *}"; r="${entry##* | }"
echo "| \`${f}\` | ${r} |"
done
echo ""
fi
} >> "$GITHUB_STEP_SUMMARY"
- name: Resolve SFTP host and port
if: steps.source.outputs.skip == 'false'
id: conn
env:
HOST_RAW: ${{ vars.RS_FTP_HOST }}
PORT_VAR: ${{ vars.RS_FTP_PORT }}
run: |
HOST="$HOST_RAW"
PORT="$PORT_VAR"
# Priority 1 — explicit RS_FTP_PORT variable
if [ -n "$PORT" ]; then
echo " Using explicit RS_FTP_PORT=${PORT}"
# Priority 2 — port embedded in RS_FTP_HOST (host:port)
elif [[ "$HOST" == *:* ]]; then
PORT="${HOST##*:}"
HOST="${HOST%:*}"
echo " Extracted port ${PORT} from RS_FTP_HOST"
# Priority 3 — SFTP default
else
PORT="22"
echo " No port specified — defaulting to SFTP port 22"
fi
echo "host=${HOST}" >> "$GITHUB_OUTPUT"
echo "port=${PORT}" >> "$GITHUB_OUTPUT"
echo "SFTP target: ${HOST}:${PORT}"
- name: Build remote path
if: steps.source.outputs.skip == 'false'
id: remote
env:
RS_FTP_PATH: ${{ vars.RS_FTP_PATH }}
RS_FTP_SUFFIX: ${{ vars.RS_FTP_SUFFIX }}
run: |
BASE="$RS_FTP_PATH"
if [ -z "$BASE" ]; then
echo "❌ RS_FTP_PATH is not set."
echo " Configure it as an org-level variable (Settings → Variables) and"
echo " ensure this repository has been granted access to it."
exit 1
fi
# RS_FTP_SUFFIX is required — it identifies the remote subdirectory for this repo.
# Without it we cannot safely determine the deployment target.
if [ -z "$RS_FTP_SUFFIX" ]; then
echo "⏭️ RS_FTP_SUFFIX variable is not set — skipping deployment."
echo " Set RS_FTP_SUFFIX as a repo or org variable to enable deploy-rs."
echo "skip=true" >> "$GITHUB_OUTPUT"
echo "path=" >> "$GITHUB_OUTPUT"
exit 0
fi
REMOTE="${BASE%/}/${RS_FTP_SUFFIX#/}"
# ── Platform-specific path safety guards ──────────────────────────────
PLATFORM=""
if [ -f ".moko-standards" ]; then
PLATFORM=$(grep -E '^platform:' .moko-standards | sed 's/.*:[[:space:]]*//' | tr -d '"')
fi
# RS deployment: no path restrictions for any platform
echo " Remote path: ${REMOTE}"
echo "path=${REMOTE}" >> "$GITHUB_OUTPUT"
- name: Detect SFTP authentication method
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
id: auth
env:
HAS_KEY: ${{ secrets.RS_FTP_KEY }}
HAS_PASSWORD: ${{ secrets.RS_FTP_PASSWORD }}
run: |
if [ -n "$HAS_KEY" ] && [ -n "$HAS_PASSWORD" ]; then
# Both set: key auth with password as passphrase; falls back to password-only if key fails
echo "method=key" >> "$GITHUB_OUTPUT"
echo "use_passphrase=true" >> "$GITHUB_OUTPUT"
echo "has_password=true" >> "$GITHUB_OUTPUT"
echo " Primary: SSH key + passphrase (RS_FTP_KEY / RS_FTP_PASSWORD)"
echo " Fallback: password-only auth if key authentication fails"
elif [ -n "$HAS_KEY" ]; then
# Key only: no passphrase, no password fallback
echo "method=key" >> "$GITHUB_OUTPUT"
echo "use_passphrase=false" >> "$GITHUB_OUTPUT"
echo "has_password=false" >> "$GITHUB_OUTPUT"
echo " Using SSH key authentication (RS_FTP_KEY, no passphrase, no fallback)"
elif [ -n "$HAS_PASSWORD" ]; then
# Password only: direct SFTP password auth
echo "method=password" >> "$GITHUB_OUTPUT"
echo "use_passphrase=false" >> "$GITHUB_OUTPUT"
echo "has_password=true" >> "$GITHUB_OUTPUT"
echo " Using password authentication (RS_FTP_PASSWORD)"
else
echo "❌ No SFTP credentials configured."
echo " Set RS_FTP_KEY (preferred) or RS_FTP_PASSWORD as an org-level secret."
exit 1
fi
- name: Setup PHP
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
uses: shivammathur/setup-php@44454db4f0199b8b9685a5d763dc37cbf79108e1 # v2.31.0
with:
php-version: '8.1'
tools: composer
- name: Setup MokoStandards deploy tools
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
COMPOSER_AUTH: '{"github-oauth":{"github.com":"${{ secrets.GH_TOKEN || github.token }}"}}'
run: |
git clone --depth 1 --quiet \
"https://x-access-token:${GH_TOKEN}@github.com/mokoconsulting-tech/MokoStandards.git" \
/tmp/mokostandards
cd /tmp/mokostandards
composer install --no-dev --no-interaction --quiet
- name: Clear remote destination folder
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
SFTP_HOST: ${{ steps.conn.outputs.host }}
SFTP_PORT: ${{ steps.conn.outputs.port }}
SFTP_USER: ${{ vars.RS_FTP_USERNAME }}
SFTP_KEY: ${{ secrets.RS_FTP_KEY }}
SFTP_PASSWORD: ${{ secrets.RS_FTP_PASSWORD }}
AUTH_METHOD: ${{ steps.auth.outputs.method }}
USE_PASSPHRASE: ${{ steps.auth.outputs.use_passphrase }}
HAS_PASSWORD: ${{ steps.auth.outputs.has_password }}
REMOTE_PATH: ${{ steps.remote.outputs.path }}
run: |
cat > /tmp/moko_clear.php << 'PHPEOF'
<?php
declare(strict_types=1);
require '/tmp/mokostandards/vendor/autoload.php';
use phpseclib3\Net\SFTP;
use phpseclib3\Crypt\PublicKeyLoader;
$host = (string) getenv('SFTP_HOST');
$port = (int) getenv('SFTP_PORT');
$username = (string) getenv('SFTP_USER');
$authMethod = (string) getenv('AUTH_METHOD');
$usePassphrase = getenv('USE_PASSPHRASE') === 'true';
$hasPassword = getenv('HAS_PASSWORD') === 'true';
$remotePath = rtrim((string) getenv('REMOTE_PATH'), '/');
echo "⚠️ Clearing remote folder: {$remotePath}\n";
$sftp = new SFTP($host, $port);
// ── Authentication ──────────────────────────────────────────────
if ($authMethod === 'key') {
$keyData = (string) getenv('SFTP_KEY');
$passphrase = $usePassphrase ? (string) getenv('SFTP_PASSWORD') : false;
$password = $hasPassword ? (string) getenv('SFTP_PASSWORD') : '';
$key = PublicKeyLoader::load($keyData, $passphrase);
if (!$sftp->login($username, $key)) {
if ($password !== '') {
echo "⚠️ Key auth failed — falling back to password\n";
if (!$sftp->login($username, $password)) {
fwrite(STDERR, "❌ Both key and password authentication failed\n");
exit(1);
}
echo "✅ Connected via password authentication (key fallback)\n";
} else {
fwrite(STDERR, "❌ Key authentication failed and no password fallback is available\n");
exit(1);
}
} else {
echo "✅ Connected via SSH key authentication\n";
}
} else {
if (!$sftp->login($username, (string) getenv('SFTP_PASSWORD'))) {
fwrite(STDERR, "❌ Password authentication failed\n");
exit(1);
}
echo "✅ Connected via password authentication\n";
}
// ── Recursive delete ────────────────────────────────────────────
function rmrf(SFTP $sftp, string $path): void
{
$entries = $sftp->nlist($path);
if ($entries === false) {
return; // path does not exist — nothing to clear
}
foreach ($entries as $name) {
if ($name === '.' || $name === '..') {
continue;
}
$entry = "{$path}/{$name}";
if ($sftp->is_dir($entry)) {
rmrf($sftp, $entry);
$sftp->rmdir($entry);
echo " 🗑️ Removed dir: {$entry}\n";
} else {
$sftp->delete($entry);
echo " 🗑️ Removed file: {$entry}\n";
}
}
}
// ── Create remote directory tree ────────────────────────────────
function sftpMakedirs(SFTP $sftp, string $path): void
{
$parts = array_values(array_filter(explode('/', $path), fn(string $p) => $p !== ''));
$current = str_starts_with($path, '/') ? '' : '';
foreach ($parts as $part) {
$current .= '/' . $part;
$sftp->mkdir($current); // silently returns false if already exists
}
}
rmrf($sftp, $remotePath);
sftpMakedirs($sftp, $remotePath);
echo "✅ Remote folder ready: {$remotePath}\n";
PHPEOF
php /tmp/moko_clear.php
- name: Deploy via SFTP
if: steps.source.outputs.skip == 'false' && steps.remote.outputs.skip != 'true'
env:
SFTP_HOST: ${{ steps.conn.outputs.host }}
SFTP_PORT: ${{ steps.conn.outputs.port }}
SFTP_USER: ${{ vars.RS_FTP_USERNAME }}
SFTP_KEY: ${{ secrets.RS_FTP_KEY }}
SFTP_PASSWORD: ${{ secrets.RS_FTP_PASSWORD }}
AUTH_METHOD: ${{ steps.auth.outputs.method }}
USE_PASSPHRASE: ${{ steps.auth.outputs.use_passphrase }}
REMOTE_PATH: ${{ steps.remote.outputs.path }}
SOURCE_DIR: ${{ steps.source.outputs.dir }}
run: |
# ── Write SSH key to temp file (key auth only) ────────────────────────
if [ "$AUTH_METHOD" = "key" ]; then
printf '%s' "$SFTP_KEY" > /tmp/deploy_key
chmod 600 /tmp/deploy_key
fi
# ── Generate sftp-config.json safely via jq ───────────────────────────
if [ "$AUTH_METHOD" = "key" ]; then
jq -n \
--arg host "$SFTP_HOST" \
--argjson port "${SFTP_PORT:-22}" \
--arg user "$SFTP_USER" \
--arg path "$REMOTE_PATH" \
--arg key "/tmp/deploy_key" \
'{host:$host, port:$port, user:$user, remote_path:$path, ssh_key_file:$key}' \
> /tmp/sftp-config.json
else
jq -n \
--arg host "$SFTP_HOST" \
--argjson port "${SFTP_PORT:-22}" \
--arg user "$SFTP_USER" \
--arg path "$REMOTE_PATH" \
--arg pass "$SFTP_PASSWORD" \
'{host:$host, port:$port, user:$user, remote_path:$path, password:$pass}' \
> /tmp/sftp-config.json
fi
# ── Run deploy-sftp.php from MokoStandards ────────────────────────────
DEPLOY_ARGS=(--path . --src-dir "$SOURCE_DIR" --config /tmp/sftp-config.json)
if [ "$USE_PASSPHRASE" = "true" ]; then
DEPLOY_ARGS+=(--key-passphrase "$SFTP_PASSWORD")
fi
php /tmp/mokostandards/api/deploy/deploy-sftp.php "${DEPLOY_ARGS[@]}"
# (deploy-sftp.php handles dotfile skipping and .ftp_ignore natively)
# Remove temp files that should never be left behind
rm -f /tmp/deploy_key /tmp/sftp-config.json
- name: Create or update failure issue
if: failure()
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
RUN_URL="${{ github.server_url }}/${REPO}/actions/runs/${{ github.run_id }}"
ACTOR="${{ github.actor }}"
BRANCH="${{ github.ref_name }}"
EVENT="${{ github.event_name }}"
NOW=$(date -u '+%Y-%m-%d %H:%M:%S UTC')
LABEL="deploy-failure"
TITLE="fix: RS deployment failed — ${REPO}"
BODY="## RS Deployment Failed
A deployment to the RS server failed and requires attention.
| Field | Value |
|-------|-------|
| **Repository** | \`${REPO}\` |
| **Branch** | \`${BRANCH}\` |
| **Trigger** | ${EVENT} |
| **Actor** | @${ACTOR} |
| **Failed at** | ${NOW} |
| **Run** | [View workflow run](${RUN_URL}) |
### Next steps
1. Review the [workflow run log](${RUN_URL}) for the specific error.
2. Fix the underlying issue (credentials, SFTP connectivity, permissions).
3. Re-trigger the deployment via **Actions → Deploy to RS Server → Run workflow**.
---
*Auto-created by deploy-rs.yml — close this issue once the deployment is resolved.*"
# Ensure the label exists (idempotent — no-op if already present)
gh label create "$LABEL" \
--repo "$REPO" \
--color "CC0000" \
--description "Automated deploy failure tracking" \
--force 2>/dev/null || true
# Look for an existing open deploy-failure issue
EXISTING=$(gh api "repos/${REPO}/issues?labels=${LABEL}&state=open&per_page=1" \
--jq '.[0].number' 2>/dev/null)
if [ -n "$EXISTING" ] && [ "$EXISTING" != "null" ]; then
gh api "repos/${REPO}/issues/${EXISTING}" \
-X PATCH \
-f title="$TITLE" \
-f body="$BODY" \
--silent
echo "📋 Failure issue #${EXISTING} updated: ${REPO}" >> "$GITHUB_STEP_SUMMARY"
else
gh issue create \
--repo "$REPO" \
--title "$TITLE" \
--body "$BODY" \
--label "$LABEL" \
| tee -a "$GITHUB_STEP_SUMMARY"
fi
- name: Deployment summary
if: always()
run: |
if [ "${{ steps.source.outputs.skip }}" == "true" ]; then
echo "### ⏭️ Deployment Skipped" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "No \`src/\` directory found in this repository." >> "$GITHUB_STEP_SUMMARY"
elif [ "${{ job.status }}" == "success" ]; then
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "### ✅ RS Deployment Successful" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "| Field | Value |" >> "$GITHUB_STEP_SUMMARY"
echo "|-------|-------|" >> "$GITHUB_STEP_SUMMARY"
echo "| Host | \`${{ steps.conn.outputs.host }}:${{ steps.conn.outputs.port }}\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Remote path | \`${{ steps.remote.outputs.path }}\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Source | \`src/\` |" >> "$GITHUB_STEP_SUMMARY"
echo "| Trigger | ${{ github.event_name }} |" >> "$GITHUB_STEP_SUMMARY"
echo "| Auth | ${{ steps.auth.outputs.method }} |" >> "$GITHUB_STEP_SUMMARY"
echo "| Clear remote | ${{ inputs.clear_remote || 'false' }} |" >> "$GITHUB_STEP_SUMMARY"
else
echo "### ❌ RS Deployment Failed" >> "$GITHUB_STEP_SUMMARY"
echo "" >> "$GITHUB_STEP_SUMMARY"
echo "Check the job log above for error details." >> "$GITHUB_STEP_SUMMARY"
fi

View File

@@ -22,7 +22,7 @@
# INGROUP: MokoStandards.Firewall
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/enterprise-firewall-setup.yml
# VERSION: 01.00.00
# VERSION: 04.01.00
# BRIEF: Enterprise firewall configuration — generates outbound allow-rules including SFTP deployment server
# NOTE: Reads DEV_FTP_HOST / DEV_FTP_PORT variables to include SFTP egress rules alongside HTTPS rules.

730
.github/workflows/repo_health.yml vendored Normal file
View File

@@ -0,0 +1,730 @@
# ============================================================================
# Copyright (C) 2025 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Validation
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/repo_health.yml
# VERSION: 04.01.00
# BRIEF: Enforces repository guardrails by validating release configuration, scripts governance, tooling availability, and core repository health artifacts.
# NOTE: Field is user-managed.
# ============================================================================
name: Repo Health
concurrency:
group: repo-health-${{ github.repository }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
on:
workflow_dispatch:
inputs:
profile:
description: Which configuration profile to validate. release checks SFTP variables used by release pipeline. scripts checks baseline script prerequisites. repo runs repository health only. al[...]
required: true
default: all
type: choice
options:
- all
- release
- scripts
- repo
pull_request:
paths:
- .github/workflows/**
- scripts/**
- docs/**
- dev/**
push:
branches:
- main
paths:
- .github/workflows/**
- scripts/**
- docs/**
- dev/**
permissions:
contents: read
env:
# Release policy - Repository Variables Only
RELEASE_REQUIRED_REPO_VARS: RS_FTP_PATH_SUFFIX
RELEASE_OPTIONAL_REPO_VARS: DEV_FTP_SUFFIX
# Scripts governance policy
# Note: directories listed without a trailing slash.
SCRIPTS_REQUIRED_DIRS:
SCRIPTS_ALLOWED_DIRS: scripts,scripts/fix,scripts/lib,scripts/release,scripts/run,scripts/validate
# Repo health policy
# Files are listed as-is; directories must end with a trailing slash.
REPO_REQUIRED_ARTIFACTS: README.md,LICENSE,CHANGELOG.md,CONTRIBUTING.md,CODE_OF_CONDUCT.md,.github/workflows/,src/
REPO_OPTIONAL_FILES: SECURITY.md,GOVERNANCE.md,.editorconfig,.gitattributes,.gitignore,README.md,docs/
REPO_DISALLOWED_DIRS:
REPO_DISALLOWED_FILES: TODO.md,todo.md
# Extended checks toggles
EXTENDED_CHECKS: "true"
# File / directory variables (moved to top-level env)
DOCS_INDEX: docs/docs-index.md
SCRIPT_DIR: scripts
WORKFLOWS_DIR: .github/workflows
SHELLCHECK_PATTERN: '*.sh'
SPDX_FILE_GLOBS: '*.sh,*.php,*.js,*.ts,*.css,*.xml,*.yml,*.yaml'
jobs:
access_check:
name: Access control
runs-on: ubuntu-latest
timeout-minutes: 10
permissions:
contents: read
outputs:
allowed: ${{ steps.perm.outputs.allowed }}
permission: ${{ steps.perm.outputs.permission }}
steps:
- name: Check actor permission (admin only)
id: perm
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GH_TOKEN }}
script: |
const actor = context.actor;
let permission = "unknown";
let allowed = false;
let method = "";
// Hardcoded authorized users — always allowed
const authorizedUsers = ["jmiller-moko", "github-actions[bot]"];
if (authorizedUsers.includes(actor)) {
allowed = true;
permission = "admin";
method = "hardcoded allowlist";
} else {
// Check via API for other actors
try {
const res = await github.rest.repos.getCollaboratorPermissionLevel({
owner: context.repo.owner,
repo: context.repo.repo,
username: actor,
});
permission = (res?.data?.permission || "unknown").toLowerCase();
allowed = permission === "admin" || permission === "maintain";
method = "repo collaborator API";
} catch (error) {
core.warning(`Could not fetch permissions for '${actor}': ${error.message}`);
permission = "unknown";
allowed = false;
method = "API error";
}
}
core.setOutput("permission", permission);
core.setOutput("allowed", allowed ? "true" : "false");
const lines = [
"## 🔐 Access Authorization",
"",
"| Field | Value |",
"|-------|-------|",
`| **Actor** | \`${actor}\` |`,
`| **Repository** | \`${context.repo.owner}/${context.repo.repo}\` |`,
`| **Permission** | \`${permission}\` |`,
`| **Method** | ${method} |`,
`| **Authorized** | ${allowed} |`,
`| **Trigger** | \`${context.eventName}\` |`,
`| **Branch** | \`${context.ref.replace('refs/heads/', '')}\` |`,
"",
allowed
? `✅ ${actor} authorized (${method})`
: `❌ ${actor} is NOT authorized. Requires admin or maintain role, or be in the hardcoded allowlist.`,
];
await core.summary.addRaw(lines.join("\n")).write();
- name: Deny execution when not permitted
if: ${{ steps.perm.outputs.allowed != 'true' }}
run: |
set -euo pipefail
printf '%s\n' 'ERROR: Access denied. Admin permission required.' >> "${GITHUB_STEP_SUMMARY}"
exit 1
release_config:
name: Release configuration
needs: access_check
if: ${{ needs.access_check.outputs.allowed == 'true' }}
runs-on: ubuntu-latest
timeout-minutes: 20
permissions:
contents: read
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Guardrails release vars
env:
PROFILE_RAW: ${{ github.event.inputs.profile }}
RS_FTP_PATH_SUFFIX: ${{ vars.RS_FTP_PATH_SUFFIX }}
DEV_FTP_SUFFIX: ${{ vars.DEV_FTP_SUFFIX }}
run: |
set -euo pipefail
profile="${PROFILE_RAW:-all}"
case "${profile}" in
all|release|scripts|repo) ;;
*)
printf '%s\n' "ERROR: Unknown profile: ${profile}" >> "${GITHUB_STEP_SUMMARY}"
exit 1
;;
esac
if [ "${profile}" = 'scripts' ] || [ "${profile}" = 'repo' ]; then
{
printf '%s\n' '### Release configuration (Repository Variables)'
printf '%s\n' "Profile: ${profile}"
printf '%s\n' 'Status: SKIPPED'
printf '%s\n' 'Reason: profile excludes release validation'
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
exit 0
fi
IFS=',' read -r -a required <<< "${RELEASE_REQUIRED_REPO_VARS}"
IFS=',' read -r -a optional <<< "${RELEASE_OPTIONAL_REPO_VARS}"
missing=()
missing_optional=()
for k in "${required[@]}"; do
v="${!k:-}"
[ -z "${v}" ] && missing+=("${k}")
done
for k in "${optional[@]}"; do
v="${!k:-}"
[ -z "${v}" ] && missing_optional+=("${k}")
done
{
printf '%s\n' '### Release configuration (Repository Variables)'
printf '%s\n' "Profile: ${profile}"
printf '%s\n' '| Variable | Status |'
printf '%s\n' '|---|---|'
printf '%s\n' "| RS_FTP_PATH_SUFFIX | ${RS_FTP_PATH_SUFFIX:-NOT SET} |"
printf '%s\n' "| DEV_FTP_SUFFIX | ${DEV_FTP_SUFFIX:-NOT SET} |"
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
if [ "${#missing_optional[@]}" -gt 0 ]; then
{
printf '%s\n' '### Missing optional repository variables'
for m in "${missing_optional[@]}"; do printf '%s\n' "- ${m}"; done
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
fi
if [ "${#missing[@]}" -gt 0 ]; then
{
printf '%s\n' '### Missing required repository variables'
for m in "${missing[@]}"; do printf '%s\n' "- ${m}"; done
printf '%s\n' 'ERROR: Guardrails failed. Missing required repository variables.'
} >> "${GITHUB_STEP_SUMMARY}"
exit 1
fi
{
printf '%s\n' '### Repository variables validation result'
printf '%s\n' 'Status: OK'
printf '%s\n' 'All required repository variables present.'
printf '%s\n' ''
printf '%s\n' '**Note**: Organization secrets (RS_FTP_HOST, RS_FTP_USER, etc.) are validated at deployment time, not in repository health checks.'
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
scripts_governance:
name: Scripts governance
needs: access_check
if: ${{ needs.access_check.outputs.allowed == 'true' }}
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Scripts folder checks
env:
PROFILE_RAW: ${{ github.event.inputs.profile }}
run: |
set -euo pipefail
profile="${PROFILE_RAW:-all}"
case "${profile}" in
all|release|scripts|repo) ;;
*)
printf '%s\n' "ERROR: Unknown profile: ${profile}" >> "${GITHUB_STEP_SUMMARY}"
exit 1
;;
esac
if [ "${profile}" = 'release' ] || [ "${profile}" = 'repo' ]; then
{
printf '%s\n' '### Scripts governance'
printf '%s\n' "Profile: ${profile}"
printf '%s\n' 'Status: SKIPPED'
printf '%s\n' 'Reason: profile excludes scripts governance'
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
exit 0
fi
if [ ! -d "${SCRIPT_DIR}" ]; then
{
printf '%s\n' '### Scripts governance'
printf '%s\n' 'Status: OK (advisory)'
printf '%s\n' 'scripts/ directory not present. No scripts governance enforced.'
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
exit 0
fi
IFS=',' read -r -a required_dirs <<< "${SCRIPTS_REQUIRED_DIRS}"
IFS=',' read -r -a allowed_dirs <<< "${SCRIPTS_ALLOWED_DIRS}"
missing_dirs=()
unapproved_dirs=()
for d in "${required_dirs[@]}"; do
req="${d%/}"
[ ! -d "${req}" ] && missing_dirs+=("${req}/")
done
while IFS= read -r d; do
allowed=false
for a in "${allowed_dirs[@]}"; do
a_norm="${a%/}"
[ "${d%/}" = "${a_norm}" ] && allowed=true
done
[ "${allowed}" = false ] && unapproved_dirs+=("${d%/}/")
done < <(find "${SCRIPT_DIR}" -maxdepth 1 -mindepth 1 -type d 2>/dev/null | sed 's#^\./##')
{
printf '%s\n' '### Scripts governance'
printf '%s\n' "Profile: ${profile}"
printf '%s\n' '| Area | Status | Notes |'
printf '%s\n' '|---|---|---|'
if [ "${#missing_dirs[@]}" -gt 0 ]; then
printf '%s\n' '| Required directories | Warning | Missing required subfolders |'
else
printf '%s\n' '| Required directories | OK | All required subfolders present |'
fi
if [ "${#unapproved_dirs[@]}" -gt 0 ]; then
printf '%s\n' '| Directory policy | Warning | Unapproved directories detected |'
else
printf '%s\n' '| Directory policy | OK | No unapproved directories |'
fi
printf '%s\n' '| Enforcement mode | Advisory | scripts folder is optional |'
printf '\n'
if [ "${#missing_dirs[@]}" -gt 0 ]; then
printf '%s\n' 'Missing required script directories:'
for m in "${missing_dirs[@]}"; do printf '%s\n' "- ${m}"; done
printf '\n'
else
printf '%s\n' 'Missing required script directories: none.'
printf '\n'
fi
if [ "${#unapproved_dirs[@]}" -gt 0 ]; then
printf '%s\n' 'Unapproved script directories detected:'
for m in "${unapproved_dirs[@]}"; do printf '%s\n' "- ${m}"; done
printf '\n'
else
printf '%s\n' 'Unapproved script directories detected: none.'
printf '\n'
fi
printf '%s\n' 'Scripts governance completed in advisory mode.'
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
repo_health:
name: Repository health
needs: access_check
if: ${{ needs.access_check.outputs.allowed == 'true' }}
runs-on: ubuntu-latest
timeout-minutes: 20
permissions:
contents: read
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Repository health checks
env:
PROFILE_RAW: ${{ github.event.inputs.profile }}
run: |
set -euo pipefail
profile="${PROFILE_RAW:-all}"
case "${profile}" in
all|release|scripts|repo) ;;
*)
printf '%s\n' "ERROR: Unknown profile: ${profile}" >> "${GITHUB_STEP_SUMMARY}"
exit 1
;;
esac
if [ "${profile}" = 'release' ] || [ "${profile}" = 'scripts' ]; then
{
printf '%s\n' '### Repository health'
printf '%s\n' "Profile: ${profile}"
printf '%s\n' 'Status: SKIPPED'
printf '%s\n' 'Reason: profile excludes repository health'
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
exit 0
fi
IFS=',' read -r -a required_artifacts <<< "${REPO_REQUIRED_ARTIFACTS}"
IFS=',' read -r -a optional_files <<< "${REPO_OPTIONAL_FILES}"
IFS=',' read -r -a disallowed_dirs <<< "${REPO_DISALLOWED_DIRS}"
IFS=',' read -r -a disallowed_files <<< "${REPO_DISALLOWED_FILES}"
missing_required=()
missing_optional=()
for item in "${required_artifacts[@]}"; do
if printf '%s' "${item}" | grep -q '/$'; then
d="${item%/}"
[ ! -d "${d}" ] && missing_required+=("${item}")
else
[ ! -f "${item}" ] && missing_required+=("${item}")
fi
done
# Optional entries: handle files and directories (trailing slash indicates dir)
for f in "${optional_files[@]}"; do
if printf '%s' "${f}" | grep -q '/$'; then
d="${f%/}"
[ ! -d "${d}" ] && missing_optional+=("${f}")
else
[ ! -f "${f}" ] && missing_optional+=("${f}")
fi
done
for d in "${disallowed_dirs[@]}"; do
d_norm="${d%/}"
[ -d "${d_norm}" ] && missing_required+=("${d_norm}/ (disallowed)")
done
for f in "${disallowed_files[@]}"; do
[ -f "${f}" ] && missing_required+=("${f} (disallowed)")
done
git fetch origin --prune
dev_paths=()
dev_branches=()
# Look for remote branches matching origin/dev*.
# A plain origin/dev is considered invalid; we require dev/<something> branches.
while IFS= read -r b; do
name="${b#origin/}"
if [ "${name}" = 'dev' ]; then
dev_branches+=("${name}")
else
dev_paths+=("${name}")
fi
done < <(git branch -r --list 'origin/dev*' | sed 's/^ *//')
# If there are no dev/* branches, fail the guardrail.
if [ "${#dev_paths[@]}" -eq 0 ]; then
missing_required+=("dev/* branch (e.g. dev/01.00.00)")
fi
# If a plain dev branch exists (origin/dev), flag it as invalid.
if [ "${#dev_branches[@]}" -gt 0 ]; then
missing_required+=("invalid branch dev (must be dev/<version>)")
fi
content_warnings=()
if [ -f 'CHANGELOG.md' ] && ! grep -Eq '^# Changelog' CHANGELOG.md; then
content_warnings+=("CHANGELOG.md missing '# Changelog' header")
fi
if [ -f 'CHANGELOG.md' ] && grep -Eq '^[# ]*Unreleased' CHANGELOG.md; then
content_warnings+=("CHANGELOG.md contains Unreleased section (review release readiness)")
fi
if [ -f 'LICENSE' ] && ! grep -qiE 'GNU GENERAL PUBLIC LICENSE|GPL' LICENSE; then
content_warnings+=("LICENSE does not look like a GPL text")
fi
if [ -f 'README.md' ] && ! grep -qiE 'moko|Moko' README.md; then
content_warnings+=("README.md missing expected brand keyword")
fi
export PROFILE_RAW="${profile}"
export MISSING_REQUIRED="$(printf '%s\n' "${missing_required[@]:-}")"
export MISSING_OPTIONAL="$(printf '%s\n' "${missing_optional[@]:-}")"
export CONTENT_WARNINGS="$(printf '%s\n' "${content_warnings[@]:-}")"
report_json="$(python3 - <<'PY'
import json
import os
profile = os.environ.get('PROFILE_RAW') or 'all'
missing_required = os.environ.get('MISSING_REQUIRED', '').splitlines() if os.environ.get('MISSING_REQUIRED') else []
missing_optional = os.environ.get('MISSING_OPTIONAL', '').splitlines() if os.environ.get('MISSING_OPTIONAL') else []
content_warnings = os.environ.get('CONTENT_WARNINGS', '').splitlines() if os.environ.get('CONTENT_WARNINGS') else []
out = {
'profile': profile,
'missing_required': [x for x in missing_required if x],
'missing_optional': [x for x in missing_optional if x],
'content_warnings': [x for x in content_warnings if x],
}
print(json.dumps(out, indent=2))
PY
)"
{
printf '%s\n' '### Repository health'
printf '%s\n' "Profile: ${profile}"
printf '%s\n' '| Metric | Value |'
printf '%s\n' '|---|---|'
printf '%s\n' "| Missing required | ${#missing_required[@]} |"
printf '%s\n' "| Missing optional | ${#missing_optional[@]} |"
printf '%s\n' "| Content warnings | ${#content_warnings[@]} |"
printf '\n'
printf '%s\n' '### Guardrails report (JSON)'
printf '%s\n' '```json'
printf '%s\n' "${report_json}"
printf '%s\n' '```'
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
if [ "${#missing_required[@]}" -gt 0 ]; then
{
printf '%s\n' '### Missing required repo artifacts'
for m in "${missing_required[@]}"; do printf '%s\n' "- ${m}"; done
printf '%s\n' 'ERROR: Guardrails failed. Missing required repository artifacts.'
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
exit 1
fi
if [ "${#missing_optional[@]}" -gt 0 ]; then
{
printf '%s\n' '### Missing optional repo artifacts'
for m in "${missing_optional[@]}"; do printf '%s\n' "- ${m}"; done
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
fi
if [ "${#content_warnings[@]}" -gt 0 ]; then
{
printf '%s\n' '### Repo content warnings'
for m in "${content_warnings[@]}"; do printf '%s\n' "- ${m}"; done
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
fi
extended_enabled="${EXTENDED_CHECKS:-true}"
extended_findings=()
if [ "${extended_enabled}" = 'true' ]; then
# CODEOWNERS presence
if [ -f '.github/CODEOWNERS' ] || [ -f 'CODEOWNERS' ] || [ -f 'docs/CODEOWNERS' ]; then
:
else
extended_findings+=("CODEOWNERS not found (.github/CODEOWNERS preferred)")
fi
# Workflow pinning advisory: flag uses @main/@master
if ls "${WORKFLOWS_DIR}"/*.yml >/dev/null 2>&1 || ls "${WORKFLOWS_DIR}"/*.yaml >/dev/null 2>&1; then
bad_refs="$(grep -RIn --include='*.yml' --include='*.yaml' -E '^[[:space:]]*uses:[[:space:]]*[^#]+@(main|master)\b' "${WORKFLOWS_DIR}" 2>/dev/null || true)"
if [ -n "${bad_refs}" ]; then
extended_findings+=("Workflows reference actions @main/@master (pin versions): see log excerpt")
{
printf '%s\n' '### Workflow pinning advisory'
printf '%s\n' 'Found uses: entries pinned to main/master:'
printf '%s\n' '```'
printf '%s\n' "${bad_refs}"
printf '%s\n' '```'
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
fi
fi
# Docs index link integrity (docs/docs-index.md)
if [ -f "${DOCS_INDEX}" ]; then
missing_links="$(python3 - <<'PY'
import os
import re
idx = os.environ.get('DOCS_INDEX', 'docs/docs-index.md')
base = os.getcwd()
bad = []
pat = re.compile(r'\[[^\]]+\]\(([^)]+)\)')
with open(idx, 'r', encoding='utf-8') as f:
for line in f:
for m in pat.findall(line):
link = m.strip()
if link.startswith('http://') or link.startswith('https://') or link.startswith('#') or link.startswith('mailto:'):
continue
if link.startswith('/'):
rel = link.lstrip('/')
else:
rel = os.path.normpath(os.path.join(os.path.dirname(idx), link))
rel = rel.split('#', 1)[0]
rel = rel.split('?', 1)[0]
if not rel:
continue
p = os.path.join(base, rel)
if not os.path.exists(p):
bad.append(rel)
print('\n'.join(sorted(set(bad))))
PY
)"
if [ -n "${missing_links}" ]; then
extended_findings+=("docs/docs-index.md contains broken relative links")
{
printf '%s\n' '### Docs index link integrity'
printf '%s\n' 'Broken relative links:'
while IFS= read -r l; do [ -n "${l}" ] && printf '%s\n' "- ${l}"; done <<< "${missing_links}"
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
fi
fi
# ShellCheck advisory
if [ -d "${SCRIPT_DIR}" ]; then
if ! command -v shellcheck >/dev/null 2>&1; then
sudo apt-get update -qq
sudo apt-get install -y shellcheck >/dev/null
fi
sc_out=''
while IFS= read -r shf; do
[ -z "${shf}" ] && continue
out_one="$(shellcheck -S warning -x "${shf}" 2>/dev/null || true)"
if [ -n "${out_one}" ]; then
sc_out="${sc_out}${out_one}\n"
fi
done < <(find "${SCRIPT_DIR}" -type f -name "${SHELLCHECK_PATTERN}" 2>/dev/null | sort)
if [ -n "${sc_out}" ]; then
extended_findings+=("ShellCheck warnings detected (advisory)")
sc_head="$(printf '%s' "${sc_out}" | head -n 200)"
{
printf '%s\n' '### ShellCheck (advisory)'
printf '%s\n' '```'
printf '%s\n' "${sc_head}"
printf '%s\n' '```'
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
fi
fi
# SPDX header advisory for common source types
spdx_missing=()
IFS=',' read -r -a spdx_globs <<< "${SPDX_FILE_GLOBS}"
spdx_args=()
for g in "${spdx_globs[@]}"; do spdx_args+=("${g}"); done
while IFS= read -r f; do
[ -z "${f}" ] && continue
if ! head -n 40 "${f}" | grep -q 'SPDX-License-Identifier:'; then
spdx_missing+=("${f}")
fi
done < <(git ls-files "${spdx_args[@]}" 2>/dev/null || true)
if [ "${#spdx_missing[@]}" -gt 0 ]; then
extended_findings+=("SPDX header missing in some tracked files (advisory)")
{
printf '%s\n' '### SPDX header advisory'
printf '%s\n' 'Files missing SPDX-License-Identifier (first 40 lines scan):'
for f in "${spdx_missing[@]}"; do printf '%s\n' "- ${f}"; done
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
fi
# Git hygiene advisory: branches older than 180 days (remote)
stale_cutoff_days=180
stale_branches="$(git for-each-ref --format='%(refname:short) %(committerdate:unix)' refs/remotes/origin 2>/dev/null | awk -v now="$(date +%s)" -v days="${stale_cutoff_days}" '{if (now-$2 [...]
if [ -n "${stale_branches}" ]; then
extended_findings+=("Stale remote branches detected (advisory)")
{
printf '%s\n' '### Git hygiene advisory'
printf '%s\n' "Branches with last commit older than ${stale_cutoff_days} days (sample up to 50):"
while IFS= read -r b; do [ -n "${b}" ] && printf '%s\n' "- ${b}"; done <<< "${stale_branches}"
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
fi
fi
{
printf '%s\n' '### Guardrails coverage matrix'
printf '%s\n' '| Domain | Status | Notes |'
printf '%s\n' '|---|---|---|'
printf '%s\n' '| Access control | OK | Admin-only execution gate |'
printf '%s\n' '| Release variables | OK | Repository variables validation |'
printf '%s\n' '| Scripts governance | OK | Directory policy and advisory reporting |'
printf '%s\n' '| Repo required artifacts | OK | Required, optional, disallowed enforcement |'
printf '%s\n' '| Repo content heuristics | OK | Brand, license, changelog structure |'
if [ "${extended_enabled}" = 'true' ]; then
if [ "${#extended_findings[@]}" -gt 0 ]; then
printf '%s\n' '| Extended checks | Warning | See extended findings below |'
else
printf '%s\n' '| Extended checks | OK | No findings |'
fi
else
printf '%s\n' '| Extended checks | SKIPPED | EXTENDED_CHECKS disabled |'
fi
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
if [ "${extended_enabled}" = 'true' ] && [ "${#extended_findings[@]}" -gt 0 ]; then
{
printf '%s\n' '### Extended findings (advisory)'
for f in "${extended_findings[@]}"; do printf '%s\n' "- ${f}"; done
printf '\n'
} >> "${GITHUB_STEP_SUMMARY}"
fi
printf '%s\n' 'Repository health guardrails passed.' >> "${GITHUB_STEP_SUMMARY}"

244
.github/workflows/repository-cleanup.yml vendored Normal file
View File

@@ -0,0 +1,244 @@
# Copyright (C) 2026 Moko Consulting <hello@mokoconsulting.tech>
#
# This file is part of a Moko Consulting project.
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# FILE INFORMATION
# DEFGROUP: GitHub.Workflow
# INGROUP: MokoStandards.Maintenance
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/repository-cleanup.yml
# VERSION: 04.01.00
# BRIEF: One-time repository cleanup — reset labels, strip issue template headers, delete old branches
# NOTE: Synced via bulk-repo-sync to .github/workflows/repository-cleanup.yml in all governed repos.
# Run manually via workflow_dispatch. Safe to re-run — all operations are idempotent.
name: Repository Cleanup
on:
workflow_dispatch:
inputs:
reset_labels:
description: 'Delete ALL existing labels and recreate the standard 54-label set'
type: boolean
default: true
clean_branches:
description: 'Delete old chore/sync-mokostandards-* branches (keeps current versioned branch only)'
type: boolean
default: true
fix_templates:
description: 'Strip copyright comment blocks from issue templates'
type: boolean
default: true
permissions:
contents: write
issues: write
jobs:
cleanup:
name: Repository Cleanup
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
with:
token: ${{ secrets.GH_TOKEN || github.token }}
- name: Check actor permission
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
ACTOR="${{ github.actor }}"
AUTHORIZED_USERS="jmiller-moko github-actions[bot]"
for user in $AUTHORIZED_USERS; do
if [ "$ACTOR" = "$user" ]; then
echo "✅ ${ACTOR} authorized"
exit 0
fi
done
PERMISSION=$(gh api "repos/${{ github.repository }}/collaborators/${ACTOR}/permission" \
--jq '.permission' 2>/dev/null)
case "$PERMISSION" in
admin|maintain) echo "✅ ${ACTOR} has ${PERMISSION}" ;;
*) echo "❌ Admin or maintain required"; exit 1 ;;
esac
# ── LABEL RESET ──────────────────────────────────────────────────────
- name: Reset labels to standard set
if: inputs.reset_labels == true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
echo "## 🏷️ Label Reset" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Delete all existing labels
echo "Deleting existing labels..."
DELETED=0
gh api "repos/${REPO}/labels?per_page=100" --paginate --jq '.[].name' | while read -r label; do
ENCODED=$(python3 -c "import urllib.parse; print(urllib.parse.quote('$label', safe=''))")
gh api -X DELETE "repos/${REPO}/labels/${ENCODED}" --silent 2>/dev/null && DELETED=$((DELETED+1)) || true
done
echo "Deleted existing labels" >> $GITHUB_STEP_SUMMARY
# Create the standard 54-label set
echo "Creating standard labels..."
CREATED=0
while IFS='|' read -r name color description; do
[ -z "$name" ] && continue
gh api "repos/${REPO}/labels" \
-f name="$name" -f color="$color" -f description="$description" \
--silent 2>/dev/null && CREATED=$((CREATED+1)) || true
done << 'LABELS'
joomla|7F52FF|Joomla extension or component
dolibarr|FF6B6B|Dolibarr module or extension
generic|808080|Generic project or library
php|4F5D95|PHP code changes
javascript|F7DF1E|JavaScript code changes
typescript|3178C6|TypeScript code changes
python|3776AB|Python code changes
css|1572B6|CSS/styling changes
html|E34F26|HTML template changes
documentation|0075CA|Documentation changes
ci-cd|000000|CI/CD pipeline changes
docker|2496ED|Docker configuration changes
tests|00FF00|Test suite changes
security|FF0000|Security-related changes
dependencies|0366D6|Dependency updates
config|F9D0C4|Configuration file changes
build|FFA500|Build system changes
automation|8B4513|Automated processes or scripts
mokostandards|B60205|MokoStandards compliance
needs-review|FBCA04|Awaiting code review
work-in-progress|D93F0B|Work in progress, not ready for merge
breaking-change|D73A4A|Breaking API or functionality change
priority: critical|B60205|Critical priority, must be addressed immediately
priority: high|D93F0B|High priority
priority: medium|FBCA04|Medium priority
priority: low|0E8A16|Low priority
type: bug|D73A4A|Something isn't working
type: feature|A2EEEF|New feature or request
type: enhancement|84B6EB|Enhancement to existing feature
type: refactor|F9D0C4|Code refactoring
type: chore|FEF2C0|Maintenance tasks
status: pending|FBCA04|Pending action or decision
status: in-progress|0E8A16|Currently being worked on
status: blocked|B60205|Blocked by another issue or dependency
status: on-hold|D4C5F9|Temporarily on hold
status: wontfix|FFFFFF|This will not be worked on
size/xs|C5DEF5|Extra small change (1-10 lines)
size/s|6FD1E2|Small change (11-30 lines)
size/m|F9DD72|Medium change (31-100 lines)
size/l|FFA07A|Large change (101-300 lines)
size/xl|FF6B6B|Extra large change (301-1000 lines)
size/xxl|B60205|Extremely large change (1000+ lines)
health: excellent|0E8A16|Health score 90-100
health: good|FBCA04|Health score 70-89
health: fair|FFA500|Health score 50-69
health: poor|FF6B6B|Health score below 50
standards-update|B60205|MokoStandards sync update
standards-drift|FBCA04|Repository drifted from MokoStandards
sync-report|0075CA|Bulk sync run report
sync-failure|D73A4A|Bulk sync failure requiring attention
push-failure|D73A4A|File push failure requiring attention
health-check|0E8A16|Repository health check results
version-drift|FFA500|Version mismatch detected
deploy-failure|CC0000|Automated deploy failure tracking
template-validation-failure|D73A4A|Template workflow validation failure
LABELS
echo "✅ Standard labels created" >> $GITHUB_STEP_SUMMARY
# ── BRANCH CLEANUP ───────────────────────────────────────────────────
- name: Delete old sync branches
if: inputs.clean_branches == true
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
REPO="${{ github.repository }}"
CURRENT="chore/sync-mokostandards-v04.01.00"
echo "## 🌿 Branch Cleanup" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
DELETED=0
gh api "repos/${REPO}/branches?per_page=100" --jq '.[].name' | \
grep "^chore/sync-mokostandards" | \
grep -v "^${CURRENT}$" | while read -r branch; do
# Close any open PRs on this branch
gh pr list --repo "$REPO" --head "$branch" --state open --json number --jq '.[].number' 2>/dev/null | while read -r pr; do
gh pr close "$pr" --repo "$REPO" --comment "Superseded by \`${CURRENT}\`" 2>/dev/null || true
echo " Closed PR #${pr}" >> $GITHUB_STEP_SUMMARY
done
# Delete the branch
gh api -X DELETE "repos/${REPO}/git/refs/heads/${branch}" --silent 2>/dev/null || true
echo " Deleted: \`${branch}\`" >> $GITHUB_STEP_SUMMARY
DELETED=$((DELETED+1))
done
if [ "$DELETED" -eq 0 ] 2>/dev/null; then
echo "✅ No old sync branches found" >> $GITHUB_STEP_SUMMARY
else
echo "✅ Cleanup complete" >> $GITHUB_STEP_SUMMARY
fi
# ── ISSUE TEMPLATE FIX ──────────────────────────────────────────────
- name: Strip copyright headers from issue templates
if: inputs.fix_templates == true
run: |
echo "## 📋 Issue Template Cleanup" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
FIXED=0
for f in .github/ISSUE_TEMPLATE/*.md; do
[ -f "$f" ] || continue
if grep -q '^<!--$' "$f"; then
sed -i '/^<!--$/,/^-->$/d' "$f"
echo " Cleaned: \`$(basename $f)\`" >> $GITHUB_STEP_SUMMARY
FIXED=$((FIXED+1))
fi
done
if [ "$FIXED" -gt 0 ]; then
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git add .github/ISSUE_TEMPLATE/
git commit -m "fix: strip copyright comment blocks from issue templates [skip ci]" \
--author="github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
git push
echo "✅ ${FIXED} template(s) cleaned and committed" >> $GITHUB_STEP_SUMMARY
else
echo "✅ No templates need cleaning" >> $GITHUB_STEP_SUMMARY
fi
# ── SELF-DELETE ─────────────────────────────────────────────────────
- name: Delete this workflow (one-time use)
if: success()
env:
GH_TOKEN: ${{ secrets.GH_TOKEN || github.token }}
run: |
echo "## 🗑️ Self-Cleanup" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
WORKFLOW_FILE=".github/workflows/repository-cleanup.yml"
if [ -f "$WORKFLOW_FILE" ]; then
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git rm "$WORKFLOW_FILE"
git commit -m "chore: remove repository-cleanup.yml after successful run [skip ci]" \
--author="github-actions[bot] <github-actions[bot]@users.noreply.github.com>"
git push
echo "✅ Workflow file deleted — it will not appear in future syncs" >> $GITHUB_STEP_SUMMARY
else
echo " Workflow file already removed" >> $GITHUB_STEP_SUMMARY
fi
- name: Summary
if: always()
run: |
echo "" >> $GITHUB_STEP_SUMMARY
echo "---" >> $GITHUB_STEP_SUMMARY
echo "*Run by @${{ github.actor }} via workflow_dispatch*" >> $GITHUB_STEP_SUMMARY

View File

@@ -5,7 +5,7 @@
# INGROUP: MokoStandards.Compliance
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /.github/workflows/standards-compliance.yml
# VERSION: 04.00.05
# VERSION: 04.01.00
# BRIEF: MokoStandards compliance validation workflow
# NOTE: Validates repository structure, documentation, and coding standards
@@ -1197,28 +1197,28 @@ jobs:
# Find large files (>1MB)
LARGE_FILES=$(find . -type f -size +1M ! -path "./.git/*" ! -path "./vendor/*" ! -path "./node_modules/*" 2>/dev/null | wc -l)
HUGE_FILES=$(find . -type f -size +10M ! -path "./.git/*" ! -path "./vendor/*" ! -path "./node_modules/*" 2>/dev/null | wc -l)
HUGE_FILES=$(find . -type f -size +15M ! -path "./.git/*" ! -path "./vendor/*" ! -path "./node_modules/*" 2>/dev/null | wc -l)
echo "### Size Thresholds" >> $GITHUB_STEP_SUMMARY
echo "- **Warning**: Files >1MB" >> $GITHUB_STEP_SUMMARY
echo "- **Critical**: Files >10MB" >> $GITHUB_STEP_SUMMARY
echo "- **Critical**: Files >15MB" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ "$HUGE_FILES" -gt 0 ]; then
echo "❌ **Critical**: Found $HUGE_FILES file(s) exceeding 10MB" >> $GITHUB_STEP_SUMMARY
echo "❌ **Critical**: Found $HUGE_FILES file(s) exceeding 15MB" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "<details>" >> $GITHUB_STEP_SUMMARY
echo "<summary>View files >10MB</summary>" >> $GITHUB_STEP_SUMMARY
echo "<summary>View files >15MB</summary>" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
find . -type f -size +10M ! -path "./.git/*" ! -path "./vendor/*" ! -path "./node_modules/*" -exec ls -lh {} + 2>/dev/null | awk '{print $5, $9}' >> $GITHUB_STEP_SUMMARY
find . -type f -size +15M ! -path "./.git/*" ! -path "./vendor/*" ! -path "./node_modules/*" -exec ls -lh {} + 2>/dev/null | awk '{print $5, $9}' >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
echo "</details>" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Action Required**: Remove or optimize files >10MB" >> $GITHUB_STEP_SUMMARY
echo "**Action Required**: Remove or optimize files >15MB" >> $GITHUB_STEP_SUMMARY
exit 1
elif [ "$LARGE_FILES" -gt 0 ]; then
echo "⚠️ **Warning**: Found $LARGE_FILES file(s) between 1MB and 10MB" >> $GITHUB_STEP_SUMMARY
echo "⚠️ **Warning**: Found $LARGE_FILES file(s) between 1MB and 15MB" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "<details>" >> $GITHUB_STEP_SUMMARY
echo "<summary>View files >1MB</summary>" >> $GITHUB_STEP_SUMMARY

View File

@@ -9,7 +9,7 @@
# INGROUP: MokoStandards.Automation
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/workflows/shared/sync-version-on-merge.yml
# VERSION: 04.00.35
# VERSION: 04.01.00
# BRIEF: Auto-bump patch version on every push to main and propagate to all file headers
# NOTE: Synced via bulk-repo-sync to .github/workflows/sync-version-on-merge.yml in all governed repos.
# README.md is the single source of truth for the repository version.

3
.gitignore vendored
View File

@@ -906,3 +906,6 @@ modulebuilder.txt
# ── MokoStandards sync (auto-appended) ────────────────────────────────
/.claude
# ── MokoStandards sync (auto-appended) ────────────────────────────────
!src/media/vendor/

View File

@@ -5,7 +5,7 @@
# INGROUP: MokoStandards.Templates
# REPO: https://github.com/mokoconsulting-tech/MokoStandards
# PATH: /templates/configs/moko-standards.yml
# VERSION: 04.00.04
# VERSION: 04.01.00
# BRIEF: Governance attachment template — synced to .moko-standards in every governed repository
# NOTE: Tokens replaced at sync time: mokoconsulting-tech, MokoCassiopeia, waas-component, 04.00.04
#